Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / fold-const.c
blob342e3760bdf0f524fca78ab0671c6a94c02efdda
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127 tree, tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
142 tree, tree, tree);
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
155 addition.
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 sign. */
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
167 #define LOWPART(x) \
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
177 static void
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
190 static void
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 HOST_WIDE_INT *hi)
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec;
210 int sign_extended_type;
212 if (POINTER_TYPE_P (type)
213 || TREE_CODE (type) == OFFSET_TYPE)
214 prec = POINTER_SIZE;
215 else
216 prec = TYPE_PRECISION (type);
218 /* Size types *are* sign extended. */
219 sign_extended_type = (!TYPE_UNSIGNED (type)
220 || (TREE_CODE (type) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (type)));
223 /* First clear all bits that are beyond the type's precision. */
224 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228 else
230 h1 = 0;
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
235 /* Then do sign extension if necessary. */
236 if (!sign_extended_type)
237 /* No sign extension */;
238 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
239 /* Correct width already. */;
240 else if (prec > HOST_BITS_PER_WIDE_INT)
242 /* Sign extend top half? */
243 if (h1 & ((unsigned HOST_WIDE_INT)1
244 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
245 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
247 else if (prec == HOST_BITS_PER_WIDE_INT)
249 if ((HOST_WIDE_INT)l1 < 0)
250 h1 = -1;
252 else
254 /* Sign extend bottom half? */
255 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
257 h1 = -1;
258 l1 |= (HOST_WIDE_INT)(-1) << prec;
262 *lv = l1;
263 *hv = h1;
265 /* If the value didn't fit, signal overflow. */
266 return l1 != low0 || h1 != high0;
269 /* We force the double-int HIGH:LOW to the range of the type TYPE by
270 sign or zero extending it.
271 OVERFLOWABLE indicates if we are interested
272 in overflow of the value, when >0 we are only interested in signed
273 overflow, for <0 we are interested in any overflow. OVERFLOWED
274 indicates whether overflow has already occurred. CONST_OVERFLOWED
275 indicates whether constant overflow has already occurred. We force
276 T's value to be within range of T's type (by setting to 0 or 1 all
277 the bits outside the type's range). We set TREE_OVERFLOWED if,
278 OVERFLOWED is nonzero,
279 or OVERFLOWABLE is >0 and signed overflow occurs
280 or OVERFLOWABLE is <0 and any overflow occurs
281 We return a new tree node for the extended double-int. The node
282 is shared if no overflow flags are set. */
284 tree
285 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
286 HOST_WIDE_INT high, int overflowable,
287 bool overflowed)
289 int sign_extended_type;
290 bool overflow;
292 /* Size types *are* sign extended. */
293 sign_extended_type = (!TYPE_UNSIGNED (type)
294 || (TREE_CODE (type) == INTEGER_TYPE
295 && TYPE_IS_SIZETYPE (type)));
297 overflow = fit_double_type (low, high, &low, &high, type);
299 /* If we need to set overflow flags, return a new unshared node. */
300 if (overflowed || overflow)
302 if (overflowed
303 || overflowable < 0
304 || (overflowable > 0 && sign_extended_type))
306 tree t = make_node (INTEGER_CST);
307 TREE_INT_CST_LOW (t) = low;
308 TREE_INT_CST_HIGH (t) = high;
309 TREE_TYPE (t) = type;
310 TREE_OVERFLOW (t) = 1;
311 return t;
315 /* Else build a shared node. */
316 return build_int_cst_wide (type, low, high);
319 /* Add two doubleword integers with doubleword result.
320 Return nonzero if the operation overflows according to UNSIGNED_P.
321 Each argument is given as two `HOST_WIDE_INT' pieces.
322 One argument is L1 and H1; the other, L2 and H2.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
327 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
328 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
329 bool unsigned_p)
331 unsigned HOST_WIDE_INT l;
332 HOST_WIDE_INT h;
334 l = l1 + l2;
335 h = h1 + h2 + (l < l1);
337 *lv = l;
338 *hv = h;
340 if (unsigned_p)
341 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
342 else
343 return OVERFLOW_SUM_SIGN (h1, h2, h);
346 /* Negate a doubleword integer with doubleword result.
347 Return nonzero if the operation overflows, assuming it's signed.
348 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
355 if (l1 == 0)
357 *lv = 0;
358 *hv = - h1;
359 return (*hv & h1) < 0;
361 else
363 *lv = -l1;
364 *hv = ~h1;
365 return 0;
369 /* Multiply two doubleword integers with doubleword result.
370 Return nonzero if the operation overflows according to UNSIGNED_P.
371 Each argument is given as two `HOST_WIDE_INT' pieces.
372 One argument is L1 and H1; the other, L2 and H2.
373 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
377 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
378 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 bool unsigned_p)
381 HOST_WIDE_INT arg1[4];
382 HOST_WIDE_INT arg2[4];
383 HOST_WIDE_INT prod[4 * 2];
384 unsigned HOST_WIDE_INT carry;
385 int i, j, k;
386 unsigned HOST_WIDE_INT toplow, neglow;
387 HOST_WIDE_INT tophigh, neghigh;
389 encode (arg1, l1, h1);
390 encode (arg2, l2, h2);
392 memset (prod, 0, sizeof prod);
394 for (i = 0; i < 4; i++)
396 carry = 0;
397 for (j = 0; j < 4; j++)
399 k = i + j;
400 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
401 carry += arg1[i] * arg2[j];
402 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
403 carry += prod[k];
404 prod[k] = LOWPART (carry);
405 carry = HIGHPART (carry);
407 prod[i + 4] = carry;
410 decode (prod, lv, hv);
411 decode (prod + 4, &toplow, &tophigh);
413 /* Unsigned overflow is immediate. */
414 if (unsigned_p)
415 return (toplow | tophigh) != 0;
417 /* Check for signed overflow by calculating the signed representation of the
418 top half of the result; it should agree with the low half's sign bit. */
419 if (h1 < 0)
421 neg_double (l2, h2, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 if (h2 < 0)
426 neg_double (l1, h1, &neglow, &neghigh);
427 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
429 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
432 /* Shift the doubleword integer in L1, H1 left by COUNT places
433 keeping only PREC bits of result.
434 Shift right if COUNT is negative.
435 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
436 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438 void
439 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
440 HOST_WIDE_INT count, unsigned int prec,
441 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
443 unsigned HOST_WIDE_INT signmask;
445 if (count < 0)
447 rshift_double (l1, h1, -count, prec, lv, hv, arith);
448 return;
451 if (SHIFT_COUNT_TRUNCATED)
452 count %= prec;
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
458 *hv = 0;
459 *lv = 0;
461 else if (count >= HOST_BITS_PER_WIDE_INT)
463 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
464 *lv = 0;
466 else
468 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
469 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
470 *lv = l1 << count;
473 /* Sign extend all bits that are beyond the precision. */
475 signmask = -((prec > HOST_BITS_PER_WIDE_INT
476 ? ((unsigned HOST_WIDE_INT) *hv
477 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
478 : (*lv >> (prec - 1))) & 1);
480 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
482 else if (prec >= HOST_BITS_PER_WIDE_INT)
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
487 else
489 *hv = signmask;
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
491 *lv |= signmask << prec;
495 /* Shift the doubleword integer in L1, H1 right by COUNT places
496 keeping only PREC bits of result. COUNT must be positive.
497 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
500 void
501 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 int arith)
506 unsigned HOST_WIDE_INT signmask;
508 signmask = (arith
509 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 : 0);
512 if (SHIFT_COUNT_TRUNCATED)
513 count %= prec;
515 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
517 /* Shifting by the host word size is undefined according to the
518 ANSI standard, so we must handle this as a special case. */
519 *hv = 0;
520 *lv = 0;
522 else if (count >= HOST_BITS_PER_WIDE_INT)
524 *hv = 0;
525 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 else
529 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
530 *lv = ((l1 >> count)
531 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
534 /* Zero / sign extend all bits that are beyond the precision. */
536 if (count >= (HOST_WIDE_INT)prec)
538 *hv = signmask;
539 *lv = signmask;
541 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
543 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
545 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
546 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
548 else
550 *hv = signmask;
551 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
552 *lv |= signmask << (prec - count);
556 /* Rotate the doubleword integer in L1, H1 left by COUNT places
557 keeping only PREC bits of result.
558 Rotate right if COUNT is negative.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
561 void
562 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
569 count %= prec;
570 if (count < 0)
571 count += prec;
573 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 *lv = s1l | s2l;
576 *hv = s1h | s2h;
579 /* Rotate the doubleword integer in L1, H1 left by COUNT places
580 keeping only PREC bits of result. COUNT must be positive.
581 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
583 void
584 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
585 HOST_WIDE_INT count, unsigned int prec,
586 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
588 unsigned HOST_WIDE_INT s1l, s2l;
589 HOST_WIDE_INT s1h, s2h;
591 count %= prec;
592 if (count < 0)
593 count += prec;
595 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
596 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
597 *lv = s1l | s2l;
598 *hv = s1h | s2h;
601 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
602 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
603 CODE is a tree code for a kind of division, one of
604 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
605 or EXACT_DIV_EXPR
606 It controls how the quotient is rounded to an integer.
607 Return nonzero if the operation overflows.
608 UNS nonzero says do unsigned division. */
611 div_and_round_double (enum tree_code code, int uns,
612 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
613 HOST_WIDE_INT hnum_orig,
614 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
615 HOST_WIDE_INT hden_orig,
616 unsigned HOST_WIDE_INT *lquo,
617 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
618 HOST_WIDE_INT *hrem)
620 int quo_neg = 0;
621 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
622 HOST_WIDE_INT den[4], quo[4];
623 int i, j;
624 unsigned HOST_WIDE_INT work;
625 unsigned HOST_WIDE_INT carry = 0;
626 unsigned HOST_WIDE_INT lnum = lnum_orig;
627 HOST_WIDE_INT hnum = hnum_orig;
628 unsigned HOST_WIDE_INT lden = lden_orig;
629 HOST_WIDE_INT hden = hden_orig;
630 int overflow = 0;
632 if (hden == 0 && lden == 0)
633 overflow = 1, lden = 1;
635 /* Calculate quotient sign and convert operands to unsigned. */
636 if (!uns)
638 if (hnum < 0)
640 quo_neg = ~ quo_neg;
641 /* (minimum integer) / (-1) is the only overflow case. */
642 if (neg_double (lnum, hnum, &lnum, &hnum)
643 && ((HOST_WIDE_INT) lden & hden) == -1)
644 overflow = 1;
646 if (hden < 0)
648 quo_neg = ~ quo_neg;
649 neg_double (lden, hden, &lden, &hden);
653 if (hnum == 0 && hden == 0)
654 { /* single precision */
655 *hquo = *hrem = 0;
656 /* This unsigned division rounds toward zero. */
657 *lquo = lnum / lden;
658 goto finish_up;
661 if (hnum == 0)
662 { /* trivial case: dividend < divisor */
663 /* hden != 0 already checked. */
664 *hquo = *lquo = 0;
665 *hrem = hnum;
666 *lrem = lnum;
667 goto finish_up;
670 memset (quo, 0, sizeof quo);
672 memset (num, 0, sizeof num); /* to zero 9th element */
673 memset (den, 0, sizeof den);
675 encode (num, lnum, hnum);
676 encode (den, lden, hden);
678 /* Special code for when the divisor < BASE. */
679 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
681 /* hnum != 0 already checked. */
682 for (i = 4 - 1; i >= 0; i--)
684 work = num[i] + carry * BASE;
685 quo[i] = work / lden;
686 carry = work % lden;
689 else
691 /* Full double precision division,
692 with thanks to Don Knuth's "Seminumerical Algorithms". */
693 int num_hi_sig, den_hi_sig;
694 unsigned HOST_WIDE_INT quo_est, scale;
696 /* Find the highest nonzero divisor digit. */
697 for (i = 4 - 1;; i--)
698 if (den[i] != 0)
700 den_hi_sig = i;
701 break;
704 /* Insure that the first digit of the divisor is at least BASE/2.
705 This is required by the quotient digit estimation algorithm. */
707 scale = BASE / (den[den_hi_sig] + 1);
708 if (scale > 1)
709 { /* scale divisor and dividend */
710 carry = 0;
711 for (i = 0; i <= 4 - 1; i++)
713 work = (num[i] * scale) + carry;
714 num[i] = LOWPART (work);
715 carry = HIGHPART (work);
718 num[4] = carry;
719 carry = 0;
720 for (i = 0; i <= 4 - 1; i++)
722 work = (den[i] * scale) + carry;
723 den[i] = LOWPART (work);
724 carry = HIGHPART (work);
725 if (den[i] != 0) den_hi_sig = i;
729 num_hi_sig = 4;
731 /* Main loop */
732 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
734 /* Guess the next quotient digit, quo_est, by dividing the first
735 two remaining dividend digits by the high order quotient digit.
736 quo_est is never low and is at most 2 high. */
737 unsigned HOST_WIDE_INT tmp;
739 num_hi_sig = i + den_hi_sig + 1;
740 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
741 if (num[num_hi_sig] != den[den_hi_sig])
742 quo_est = work / den[den_hi_sig];
743 else
744 quo_est = BASE - 1;
746 /* Refine quo_est so it's usually correct, and at most one high. */
747 tmp = work - quo_est * den[den_hi_sig];
748 if (tmp < BASE
749 && (den[den_hi_sig - 1] * quo_est
750 > (tmp * BASE + num[num_hi_sig - 2])))
751 quo_est--;
753 /* Try QUO_EST as the quotient digit, by multiplying the
754 divisor by QUO_EST and subtracting from the remaining dividend.
755 Keep in mind that QUO_EST is the I - 1st digit. */
757 carry = 0;
758 for (j = 0; j <= den_hi_sig; j++)
760 work = quo_est * den[j] + carry;
761 carry = HIGHPART (work);
762 work = num[i + j] - LOWPART (work);
763 num[i + j] = LOWPART (work);
764 carry += HIGHPART (work) != 0;
767 /* If quo_est was high by one, then num[i] went negative and
768 we need to correct things. */
769 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
771 quo_est--;
772 carry = 0; /* add divisor back in */
773 for (j = 0; j <= den_hi_sig; j++)
775 work = num[i + j] + den[j] + carry;
776 carry = HIGHPART (work);
777 num[i + j] = LOWPART (work);
780 num [num_hi_sig] += carry;
783 /* Store the quotient digit. */
784 quo[i] = quo_est;
788 decode (quo, lquo, hquo);
790 finish_up:
791 /* If result is negative, make it so. */
792 if (quo_neg)
793 neg_double (*lquo, *hquo, lquo, hquo);
795 /* Compute trial remainder: rem = num - (quo * den) */
796 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
797 neg_double (*lrem, *hrem, lrem, hrem);
798 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
800 switch (code)
802 case TRUNC_DIV_EXPR:
803 case TRUNC_MOD_EXPR: /* round toward zero */
804 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
805 return overflow;
807 case FLOOR_DIV_EXPR:
808 case FLOOR_MOD_EXPR: /* round toward negative infinity */
809 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
811 /* quo = quo - 1; */
812 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
813 lquo, hquo);
815 else
816 return overflow;
817 break;
819 case CEIL_DIV_EXPR:
820 case CEIL_MOD_EXPR: /* round toward positive infinity */
821 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 lquo, hquo);
826 else
827 return overflow;
828 break;
830 case ROUND_DIV_EXPR:
831 case ROUND_MOD_EXPR: /* round to closest integer */
833 unsigned HOST_WIDE_INT labs_rem = *lrem;
834 HOST_WIDE_INT habs_rem = *hrem;
835 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
836 HOST_WIDE_INT habs_den = hden, htwice;
838 /* Get absolute values. */
839 if (*hrem < 0)
840 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
841 if (hden < 0)
842 neg_double (lden, hden, &labs_den, &habs_den);
844 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
845 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
846 labs_rem, habs_rem, &ltwice, &htwice);
848 if (((unsigned HOST_WIDE_INT) habs_den
849 < (unsigned HOST_WIDE_INT) htwice)
850 || (((unsigned HOST_WIDE_INT) habs_den
851 == (unsigned HOST_WIDE_INT) htwice)
852 && (labs_den <= ltwice)))
854 if (*hquo < 0)
855 /* quo = quo - 1; */
856 add_double (*lquo, *hquo,
857 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 else
859 /* quo = quo + 1; */
860 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
861 lquo, hquo);
863 else
864 return overflow;
866 break;
868 default:
869 gcc_unreachable ();
872 /* Compute true remainder: rem = num - (quo * den) */
873 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
874 neg_double (*lrem, *hrem, lrem, hrem);
875 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
876 return overflow;
879 /* If ARG2 divides ARG1 with zero remainder, carries out the division
880 of type CODE and returns the quotient.
881 Otherwise returns NULL_TREE. */
883 tree
884 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
886 unsigned HOST_WIDE_INT int1l, int2l;
887 HOST_WIDE_INT int1h, int2h;
888 unsigned HOST_WIDE_INT quol, reml;
889 HOST_WIDE_INT quoh, remh;
890 tree type = TREE_TYPE (arg1);
891 int uns = TYPE_UNSIGNED (type);
893 int1l = TREE_INT_CST_LOW (arg1);
894 int1h = TREE_INT_CST_HIGH (arg1);
895 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
896 &obj[some_exotic_number]. */
897 if (POINTER_TYPE_P (type))
899 uns = false;
900 type = signed_type_for (type);
901 fit_double_type (int1l, int1h, &int1l, &int1h,
902 type);
904 else
905 fit_double_type (int1l, int1h, &int1l, &int1h, type);
906 int2l = TREE_INT_CST_LOW (arg2);
907 int2h = TREE_INT_CST_HIGH (arg2);
909 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
910 &quol, &quoh, &reml, &remh);
911 if (remh != 0 || reml != 0)
912 return NULL_TREE;
914 return build_int_cst_wide (type, quol, quoh);
917 /* This is nonzero if we should defer warnings about undefined
918 overflow. This facility exists because these warnings are a
919 special case. The code to estimate loop iterations does not want
920 to issue any warnings, since it works with expressions which do not
921 occur in user code. Various bits of cleanup code call fold(), but
922 only use the result if it has certain characteristics (e.g., is a
923 constant); that code only wants to issue a warning if the result is
924 used. */
926 static int fold_deferring_overflow_warnings;
928 /* If a warning about undefined overflow is deferred, this is the
929 warning. Note that this may cause us to turn two warnings into
930 one, but that is fine since it is sufficient to only give one
931 warning per expression. */
933 static const char* fold_deferred_overflow_warning;
935 /* If a warning about undefined overflow is deferred, this is the
936 level at which the warning should be emitted. */
938 static enum warn_strict_overflow_code fold_deferred_overflow_code;
940 /* Start deferring overflow warnings. We could use a stack here to
941 permit nested calls, but at present it is not necessary. */
943 void
944 fold_defer_overflow_warnings (void)
946 ++fold_deferring_overflow_warnings;
949 /* Stop deferring overflow warnings. If there is a pending warning,
950 and ISSUE is true, then issue the warning if appropriate. STMT is
951 the statement with which the warning should be associated (used for
952 location information); STMT may be NULL. CODE is the level of the
953 warning--a warn_strict_overflow_code value. This function will use
954 the smaller of CODE and the deferred code when deciding whether to
955 issue the warning. CODE may be zero to mean to always use the
956 deferred code. */
958 void
959 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
961 const char *warnmsg;
962 location_t locus;
964 gcc_assert (fold_deferring_overflow_warnings > 0);
965 --fold_deferring_overflow_warnings;
966 if (fold_deferring_overflow_warnings > 0)
968 if (fold_deferred_overflow_warning != NULL
969 && code != 0
970 && code < (int) fold_deferred_overflow_code)
971 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
972 return;
975 warnmsg = fold_deferred_overflow_warning;
976 fold_deferred_overflow_warning = NULL;
978 if (!issue || warnmsg == NULL)
979 return;
981 if (gimple_no_warning_p (stmt))
982 return;
984 /* Use the smallest code level when deciding to issue the
985 warning. */
986 if (code == 0 || code > (int) fold_deferred_overflow_code)
987 code = fold_deferred_overflow_code;
989 if (!issue_strict_overflow_warning (code))
990 return;
992 if (stmt == NULL)
993 locus = input_location;
994 else
995 locus = gimple_location (stmt);
996 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
999 /* Stop deferring overflow warnings, ignoring any deferred
1000 warnings. */
1002 void
1003 fold_undefer_and_ignore_overflow_warnings (void)
1005 fold_undefer_overflow_warnings (false, NULL, 0);
1008 /* Whether we are deferring overflow warnings. */
1010 bool
1011 fold_deferring_overflow_warnings_p (void)
1013 return fold_deferring_overflow_warnings > 0;
1016 /* This is called when we fold something based on the fact that signed
1017 overflow is undefined. */
1019 static void
1020 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1022 if (fold_deferring_overflow_warnings > 0)
1024 if (fold_deferred_overflow_warning == NULL
1025 || wc < fold_deferred_overflow_code)
1027 fold_deferred_overflow_warning = gmsgid;
1028 fold_deferred_overflow_code = wc;
1031 else if (issue_strict_overflow_warning (wc))
1032 warning (OPT_Wstrict_overflow, gmsgid);
1035 /* Return true if the built-in mathematical function specified by CODE
1036 is odd, i.e. -f(x) == f(-x). */
1038 static bool
1039 negate_mathfn_p (enum built_in_function code)
1041 switch (code)
1043 CASE_FLT_FN (BUILT_IN_ASIN):
1044 CASE_FLT_FN (BUILT_IN_ASINH):
1045 CASE_FLT_FN (BUILT_IN_ATAN):
1046 CASE_FLT_FN (BUILT_IN_ATANH):
1047 CASE_FLT_FN (BUILT_IN_CASIN):
1048 CASE_FLT_FN (BUILT_IN_CASINH):
1049 CASE_FLT_FN (BUILT_IN_CATAN):
1050 CASE_FLT_FN (BUILT_IN_CATANH):
1051 CASE_FLT_FN (BUILT_IN_CBRT):
1052 CASE_FLT_FN (BUILT_IN_CPROJ):
1053 CASE_FLT_FN (BUILT_IN_CSIN):
1054 CASE_FLT_FN (BUILT_IN_CSINH):
1055 CASE_FLT_FN (BUILT_IN_CTAN):
1056 CASE_FLT_FN (BUILT_IN_CTANH):
1057 CASE_FLT_FN (BUILT_IN_ERF):
1058 CASE_FLT_FN (BUILT_IN_LLROUND):
1059 CASE_FLT_FN (BUILT_IN_LROUND):
1060 CASE_FLT_FN (BUILT_IN_ROUND):
1061 CASE_FLT_FN (BUILT_IN_SIN):
1062 CASE_FLT_FN (BUILT_IN_SINH):
1063 CASE_FLT_FN (BUILT_IN_TAN):
1064 CASE_FLT_FN (BUILT_IN_TANH):
1065 CASE_FLT_FN (BUILT_IN_TRUNC):
1066 return true;
1068 CASE_FLT_FN (BUILT_IN_LLRINT):
1069 CASE_FLT_FN (BUILT_IN_LRINT):
1070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1071 CASE_FLT_FN (BUILT_IN_RINT):
1072 return !flag_rounding_math;
1074 default:
1075 break;
1077 return false;
1080 /* Check whether we may negate an integer constant T without causing
1081 overflow. */
1083 bool
1084 may_negate_without_overflow_p (const_tree t)
1086 unsigned HOST_WIDE_INT val;
1087 unsigned int prec;
1088 tree type;
1090 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1092 type = TREE_TYPE (t);
1093 if (TYPE_UNSIGNED (type))
1094 return false;
1096 prec = TYPE_PRECISION (type);
1097 if (prec > HOST_BITS_PER_WIDE_INT)
1099 if (TREE_INT_CST_LOW (t) != 0)
1100 return true;
1101 prec -= HOST_BITS_PER_WIDE_INT;
1102 val = TREE_INT_CST_HIGH (t);
1104 else
1105 val = TREE_INT_CST_LOW (t);
1106 if (prec < HOST_BITS_PER_WIDE_INT)
1107 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1108 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1111 /* Determine whether an expression T can be cheaply negated using
1112 the function negate_expr without introducing undefined overflow. */
1114 static bool
1115 negate_expr_p (tree t)
1117 tree type;
1119 if (t == 0)
1120 return false;
1122 type = TREE_TYPE (t);
1124 STRIP_SIGN_NOPS (t);
1125 switch (TREE_CODE (t))
1127 case INTEGER_CST:
1128 if (TYPE_OVERFLOW_WRAPS (type))
1129 return true;
1131 /* Check that -CST will not overflow type. */
1132 return may_negate_without_overflow_p (t);
1133 case BIT_NOT_EXPR:
1134 return (INTEGRAL_TYPE_P (type)
1135 && TYPE_OVERFLOW_WRAPS (type));
1137 case FIXED_CST:
1138 case REAL_CST:
1139 case NEGATE_EXPR:
1140 return true;
1142 case COMPLEX_CST:
1143 return negate_expr_p (TREE_REALPART (t))
1144 && negate_expr_p (TREE_IMAGPART (t));
1146 case COMPLEX_EXPR:
1147 return negate_expr_p (TREE_OPERAND (t, 0))
1148 && negate_expr_p (TREE_OPERAND (t, 1));
1150 case CONJ_EXPR:
1151 return negate_expr_p (TREE_OPERAND (t, 0));
1153 case PLUS_EXPR:
1154 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1155 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1156 return false;
1157 /* -(A + B) -> (-B) - A. */
1158 if (negate_expr_p (TREE_OPERAND (t, 1))
1159 && reorder_operands_p (TREE_OPERAND (t, 0),
1160 TREE_OPERAND (t, 1)))
1161 return true;
1162 /* -(A + B) -> (-A) - B. */
1163 return negate_expr_p (TREE_OPERAND (t, 0));
1165 case MINUS_EXPR:
1166 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1167 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1169 && reorder_operands_p (TREE_OPERAND (t, 0),
1170 TREE_OPERAND (t, 1));
1172 case MULT_EXPR:
1173 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1174 break;
1176 /* Fall through. */
1178 case RDIV_EXPR:
1179 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1180 return negate_expr_p (TREE_OPERAND (t, 1))
1181 || negate_expr_p (TREE_OPERAND (t, 0));
1182 break;
1184 case TRUNC_DIV_EXPR:
1185 case ROUND_DIV_EXPR:
1186 case FLOOR_DIV_EXPR:
1187 case CEIL_DIV_EXPR:
1188 case EXACT_DIV_EXPR:
1189 /* In general we can't negate A / B, because if A is INT_MIN and
1190 B is 1, we may turn this into INT_MIN / -1 which is undefined
1191 and actually traps on some architectures. But if overflow is
1192 undefined, we can negate, because - (INT_MIN / 1) is an
1193 overflow. */
1194 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1195 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1196 break;
1197 return negate_expr_p (TREE_OPERAND (t, 1))
1198 || negate_expr_p (TREE_OPERAND (t, 0));
1200 case NOP_EXPR:
1201 /* Negate -((double)float) as (double)(-float). */
1202 if (TREE_CODE (type) == REAL_TYPE)
1204 tree tem = strip_float_extensions (t);
1205 if (tem != t)
1206 return negate_expr_p (tem);
1208 break;
1210 case CALL_EXPR:
1211 /* Negate -f(x) as f(-x). */
1212 if (negate_mathfn_p (builtin_mathfn_code (t)))
1213 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1214 break;
1216 case RSHIFT_EXPR:
1217 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1218 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1220 tree op1 = TREE_OPERAND (t, 1);
1221 if (TREE_INT_CST_HIGH (op1) == 0
1222 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1223 == TREE_INT_CST_LOW (op1))
1224 return true;
1226 break;
1228 default:
1229 break;
1231 return false;
1234 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1235 simplification is possible.
1236 If negate_expr_p would return true for T, NULL_TREE will never be
1237 returned. */
1239 static tree
1240 fold_negate_expr (location_t loc, tree t)
1242 tree type = TREE_TYPE (t);
1243 tree tem;
1245 switch (TREE_CODE (t))
1247 /* Convert - (~A) to A + 1. */
1248 case BIT_NOT_EXPR:
1249 if (INTEGRAL_TYPE_P (type))
1250 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1251 build_int_cst (type, 1));
1252 break;
1254 case INTEGER_CST:
1255 tem = fold_negate_const (t, type);
1256 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1257 || !TYPE_OVERFLOW_TRAPS (type))
1258 return tem;
1259 break;
1261 case REAL_CST:
1262 tem = fold_negate_const (t, type);
1263 /* Two's complement FP formats, such as c4x, may overflow. */
1264 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1265 return tem;
1266 break;
1268 case FIXED_CST:
1269 tem = fold_negate_const (t, type);
1270 return tem;
1272 case COMPLEX_CST:
1274 tree rpart = negate_expr (TREE_REALPART (t));
1275 tree ipart = negate_expr (TREE_IMAGPART (t));
1277 if ((TREE_CODE (rpart) == REAL_CST
1278 && TREE_CODE (ipart) == REAL_CST)
1279 || (TREE_CODE (rpart) == INTEGER_CST
1280 && TREE_CODE (ipart) == INTEGER_CST))
1281 return build_complex (type, rpart, ipart);
1283 break;
1285 case COMPLEX_EXPR:
1286 if (negate_expr_p (t))
1287 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1288 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1289 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1290 break;
1292 case CONJ_EXPR:
1293 if (negate_expr_p (t))
1294 return fold_build1_loc (loc, CONJ_EXPR, type,
1295 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1296 break;
1298 case NEGATE_EXPR:
1299 return TREE_OPERAND (t, 0);
1301 case PLUS_EXPR:
1302 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1303 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1305 /* -(A + B) -> (-B) - A. */
1306 if (negate_expr_p (TREE_OPERAND (t, 1))
1307 && reorder_operands_p (TREE_OPERAND (t, 0),
1308 TREE_OPERAND (t, 1)))
1310 tem = negate_expr (TREE_OPERAND (t, 1));
1311 return fold_build2_loc (loc, MINUS_EXPR, type,
1312 tem, TREE_OPERAND (t, 0));
1315 /* -(A + B) -> (-A) - B. */
1316 if (negate_expr_p (TREE_OPERAND (t, 0)))
1318 tem = negate_expr (TREE_OPERAND (t, 0));
1319 return fold_build2_loc (loc, MINUS_EXPR, type,
1320 tem, TREE_OPERAND (t, 1));
1323 break;
1325 case MINUS_EXPR:
1326 /* - (A - B) -> B - A */
1327 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1328 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1329 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1330 return fold_build2_loc (loc, MINUS_EXPR, type,
1331 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1332 break;
1334 case MULT_EXPR:
1335 if (TYPE_UNSIGNED (type))
1336 break;
1338 /* Fall through. */
1340 case RDIV_EXPR:
1341 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1343 tem = TREE_OPERAND (t, 1);
1344 if (negate_expr_p (tem))
1345 return fold_build2_loc (loc, TREE_CODE (t), type,
1346 TREE_OPERAND (t, 0), negate_expr (tem));
1347 tem = TREE_OPERAND (t, 0);
1348 if (negate_expr_p (tem))
1349 return fold_build2_loc (loc, TREE_CODE (t), type,
1350 negate_expr (tem), TREE_OPERAND (t, 1));
1352 break;
1354 case TRUNC_DIV_EXPR:
1355 case ROUND_DIV_EXPR:
1356 case FLOOR_DIV_EXPR:
1357 case CEIL_DIV_EXPR:
1358 case EXACT_DIV_EXPR:
1359 /* In general we can't negate A / B, because if A is INT_MIN and
1360 B is 1, we may turn this into INT_MIN / -1 which is undefined
1361 and actually traps on some architectures. But if overflow is
1362 undefined, we can negate, because - (INT_MIN / 1) is an
1363 overflow. */
1364 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1366 const char * const warnmsg = G_("assuming signed overflow does not "
1367 "occur when negating a division");
1368 tem = TREE_OPERAND (t, 1);
1369 if (negate_expr_p (tem))
1371 if (INTEGRAL_TYPE_P (type)
1372 && (TREE_CODE (tem) != INTEGER_CST
1373 || integer_onep (tem)))
1374 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1375 return fold_build2_loc (loc, TREE_CODE (t), type,
1376 TREE_OPERAND (t, 0), negate_expr (tem));
1378 tem = TREE_OPERAND (t, 0);
1379 if (negate_expr_p (tem))
1381 if (INTEGRAL_TYPE_P (type)
1382 && (TREE_CODE (tem) != INTEGER_CST
1383 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1385 return fold_build2_loc (loc, TREE_CODE (t), type,
1386 negate_expr (tem), TREE_OPERAND (t, 1));
1389 break;
1391 case NOP_EXPR:
1392 /* Convert -((double)float) into (double)(-float). */
1393 if (TREE_CODE (type) == REAL_TYPE)
1395 tem = strip_float_extensions (t);
1396 if (tem != t && negate_expr_p (tem))
1397 return fold_convert_loc (loc, type, negate_expr (tem));
1399 break;
1401 case CALL_EXPR:
1402 /* Negate -f(x) as f(-x). */
1403 if (negate_mathfn_p (builtin_mathfn_code (t))
1404 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1406 tree fndecl, arg;
1408 fndecl = get_callee_fndecl (t);
1409 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1410 return build_call_expr_loc (loc, fndecl, 1, arg);
1412 break;
1414 case RSHIFT_EXPR:
1415 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1416 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1418 tree op1 = TREE_OPERAND (t, 1);
1419 if (TREE_INT_CST_HIGH (op1) == 0
1420 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1421 == TREE_INT_CST_LOW (op1))
1423 tree ntype = TYPE_UNSIGNED (type)
1424 ? signed_type_for (type)
1425 : unsigned_type_for (type);
1426 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1427 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1428 return fold_convert_loc (loc, type, temp);
1431 break;
1433 default:
1434 break;
1437 return NULL_TREE;
1440 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1441 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1442 return NULL_TREE. */
1444 static tree
1445 negate_expr (tree t)
1447 tree type, tem;
1448 location_t loc;
1450 if (t == NULL_TREE)
1451 return NULL_TREE;
1453 loc = EXPR_LOCATION (t);
1454 type = TREE_TYPE (t);
1455 STRIP_SIGN_NOPS (t);
1457 tem = fold_negate_expr (loc, t);
1458 if (!tem)
1460 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1461 SET_EXPR_LOCATION (tem, loc);
1463 return fold_convert_loc (loc, type, tem);
1466 /* Split a tree IN into a constant, literal and variable parts that could be
1467 combined with CODE to make IN. "constant" means an expression with
1468 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1469 commutative arithmetic operation. Store the constant part into *CONP,
1470 the literal in *LITP and return the variable part. If a part isn't
1471 present, set it to null. If the tree does not decompose in this way,
1472 return the entire tree as the variable part and the other parts as null.
1474 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1475 case, we negate an operand that was subtracted. Except if it is a
1476 literal for which we use *MINUS_LITP instead.
1478 If NEGATE_P is true, we are negating all of IN, again except a literal
1479 for which we use *MINUS_LITP instead.
1481 If IN is itself a literal or constant, return it as appropriate.
1483 Note that we do not guarantee that any of the three values will be the
1484 same type as IN, but they will have the same signedness and mode. */
1486 static tree
1487 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1488 tree *minus_litp, int negate_p)
1490 tree var = 0;
1492 *conp = 0;
1493 *litp = 0;
1494 *minus_litp = 0;
1496 /* Strip any conversions that don't change the machine mode or signedness. */
1497 STRIP_SIGN_NOPS (in);
1499 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1500 || TREE_CODE (in) == FIXED_CST)
1501 *litp = in;
1502 else if (TREE_CODE (in) == code
1503 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1504 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1505 /* We can associate addition and subtraction together (even
1506 though the C standard doesn't say so) for integers because
1507 the value is not affected. For reals, the value might be
1508 affected, so we can't. */
1509 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1510 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1512 tree op0 = TREE_OPERAND (in, 0);
1513 tree op1 = TREE_OPERAND (in, 1);
1514 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1515 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1517 /* First see if either of the operands is a literal, then a constant. */
1518 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1519 || TREE_CODE (op0) == FIXED_CST)
1520 *litp = op0, op0 = 0;
1521 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1522 || TREE_CODE (op1) == FIXED_CST)
1523 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1525 if (op0 != 0 && TREE_CONSTANT (op0))
1526 *conp = op0, op0 = 0;
1527 else if (op1 != 0 && TREE_CONSTANT (op1))
1528 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1530 /* If we haven't dealt with either operand, this is not a case we can
1531 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1532 if (op0 != 0 && op1 != 0)
1533 var = in;
1534 else if (op0 != 0)
1535 var = op0;
1536 else
1537 var = op1, neg_var_p = neg1_p;
1539 /* Now do any needed negations. */
1540 if (neg_litp_p)
1541 *minus_litp = *litp, *litp = 0;
1542 if (neg_conp_p)
1543 *conp = negate_expr (*conp);
1544 if (neg_var_p)
1545 var = negate_expr (var);
1547 else if (TREE_CONSTANT (in))
1548 *conp = in;
1549 else
1550 var = in;
1552 if (negate_p)
1554 if (*litp)
1555 *minus_litp = *litp, *litp = 0;
1556 else if (*minus_litp)
1557 *litp = *minus_litp, *minus_litp = 0;
1558 *conp = negate_expr (*conp);
1559 var = negate_expr (var);
1562 return var;
1565 /* Re-associate trees split by the above function. T1 and T2 are
1566 either expressions to associate or null. Return the new
1567 expression, if any. LOC is the location of the new expression. If
1568 we build an operation, do it in TYPE and with CODE. */
1570 static tree
1571 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1573 tree tem;
1575 if (t1 == 0)
1576 return t2;
1577 else if (t2 == 0)
1578 return t1;
1580 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1581 try to fold this since we will have infinite recursion. But do
1582 deal with any NEGATE_EXPRs. */
1583 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1584 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1586 if (code == PLUS_EXPR)
1588 if (TREE_CODE (t1) == NEGATE_EXPR)
1589 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1590 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1591 else if (TREE_CODE (t2) == NEGATE_EXPR)
1592 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1593 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1594 else if (integer_zerop (t2))
1595 return fold_convert_loc (loc, type, t1);
1597 else if (code == MINUS_EXPR)
1599 if (integer_zerop (t2))
1600 return fold_convert_loc (loc, type, t1);
1603 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1604 fold_convert_loc (loc, type, t2));
1605 goto associate_trees_exit;
1608 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1609 fold_convert_loc (loc, type, t2));
1610 associate_trees_exit:
1611 protected_set_expr_location (tem, loc);
1612 return tem;
1615 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1616 for use in int_const_binop, size_binop and size_diffop. */
1618 static bool
1619 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1621 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1622 return false;
1623 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1624 return false;
1626 switch (code)
1628 case LSHIFT_EXPR:
1629 case RSHIFT_EXPR:
1630 case LROTATE_EXPR:
1631 case RROTATE_EXPR:
1632 return true;
1634 default:
1635 break;
1638 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1639 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1640 && TYPE_MODE (type1) == TYPE_MODE (type2);
1644 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1645 to produce a new constant. Return NULL_TREE if we don't know how
1646 to evaluate CODE at compile-time.
1648 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1650 tree
1651 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1653 unsigned HOST_WIDE_INT int1l, int2l;
1654 HOST_WIDE_INT int1h, int2h;
1655 unsigned HOST_WIDE_INT low;
1656 HOST_WIDE_INT hi;
1657 unsigned HOST_WIDE_INT garbagel;
1658 HOST_WIDE_INT garbageh;
1659 tree t;
1660 tree type = TREE_TYPE (arg1);
1661 int uns = TYPE_UNSIGNED (type);
1662 int is_sizetype
1663 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1664 int overflow = 0;
1666 int1l = TREE_INT_CST_LOW (arg1);
1667 int1h = TREE_INT_CST_HIGH (arg1);
1668 int2l = TREE_INT_CST_LOW (arg2);
1669 int2h = TREE_INT_CST_HIGH (arg2);
1671 switch (code)
1673 case BIT_IOR_EXPR:
1674 low = int1l | int2l, hi = int1h | int2h;
1675 break;
1677 case BIT_XOR_EXPR:
1678 low = int1l ^ int2l, hi = int1h ^ int2h;
1679 break;
1681 case BIT_AND_EXPR:
1682 low = int1l & int2l, hi = int1h & int2h;
1683 break;
1685 case RSHIFT_EXPR:
1686 int2l = -int2l;
1687 case LSHIFT_EXPR:
1688 /* It's unclear from the C standard whether shifts can overflow.
1689 The following code ignores overflow; perhaps a C standard
1690 interpretation ruling is needed. */
1691 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1692 &low, &hi, !uns);
1693 break;
1695 case RROTATE_EXPR:
1696 int2l = - int2l;
1697 case LROTATE_EXPR:
1698 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1699 &low, &hi);
1700 break;
1702 case PLUS_EXPR:
1703 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1704 break;
1706 case MINUS_EXPR:
1707 neg_double (int2l, int2h, &low, &hi);
1708 add_double (int1l, int1h, low, hi, &low, &hi);
1709 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1710 break;
1712 case MULT_EXPR:
1713 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1714 break;
1716 case TRUNC_DIV_EXPR:
1717 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1718 case EXACT_DIV_EXPR:
1719 /* This is a shortcut for a common special case. */
1720 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1721 && !TREE_OVERFLOW (arg1)
1722 && !TREE_OVERFLOW (arg2)
1723 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1725 if (code == CEIL_DIV_EXPR)
1726 int1l += int2l - 1;
1728 low = int1l / int2l, hi = 0;
1729 break;
1732 /* ... fall through ... */
1734 case ROUND_DIV_EXPR:
1735 if (int2h == 0 && int2l == 0)
1736 return NULL_TREE;
1737 if (int2h == 0 && int2l == 1)
1739 low = int1l, hi = int1h;
1740 break;
1742 if (int1l == int2l && int1h == int2h
1743 && ! (int1l == 0 && int1h == 0))
1745 low = 1, hi = 0;
1746 break;
1748 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1749 &low, &hi, &garbagel, &garbageh);
1750 break;
1752 case TRUNC_MOD_EXPR:
1753 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1754 /* This is a shortcut for a common special case. */
1755 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1756 && !TREE_OVERFLOW (arg1)
1757 && !TREE_OVERFLOW (arg2)
1758 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1760 if (code == CEIL_MOD_EXPR)
1761 int1l += int2l - 1;
1762 low = int1l % int2l, hi = 0;
1763 break;
1766 /* ... fall through ... */
1768 case ROUND_MOD_EXPR:
1769 if (int2h == 0 && int2l == 0)
1770 return NULL_TREE;
1771 overflow = div_and_round_double (code, uns,
1772 int1l, int1h, int2l, int2h,
1773 &garbagel, &garbageh, &low, &hi);
1774 break;
1776 case MIN_EXPR:
1777 case MAX_EXPR:
1778 if (uns)
1779 low = (((unsigned HOST_WIDE_INT) int1h
1780 < (unsigned HOST_WIDE_INT) int2h)
1781 || (((unsigned HOST_WIDE_INT) int1h
1782 == (unsigned HOST_WIDE_INT) int2h)
1783 && int1l < int2l));
1784 else
1785 low = (int1h < int2h
1786 || (int1h == int2h && int1l < int2l));
1788 if (low == (code == MIN_EXPR))
1789 low = int1l, hi = int1h;
1790 else
1791 low = int2l, hi = int2h;
1792 break;
1794 default:
1795 return NULL_TREE;
1798 if (notrunc)
1800 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1802 /* Propagate overflow flags ourselves. */
1803 if (((!uns || is_sizetype) && overflow)
1804 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1806 t = copy_node (t);
1807 TREE_OVERFLOW (t) = 1;
1810 else
1811 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1812 ((!uns || is_sizetype) && overflow)
1813 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1815 return t;
1818 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1819 constant. We assume ARG1 and ARG2 have the same data type, or at least
1820 are the same kind of constant and the same machine mode. Return zero if
1821 combining the constants is not allowed in the current operating mode.
1823 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1825 static tree
1826 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1828 /* Sanity check for the recursive cases. */
1829 if (!arg1 || !arg2)
1830 return NULL_TREE;
1832 STRIP_NOPS (arg1);
1833 STRIP_NOPS (arg2);
1835 if (TREE_CODE (arg1) == INTEGER_CST)
1836 return int_const_binop (code, arg1, arg2, notrunc);
1838 if (TREE_CODE (arg1) == REAL_CST)
1840 enum machine_mode mode;
1841 REAL_VALUE_TYPE d1;
1842 REAL_VALUE_TYPE d2;
1843 REAL_VALUE_TYPE value;
1844 REAL_VALUE_TYPE result;
1845 bool inexact;
1846 tree t, type;
1848 /* The following codes are handled by real_arithmetic. */
1849 switch (code)
1851 case PLUS_EXPR:
1852 case MINUS_EXPR:
1853 case MULT_EXPR:
1854 case RDIV_EXPR:
1855 case MIN_EXPR:
1856 case MAX_EXPR:
1857 break;
1859 default:
1860 return NULL_TREE;
1863 d1 = TREE_REAL_CST (arg1);
1864 d2 = TREE_REAL_CST (arg2);
1866 type = TREE_TYPE (arg1);
1867 mode = TYPE_MODE (type);
1869 /* Don't perform operation if we honor signaling NaNs and
1870 either operand is a NaN. */
1871 if (HONOR_SNANS (mode)
1872 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1873 return NULL_TREE;
1875 /* Don't perform operation if it would raise a division
1876 by zero exception. */
1877 if (code == RDIV_EXPR
1878 && REAL_VALUES_EQUAL (d2, dconst0)
1879 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1880 return NULL_TREE;
1882 /* If either operand is a NaN, just return it. Otherwise, set up
1883 for floating-point trap; we return an overflow. */
1884 if (REAL_VALUE_ISNAN (d1))
1885 return arg1;
1886 else if (REAL_VALUE_ISNAN (d2))
1887 return arg2;
1889 inexact = real_arithmetic (&value, code, &d1, &d2);
1890 real_convert (&result, mode, &value);
1892 /* Don't constant fold this floating point operation if
1893 the result has overflowed and flag_trapping_math. */
1894 if (flag_trapping_math
1895 && MODE_HAS_INFINITIES (mode)
1896 && REAL_VALUE_ISINF (result)
1897 && !REAL_VALUE_ISINF (d1)
1898 && !REAL_VALUE_ISINF (d2))
1899 return NULL_TREE;
1901 /* Don't constant fold this floating point operation if the
1902 result may dependent upon the run-time rounding mode and
1903 flag_rounding_math is set, or if GCC's software emulation
1904 is unable to accurately represent the result. */
1905 if ((flag_rounding_math
1906 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1907 && (inexact || !real_identical (&result, &value)))
1908 return NULL_TREE;
1910 t = build_real (type, result);
1912 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1913 return t;
1916 if (TREE_CODE (arg1) == FIXED_CST)
1918 FIXED_VALUE_TYPE f1;
1919 FIXED_VALUE_TYPE f2;
1920 FIXED_VALUE_TYPE result;
1921 tree t, type;
1922 int sat_p;
1923 bool overflow_p;
1925 /* The following codes are handled by fixed_arithmetic. */
1926 switch (code)
1928 case PLUS_EXPR:
1929 case MINUS_EXPR:
1930 case MULT_EXPR:
1931 case TRUNC_DIV_EXPR:
1932 f2 = TREE_FIXED_CST (arg2);
1933 break;
1935 case LSHIFT_EXPR:
1936 case RSHIFT_EXPR:
1937 f2.data.high = TREE_INT_CST_HIGH (arg2);
1938 f2.data.low = TREE_INT_CST_LOW (arg2);
1939 f2.mode = SImode;
1940 break;
1942 default:
1943 return NULL_TREE;
1946 f1 = TREE_FIXED_CST (arg1);
1947 type = TREE_TYPE (arg1);
1948 sat_p = TYPE_SATURATING (type);
1949 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1950 t = build_fixed (type, result);
1951 /* Propagate overflow flags. */
1952 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1953 TREE_OVERFLOW (t) = 1;
1954 return t;
1957 if (TREE_CODE (arg1) == COMPLEX_CST)
1959 tree type = TREE_TYPE (arg1);
1960 tree r1 = TREE_REALPART (arg1);
1961 tree i1 = TREE_IMAGPART (arg1);
1962 tree r2 = TREE_REALPART (arg2);
1963 tree i2 = TREE_IMAGPART (arg2);
1964 tree real, imag;
1966 switch (code)
1968 case PLUS_EXPR:
1969 case MINUS_EXPR:
1970 real = const_binop (code, r1, r2, notrunc);
1971 imag = const_binop (code, i1, i2, notrunc);
1972 break;
1974 case MULT_EXPR:
1975 #ifdef HAVE_mpc
1976 if (COMPLEX_FLOAT_TYPE_P (type))
1977 return do_mpc_arg2 (arg1, arg2, type, mpc_mul);
1978 #endif
1980 real = const_binop (MINUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1983 notrunc);
1984 imag = const_binop (PLUS_EXPR,
1985 const_binop (MULT_EXPR, r1, i2, notrunc),
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 notrunc);
1988 break;
1990 case RDIV_EXPR:
1991 #ifdef HAVE_mpc
1992 if (COMPLEX_FLOAT_TYPE_P (type))
1993 return do_mpc_arg2 (arg1, arg2, type, mpc_div);
1994 #endif
1997 tree magsquared
1998 = const_binop (PLUS_EXPR,
1999 const_binop (MULT_EXPR, r2, r2, notrunc),
2000 const_binop (MULT_EXPR, i2, i2, notrunc),
2001 notrunc);
2002 tree t1
2003 = const_binop (PLUS_EXPR,
2004 const_binop (MULT_EXPR, r1, r2, notrunc),
2005 const_binop (MULT_EXPR, i1, i2, notrunc),
2006 notrunc);
2007 tree t2
2008 = const_binop (MINUS_EXPR,
2009 const_binop (MULT_EXPR, i1, r2, notrunc),
2010 const_binop (MULT_EXPR, r1, i2, notrunc),
2011 notrunc);
2013 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
2014 code = TRUNC_DIV_EXPR;
2016 real = const_binop (code, t1, magsquared, notrunc);
2017 imag = const_binop (code, t2, magsquared, notrunc);
2019 break;
2021 default:
2022 return NULL_TREE;
2025 if (real && imag)
2026 return build_complex (type, real, imag);
2029 if (TREE_CODE (arg1) == VECTOR_CST)
2031 tree type = TREE_TYPE(arg1);
2032 int count = TYPE_VECTOR_SUBPARTS (type), i;
2033 tree elements1, elements2, list = NULL_TREE;
2035 if(TREE_CODE(arg2) != VECTOR_CST)
2036 return NULL_TREE;
2038 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2039 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2041 for (i = 0; i < count; i++)
2043 tree elem1, elem2, elem;
2045 /* The trailing elements can be empty and should be treated as 0 */
2046 if(!elements1)
2047 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2048 else
2050 elem1 = TREE_VALUE(elements1);
2051 elements1 = TREE_CHAIN (elements1);
2054 if(!elements2)
2055 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2056 else
2058 elem2 = TREE_VALUE(elements2);
2059 elements2 = TREE_CHAIN (elements2);
2062 elem = const_binop (code, elem1, elem2, notrunc);
2064 /* It is possible that const_binop cannot handle the given
2065 code and return NULL_TREE */
2066 if(elem == NULL_TREE)
2067 return NULL_TREE;
2069 list = tree_cons (NULL_TREE, elem, list);
2071 return build_vector(type, nreverse(list));
2073 return NULL_TREE;
2076 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2077 indicates which particular sizetype to create. */
2079 tree
2080 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2082 return build_int_cst (sizetype_tab[(int) kind], number);
2085 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2086 is a tree code. The type of the result is taken from the operands.
2087 Both must be equivalent integer types, ala int_binop_types_match_p.
2088 If the operands are constant, so is the result. */
2090 tree
2091 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2093 tree type = TREE_TYPE (arg0);
2095 if (arg0 == error_mark_node || arg1 == error_mark_node)
2096 return error_mark_node;
2098 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2099 TREE_TYPE (arg1)));
2101 /* Handle the special case of two integer constants faster. */
2102 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2104 /* And some specific cases even faster than that. */
2105 if (code == PLUS_EXPR)
2107 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2108 return arg1;
2109 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2110 return arg0;
2112 else if (code == MINUS_EXPR)
2114 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2115 return arg0;
2117 else if (code == MULT_EXPR)
2119 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2120 return arg1;
2123 /* Handle general case of two integer constants. */
2124 return int_const_binop (code, arg0, arg1, 0);
2127 return fold_build2_loc (loc, code, type, arg0, arg1);
2130 /* Given two values, either both of sizetype or both of bitsizetype,
2131 compute the difference between the two values. Return the value
2132 in signed type corresponding to the type of the operands. */
2134 tree
2135 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2137 tree type = TREE_TYPE (arg0);
2138 tree ctype;
2140 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2141 TREE_TYPE (arg1)));
2143 /* If the type is already signed, just do the simple thing. */
2144 if (!TYPE_UNSIGNED (type))
2145 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2147 if (type == sizetype)
2148 ctype = ssizetype;
2149 else if (type == bitsizetype)
2150 ctype = sbitsizetype;
2151 else
2152 ctype = signed_type_for (type);
2154 /* If either operand is not a constant, do the conversions to the signed
2155 type and subtract. The hardware will do the right thing with any
2156 overflow in the subtraction. */
2157 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2158 return size_binop_loc (loc, MINUS_EXPR,
2159 fold_convert_loc (loc, ctype, arg0),
2160 fold_convert_loc (loc, ctype, arg1));
2162 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2163 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2164 overflow) and negate (which can't either). Special-case a result
2165 of zero while we're here. */
2166 if (tree_int_cst_equal (arg0, arg1))
2167 return build_int_cst (ctype, 0);
2168 else if (tree_int_cst_lt (arg1, arg0))
2169 return fold_convert_loc (loc, ctype,
2170 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2171 else
2172 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2173 fold_convert_loc (loc, ctype,
2174 size_binop_loc (loc,
2175 MINUS_EXPR,
2176 arg1, arg0)));
2179 /* A subroutine of fold_convert_const handling conversions of an
2180 INTEGER_CST to another integer type. */
2182 static tree
2183 fold_convert_const_int_from_int (tree type, const_tree arg1)
2185 tree t;
2187 /* Given an integer constant, make new constant with new type,
2188 appropriately sign-extended or truncated. */
2189 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2190 TREE_INT_CST_HIGH (arg1),
2191 /* Don't set the overflow when
2192 converting from a pointer, */
2193 !POINTER_TYPE_P (TREE_TYPE (arg1))
2194 /* or to a sizetype with same signedness
2195 and the precision is unchanged.
2196 ??? sizetype is always sign-extended,
2197 but its signedness depends on the
2198 frontend. Thus we see spurious overflows
2199 here if we do not check this. */
2200 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2201 == TYPE_PRECISION (type))
2202 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2203 == TYPE_UNSIGNED (type))
2204 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2205 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2206 || (TREE_CODE (type) == INTEGER_TYPE
2207 && TYPE_IS_SIZETYPE (type)))),
2208 (TREE_INT_CST_HIGH (arg1) < 0
2209 && (TYPE_UNSIGNED (type)
2210 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2211 | TREE_OVERFLOW (arg1));
2213 return t;
2216 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2217 to an integer type. */
2219 static tree
2220 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2222 int overflow = 0;
2223 tree t;
2225 /* The following code implements the floating point to integer
2226 conversion rules required by the Java Language Specification,
2227 that IEEE NaNs are mapped to zero and values that overflow
2228 the target precision saturate, i.e. values greater than
2229 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2230 are mapped to INT_MIN. These semantics are allowed by the
2231 C and C++ standards that simply state that the behavior of
2232 FP-to-integer conversion is unspecified upon overflow. */
2234 HOST_WIDE_INT high, low;
2235 REAL_VALUE_TYPE r;
2236 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2238 switch (code)
2240 case FIX_TRUNC_EXPR:
2241 real_trunc (&r, VOIDmode, &x);
2242 break;
2244 default:
2245 gcc_unreachable ();
2248 /* If R is NaN, return zero and show we have an overflow. */
2249 if (REAL_VALUE_ISNAN (r))
2251 overflow = 1;
2252 high = 0;
2253 low = 0;
2256 /* See if R is less than the lower bound or greater than the
2257 upper bound. */
2259 if (! overflow)
2261 tree lt = TYPE_MIN_VALUE (type);
2262 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2263 if (REAL_VALUES_LESS (r, l))
2265 overflow = 1;
2266 high = TREE_INT_CST_HIGH (lt);
2267 low = TREE_INT_CST_LOW (lt);
2271 if (! overflow)
2273 tree ut = TYPE_MAX_VALUE (type);
2274 if (ut)
2276 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2277 if (REAL_VALUES_LESS (u, r))
2279 overflow = 1;
2280 high = TREE_INT_CST_HIGH (ut);
2281 low = TREE_INT_CST_LOW (ut);
2286 if (! overflow)
2287 REAL_VALUE_TO_INT (&low, &high, r);
2289 t = force_fit_type_double (type, low, high, -1,
2290 overflow | TREE_OVERFLOW (arg1));
2291 return t;
2294 /* A subroutine of fold_convert_const handling conversions of a
2295 FIXED_CST to an integer type. */
2297 static tree
2298 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2300 tree t;
2301 double_int temp, temp_trunc;
2302 unsigned int mode;
2304 /* Right shift FIXED_CST to temp by fbit. */
2305 temp = TREE_FIXED_CST (arg1).data;
2306 mode = TREE_FIXED_CST (arg1).mode;
2307 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2309 lshift_double (temp.low, temp.high,
2310 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2311 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2313 /* Left shift temp to temp_trunc by fbit. */
2314 lshift_double (temp.low, temp.high,
2315 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2316 &temp_trunc.low, &temp_trunc.high,
2317 SIGNED_FIXED_POINT_MODE_P (mode));
2319 else
2321 temp.low = 0;
2322 temp.high = 0;
2323 temp_trunc.low = 0;
2324 temp_trunc.high = 0;
2327 /* If FIXED_CST is negative, we need to round the value toward 0.
2328 By checking if the fractional bits are not zero to add 1 to temp. */
2329 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2330 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2332 double_int one;
2333 one.low = 1;
2334 one.high = 0;
2335 temp = double_int_add (temp, one);
2338 /* Given a fixed-point constant, make new constant with new type,
2339 appropriately sign-extended or truncated. */
2340 t = force_fit_type_double (type, temp.low, temp.high, -1,
2341 (temp.high < 0
2342 && (TYPE_UNSIGNED (type)
2343 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2344 | TREE_OVERFLOW (arg1));
2346 return t;
2349 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2350 to another floating point type. */
2352 static tree
2353 fold_convert_const_real_from_real (tree type, const_tree arg1)
2355 REAL_VALUE_TYPE value;
2356 tree t;
2358 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2359 t = build_real (type, value);
2361 /* If converting an infinity or NAN to a representation that doesn't
2362 have one, set the overflow bit so that we can produce some kind of
2363 error message at the appropriate point if necessary. It's not the
2364 most user-friendly message, but it's better than nothing. */
2365 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2366 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2367 TREE_OVERFLOW (t) = 1;
2368 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2369 && !MODE_HAS_NANS (TYPE_MODE (type)))
2370 TREE_OVERFLOW (t) = 1;
2371 /* Regular overflow, conversion produced an infinity in a mode that
2372 can't represent them. */
2373 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2374 && REAL_VALUE_ISINF (value)
2375 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2376 TREE_OVERFLOW (t) = 1;
2377 else
2378 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2379 return t;
2382 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2383 to a floating point type. */
2385 static tree
2386 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2388 REAL_VALUE_TYPE value;
2389 tree t;
2391 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2392 t = build_real (type, value);
2394 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2395 return t;
2398 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2399 to another fixed-point type. */
2401 static tree
2402 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2404 FIXED_VALUE_TYPE value;
2405 tree t;
2406 bool overflow_p;
2408 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2409 TYPE_SATURATING (type));
2410 t = build_fixed (type, value);
2412 /* Propagate overflow flags. */
2413 if (overflow_p | TREE_OVERFLOW (arg1))
2414 TREE_OVERFLOW (t) = 1;
2415 return t;
2418 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2419 to a fixed-point type. */
2421 static tree
2422 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2424 FIXED_VALUE_TYPE value;
2425 tree t;
2426 bool overflow_p;
2428 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2429 TREE_INT_CST (arg1),
2430 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2431 TYPE_SATURATING (type));
2432 t = build_fixed (type, value);
2434 /* Propagate overflow flags. */
2435 if (overflow_p | TREE_OVERFLOW (arg1))
2436 TREE_OVERFLOW (t) = 1;
2437 return t;
2440 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2441 to a fixed-point type. */
2443 static tree
2444 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2446 FIXED_VALUE_TYPE value;
2447 tree t;
2448 bool overflow_p;
2450 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2451 &TREE_REAL_CST (arg1),
2452 TYPE_SATURATING (type));
2453 t = build_fixed (type, value);
2455 /* Propagate overflow flags. */
2456 if (overflow_p | TREE_OVERFLOW (arg1))
2457 TREE_OVERFLOW (t) = 1;
2458 return t;
2461 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2462 type TYPE. If no simplification can be done return NULL_TREE. */
2464 static tree
2465 fold_convert_const (enum tree_code code, tree type, tree arg1)
2467 if (TREE_TYPE (arg1) == type)
2468 return arg1;
2470 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2471 || TREE_CODE (type) == OFFSET_TYPE)
2473 if (TREE_CODE (arg1) == INTEGER_CST)
2474 return fold_convert_const_int_from_int (type, arg1);
2475 else if (TREE_CODE (arg1) == REAL_CST)
2476 return fold_convert_const_int_from_real (code, type, arg1);
2477 else if (TREE_CODE (arg1) == FIXED_CST)
2478 return fold_convert_const_int_from_fixed (type, arg1);
2480 else if (TREE_CODE (type) == REAL_TYPE)
2482 if (TREE_CODE (arg1) == INTEGER_CST)
2483 return build_real_from_int_cst (type, arg1);
2484 else if (TREE_CODE (arg1) == REAL_CST)
2485 return fold_convert_const_real_from_real (type, arg1);
2486 else if (TREE_CODE (arg1) == FIXED_CST)
2487 return fold_convert_const_real_from_fixed (type, arg1);
2489 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2491 if (TREE_CODE (arg1) == FIXED_CST)
2492 return fold_convert_const_fixed_from_fixed (type, arg1);
2493 else if (TREE_CODE (arg1) == INTEGER_CST)
2494 return fold_convert_const_fixed_from_int (type, arg1);
2495 else if (TREE_CODE (arg1) == REAL_CST)
2496 return fold_convert_const_fixed_from_real (type, arg1);
2498 return NULL_TREE;
2501 /* Construct a vector of zero elements of vector type TYPE. */
2503 static tree
2504 build_zero_vector (tree type)
2506 tree elem, list;
2507 int i, units;
2509 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2510 units = TYPE_VECTOR_SUBPARTS (type);
2512 list = NULL_TREE;
2513 for (i = 0; i < units; i++)
2514 list = tree_cons (NULL_TREE, elem, list);
2515 return build_vector (type, list);
2518 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2520 bool
2521 fold_convertible_p (const_tree type, const_tree arg)
2523 tree orig = TREE_TYPE (arg);
2525 if (type == orig)
2526 return true;
2528 if (TREE_CODE (arg) == ERROR_MARK
2529 || TREE_CODE (type) == ERROR_MARK
2530 || TREE_CODE (orig) == ERROR_MARK)
2531 return false;
2533 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2534 return true;
2536 switch (TREE_CODE (type))
2538 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2539 case POINTER_TYPE: case REFERENCE_TYPE:
2540 case OFFSET_TYPE:
2541 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2542 || TREE_CODE (orig) == OFFSET_TYPE)
2543 return true;
2544 return (TREE_CODE (orig) == VECTOR_TYPE
2545 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 case REAL_TYPE:
2548 case FIXED_POINT_TYPE:
2549 case COMPLEX_TYPE:
2550 case VECTOR_TYPE:
2551 case VOID_TYPE:
2552 return TREE_CODE (type) == TREE_CODE (orig);
2554 default:
2555 return false;
2559 /* Convert expression ARG to type TYPE. Used by the middle-end for
2560 simple conversions in preference to calling the front-end's convert. */
2562 tree
2563 fold_convert_loc (location_t loc, tree type, tree arg)
2565 tree orig = TREE_TYPE (arg);
2566 tree tem;
2568 if (type == orig)
2569 return arg;
2571 if (TREE_CODE (arg) == ERROR_MARK
2572 || TREE_CODE (type) == ERROR_MARK
2573 || TREE_CODE (orig) == ERROR_MARK)
2574 return error_mark_node;
2576 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2577 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2579 switch (TREE_CODE (type))
2581 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2582 case POINTER_TYPE: case REFERENCE_TYPE:
2583 case OFFSET_TYPE:
2584 if (TREE_CODE (arg) == INTEGER_CST)
2586 tem = fold_convert_const (NOP_EXPR, type, arg);
2587 if (tem != NULL_TREE)
2588 return tem;
2590 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2591 || TREE_CODE (orig) == OFFSET_TYPE)
2592 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2593 if (TREE_CODE (orig) == COMPLEX_TYPE)
2594 return fold_convert_loc (loc, type,
2595 fold_build1_loc (loc, REALPART_EXPR,
2596 TREE_TYPE (orig), arg));
2597 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2598 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2599 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2601 case REAL_TYPE:
2602 if (TREE_CODE (arg) == INTEGER_CST)
2604 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2605 if (tem != NULL_TREE)
2606 return tem;
2608 else if (TREE_CODE (arg) == REAL_CST)
2610 tem = fold_convert_const (NOP_EXPR, type, arg);
2611 if (tem != NULL_TREE)
2612 return tem;
2614 else if (TREE_CODE (arg) == FIXED_CST)
2616 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2617 if (tem != NULL_TREE)
2618 return tem;
2621 switch (TREE_CODE (orig))
2623 case INTEGER_TYPE:
2624 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2625 case POINTER_TYPE: case REFERENCE_TYPE:
2626 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2628 case REAL_TYPE:
2629 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2631 case FIXED_POINT_TYPE:
2632 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2634 case COMPLEX_TYPE:
2635 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2636 return fold_convert_loc (loc, type, tem);
2638 default:
2639 gcc_unreachable ();
2642 case FIXED_POINT_TYPE:
2643 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2644 || TREE_CODE (arg) == REAL_CST)
2646 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2647 if (tem != NULL_TREE)
2648 goto fold_convert_exit;
2651 switch (TREE_CODE (orig))
2653 case FIXED_POINT_TYPE:
2654 case INTEGER_TYPE:
2655 case ENUMERAL_TYPE:
2656 case BOOLEAN_TYPE:
2657 case REAL_TYPE:
2658 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2660 case COMPLEX_TYPE:
2661 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2662 return fold_convert_loc (loc, type, tem);
2664 default:
2665 gcc_unreachable ();
2668 case COMPLEX_TYPE:
2669 switch (TREE_CODE (orig))
2671 case INTEGER_TYPE:
2672 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2673 case POINTER_TYPE: case REFERENCE_TYPE:
2674 case REAL_TYPE:
2675 case FIXED_POINT_TYPE:
2676 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2677 fold_convert_loc (loc, TREE_TYPE (type), arg),
2678 fold_convert_loc (loc, TREE_TYPE (type),
2679 integer_zero_node));
2680 case COMPLEX_TYPE:
2682 tree rpart, ipart;
2684 if (TREE_CODE (arg) == COMPLEX_EXPR)
2686 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2687 TREE_OPERAND (arg, 0));
2688 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2689 TREE_OPERAND (arg, 1));
2690 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2693 arg = save_expr (arg);
2694 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2695 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2696 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2697 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2698 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2701 default:
2702 gcc_unreachable ();
2705 case VECTOR_TYPE:
2706 if (integer_zerop (arg))
2707 return build_zero_vector (type);
2708 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2709 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2710 || TREE_CODE (orig) == VECTOR_TYPE);
2711 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2713 case VOID_TYPE:
2714 tem = fold_ignored_result (arg);
2715 if (TREE_CODE (tem) == MODIFY_EXPR)
2716 goto fold_convert_exit;
2717 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2719 default:
2720 gcc_unreachable ();
2722 fold_convert_exit:
2723 protected_set_expr_location (tem, loc);
2724 return tem;
2727 /* Return false if expr can be assumed not to be an lvalue, true
2728 otherwise. */
2730 static bool
2731 maybe_lvalue_p (const_tree x)
2733 /* We only need to wrap lvalue tree codes. */
2734 switch (TREE_CODE (x))
2736 case VAR_DECL:
2737 case PARM_DECL:
2738 case RESULT_DECL:
2739 case LABEL_DECL:
2740 case FUNCTION_DECL:
2741 case SSA_NAME:
2743 case COMPONENT_REF:
2744 case INDIRECT_REF:
2745 case ALIGN_INDIRECT_REF:
2746 case MISALIGNED_INDIRECT_REF:
2747 case ARRAY_REF:
2748 case ARRAY_RANGE_REF:
2749 case BIT_FIELD_REF:
2750 case OBJ_TYPE_REF:
2752 case REALPART_EXPR:
2753 case IMAGPART_EXPR:
2754 case PREINCREMENT_EXPR:
2755 case PREDECREMENT_EXPR:
2756 case SAVE_EXPR:
2757 case TRY_CATCH_EXPR:
2758 case WITH_CLEANUP_EXPR:
2759 case COMPOUND_EXPR:
2760 case MODIFY_EXPR:
2761 case TARGET_EXPR:
2762 case COND_EXPR:
2763 case BIND_EXPR:
2764 case MIN_EXPR:
2765 case MAX_EXPR:
2766 break;
2768 default:
2769 /* Assume the worst for front-end tree codes. */
2770 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2771 break;
2772 return false;
2775 return true;
2778 /* Return an expr equal to X but certainly not valid as an lvalue. */
2780 tree
2781 non_lvalue_loc (location_t loc, tree x)
2783 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2784 us. */
2785 if (in_gimple_form)
2786 return x;
2788 if (! maybe_lvalue_p (x))
2789 return x;
2790 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2791 SET_EXPR_LOCATION (x, loc);
2792 return x;
2795 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2796 Zero means allow extended lvalues. */
2798 int pedantic_lvalues;
2800 /* When pedantic, return an expr equal to X but certainly not valid as a
2801 pedantic lvalue. Otherwise, return X. */
2803 static tree
2804 pedantic_non_lvalue_loc (location_t loc, tree x)
2806 if (pedantic_lvalues)
2807 return non_lvalue_loc (loc, x);
2808 protected_set_expr_location (x, loc);
2809 return x;
2812 /* Given a tree comparison code, return the code that is the logical inverse
2813 of the given code. It is not safe to do this for floating-point
2814 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2815 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2817 enum tree_code
2818 invert_tree_comparison (enum tree_code code, bool honor_nans)
2820 if (honor_nans && flag_trapping_math)
2821 return ERROR_MARK;
2823 switch (code)
2825 case EQ_EXPR:
2826 return NE_EXPR;
2827 case NE_EXPR:
2828 return EQ_EXPR;
2829 case GT_EXPR:
2830 return honor_nans ? UNLE_EXPR : LE_EXPR;
2831 case GE_EXPR:
2832 return honor_nans ? UNLT_EXPR : LT_EXPR;
2833 case LT_EXPR:
2834 return honor_nans ? UNGE_EXPR : GE_EXPR;
2835 case LE_EXPR:
2836 return honor_nans ? UNGT_EXPR : GT_EXPR;
2837 case LTGT_EXPR:
2838 return UNEQ_EXPR;
2839 case UNEQ_EXPR:
2840 return LTGT_EXPR;
2841 case UNGT_EXPR:
2842 return LE_EXPR;
2843 case UNGE_EXPR:
2844 return LT_EXPR;
2845 case UNLT_EXPR:
2846 return GE_EXPR;
2847 case UNLE_EXPR:
2848 return GT_EXPR;
2849 case ORDERED_EXPR:
2850 return UNORDERED_EXPR;
2851 case UNORDERED_EXPR:
2852 return ORDERED_EXPR;
2853 default:
2854 gcc_unreachable ();
2858 /* Similar, but return the comparison that results if the operands are
2859 swapped. This is safe for floating-point. */
2861 enum tree_code
2862 swap_tree_comparison (enum tree_code code)
2864 switch (code)
2866 case EQ_EXPR:
2867 case NE_EXPR:
2868 case ORDERED_EXPR:
2869 case UNORDERED_EXPR:
2870 case LTGT_EXPR:
2871 case UNEQ_EXPR:
2872 return code;
2873 case GT_EXPR:
2874 return LT_EXPR;
2875 case GE_EXPR:
2876 return LE_EXPR;
2877 case LT_EXPR:
2878 return GT_EXPR;
2879 case LE_EXPR:
2880 return GE_EXPR;
2881 case UNGT_EXPR:
2882 return UNLT_EXPR;
2883 case UNGE_EXPR:
2884 return UNLE_EXPR;
2885 case UNLT_EXPR:
2886 return UNGT_EXPR;
2887 case UNLE_EXPR:
2888 return UNGE_EXPR;
2889 default:
2890 gcc_unreachable ();
2895 /* Convert a comparison tree code from an enum tree_code representation
2896 into a compcode bit-based encoding. This function is the inverse of
2897 compcode_to_comparison. */
2899 static enum comparison_code
2900 comparison_to_compcode (enum tree_code code)
2902 switch (code)
2904 case LT_EXPR:
2905 return COMPCODE_LT;
2906 case EQ_EXPR:
2907 return COMPCODE_EQ;
2908 case LE_EXPR:
2909 return COMPCODE_LE;
2910 case GT_EXPR:
2911 return COMPCODE_GT;
2912 case NE_EXPR:
2913 return COMPCODE_NE;
2914 case GE_EXPR:
2915 return COMPCODE_GE;
2916 case ORDERED_EXPR:
2917 return COMPCODE_ORD;
2918 case UNORDERED_EXPR:
2919 return COMPCODE_UNORD;
2920 case UNLT_EXPR:
2921 return COMPCODE_UNLT;
2922 case UNEQ_EXPR:
2923 return COMPCODE_UNEQ;
2924 case UNLE_EXPR:
2925 return COMPCODE_UNLE;
2926 case UNGT_EXPR:
2927 return COMPCODE_UNGT;
2928 case LTGT_EXPR:
2929 return COMPCODE_LTGT;
2930 case UNGE_EXPR:
2931 return COMPCODE_UNGE;
2932 default:
2933 gcc_unreachable ();
2937 /* Convert a compcode bit-based encoding of a comparison operator back
2938 to GCC's enum tree_code representation. This function is the
2939 inverse of comparison_to_compcode. */
2941 static enum tree_code
2942 compcode_to_comparison (enum comparison_code code)
2944 switch (code)
2946 case COMPCODE_LT:
2947 return LT_EXPR;
2948 case COMPCODE_EQ:
2949 return EQ_EXPR;
2950 case COMPCODE_LE:
2951 return LE_EXPR;
2952 case COMPCODE_GT:
2953 return GT_EXPR;
2954 case COMPCODE_NE:
2955 return NE_EXPR;
2956 case COMPCODE_GE:
2957 return GE_EXPR;
2958 case COMPCODE_ORD:
2959 return ORDERED_EXPR;
2960 case COMPCODE_UNORD:
2961 return UNORDERED_EXPR;
2962 case COMPCODE_UNLT:
2963 return UNLT_EXPR;
2964 case COMPCODE_UNEQ:
2965 return UNEQ_EXPR;
2966 case COMPCODE_UNLE:
2967 return UNLE_EXPR;
2968 case COMPCODE_UNGT:
2969 return UNGT_EXPR;
2970 case COMPCODE_LTGT:
2971 return LTGT_EXPR;
2972 case COMPCODE_UNGE:
2973 return UNGE_EXPR;
2974 default:
2975 gcc_unreachable ();
2979 /* Return a tree for the comparison which is the combination of
2980 doing the AND or OR (depending on CODE) of the two operations LCODE
2981 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2982 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2983 if this makes the transformation invalid. */
2985 tree
2986 combine_comparisons (location_t loc,
2987 enum tree_code code, enum tree_code lcode,
2988 enum tree_code rcode, tree truth_type,
2989 tree ll_arg, tree lr_arg)
2991 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2992 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2993 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2994 int compcode;
2996 switch (code)
2998 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2999 compcode = lcompcode & rcompcode;
3000 break;
3002 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3003 compcode = lcompcode | rcompcode;
3004 break;
3006 default:
3007 return NULL_TREE;
3010 if (!honor_nans)
3012 /* Eliminate unordered comparisons, as well as LTGT and ORD
3013 which are not used unless the mode has NaNs. */
3014 compcode &= ~COMPCODE_UNORD;
3015 if (compcode == COMPCODE_LTGT)
3016 compcode = COMPCODE_NE;
3017 else if (compcode == COMPCODE_ORD)
3018 compcode = COMPCODE_TRUE;
3020 else if (flag_trapping_math)
3022 /* Check that the original operation and the optimized ones will trap
3023 under the same condition. */
3024 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3025 && (lcompcode != COMPCODE_EQ)
3026 && (lcompcode != COMPCODE_ORD);
3027 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3028 && (rcompcode != COMPCODE_EQ)
3029 && (rcompcode != COMPCODE_ORD);
3030 bool trap = (compcode & COMPCODE_UNORD) == 0
3031 && (compcode != COMPCODE_EQ)
3032 && (compcode != COMPCODE_ORD);
3034 /* In a short-circuited boolean expression the LHS might be
3035 such that the RHS, if evaluated, will never trap. For
3036 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3037 if neither x nor y is NaN. (This is a mixed blessing: for
3038 example, the expression above will never trap, hence
3039 optimizing it to x < y would be invalid). */
3040 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3041 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3042 rtrap = false;
3044 /* If the comparison was short-circuited, and only the RHS
3045 trapped, we may now generate a spurious trap. */
3046 if (rtrap && !ltrap
3047 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3048 return NULL_TREE;
3050 /* If we changed the conditions that cause a trap, we lose. */
3051 if ((ltrap || rtrap) != trap)
3052 return NULL_TREE;
3055 if (compcode == COMPCODE_TRUE)
3056 return constant_boolean_node (true, truth_type);
3057 else if (compcode == COMPCODE_FALSE)
3058 return constant_boolean_node (false, truth_type);
3059 else
3061 enum tree_code tcode;
3063 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3064 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3068 /* Return nonzero if two operands (typically of the same tree node)
3069 are necessarily equal. If either argument has side-effects this
3070 function returns zero. FLAGS modifies behavior as follows:
3072 If OEP_ONLY_CONST is set, only return nonzero for constants.
3073 This function tests whether the operands are indistinguishable;
3074 it does not test whether they are equal using C's == operation.
3075 The distinction is important for IEEE floating point, because
3076 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3077 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3079 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3080 even though it may hold multiple values during a function.
3081 This is because a GCC tree node guarantees that nothing else is
3082 executed between the evaluation of its "operands" (which may often
3083 be evaluated in arbitrary order). Hence if the operands themselves
3084 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3085 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3086 unset means assuming isochronic (or instantaneous) tree equivalence.
3087 Unless comparing arbitrary expression trees, such as from different
3088 statements, this flag can usually be left unset.
3090 If OEP_PURE_SAME is set, then pure functions with identical arguments
3091 are considered the same. It is used when the caller has other ways
3092 to ensure that global memory is unchanged in between. */
3095 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3097 /* If either is ERROR_MARK, they aren't equal. */
3098 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3099 return 0;
3101 /* Check equality of integer constants before bailing out due to
3102 precision differences. */
3103 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3104 return tree_int_cst_equal (arg0, arg1);
3106 /* If both types don't have the same signedness, then we can't consider
3107 them equal. We must check this before the STRIP_NOPS calls
3108 because they may change the signedness of the arguments. As pointers
3109 strictly don't have a signedness, require either two pointers or
3110 two non-pointers as well. */
3111 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3112 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3113 return 0;
3115 /* If both types don't have the same precision, then it is not safe
3116 to strip NOPs. */
3117 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3118 return 0;
3120 STRIP_NOPS (arg0);
3121 STRIP_NOPS (arg1);
3123 /* In case both args are comparisons but with different comparison
3124 code, try to swap the comparison operands of one arg to produce
3125 a match and compare that variant. */
3126 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3127 && COMPARISON_CLASS_P (arg0)
3128 && COMPARISON_CLASS_P (arg1))
3130 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3132 if (TREE_CODE (arg0) == swap_code)
3133 return operand_equal_p (TREE_OPERAND (arg0, 0),
3134 TREE_OPERAND (arg1, 1), flags)
3135 && operand_equal_p (TREE_OPERAND (arg0, 1),
3136 TREE_OPERAND (arg1, 0), flags);
3139 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3140 /* This is needed for conversions and for COMPONENT_REF.
3141 Might as well play it safe and always test this. */
3142 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3143 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3144 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3145 return 0;
3147 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3148 We don't care about side effects in that case because the SAVE_EXPR
3149 takes care of that for us. In all other cases, two expressions are
3150 equal if they have no side effects. If we have two identical
3151 expressions with side effects that should be treated the same due
3152 to the only side effects being identical SAVE_EXPR's, that will
3153 be detected in the recursive calls below. */
3154 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3155 && (TREE_CODE (arg0) == SAVE_EXPR
3156 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3157 return 1;
3159 /* Next handle constant cases, those for which we can return 1 even
3160 if ONLY_CONST is set. */
3161 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3162 switch (TREE_CODE (arg0))
3164 case INTEGER_CST:
3165 return tree_int_cst_equal (arg0, arg1);
3167 case FIXED_CST:
3168 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3169 TREE_FIXED_CST (arg1));
3171 case REAL_CST:
3172 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3173 TREE_REAL_CST (arg1)))
3174 return 1;
3177 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3179 /* If we do not distinguish between signed and unsigned zero,
3180 consider them equal. */
3181 if (real_zerop (arg0) && real_zerop (arg1))
3182 return 1;
3184 return 0;
3186 case VECTOR_CST:
3188 tree v1, v2;
3190 v1 = TREE_VECTOR_CST_ELTS (arg0);
3191 v2 = TREE_VECTOR_CST_ELTS (arg1);
3192 while (v1 && v2)
3194 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3195 flags))
3196 return 0;
3197 v1 = TREE_CHAIN (v1);
3198 v2 = TREE_CHAIN (v2);
3201 return v1 == v2;
3204 case COMPLEX_CST:
3205 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3206 flags)
3207 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3208 flags));
3210 case STRING_CST:
3211 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3212 && ! memcmp (TREE_STRING_POINTER (arg0),
3213 TREE_STRING_POINTER (arg1),
3214 TREE_STRING_LENGTH (arg0)));
3216 case ADDR_EXPR:
3217 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3219 default:
3220 break;
3223 if (flags & OEP_ONLY_CONST)
3224 return 0;
3226 /* Define macros to test an operand from arg0 and arg1 for equality and a
3227 variant that allows null and views null as being different from any
3228 non-null value. In the latter case, if either is null, the both
3229 must be; otherwise, do the normal comparison. */
3230 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3231 TREE_OPERAND (arg1, N), flags)
3233 #define OP_SAME_WITH_NULL(N) \
3234 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3235 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3237 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3239 case tcc_unary:
3240 /* Two conversions are equal only if signedness and modes match. */
3241 switch (TREE_CODE (arg0))
3243 CASE_CONVERT:
3244 case FIX_TRUNC_EXPR:
3245 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3246 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3247 return 0;
3248 break;
3249 default:
3250 break;
3253 return OP_SAME (0);
3256 case tcc_comparison:
3257 case tcc_binary:
3258 if (OP_SAME (0) && OP_SAME (1))
3259 return 1;
3261 /* For commutative ops, allow the other order. */
3262 return (commutative_tree_code (TREE_CODE (arg0))
3263 && operand_equal_p (TREE_OPERAND (arg0, 0),
3264 TREE_OPERAND (arg1, 1), flags)
3265 && operand_equal_p (TREE_OPERAND (arg0, 1),
3266 TREE_OPERAND (arg1, 0), flags));
3268 case tcc_reference:
3269 /* If either of the pointer (or reference) expressions we are
3270 dereferencing contain a side effect, these cannot be equal. */
3271 if (TREE_SIDE_EFFECTS (arg0)
3272 || TREE_SIDE_EFFECTS (arg1))
3273 return 0;
3275 switch (TREE_CODE (arg0))
3277 case INDIRECT_REF:
3278 case ALIGN_INDIRECT_REF:
3279 case MISALIGNED_INDIRECT_REF:
3280 case REALPART_EXPR:
3281 case IMAGPART_EXPR:
3282 return OP_SAME (0);
3284 case ARRAY_REF:
3285 case ARRAY_RANGE_REF:
3286 /* Operands 2 and 3 may be null.
3287 Compare the array index by value if it is constant first as we
3288 may have different types but same value here. */
3289 return (OP_SAME (0)
3290 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3291 TREE_OPERAND (arg1, 1))
3292 || OP_SAME (1))
3293 && OP_SAME_WITH_NULL (2)
3294 && OP_SAME_WITH_NULL (3));
3296 case COMPONENT_REF:
3297 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3298 may be NULL when we're called to compare MEM_EXPRs. */
3299 return OP_SAME_WITH_NULL (0)
3300 && OP_SAME (1)
3301 && OP_SAME_WITH_NULL (2);
3303 case BIT_FIELD_REF:
3304 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3306 default:
3307 return 0;
3310 case tcc_expression:
3311 switch (TREE_CODE (arg0))
3313 case ADDR_EXPR:
3314 case TRUTH_NOT_EXPR:
3315 return OP_SAME (0);
3317 case TRUTH_ANDIF_EXPR:
3318 case TRUTH_ORIF_EXPR:
3319 return OP_SAME (0) && OP_SAME (1);
3321 case TRUTH_AND_EXPR:
3322 case TRUTH_OR_EXPR:
3323 case TRUTH_XOR_EXPR:
3324 if (OP_SAME (0) && OP_SAME (1))
3325 return 1;
3327 /* Otherwise take into account this is a commutative operation. */
3328 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3329 TREE_OPERAND (arg1, 1), flags)
3330 && operand_equal_p (TREE_OPERAND (arg0, 1),
3331 TREE_OPERAND (arg1, 0), flags));
3333 case COND_EXPR:
3334 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3336 default:
3337 return 0;
3340 case tcc_vl_exp:
3341 switch (TREE_CODE (arg0))
3343 case CALL_EXPR:
3344 /* If the CALL_EXPRs call different functions, then they
3345 clearly can not be equal. */
3346 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3347 flags))
3348 return 0;
3351 unsigned int cef = call_expr_flags (arg0);
3352 if (flags & OEP_PURE_SAME)
3353 cef &= ECF_CONST | ECF_PURE;
3354 else
3355 cef &= ECF_CONST;
3356 if (!cef)
3357 return 0;
3360 /* Now see if all the arguments are the same. */
3362 const_call_expr_arg_iterator iter0, iter1;
3363 const_tree a0, a1;
3364 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3365 a1 = first_const_call_expr_arg (arg1, &iter1);
3366 a0 && a1;
3367 a0 = next_const_call_expr_arg (&iter0),
3368 a1 = next_const_call_expr_arg (&iter1))
3369 if (! operand_equal_p (a0, a1, flags))
3370 return 0;
3372 /* If we get here and both argument lists are exhausted
3373 then the CALL_EXPRs are equal. */
3374 return ! (a0 || a1);
3376 default:
3377 return 0;
3380 case tcc_declaration:
3381 /* Consider __builtin_sqrt equal to sqrt. */
3382 return (TREE_CODE (arg0) == FUNCTION_DECL
3383 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3384 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3385 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3387 default:
3388 return 0;
3391 #undef OP_SAME
3392 #undef OP_SAME_WITH_NULL
3395 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3396 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3398 When in doubt, return 0. */
3400 static int
3401 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3403 int unsignedp1, unsignedpo;
3404 tree primarg0, primarg1, primother;
3405 unsigned int correct_width;
3407 if (operand_equal_p (arg0, arg1, 0))
3408 return 1;
3410 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3411 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3412 return 0;
3414 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3415 and see if the inner values are the same. This removes any
3416 signedness comparison, which doesn't matter here. */
3417 primarg0 = arg0, primarg1 = arg1;
3418 STRIP_NOPS (primarg0);
3419 STRIP_NOPS (primarg1);
3420 if (operand_equal_p (primarg0, primarg1, 0))
3421 return 1;
3423 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3424 actual comparison operand, ARG0.
3426 First throw away any conversions to wider types
3427 already present in the operands. */
3429 primarg1 = get_narrower (arg1, &unsignedp1);
3430 primother = get_narrower (other, &unsignedpo);
3432 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3433 if (unsignedp1 == unsignedpo
3434 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3435 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3437 tree type = TREE_TYPE (arg0);
3439 /* Make sure shorter operand is extended the right way
3440 to match the longer operand. */
3441 primarg1 = fold_convert (signed_or_unsigned_type_for
3442 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3444 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3445 return 1;
3448 return 0;
3451 /* See if ARG is an expression that is either a comparison or is performing
3452 arithmetic on comparisons. The comparisons must only be comparing
3453 two different values, which will be stored in *CVAL1 and *CVAL2; if
3454 they are nonzero it means that some operands have already been found.
3455 No variables may be used anywhere else in the expression except in the
3456 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3457 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3459 If this is true, return 1. Otherwise, return zero. */
3461 static int
3462 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3464 enum tree_code code = TREE_CODE (arg);
3465 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3467 /* We can handle some of the tcc_expression cases here. */
3468 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3469 tclass = tcc_unary;
3470 else if (tclass == tcc_expression
3471 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3472 || code == COMPOUND_EXPR))
3473 tclass = tcc_binary;
3475 else if (tclass == tcc_expression && code == SAVE_EXPR
3476 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3478 /* If we've already found a CVAL1 or CVAL2, this expression is
3479 two complex to handle. */
3480 if (*cval1 || *cval2)
3481 return 0;
3483 tclass = tcc_unary;
3484 *save_p = 1;
3487 switch (tclass)
3489 case tcc_unary:
3490 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3492 case tcc_binary:
3493 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3494 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3495 cval1, cval2, save_p));
3497 case tcc_constant:
3498 return 1;
3500 case tcc_expression:
3501 if (code == COND_EXPR)
3502 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3503 cval1, cval2, save_p)
3504 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3505 cval1, cval2, save_p)
3506 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3507 cval1, cval2, save_p));
3508 return 0;
3510 case tcc_comparison:
3511 /* First see if we can handle the first operand, then the second. For
3512 the second operand, we know *CVAL1 can't be zero. It must be that
3513 one side of the comparison is each of the values; test for the
3514 case where this isn't true by failing if the two operands
3515 are the same. */
3517 if (operand_equal_p (TREE_OPERAND (arg, 0),
3518 TREE_OPERAND (arg, 1), 0))
3519 return 0;
3521 if (*cval1 == 0)
3522 *cval1 = TREE_OPERAND (arg, 0);
3523 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3525 else if (*cval2 == 0)
3526 *cval2 = TREE_OPERAND (arg, 0);
3527 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3529 else
3530 return 0;
3532 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3534 else if (*cval2 == 0)
3535 *cval2 = TREE_OPERAND (arg, 1);
3536 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3538 else
3539 return 0;
3541 return 1;
3543 default:
3544 return 0;
3548 /* ARG is a tree that is known to contain just arithmetic operations and
3549 comparisons. Evaluate the operations in the tree substituting NEW0 for
3550 any occurrence of OLD0 as an operand of a comparison and likewise for
3551 NEW1 and OLD1. */
3553 static tree
3554 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3555 tree old1, tree new1)
3557 tree type = TREE_TYPE (arg);
3558 enum tree_code code = TREE_CODE (arg);
3559 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3561 /* We can handle some of the tcc_expression cases here. */
3562 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3563 tclass = tcc_unary;
3564 else if (tclass == tcc_expression
3565 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3566 tclass = tcc_binary;
3568 switch (tclass)
3570 case tcc_unary:
3571 return fold_build1_loc (loc, code, type,
3572 eval_subst (loc, TREE_OPERAND (arg, 0),
3573 old0, new0, old1, new1));
3575 case tcc_binary:
3576 return fold_build2_loc (loc, code, type,
3577 eval_subst (loc, TREE_OPERAND (arg, 0),
3578 old0, new0, old1, new1),
3579 eval_subst (loc, TREE_OPERAND (arg, 1),
3580 old0, new0, old1, new1));
3582 case tcc_expression:
3583 switch (code)
3585 case SAVE_EXPR:
3586 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3587 old1, new1);
3589 case COMPOUND_EXPR:
3590 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3591 old1, new1);
3593 case COND_EXPR:
3594 return fold_build3_loc (loc, code, type,
3595 eval_subst (loc, TREE_OPERAND (arg, 0),
3596 old0, new0, old1, new1),
3597 eval_subst (loc, TREE_OPERAND (arg, 1),
3598 old0, new0, old1, new1),
3599 eval_subst (loc, TREE_OPERAND (arg, 2),
3600 old0, new0, old1, new1));
3601 default:
3602 break;
3604 /* Fall through - ??? */
3606 case tcc_comparison:
3608 tree arg0 = TREE_OPERAND (arg, 0);
3609 tree arg1 = TREE_OPERAND (arg, 1);
3611 /* We need to check both for exact equality and tree equality. The
3612 former will be true if the operand has a side-effect. In that
3613 case, we know the operand occurred exactly once. */
3615 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3616 arg0 = new0;
3617 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3618 arg0 = new1;
3620 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3621 arg1 = new0;
3622 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3623 arg1 = new1;
3625 return fold_build2_loc (loc, code, type, arg0, arg1);
3628 default:
3629 return arg;
3633 /* Return a tree for the case when the result of an expression is RESULT
3634 converted to TYPE and OMITTED was previously an operand of the expression
3635 but is now not needed (e.g., we folded OMITTED * 0).
3637 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3638 the conversion of RESULT to TYPE. */
3640 tree
3641 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3643 tree t = fold_convert_loc (loc, type, result);
3645 /* If the resulting operand is an empty statement, just return the omitted
3646 statement casted to void. */
3647 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3649 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3650 goto omit_one_operand_exit;
3653 if (TREE_SIDE_EFFECTS (omitted))
3655 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3656 goto omit_one_operand_exit;
3659 return non_lvalue_loc (loc, t);
3661 omit_one_operand_exit:
3662 protected_set_expr_location (t, loc);
3663 return t;
3666 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3668 static tree
3669 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3670 tree omitted)
3672 tree t = fold_convert_loc (loc, type, result);
3674 /* If the resulting operand is an empty statement, just return the omitted
3675 statement casted to void. */
3676 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3678 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3679 goto pedantic_omit_one_operand_exit;
3682 if (TREE_SIDE_EFFECTS (omitted))
3684 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3685 goto pedantic_omit_one_operand_exit;
3688 return pedantic_non_lvalue_loc (loc, t);
3690 pedantic_omit_one_operand_exit:
3691 protected_set_expr_location (t, loc);
3692 return t;
3695 /* Return a tree for the case when the result of an expression is RESULT
3696 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3697 of the expression but are now not needed.
3699 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3700 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3701 evaluated before OMITTED2. Otherwise, if neither has side effects,
3702 just do the conversion of RESULT to TYPE. */
3704 tree
3705 omit_two_operands_loc (location_t loc, tree type, tree result,
3706 tree omitted1, tree omitted2)
3708 tree t = fold_convert_loc (loc, type, result);
3710 if (TREE_SIDE_EFFECTS (omitted2))
3712 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3713 SET_EXPR_LOCATION (t, loc);
3715 if (TREE_SIDE_EFFECTS (omitted1))
3717 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3718 SET_EXPR_LOCATION (t, loc);
3721 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3725 /* Return a simplified tree node for the truth-negation of ARG. This
3726 never alters ARG itself. We assume that ARG is an operation that
3727 returns a truth value (0 or 1).
3729 FIXME: one would think we would fold the result, but it causes
3730 problems with the dominator optimizer. */
3732 tree
3733 fold_truth_not_expr (location_t loc, tree arg)
3735 tree t, type = TREE_TYPE (arg);
3736 enum tree_code code = TREE_CODE (arg);
3737 location_t loc1, loc2;
3739 /* If this is a comparison, we can simply invert it, except for
3740 floating-point non-equality comparisons, in which case we just
3741 enclose a TRUTH_NOT_EXPR around what we have. */
3743 if (TREE_CODE_CLASS (code) == tcc_comparison)
3745 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3746 if (FLOAT_TYPE_P (op_type)
3747 && flag_trapping_math
3748 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3749 && code != NE_EXPR && code != EQ_EXPR)
3750 return NULL_TREE;
3752 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3753 if (code == ERROR_MARK)
3754 return NULL_TREE;
3756 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3757 SET_EXPR_LOCATION (t, loc);
3758 return t;
3761 switch (code)
3763 case INTEGER_CST:
3764 return constant_boolean_node (integer_zerop (arg), type);
3766 case TRUTH_AND_EXPR:
3767 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3768 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3769 if (loc1 == UNKNOWN_LOCATION)
3770 loc1 = loc;
3771 if (loc2 == UNKNOWN_LOCATION)
3772 loc2 = loc;
3773 t = build2 (TRUTH_OR_EXPR, type,
3774 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3775 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3776 break;
3778 case TRUTH_OR_EXPR:
3779 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3780 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3781 if (loc1 == UNKNOWN_LOCATION)
3782 loc1 = loc;
3783 if (loc2 == UNKNOWN_LOCATION)
3784 loc2 = loc;
3785 t = build2 (TRUTH_AND_EXPR, type,
3786 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3787 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3788 break;
3790 case TRUTH_XOR_EXPR:
3791 /* Here we can invert either operand. We invert the first operand
3792 unless the second operand is a TRUTH_NOT_EXPR in which case our
3793 result is the XOR of the first operand with the inside of the
3794 negation of the second operand. */
3796 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3797 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3798 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3799 else
3800 t = build2 (TRUTH_XOR_EXPR, type,
3801 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3802 TREE_OPERAND (arg, 1));
3803 break;
3805 case TRUTH_ANDIF_EXPR:
3806 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3807 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3808 if (loc1 == UNKNOWN_LOCATION)
3809 loc1 = loc;
3810 if (loc2 == UNKNOWN_LOCATION)
3811 loc2 = loc;
3812 t = build2 (TRUTH_ORIF_EXPR, type,
3813 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3814 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3815 break;
3817 case TRUTH_ORIF_EXPR:
3818 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3819 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3820 if (loc1 == UNKNOWN_LOCATION)
3821 loc1 = loc;
3822 if (loc2 == UNKNOWN_LOCATION)
3823 loc2 = loc;
3824 t = build2 (TRUTH_ANDIF_EXPR, type,
3825 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3826 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3827 break;
3829 case TRUTH_NOT_EXPR:
3830 return TREE_OPERAND (arg, 0);
3832 case COND_EXPR:
3834 tree arg1 = TREE_OPERAND (arg, 1);
3835 tree arg2 = TREE_OPERAND (arg, 2);
3837 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3838 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3839 if (loc1 == UNKNOWN_LOCATION)
3840 loc1 = loc;
3841 if (loc2 == UNKNOWN_LOCATION)
3842 loc2 = loc;
3844 /* A COND_EXPR may have a throw as one operand, which
3845 then has void type. Just leave void operands
3846 as they are. */
3847 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3848 VOID_TYPE_P (TREE_TYPE (arg1))
3849 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3850 VOID_TYPE_P (TREE_TYPE (arg2))
3851 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3852 break;
3855 case COMPOUND_EXPR:
3856 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3857 if (loc1 == UNKNOWN_LOCATION)
3858 loc1 = loc;
3859 t = build2 (COMPOUND_EXPR, type,
3860 TREE_OPERAND (arg, 0),
3861 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3862 break;
3864 case NON_LVALUE_EXPR:
3865 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3866 if (loc1 == UNKNOWN_LOCATION)
3867 loc1 = loc;
3868 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3870 CASE_CONVERT:
3871 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3873 t = build1 (TRUTH_NOT_EXPR, type, arg);
3874 break;
3877 /* ... fall through ... */
3879 case FLOAT_EXPR:
3880 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3881 if (loc1 == UNKNOWN_LOCATION)
3882 loc1 = loc;
3883 t = build1 (TREE_CODE (arg), type,
3884 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3885 break;
3887 case BIT_AND_EXPR:
3888 if (!integer_onep (TREE_OPERAND (arg, 1)))
3889 return NULL_TREE;
3890 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3891 break;
3893 case SAVE_EXPR:
3894 t = build1 (TRUTH_NOT_EXPR, type, arg);
3895 break;
3897 case CLEANUP_POINT_EXPR:
3898 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3899 if (loc1 == UNKNOWN_LOCATION)
3900 loc1 = loc;
3901 t = build1 (CLEANUP_POINT_EXPR, type,
3902 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3903 break;
3905 default:
3906 t = NULL_TREE;
3907 break;
3910 if (t)
3911 SET_EXPR_LOCATION (t, loc);
3913 return t;
3916 /* Return a simplified tree node for the truth-negation of ARG. This
3917 never alters ARG itself. We assume that ARG is an operation that
3918 returns a truth value (0 or 1).
3920 FIXME: one would think we would fold the result, but it causes
3921 problems with the dominator optimizer. */
3923 tree
3924 invert_truthvalue_loc (location_t loc, tree arg)
3926 tree tem;
3928 if (TREE_CODE (arg) == ERROR_MARK)
3929 return arg;
3931 tem = fold_truth_not_expr (loc, arg);
3932 if (!tem)
3934 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3935 SET_EXPR_LOCATION (tem, loc);
3938 return tem;
3941 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3942 operands are another bit-wise operation with a common input. If so,
3943 distribute the bit operations to save an operation and possibly two if
3944 constants are involved. For example, convert
3945 (A | B) & (A | C) into A | (B & C)
3946 Further simplification will occur if B and C are constants.
3948 If this optimization cannot be done, 0 will be returned. */
3950 static tree
3951 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3952 tree arg0, tree arg1)
3954 tree common;
3955 tree left, right;
3957 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3958 || TREE_CODE (arg0) == code
3959 || (TREE_CODE (arg0) != BIT_AND_EXPR
3960 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3961 return 0;
3963 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3965 common = TREE_OPERAND (arg0, 0);
3966 left = TREE_OPERAND (arg0, 1);
3967 right = TREE_OPERAND (arg1, 1);
3969 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3971 common = TREE_OPERAND (arg0, 0);
3972 left = TREE_OPERAND (arg0, 1);
3973 right = TREE_OPERAND (arg1, 0);
3975 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3977 common = TREE_OPERAND (arg0, 1);
3978 left = TREE_OPERAND (arg0, 0);
3979 right = TREE_OPERAND (arg1, 1);
3981 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3983 common = TREE_OPERAND (arg0, 1);
3984 left = TREE_OPERAND (arg0, 0);
3985 right = TREE_OPERAND (arg1, 0);
3987 else
3988 return 0;
3990 common = fold_convert_loc (loc, type, common);
3991 left = fold_convert_loc (loc, type, left);
3992 right = fold_convert_loc (loc, type, right);
3993 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3994 fold_build2_loc (loc, code, type, left, right));
3997 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3998 with code CODE. This optimization is unsafe. */
3999 static tree
4000 distribute_real_division (location_t loc, enum tree_code code, tree type,
4001 tree arg0, tree arg1)
4003 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4004 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4006 /* (A / C) +- (B / C) -> (A +- B) / C. */
4007 if (mul0 == mul1
4008 && operand_equal_p (TREE_OPERAND (arg0, 1),
4009 TREE_OPERAND (arg1, 1), 0))
4010 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4011 fold_build2_loc (loc, code, type,
4012 TREE_OPERAND (arg0, 0),
4013 TREE_OPERAND (arg1, 0)),
4014 TREE_OPERAND (arg0, 1));
4016 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4017 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4018 TREE_OPERAND (arg1, 0), 0)
4019 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4020 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4022 REAL_VALUE_TYPE r0, r1;
4023 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4024 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4025 if (!mul0)
4026 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4027 if (!mul1)
4028 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4029 real_arithmetic (&r0, code, &r0, &r1);
4030 return fold_build2_loc (loc, MULT_EXPR, type,
4031 TREE_OPERAND (arg0, 0),
4032 build_real (type, r0));
4035 return NULL_TREE;
4038 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4039 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4041 static tree
4042 make_bit_field_ref (location_t loc, tree inner, tree type,
4043 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4045 tree result, bftype;
4047 if (bitpos == 0)
4049 tree size = TYPE_SIZE (TREE_TYPE (inner));
4050 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4051 || POINTER_TYPE_P (TREE_TYPE (inner)))
4052 && host_integerp (size, 0)
4053 && tree_low_cst (size, 0) == bitsize)
4054 return fold_convert_loc (loc, type, inner);
4057 bftype = type;
4058 if (TYPE_PRECISION (bftype) != bitsize
4059 || TYPE_UNSIGNED (bftype) == !unsignedp)
4060 bftype = build_nonstandard_integer_type (bitsize, 0);
4062 result = build3 (BIT_FIELD_REF, bftype, inner,
4063 size_int (bitsize), bitsize_int (bitpos));
4064 SET_EXPR_LOCATION (result, loc);
4066 if (bftype != type)
4067 result = fold_convert_loc (loc, type, result);
4069 return result;
4072 /* Optimize a bit-field compare.
4074 There are two cases: First is a compare against a constant and the
4075 second is a comparison of two items where the fields are at the same
4076 bit position relative to the start of a chunk (byte, halfword, word)
4077 large enough to contain it. In these cases we can avoid the shift
4078 implicit in bitfield extractions.
4080 For constants, we emit a compare of the shifted constant with the
4081 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4082 compared. For two fields at the same position, we do the ANDs with the
4083 similar mask and compare the result of the ANDs.
4085 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4086 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4087 are the left and right operands of the comparison, respectively.
4089 If the optimization described above can be done, we return the resulting
4090 tree. Otherwise we return zero. */
4092 static tree
4093 optimize_bit_field_compare (location_t loc, enum tree_code code,
4094 tree compare_type, tree lhs, tree rhs)
4096 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4097 tree type = TREE_TYPE (lhs);
4098 tree signed_type, unsigned_type;
4099 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4100 enum machine_mode lmode, rmode, nmode;
4101 int lunsignedp, runsignedp;
4102 int lvolatilep = 0, rvolatilep = 0;
4103 tree linner, rinner = NULL_TREE;
4104 tree mask;
4105 tree offset;
4107 /* Get all the information about the extractions being done. If the bit size
4108 if the same as the size of the underlying object, we aren't doing an
4109 extraction at all and so can do nothing. We also don't want to
4110 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4111 then will no longer be able to replace it. */
4112 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4113 &lunsignedp, &lvolatilep, false);
4114 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4115 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4116 return 0;
4118 if (!const_p)
4120 /* If this is not a constant, we can only do something if bit positions,
4121 sizes, and signedness are the same. */
4122 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4123 &runsignedp, &rvolatilep, false);
4125 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4126 || lunsignedp != runsignedp || offset != 0
4127 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4128 return 0;
4131 /* See if we can find a mode to refer to this field. We should be able to,
4132 but fail if we can't. */
4133 nmode = get_best_mode (lbitsize, lbitpos,
4134 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4135 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4136 TYPE_ALIGN (TREE_TYPE (rinner))),
4137 word_mode, lvolatilep || rvolatilep);
4138 if (nmode == VOIDmode)
4139 return 0;
4141 /* Set signed and unsigned types of the precision of this mode for the
4142 shifts below. */
4143 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4144 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4146 /* Compute the bit position and size for the new reference and our offset
4147 within it. If the new reference is the same size as the original, we
4148 won't optimize anything, so return zero. */
4149 nbitsize = GET_MODE_BITSIZE (nmode);
4150 nbitpos = lbitpos & ~ (nbitsize - 1);
4151 lbitpos -= nbitpos;
4152 if (nbitsize == lbitsize)
4153 return 0;
4155 if (BYTES_BIG_ENDIAN)
4156 lbitpos = nbitsize - lbitsize - lbitpos;
4158 /* Make the mask to be used against the extracted field. */
4159 mask = build_int_cst_type (unsigned_type, -1);
4160 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4161 mask = const_binop (RSHIFT_EXPR, mask,
4162 size_int (nbitsize - lbitsize - lbitpos), 0);
4164 if (! const_p)
4165 /* If not comparing with constant, just rework the comparison
4166 and return. */
4167 return fold_build2_loc (loc, code, compare_type,
4168 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4169 make_bit_field_ref (loc, linner,
4170 unsigned_type,
4171 nbitsize, nbitpos,
4173 mask),
4174 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4175 make_bit_field_ref (loc, rinner,
4176 unsigned_type,
4177 nbitsize, nbitpos,
4179 mask));
4181 /* Otherwise, we are handling the constant case. See if the constant is too
4182 big for the field. Warn and return a tree of for 0 (false) if so. We do
4183 this not only for its own sake, but to avoid having to test for this
4184 error case below. If we didn't, we might generate wrong code.
4186 For unsigned fields, the constant shifted right by the field length should
4187 be all zero. For signed fields, the high-order bits should agree with
4188 the sign bit. */
4190 if (lunsignedp)
4192 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4193 fold_convert_loc (loc,
4194 unsigned_type, rhs),
4195 size_int (lbitsize), 0)))
4197 warning (0, "comparison is always %d due to width of bit-field",
4198 code == NE_EXPR);
4199 return constant_boolean_node (code == NE_EXPR, compare_type);
4202 else
4204 tree tem = const_binop (RSHIFT_EXPR,
4205 fold_convert_loc (loc, signed_type, rhs),
4206 size_int (lbitsize - 1), 0);
4207 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4209 warning (0, "comparison is always %d due to width of bit-field",
4210 code == NE_EXPR);
4211 return constant_boolean_node (code == NE_EXPR, compare_type);
4215 /* Single-bit compares should always be against zero. */
4216 if (lbitsize == 1 && ! integer_zerop (rhs))
4218 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4219 rhs = build_int_cst (type, 0);
4222 /* Make a new bitfield reference, shift the constant over the
4223 appropriate number of bits and mask it with the computed mask
4224 (in case this was a signed field). If we changed it, make a new one. */
4225 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4226 if (lvolatilep)
4228 TREE_SIDE_EFFECTS (lhs) = 1;
4229 TREE_THIS_VOLATILE (lhs) = 1;
4232 rhs = const_binop (BIT_AND_EXPR,
4233 const_binop (LSHIFT_EXPR,
4234 fold_convert_loc (loc, unsigned_type, rhs),
4235 size_int (lbitpos), 0),
4236 mask, 0);
4238 lhs = build2 (code, compare_type,
4239 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4240 rhs);
4241 SET_EXPR_LOCATION (lhs, loc);
4242 return lhs;
4245 /* Subroutine for fold_truthop: decode a field reference.
4247 If EXP is a comparison reference, we return the innermost reference.
4249 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4250 set to the starting bit number.
4252 If the innermost field can be completely contained in a mode-sized
4253 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4255 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4256 otherwise it is not changed.
4258 *PUNSIGNEDP is set to the signedness of the field.
4260 *PMASK is set to the mask used. This is either contained in a
4261 BIT_AND_EXPR or derived from the width of the field.
4263 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4265 Return 0 if this is not a component reference or is one that we can't
4266 do anything with. */
4268 static tree
4269 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4270 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4271 int *punsignedp, int *pvolatilep,
4272 tree *pmask, tree *pand_mask)
4274 tree outer_type = 0;
4275 tree and_mask = 0;
4276 tree mask, inner, offset;
4277 tree unsigned_type;
4278 unsigned int precision;
4280 /* All the optimizations using this function assume integer fields.
4281 There are problems with FP fields since the type_for_size call
4282 below can fail for, e.g., XFmode. */
4283 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4284 return 0;
4286 /* We are interested in the bare arrangement of bits, so strip everything
4287 that doesn't affect the machine mode. However, record the type of the
4288 outermost expression if it may matter below. */
4289 if (CONVERT_EXPR_P (exp)
4290 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4291 outer_type = TREE_TYPE (exp);
4292 STRIP_NOPS (exp);
4294 if (TREE_CODE (exp) == BIT_AND_EXPR)
4296 and_mask = TREE_OPERAND (exp, 1);
4297 exp = TREE_OPERAND (exp, 0);
4298 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4299 if (TREE_CODE (and_mask) != INTEGER_CST)
4300 return 0;
4303 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4304 punsignedp, pvolatilep, false);
4305 if ((inner == exp && and_mask == 0)
4306 || *pbitsize < 0 || offset != 0
4307 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4308 return 0;
4310 /* If the number of bits in the reference is the same as the bitsize of
4311 the outer type, then the outer type gives the signedness. Otherwise
4312 (in case of a small bitfield) the signedness is unchanged. */
4313 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4314 *punsignedp = TYPE_UNSIGNED (outer_type);
4316 /* Compute the mask to access the bitfield. */
4317 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4318 precision = TYPE_PRECISION (unsigned_type);
4320 mask = build_int_cst_type (unsigned_type, -1);
4322 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4323 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4325 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4326 if (and_mask != 0)
4327 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4328 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4330 *pmask = mask;
4331 *pand_mask = and_mask;
4332 return inner;
4335 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4336 bit positions. */
4338 static int
4339 all_ones_mask_p (const_tree mask, int size)
4341 tree type = TREE_TYPE (mask);
4342 unsigned int precision = TYPE_PRECISION (type);
4343 tree tmask;
4345 tmask = build_int_cst_type (signed_type_for (type), -1);
4347 return
4348 tree_int_cst_equal (mask,
4349 const_binop (RSHIFT_EXPR,
4350 const_binop (LSHIFT_EXPR, tmask,
4351 size_int (precision - size),
4353 size_int (precision - size), 0));
4356 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4357 represents the sign bit of EXP's type. If EXP represents a sign
4358 or zero extension, also test VAL against the unextended type.
4359 The return value is the (sub)expression whose sign bit is VAL,
4360 or NULL_TREE otherwise. */
4362 static tree
4363 sign_bit_p (tree exp, const_tree val)
4365 unsigned HOST_WIDE_INT mask_lo, lo;
4366 HOST_WIDE_INT mask_hi, hi;
4367 int width;
4368 tree t;
4370 /* Tree EXP must have an integral type. */
4371 t = TREE_TYPE (exp);
4372 if (! INTEGRAL_TYPE_P (t))
4373 return NULL_TREE;
4375 /* Tree VAL must be an integer constant. */
4376 if (TREE_CODE (val) != INTEGER_CST
4377 || TREE_OVERFLOW (val))
4378 return NULL_TREE;
4380 width = TYPE_PRECISION (t);
4381 if (width > HOST_BITS_PER_WIDE_INT)
4383 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4384 lo = 0;
4386 mask_hi = ((unsigned HOST_WIDE_INT) -1
4387 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4388 mask_lo = -1;
4390 else
4392 hi = 0;
4393 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4395 mask_hi = 0;
4396 mask_lo = ((unsigned HOST_WIDE_INT) -1
4397 >> (HOST_BITS_PER_WIDE_INT - width));
4400 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4401 treat VAL as if it were unsigned. */
4402 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4403 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4404 return exp;
4406 /* Handle extension from a narrower type. */
4407 if (TREE_CODE (exp) == NOP_EXPR
4408 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4409 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4411 return NULL_TREE;
4414 /* Subroutine for fold_truthop: determine if an operand is simple enough
4415 to be evaluated unconditionally. */
4417 static int
4418 simple_operand_p (const_tree exp)
4420 /* Strip any conversions that don't change the machine mode. */
4421 STRIP_NOPS (exp);
4423 return (CONSTANT_CLASS_P (exp)
4424 || TREE_CODE (exp) == SSA_NAME
4425 || (DECL_P (exp)
4426 && ! TREE_ADDRESSABLE (exp)
4427 && ! TREE_THIS_VOLATILE (exp)
4428 && ! DECL_NONLOCAL (exp)
4429 /* Don't regard global variables as simple. They may be
4430 allocated in ways unknown to the compiler (shared memory,
4431 #pragma weak, etc). */
4432 && ! TREE_PUBLIC (exp)
4433 && ! DECL_EXTERNAL (exp)
4434 /* Loading a static variable is unduly expensive, but global
4435 registers aren't expensive. */
4436 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4439 /* The following functions are subroutines to fold_range_test and allow it to
4440 try to change a logical combination of comparisons into a range test.
4442 For example, both
4443 X == 2 || X == 3 || X == 4 || X == 5
4445 X >= 2 && X <= 5
4446 are converted to
4447 (unsigned) (X - 2) <= 3
4449 We describe each set of comparisons as being either inside or outside
4450 a range, using a variable named like IN_P, and then describe the
4451 range with a lower and upper bound. If one of the bounds is omitted,
4452 it represents either the highest or lowest value of the type.
4454 In the comments below, we represent a range by two numbers in brackets
4455 preceded by a "+" to designate being inside that range, or a "-" to
4456 designate being outside that range, so the condition can be inverted by
4457 flipping the prefix. An omitted bound is represented by a "-". For
4458 example, "- [-, 10]" means being outside the range starting at the lowest
4459 possible value and ending at 10, in other words, being greater than 10.
4460 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4461 always false.
4463 We set up things so that the missing bounds are handled in a consistent
4464 manner so neither a missing bound nor "true" and "false" need to be
4465 handled using a special case. */
4467 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4468 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4469 and UPPER1_P are nonzero if the respective argument is an upper bound
4470 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4471 must be specified for a comparison. ARG1 will be converted to ARG0's
4472 type if both are specified. */
4474 static tree
4475 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4476 tree arg1, int upper1_p)
4478 tree tem;
4479 int result;
4480 int sgn0, sgn1;
4482 /* If neither arg represents infinity, do the normal operation.
4483 Else, if not a comparison, return infinity. Else handle the special
4484 comparison rules. Note that most of the cases below won't occur, but
4485 are handled for consistency. */
4487 if (arg0 != 0 && arg1 != 0)
4489 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4490 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4491 STRIP_NOPS (tem);
4492 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4495 if (TREE_CODE_CLASS (code) != tcc_comparison)
4496 return 0;
4498 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4499 for neither. In real maths, we cannot assume open ended ranges are
4500 the same. But, this is computer arithmetic, where numbers are finite.
4501 We can therefore make the transformation of any unbounded range with
4502 the value Z, Z being greater than any representable number. This permits
4503 us to treat unbounded ranges as equal. */
4504 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4505 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4506 switch (code)
4508 case EQ_EXPR:
4509 result = sgn0 == sgn1;
4510 break;
4511 case NE_EXPR:
4512 result = sgn0 != sgn1;
4513 break;
4514 case LT_EXPR:
4515 result = sgn0 < sgn1;
4516 break;
4517 case LE_EXPR:
4518 result = sgn0 <= sgn1;
4519 break;
4520 case GT_EXPR:
4521 result = sgn0 > sgn1;
4522 break;
4523 case GE_EXPR:
4524 result = sgn0 >= sgn1;
4525 break;
4526 default:
4527 gcc_unreachable ();
4530 return constant_boolean_node (result, type);
4533 /* Given EXP, a logical expression, set the range it is testing into
4534 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4535 actually being tested. *PLOW and *PHIGH will be made of the same
4536 type as the returned expression. If EXP is not a comparison, we
4537 will most likely not be returning a useful value and range. Set
4538 *STRICT_OVERFLOW_P to true if the return value is only valid
4539 because signed overflow is undefined; otherwise, do not change
4540 *STRICT_OVERFLOW_P. */
4542 tree
4543 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4544 bool *strict_overflow_p)
4546 enum tree_code code;
4547 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4548 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4549 int in_p, n_in_p;
4550 tree low, high, n_low, n_high;
4551 location_t loc = EXPR_LOCATION (exp);
4553 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4554 and see if we can refine the range. Some of the cases below may not
4555 happen, but it doesn't seem worth worrying about this. We "continue"
4556 the outer loop when we've changed something; otherwise we "break"
4557 the switch, which will "break" the while. */
4559 in_p = 0;
4560 low = high = build_int_cst (TREE_TYPE (exp), 0);
4562 while (1)
4564 code = TREE_CODE (exp);
4565 exp_type = TREE_TYPE (exp);
4567 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4569 if (TREE_OPERAND_LENGTH (exp) > 0)
4570 arg0 = TREE_OPERAND (exp, 0);
4571 if (TREE_CODE_CLASS (code) == tcc_comparison
4572 || TREE_CODE_CLASS (code) == tcc_unary
4573 || TREE_CODE_CLASS (code) == tcc_binary)
4574 arg0_type = TREE_TYPE (arg0);
4575 if (TREE_CODE_CLASS (code) == tcc_binary
4576 || TREE_CODE_CLASS (code) == tcc_comparison
4577 || (TREE_CODE_CLASS (code) == tcc_expression
4578 && TREE_OPERAND_LENGTH (exp) > 1))
4579 arg1 = TREE_OPERAND (exp, 1);
4582 switch (code)
4584 case TRUTH_NOT_EXPR:
4585 in_p = ! in_p, exp = arg0;
4586 continue;
4588 case EQ_EXPR: case NE_EXPR:
4589 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4590 /* We can only do something if the range is testing for zero
4591 and if the second operand is an integer constant. Note that
4592 saying something is "in" the range we make is done by
4593 complementing IN_P since it will set in the initial case of
4594 being not equal to zero; "out" is leaving it alone. */
4595 if (low == 0 || high == 0
4596 || ! integer_zerop (low) || ! integer_zerop (high)
4597 || TREE_CODE (arg1) != INTEGER_CST)
4598 break;
4600 switch (code)
4602 case NE_EXPR: /* - [c, c] */
4603 low = high = arg1;
4604 break;
4605 case EQ_EXPR: /* + [c, c] */
4606 in_p = ! in_p, low = high = arg1;
4607 break;
4608 case GT_EXPR: /* - [-, c] */
4609 low = 0, high = arg1;
4610 break;
4611 case GE_EXPR: /* + [c, -] */
4612 in_p = ! in_p, low = arg1, high = 0;
4613 break;
4614 case LT_EXPR: /* - [c, -] */
4615 low = arg1, high = 0;
4616 break;
4617 case LE_EXPR: /* + [-, c] */
4618 in_p = ! in_p, low = 0, high = arg1;
4619 break;
4620 default:
4621 gcc_unreachable ();
4624 /* If this is an unsigned comparison, we also know that EXP is
4625 greater than or equal to zero. We base the range tests we make
4626 on that fact, so we record it here so we can parse existing
4627 range tests. We test arg0_type since often the return type
4628 of, e.g. EQ_EXPR, is boolean. */
4629 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4631 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4632 in_p, low, high, 1,
4633 build_int_cst (arg0_type, 0),
4634 NULL_TREE))
4635 break;
4637 in_p = n_in_p, low = n_low, high = n_high;
4639 /* If the high bound is missing, but we have a nonzero low
4640 bound, reverse the range so it goes from zero to the low bound
4641 minus 1. */
4642 if (high == 0 && low && ! integer_zerop (low))
4644 in_p = ! in_p;
4645 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4646 integer_one_node, 0);
4647 low = build_int_cst (arg0_type, 0);
4651 exp = arg0;
4652 continue;
4654 case NEGATE_EXPR:
4655 /* (-x) IN [a,b] -> x in [-b, -a] */
4656 n_low = range_binop (MINUS_EXPR, exp_type,
4657 build_int_cst (exp_type, 0),
4658 0, high, 1);
4659 n_high = range_binop (MINUS_EXPR, exp_type,
4660 build_int_cst (exp_type, 0),
4661 0, low, 0);
4662 low = n_low, high = n_high;
4663 exp = arg0;
4664 continue;
4666 case BIT_NOT_EXPR:
4667 /* ~ X -> -X - 1 */
4668 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4669 build_int_cst (exp_type, 1));
4670 SET_EXPR_LOCATION (exp, loc);
4671 continue;
4673 case PLUS_EXPR: case MINUS_EXPR:
4674 if (TREE_CODE (arg1) != INTEGER_CST)
4675 break;
4677 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4678 move a constant to the other side. */
4679 if (!TYPE_UNSIGNED (arg0_type)
4680 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4681 break;
4683 /* If EXP is signed, any overflow in the computation is undefined,
4684 so we don't worry about it so long as our computations on
4685 the bounds don't overflow. For unsigned, overflow is defined
4686 and this is exactly the right thing. */
4687 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4688 arg0_type, low, 0, arg1, 0);
4689 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4690 arg0_type, high, 1, arg1, 0);
4691 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4692 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4693 break;
4695 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4696 *strict_overflow_p = true;
4698 /* Check for an unsigned range which has wrapped around the maximum
4699 value thus making n_high < n_low, and normalize it. */
4700 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4702 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4703 integer_one_node, 0);
4704 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4705 integer_one_node, 0);
4707 /* If the range is of the form +/- [ x+1, x ], we won't
4708 be able to normalize it. But then, it represents the
4709 whole range or the empty set, so make it
4710 +/- [ -, - ]. */
4711 if (tree_int_cst_equal (n_low, low)
4712 && tree_int_cst_equal (n_high, high))
4713 low = high = 0;
4714 else
4715 in_p = ! in_p;
4717 else
4718 low = n_low, high = n_high;
4720 exp = arg0;
4721 continue;
4723 CASE_CONVERT: case NON_LVALUE_EXPR:
4724 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4725 break;
4727 if (! INTEGRAL_TYPE_P (arg0_type)
4728 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4729 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4730 break;
4732 n_low = low, n_high = high;
4734 if (n_low != 0)
4735 n_low = fold_convert_loc (loc, arg0_type, n_low);
4737 if (n_high != 0)
4738 n_high = fold_convert_loc (loc, arg0_type, n_high);
4741 /* If we're converting arg0 from an unsigned type, to exp,
4742 a signed type, we will be doing the comparison as unsigned.
4743 The tests above have already verified that LOW and HIGH
4744 are both positive.
4746 So we have to ensure that we will handle large unsigned
4747 values the same way that the current signed bounds treat
4748 negative values. */
4750 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4752 tree high_positive;
4753 tree equiv_type;
4754 /* For fixed-point modes, we need to pass the saturating flag
4755 as the 2nd parameter. */
4756 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4757 equiv_type = lang_hooks.types.type_for_mode
4758 (TYPE_MODE (arg0_type),
4759 TYPE_SATURATING (arg0_type));
4760 else
4761 equiv_type = lang_hooks.types.type_for_mode
4762 (TYPE_MODE (arg0_type), 1);
4764 /* A range without an upper bound is, naturally, unbounded.
4765 Since convert would have cropped a very large value, use
4766 the max value for the destination type. */
4767 high_positive
4768 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4769 : TYPE_MAX_VALUE (arg0_type);
4771 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4772 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4773 fold_convert_loc (loc, arg0_type,
4774 high_positive),
4775 build_int_cst (arg0_type, 1));
4777 /* If the low bound is specified, "and" the range with the
4778 range for which the original unsigned value will be
4779 positive. */
4780 if (low != 0)
4782 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4783 1, n_low, n_high, 1,
4784 fold_convert_loc (loc, arg0_type,
4785 integer_zero_node),
4786 high_positive))
4787 break;
4789 in_p = (n_in_p == in_p);
4791 else
4793 /* Otherwise, "or" the range with the range of the input
4794 that will be interpreted as negative. */
4795 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4796 0, n_low, n_high, 1,
4797 fold_convert_loc (loc, arg0_type,
4798 integer_zero_node),
4799 high_positive))
4800 break;
4802 in_p = (in_p != n_in_p);
4806 exp = arg0;
4807 low = n_low, high = n_high;
4808 continue;
4810 default:
4811 break;
4814 break;
4817 /* If EXP is a constant, we can evaluate whether this is true or false. */
4818 if (TREE_CODE (exp) == INTEGER_CST)
4820 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4821 exp, 0, low, 0))
4822 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4823 exp, 1, high, 1)));
4824 low = high = 0;
4825 exp = 0;
4828 *pin_p = in_p, *plow = low, *phigh = high;
4829 return exp;
4832 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4833 type, TYPE, return an expression to test if EXP is in (or out of, depending
4834 on IN_P) the range. Return 0 if the test couldn't be created. */
4836 tree
4837 build_range_check (location_t loc, tree type, tree exp, int in_p,
4838 tree low, tree high)
4840 tree etype = TREE_TYPE (exp), value;
4842 #ifdef HAVE_canonicalize_funcptr_for_compare
4843 /* Disable this optimization for function pointer expressions
4844 on targets that require function pointer canonicalization. */
4845 if (HAVE_canonicalize_funcptr_for_compare
4846 && TREE_CODE (etype) == POINTER_TYPE
4847 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4848 return NULL_TREE;
4849 #endif
4851 if (! in_p)
4853 value = build_range_check (loc, type, exp, 1, low, high);
4854 if (value != 0)
4855 return invert_truthvalue_loc (loc, value);
4857 return 0;
4860 if (low == 0 && high == 0)
4861 return build_int_cst (type, 1);
4863 if (low == 0)
4864 return fold_build2_loc (loc, LE_EXPR, type, exp,
4865 fold_convert_loc (loc, etype, high));
4867 if (high == 0)
4868 return fold_build2_loc (loc, GE_EXPR, type, exp,
4869 fold_convert_loc (loc, etype, low));
4871 if (operand_equal_p (low, high, 0))
4872 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4873 fold_convert_loc (loc, etype, low));
4875 if (integer_zerop (low))
4877 if (! TYPE_UNSIGNED (etype))
4879 etype = unsigned_type_for (etype);
4880 high = fold_convert_loc (loc, etype, high);
4881 exp = fold_convert_loc (loc, etype, exp);
4883 return build_range_check (loc, type, exp, 1, 0, high);
4886 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4887 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4889 unsigned HOST_WIDE_INT lo;
4890 HOST_WIDE_INT hi;
4891 int prec;
4893 prec = TYPE_PRECISION (etype);
4894 if (prec <= HOST_BITS_PER_WIDE_INT)
4896 hi = 0;
4897 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4899 else
4901 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4902 lo = (unsigned HOST_WIDE_INT) -1;
4905 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4907 if (TYPE_UNSIGNED (etype))
4909 tree signed_etype = signed_type_for (etype);
4910 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4911 etype
4912 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4913 else
4914 etype = signed_etype;
4915 exp = fold_convert_loc (loc, etype, exp);
4917 return fold_build2_loc (loc, GT_EXPR, type, exp,
4918 build_int_cst (etype, 0));
4922 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4923 This requires wrap-around arithmetics for the type of the expression.
4924 First make sure that arithmetics in this type is valid, then make sure
4925 that it wraps around. */
4926 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4927 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4928 TYPE_UNSIGNED (etype));
4930 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4932 tree utype, minv, maxv;
4934 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4935 for the type in question, as we rely on this here. */
4936 utype = unsigned_type_for (etype);
4937 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4938 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4939 integer_one_node, 1);
4940 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4942 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4943 minv, 1, maxv, 1)))
4944 etype = utype;
4945 else
4946 return 0;
4949 high = fold_convert_loc (loc, etype, high);
4950 low = fold_convert_loc (loc, etype, low);
4951 exp = fold_convert_loc (loc, etype, exp);
4953 value = const_binop (MINUS_EXPR, high, low, 0);
4956 if (POINTER_TYPE_P (etype))
4958 if (value != 0 && !TREE_OVERFLOW (value))
4960 low = fold_convert_loc (loc, sizetype, low);
4961 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4962 return build_range_check (loc, type,
4963 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4964 etype, exp, low),
4965 1, build_int_cst (etype, 0), value);
4967 return 0;
4970 if (value != 0 && !TREE_OVERFLOW (value))
4971 return build_range_check (loc, type,
4972 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4973 1, build_int_cst (etype, 0), value);
4975 return 0;
4978 /* Return the predecessor of VAL in its type, handling the infinite case. */
4980 static tree
4981 range_predecessor (tree val)
4983 tree type = TREE_TYPE (val);
4985 if (INTEGRAL_TYPE_P (type)
4986 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4987 return 0;
4988 else
4989 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4992 /* Return the successor of VAL in its type, handling the infinite case. */
4994 static tree
4995 range_successor (tree val)
4997 tree type = TREE_TYPE (val);
4999 if (INTEGRAL_TYPE_P (type)
5000 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5001 return 0;
5002 else
5003 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5006 /* Given two ranges, see if we can merge them into one. Return 1 if we
5007 can, 0 if we can't. Set the output range into the specified parameters. */
5009 bool
5010 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5011 tree high0, int in1_p, tree low1, tree high1)
5013 int no_overlap;
5014 int subset;
5015 int temp;
5016 tree tem;
5017 int in_p;
5018 tree low, high;
5019 int lowequal = ((low0 == 0 && low1 == 0)
5020 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5021 low0, 0, low1, 0)));
5022 int highequal = ((high0 == 0 && high1 == 0)
5023 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5024 high0, 1, high1, 1)));
5026 /* Make range 0 be the range that starts first, or ends last if they
5027 start at the same value. Swap them if it isn't. */
5028 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5029 low0, 0, low1, 0))
5030 || (lowequal
5031 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5032 high1, 1, high0, 1))))
5034 temp = in0_p, in0_p = in1_p, in1_p = temp;
5035 tem = low0, low0 = low1, low1 = tem;
5036 tem = high0, high0 = high1, high1 = tem;
5039 /* Now flag two cases, whether the ranges are disjoint or whether the
5040 second range is totally subsumed in the first. Note that the tests
5041 below are simplified by the ones above. */
5042 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5043 high0, 1, low1, 0));
5044 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5045 high1, 1, high0, 1));
5047 /* We now have four cases, depending on whether we are including or
5048 excluding the two ranges. */
5049 if (in0_p && in1_p)
5051 /* If they don't overlap, the result is false. If the second range
5052 is a subset it is the result. Otherwise, the range is from the start
5053 of the second to the end of the first. */
5054 if (no_overlap)
5055 in_p = 0, low = high = 0;
5056 else if (subset)
5057 in_p = 1, low = low1, high = high1;
5058 else
5059 in_p = 1, low = low1, high = high0;
5062 else if (in0_p && ! in1_p)
5064 /* If they don't overlap, the result is the first range. If they are
5065 equal, the result is false. If the second range is a subset of the
5066 first, and the ranges begin at the same place, we go from just after
5067 the end of the second range to the end of the first. If the second
5068 range is not a subset of the first, or if it is a subset and both
5069 ranges end at the same place, the range starts at the start of the
5070 first range and ends just before the second range.
5071 Otherwise, we can't describe this as a single range. */
5072 if (no_overlap)
5073 in_p = 1, low = low0, high = high0;
5074 else if (lowequal && highequal)
5075 in_p = 0, low = high = 0;
5076 else if (subset && lowequal)
5078 low = range_successor (high1);
5079 high = high0;
5080 in_p = 1;
5081 if (low == 0)
5083 /* We are in the weird situation where high0 > high1 but
5084 high1 has no successor. Punt. */
5085 return 0;
5088 else if (! subset || highequal)
5090 low = low0;
5091 high = range_predecessor (low1);
5092 in_p = 1;
5093 if (high == 0)
5095 /* low0 < low1 but low1 has no predecessor. Punt. */
5096 return 0;
5099 else
5100 return 0;
5103 else if (! in0_p && in1_p)
5105 /* If they don't overlap, the result is the second range. If the second
5106 is a subset of the first, the result is false. Otherwise,
5107 the range starts just after the first range and ends at the
5108 end of the second. */
5109 if (no_overlap)
5110 in_p = 1, low = low1, high = high1;
5111 else if (subset || highequal)
5112 in_p = 0, low = high = 0;
5113 else
5115 low = range_successor (high0);
5116 high = high1;
5117 in_p = 1;
5118 if (low == 0)
5120 /* high1 > high0 but high0 has no successor. Punt. */
5121 return 0;
5126 else
5128 /* The case where we are excluding both ranges. Here the complex case
5129 is if they don't overlap. In that case, the only time we have a
5130 range is if they are adjacent. If the second is a subset of the
5131 first, the result is the first. Otherwise, the range to exclude
5132 starts at the beginning of the first range and ends at the end of the
5133 second. */
5134 if (no_overlap)
5136 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5137 range_successor (high0),
5138 1, low1, 0)))
5139 in_p = 0, low = low0, high = high1;
5140 else
5142 /* Canonicalize - [min, x] into - [-, x]. */
5143 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5144 switch (TREE_CODE (TREE_TYPE (low0)))
5146 case ENUMERAL_TYPE:
5147 if (TYPE_PRECISION (TREE_TYPE (low0))
5148 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5149 break;
5150 /* FALLTHROUGH */
5151 case INTEGER_TYPE:
5152 if (tree_int_cst_equal (low0,
5153 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5154 low0 = 0;
5155 break;
5156 case POINTER_TYPE:
5157 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5158 && integer_zerop (low0))
5159 low0 = 0;
5160 break;
5161 default:
5162 break;
5165 /* Canonicalize - [x, max] into - [x, -]. */
5166 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5167 switch (TREE_CODE (TREE_TYPE (high1)))
5169 case ENUMERAL_TYPE:
5170 if (TYPE_PRECISION (TREE_TYPE (high1))
5171 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5172 break;
5173 /* FALLTHROUGH */
5174 case INTEGER_TYPE:
5175 if (tree_int_cst_equal (high1,
5176 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5177 high1 = 0;
5178 break;
5179 case POINTER_TYPE:
5180 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5181 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5182 high1, 1,
5183 integer_one_node, 1)))
5184 high1 = 0;
5185 break;
5186 default:
5187 break;
5190 /* The ranges might be also adjacent between the maximum and
5191 minimum values of the given type. For
5192 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5193 return + [x + 1, y - 1]. */
5194 if (low0 == 0 && high1 == 0)
5196 low = range_successor (high0);
5197 high = range_predecessor (low1);
5198 if (low == 0 || high == 0)
5199 return 0;
5201 in_p = 1;
5203 else
5204 return 0;
5207 else if (subset)
5208 in_p = 0, low = low0, high = high0;
5209 else
5210 in_p = 0, low = low0, high = high1;
5213 *pin_p = in_p, *plow = low, *phigh = high;
5214 return 1;
5218 /* Subroutine of fold, looking inside expressions of the form
5219 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5220 of the COND_EXPR. This function is being used also to optimize
5221 A op B ? C : A, by reversing the comparison first.
5223 Return a folded expression whose code is not a COND_EXPR
5224 anymore, or NULL_TREE if no folding opportunity is found. */
5226 static tree
5227 fold_cond_expr_with_comparison (location_t loc, tree type,
5228 tree arg0, tree arg1, tree arg2)
5230 enum tree_code comp_code = TREE_CODE (arg0);
5231 tree arg00 = TREE_OPERAND (arg0, 0);
5232 tree arg01 = TREE_OPERAND (arg0, 1);
5233 tree arg1_type = TREE_TYPE (arg1);
5234 tree tem;
5236 STRIP_NOPS (arg1);
5237 STRIP_NOPS (arg2);
5239 /* If we have A op 0 ? A : -A, consider applying the following
5240 transformations:
5242 A == 0? A : -A same as -A
5243 A != 0? A : -A same as A
5244 A >= 0? A : -A same as abs (A)
5245 A > 0? A : -A same as abs (A)
5246 A <= 0? A : -A same as -abs (A)
5247 A < 0? A : -A same as -abs (A)
5249 None of these transformations work for modes with signed
5250 zeros. If A is +/-0, the first two transformations will
5251 change the sign of the result (from +0 to -0, or vice
5252 versa). The last four will fix the sign of the result,
5253 even though the original expressions could be positive or
5254 negative, depending on the sign of A.
5256 Note that all these transformations are correct if A is
5257 NaN, since the two alternatives (A and -A) are also NaNs. */
5258 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5259 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5260 ? real_zerop (arg01)
5261 : integer_zerop (arg01))
5262 && ((TREE_CODE (arg2) == NEGATE_EXPR
5263 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5264 /* In the case that A is of the form X-Y, '-A' (arg2) may
5265 have already been folded to Y-X, check for that. */
5266 || (TREE_CODE (arg1) == MINUS_EXPR
5267 && TREE_CODE (arg2) == MINUS_EXPR
5268 && operand_equal_p (TREE_OPERAND (arg1, 0),
5269 TREE_OPERAND (arg2, 1), 0)
5270 && operand_equal_p (TREE_OPERAND (arg1, 1),
5271 TREE_OPERAND (arg2, 0), 0))))
5272 switch (comp_code)
5274 case EQ_EXPR:
5275 case UNEQ_EXPR:
5276 tem = fold_convert_loc (loc, arg1_type, arg1);
5277 return pedantic_non_lvalue_loc (loc,
5278 fold_convert_loc (loc, type,
5279 negate_expr (tem)));
5280 case NE_EXPR:
5281 case LTGT_EXPR:
5282 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5283 case UNGE_EXPR:
5284 case UNGT_EXPR:
5285 if (flag_trapping_math)
5286 break;
5287 /* Fall through. */
5288 case GE_EXPR:
5289 case GT_EXPR:
5290 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5291 arg1 = fold_convert_loc (loc, signed_type_for
5292 (TREE_TYPE (arg1)), arg1);
5293 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5294 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5295 case UNLE_EXPR:
5296 case UNLT_EXPR:
5297 if (flag_trapping_math)
5298 break;
5299 case LE_EXPR:
5300 case LT_EXPR:
5301 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5302 arg1 = fold_convert_loc (loc, signed_type_for
5303 (TREE_TYPE (arg1)), arg1);
5304 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5305 return negate_expr (fold_convert_loc (loc, type, tem));
5306 default:
5307 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5308 break;
5311 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5312 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5313 both transformations are correct when A is NaN: A != 0
5314 is then true, and A == 0 is false. */
5316 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5317 && integer_zerop (arg01) && integer_zerop (arg2))
5319 if (comp_code == NE_EXPR)
5320 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5321 else if (comp_code == EQ_EXPR)
5322 return build_int_cst (type, 0);
5325 /* Try some transformations of A op B ? A : B.
5327 A == B? A : B same as B
5328 A != B? A : B same as A
5329 A >= B? A : B same as max (A, B)
5330 A > B? A : B same as max (B, A)
5331 A <= B? A : B same as min (A, B)
5332 A < B? A : B same as min (B, A)
5334 As above, these transformations don't work in the presence
5335 of signed zeros. For example, if A and B are zeros of
5336 opposite sign, the first two transformations will change
5337 the sign of the result. In the last four, the original
5338 expressions give different results for (A=+0, B=-0) and
5339 (A=-0, B=+0), but the transformed expressions do not.
5341 The first two transformations are correct if either A or B
5342 is a NaN. In the first transformation, the condition will
5343 be false, and B will indeed be chosen. In the case of the
5344 second transformation, the condition A != B will be true,
5345 and A will be chosen.
5347 The conversions to max() and min() are not correct if B is
5348 a number and A is not. The conditions in the original
5349 expressions will be false, so all four give B. The min()
5350 and max() versions would give a NaN instead. */
5351 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5352 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5353 /* Avoid these transformations if the COND_EXPR may be used
5354 as an lvalue in the C++ front-end. PR c++/19199. */
5355 && (in_gimple_form
5356 || (strcmp (lang_hooks.name, "GNU C++") != 0
5357 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5358 || ! maybe_lvalue_p (arg1)
5359 || ! maybe_lvalue_p (arg2)))
5361 tree comp_op0 = arg00;
5362 tree comp_op1 = arg01;
5363 tree comp_type = TREE_TYPE (comp_op0);
5365 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5366 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5368 comp_type = type;
5369 comp_op0 = arg1;
5370 comp_op1 = arg2;
5373 switch (comp_code)
5375 case EQ_EXPR:
5376 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5377 case NE_EXPR:
5378 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5379 case LE_EXPR:
5380 case LT_EXPR:
5381 case UNLE_EXPR:
5382 case UNLT_EXPR:
5383 /* In C++ a ?: expression can be an lvalue, so put the
5384 operand which will be used if they are equal first
5385 so that we can convert this back to the
5386 corresponding COND_EXPR. */
5387 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5389 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5390 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5391 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5392 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5393 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5394 comp_op1, comp_op0);
5395 return pedantic_non_lvalue_loc (loc,
5396 fold_convert_loc (loc, type, tem));
5398 break;
5399 case GE_EXPR:
5400 case GT_EXPR:
5401 case UNGE_EXPR:
5402 case UNGT_EXPR:
5403 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5405 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5406 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5407 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5408 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5409 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5410 comp_op1, comp_op0);
5411 return pedantic_non_lvalue_loc (loc,
5412 fold_convert_loc (loc, type, tem));
5414 break;
5415 case UNEQ_EXPR:
5416 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5417 return pedantic_non_lvalue_loc (loc,
5418 fold_convert_loc (loc, type, arg2));
5419 break;
5420 case LTGT_EXPR:
5421 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5422 return pedantic_non_lvalue_loc (loc,
5423 fold_convert_loc (loc, type, arg1));
5424 break;
5425 default:
5426 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5427 break;
5431 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5432 we might still be able to simplify this. For example,
5433 if C1 is one less or one more than C2, this might have started
5434 out as a MIN or MAX and been transformed by this function.
5435 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5437 if (INTEGRAL_TYPE_P (type)
5438 && TREE_CODE (arg01) == INTEGER_CST
5439 && TREE_CODE (arg2) == INTEGER_CST)
5440 switch (comp_code)
5442 case EQ_EXPR:
5443 if (TREE_CODE (arg1) == INTEGER_CST)
5444 break;
5445 /* We can replace A with C1 in this case. */
5446 arg1 = fold_convert_loc (loc, type, arg01);
5447 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5449 case LT_EXPR:
5450 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5451 MIN_EXPR, to preserve the signedness of the comparison. */
5452 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5453 OEP_ONLY_CONST)
5454 && operand_equal_p (arg01,
5455 const_binop (PLUS_EXPR, arg2,
5456 build_int_cst (type, 1), 0),
5457 OEP_ONLY_CONST))
5459 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5460 fold_convert_loc (loc, TREE_TYPE (arg00),
5461 arg2));
5462 return pedantic_non_lvalue_loc (loc,
5463 fold_convert_loc (loc, type, tem));
5465 break;
5467 case LE_EXPR:
5468 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5469 as above. */
5470 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5471 OEP_ONLY_CONST)
5472 && operand_equal_p (arg01,
5473 const_binop (MINUS_EXPR, arg2,
5474 build_int_cst (type, 1), 0),
5475 OEP_ONLY_CONST))
5477 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5478 fold_convert_loc (loc, TREE_TYPE (arg00),
5479 arg2));
5480 return pedantic_non_lvalue_loc (loc,
5481 fold_convert_loc (loc, type, tem));
5483 break;
5485 case GT_EXPR:
5486 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5487 MAX_EXPR, to preserve the signedness of the comparison. */
5488 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5489 OEP_ONLY_CONST)
5490 && operand_equal_p (arg01,
5491 const_binop (MINUS_EXPR, arg2,
5492 build_int_cst (type, 1), 0),
5493 OEP_ONLY_CONST))
5495 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5496 fold_convert_loc (loc, TREE_TYPE (arg00),
5497 arg2));
5498 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5500 break;
5502 case GE_EXPR:
5503 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5504 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5505 OEP_ONLY_CONST)
5506 && operand_equal_p (arg01,
5507 const_binop (PLUS_EXPR, arg2,
5508 build_int_cst (type, 1), 0),
5509 OEP_ONLY_CONST))
5511 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5512 fold_convert_loc (loc, TREE_TYPE (arg00),
5513 arg2));
5514 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5516 break;
5517 case NE_EXPR:
5518 break;
5519 default:
5520 gcc_unreachable ();
5523 return NULL_TREE;
5528 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5529 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5530 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5531 false) >= 2)
5532 #endif
5534 /* EXP is some logical combination of boolean tests. See if we can
5535 merge it into some range test. Return the new tree if so. */
5537 static tree
5538 fold_range_test (location_t loc, enum tree_code code, tree type,
5539 tree op0, tree op1)
5541 int or_op = (code == TRUTH_ORIF_EXPR
5542 || code == TRUTH_OR_EXPR);
5543 int in0_p, in1_p, in_p;
5544 tree low0, low1, low, high0, high1, high;
5545 bool strict_overflow_p = false;
5546 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5547 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5548 tree tem;
5549 const char * const warnmsg = G_("assuming signed overflow does not occur "
5550 "when simplifying range test");
5552 /* If this is an OR operation, invert both sides; we will invert
5553 again at the end. */
5554 if (or_op)
5555 in0_p = ! in0_p, in1_p = ! in1_p;
5557 /* If both expressions are the same, if we can merge the ranges, and we
5558 can build the range test, return it or it inverted. If one of the
5559 ranges is always true or always false, consider it to be the same
5560 expression as the other. */
5561 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5562 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5563 in1_p, low1, high1)
5564 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5565 lhs != 0 ? lhs
5566 : rhs != 0 ? rhs : integer_zero_node,
5567 in_p, low, high))))
5569 if (strict_overflow_p)
5570 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5571 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5574 /* On machines where the branch cost is expensive, if this is a
5575 short-circuited branch and the underlying object on both sides
5576 is the same, make a non-short-circuit operation. */
5577 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5578 && lhs != 0 && rhs != 0
5579 && (code == TRUTH_ANDIF_EXPR
5580 || code == TRUTH_ORIF_EXPR)
5581 && operand_equal_p (lhs, rhs, 0))
5583 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5584 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5585 which cases we can't do this. */
5586 if (simple_operand_p (lhs))
5588 tem = build2 (code == TRUTH_ANDIF_EXPR
5589 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5590 type, op0, op1);
5591 SET_EXPR_LOCATION (tem, loc);
5592 return tem;
5595 else if (lang_hooks.decls.global_bindings_p () == 0
5596 && ! CONTAINS_PLACEHOLDER_P (lhs))
5598 tree common = save_expr (lhs);
5600 if (0 != (lhs = build_range_check (loc, type, common,
5601 or_op ? ! in0_p : in0_p,
5602 low0, high0))
5603 && (0 != (rhs = build_range_check (loc, type, common,
5604 or_op ? ! in1_p : in1_p,
5605 low1, high1))))
5607 if (strict_overflow_p)
5608 fold_overflow_warning (warnmsg,
5609 WARN_STRICT_OVERFLOW_COMPARISON);
5610 tem = build2 (code == TRUTH_ANDIF_EXPR
5611 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5612 type, lhs, rhs);
5613 SET_EXPR_LOCATION (tem, loc);
5614 return tem;
5619 return 0;
5622 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5623 bit value. Arrange things so the extra bits will be set to zero if and
5624 only if C is signed-extended to its full width. If MASK is nonzero,
5625 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5627 static tree
5628 unextend (tree c, int p, int unsignedp, tree mask)
5630 tree type = TREE_TYPE (c);
5631 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5632 tree temp;
5634 if (p == modesize || unsignedp)
5635 return c;
5637 /* We work by getting just the sign bit into the low-order bit, then
5638 into the high-order bit, then sign-extend. We then XOR that value
5639 with C. */
5640 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5641 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5643 /* We must use a signed type in order to get an arithmetic right shift.
5644 However, we must also avoid introducing accidental overflows, so that
5645 a subsequent call to integer_zerop will work. Hence we must
5646 do the type conversion here. At this point, the constant is either
5647 zero or one, and the conversion to a signed type can never overflow.
5648 We could get an overflow if this conversion is done anywhere else. */
5649 if (TYPE_UNSIGNED (type))
5650 temp = fold_convert (signed_type_for (type), temp);
5652 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5653 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5654 if (mask != 0)
5655 temp = const_binop (BIT_AND_EXPR, temp,
5656 fold_convert (TREE_TYPE (c), mask),
5658 /* If necessary, convert the type back to match the type of C. */
5659 if (TYPE_UNSIGNED (type))
5660 temp = fold_convert (type, temp);
5662 return fold_convert (type,
5663 const_binop (BIT_XOR_EXPR, c, temp, 0));
5666 /* Find ways of folding logical expressions of LHS and RHS:
5667 Try to merge two comparisons to the same innermost item.
5668 Look for range tests like "ch >= '0' && ch <= '9'".
5669 Look for combinations of simple terms on machines with expensive branches
5670 and evaluate the RHS unconditionally.
5672 For example, if we have p->a == 2 && p->b == 4 and we can make an
5673 object large enough to span both A and B, we can do this with a comparison
5674 against the object ANDed with the a mask.
5676 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5677 operations to do this with one comparison.
5679 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5680 function and the one above.
5682 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5683 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5685 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5686 two operands.
5688 We return the simplified tree or 0 if no optimization is possible. */
5690 static tree
5691 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5692 tree lhs, tree rhs)
5694 /* If this is the "or" of two comparisons, we can do something if
5695 the comparisons are NE_EXPR. If this is the "and", we can do something
5696 if the comparisons are EQ_EXPR. I.e.,
5697 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5699 WANTED_CODE is this operation code. For single bit fields, we can
5700 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5701 comparison for one-bit fields. */
5703 enum tree_code wanted_code;
5704 enum tree_code lcode, rcode;
5705 tree ll_arg, lr_arg, rl_arg, rr_arg;
5706 tree ll_inner, lr_inner, rl_inner, rr_inner;
5707 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5708 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5709 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5710 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5711 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5712 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5713 enum machine_mode lnmode, rnmode;
5714 tree ll_mask, lr_mask, rl_mask, rr_mask;
5715 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5716 tree l_const, r_const;
5717 tree lntype, rntype, result;
5718 HOST_WIDE_INT first_bit, end_bit;
5719 int volatilep;
5720 tree orig_lhs = lhs, orig_rhs = rhs;
5721 enum tree_code orig_code = code;
5723 /* Start by getting the comparison codes. Fail if anything is volatile.
5724 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5725 it were surrounded with a NE_EXPR. */
5727 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5728 return 0;
5730 lcode = TREE_CODE (lhs);
5731 rcode = TREE_CODE (rhs);
5733 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5735 lhs = build2 (NE_EXPR, truth_type, lhs,
5736 build_int_cst (TREE_TYPE (lhs), 0));
5737 lcode = NE_EXPR;
5740 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5742 rhs = build2 (NE_EXPR, truth_type, rhs,
5743 build_int_cst (TREE_TYPE (rhs), 0));
5744 rcode = NE_EXPR;
5747 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5748 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5749 return 0;
5751 ll_arg = TREE_OPERAND (lhs, 0);
5752 lr_arg = TREE_OPERAND (lhs, 1);
5753 rl_arg = TREE_OPERAND (rhs, 0);
5754 rr_arg = TREE_OPERAND (rhs, 1);
5756 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5757 if (simple_operand_p (ll_arg)
5758 && simple_operand_p (lr_arg))
5760 tree result;
5761 if (operand_equal_p (ll_arg, rl_arg, 0)
5762 && operand_equal_p (lr_arg, rr_arg, 0))
5764 result = combine_comparisons (loc, code, lcode, rcode,
5765 truth_type, ll_arg, lr_arg);
5766 if (result)
5767 return result;
5769 else if (operand_equal_p (ll_arg, rr_arg, 0)
5770 && operand_equal_p (lr_arg, rl_arg, 0))
5772 result = combine_comparisons (loc, code, lcode,
5773 swap_tree_comparison (rcode),
5774 truth_type, ll_arg, lr_arg);
5775 if (result)
5776 return result;
5780 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5781 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5783 /* If the RHS can be evaluated unconditionally and its operands are
5784 simple, it wins to evaluate the RHS unconditionally on machines
5785 with expensive branches. In this case, this isn't a comparison
5786 that can be merged. Avoid doing this if the RHS is a floating-point
5787 comparison since those can trap. */
5789 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5790 false) >= 2
5791 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5792 && simple_operand_p (rl_arg)
5793 && simple_operand_p (rr_arg))
5795 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5796 if (code == TRUTH_OR_EXPR
5797 && lcode == NE_EXPR && integer_zerop (lr_arg)
5798 && rcode == NE_EXPR && integer_zerop (rr_arg)
5799 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5800 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5802 result = build2 (NE_EXPR, truth_type,
5803 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5804 ll_arg, rl_arg),
5805 build_int_cst (TREE_TYPE (ll_arg), 0));
5806 goto fold_truthop_exit;
5809 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5810 if (code == TRUTH_AND_EXPR
5811 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5812 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5813 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5814 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5816 result = build2 (EQ_EXPR, truth_type,
5817 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5818 ll_arg, rl_arg),
5819 build_int_cst (TREE_TYPE (ll_arg), 0));
5820 goto fold_truthop_exit;
5823 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5825 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5827 result = build2 (code, truth_type, lhs, rhs);
5828 goto fold_truthop_exit;
5830 return NULL_TREE;
5834 /* See if the comparisons can be merged. Then get all the parameters for
5835 each side. */
5837 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5838 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5839 return 0;
5841 volatilep = 0;
5842 ll_inner = decode_field_reference (loc, ll_arg,
5843 &ll_bitsize, &ll_bitpos, &ll_mode,
5844 &ll_unsignedp, &volatilep, &ll_mask,
5845 &ll_and_mask);
5846 lr_inner = decode_field_reference (loc, lr_arg,
5847 &lr_bitsize, &lr_bitpos, &lr_mode,
5848 &lr_unsignedp, &volatilep, &lr_mask,
5849 &lr_and_mask);
5850 rl_inner = decode_field_reference (loc, rl_arg,
5851 &rl_bitsize, &rl_bitpos, &rl_mode,
5852 &rl_unsignedp, &volatilep, &rl_mask,
5853 &rl_and_mask);
5854 rr_inner = decode_field_reference (loc, rr_arg,
5855 &rr_bitsize, &rr_bitpos, &rr_mode,
5856 &rr_unsignedp, &volatilep, &rr_mask,
5857 &rr_and_mask);
5859 /* It must be true that the inner operation on the lhs of each
5860 comparison must be the same if we are to be able to do anything.
5861 Then see if we have constants. If not, the same must be true for
5862 the rhs's. */
5863 if (volatilep || ll_inner == 0 || rl_inner == 0
5864 || ! operand_equal_p (ll_inner, rl_inner, 0))
5865 return 0;
5867 if (TREE_CODE (lr_arg) == INTEGER_CST
5868 && TREE_CODE (rr_arg) == INTEGER_CST)
5869 l_const = lr_arg, r_const = rr_arg;
5870 else if (lr_inner == 0 || rr_inner == 0
5871 || ! operand_equal_p (lr_inner, rr_inner, 0))
5872 return 0;
5873 else
5874 l_const = r_const = 0;
5876 /* If either comparison code is not correct for our logical operation,
5877 fail. However, we can convert a one-bit comparison against zero into
5878 the opposite comparison against that bit being set in the field. */
5880 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5881 if (lcode != wanted_code)
5883 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5885 /* Make the left operand unsigned, since we are only interested
5886 in the value of one bit. Otherwise we are doing the wrong
5887 thing below. */
5888 ll_unsignedp = 1;
5889 l_const = ll_mask;
5891 else
5892 return 0;
5895 /* This is analogous to the code for l_const above. */
5896 if (rcode != wanted_code)
5898 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5900 rl_unsignedp = 1;
5901 r_const = rl_mask;
5903 else
5904 return 0;
5907 /* See if we can find a mode that contains both fields being compared on
5908 the left. If we can't, fail. Otherwise, update all constants and masks
5909 to be relative to a field of that size. */
5910 first_bit = MIN (ll_bitpos, rl_bitpos);
5911 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5912 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5913 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5914 volatilep);
5915 if (lnmode == VOIDmode)
5916 return 0;
5918 lnbitsize = GET_MODE_BITSIZE (lnmode);
5919 lnbitpos = first_bit & ~ (lnbitsize - 1);
5920 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5921 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5923 if (BYTES_BIG_ENDIAN)
5925 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5926 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5929 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5930 size_int (xll_bitpos), 0);
5931 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5932 size_int (xrl_bitpos), 0);
5934 if (l_const)
5936 l_const = fold_convert_loc (loc, lntype, l_const);
5937 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5938 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5939 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5940 fold_build1_loc (loc, BIT_NOT_EXPR,
5941 lntype, ll_mask),
5942 0)))
5944 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5946 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5949 if (r_const)
5951 r_const = fold_convert_loc (loc, lntype, r_const);
5952 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5953 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5954 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5955 fold_build1_loc (loc, BIT_NOT_EXPR,
5956 lntype, rl_mask),
5957 0)))
5959 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5961 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5965 /* If the right sides are not constant, do the same for it. Also,
5966 disallow this optimization if a size or signedness mismatch occurs
5967 between the left and right sides. */
5968 if (l_const == 0)
5970 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5971 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5972 /* Make sure the two fields on the right
5973 correspond to the left without being swapped. */
5974 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5975 return 0;
5977 first_bit = MIN (lr_bitpos, rr_bitpos);
5978 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5979 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5980 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5981 volatilep);
5982 if (rnmode == VOIDmode)
5983 return 0;
5985 rnbitsize = GET_MODE_BITSIZE (rnmode);
5986 rnbitpos = first_bit & ~ (rnbitsize - 1);
5987 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5988 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5990 if (BYTES_BIG_ENDIAN)
5992 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5993 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5996 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5997 rntype, lr_mask),
5998 size_int (xlr_bitpos), 0);
5999 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6000 rntype, rr_mask),
6001 size_int (xrr_bitpos), 0);
6003 /* Make a mask that corresponds to both fields being compared.
6004 Do this for both items being compared. If the operands are the
6005 same size and the bits being compared are in the same position
6006 then we can do this by masking both and comparing the masked
6007 results. */
6008 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6009 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6010 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6012 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6013 ll_unsignedp || rl_unsignedp);
6014 if (! all_ones_mask_p (ll_mask, lnbitsize))
6015 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6017 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6018 lr_unsignedp || rr_unsignedp);
6019 if (! all_ones_mask_p (lr_mask, rnbitsize))
6020 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6022 result = build2 (wanted_code, truth_type, lhs, rhs);
6023 goto fold_truthop_exit;
6026 /* There is still another way we can do something: If both pairs of
6027 fields being compared are adjacent, we may be able to make a wider
6028 field containing them both.
6030 Note that we still must mask the lhs/rhs expressions. Furthermore,
6031 the mask must be shifted to account for the shift done by
6032 make_bit_field_ref. */
6033 if ((ll_bitsize + ll_bitpos == rl_bitpos
6034 && lr_bitsize + lr_bitpos == rr_bitpos)
6035 || (ll_bitpos == rl_bitpos + rl_bitsize
6036 && lr_bitpos == rr_bitpos + rr_bitsize))
6038 tree type;
6040 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6041 ll_bitsize + rl_bitsize,
6042 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6043 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6044 lr_bitsize + rr_bitsize,
6045 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6047 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6048 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6049 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6050 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6052 /* Convert to the smaller type before masking out unwanted bits. */
6053 type = lntype;
6054 if (lntype != rntype)
6056 if (lnbitsize > rnbitsize)
6058 lhs = fold_convert_loc (loc, rntype, lhs);
6059 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6060 type = rntype;
6062 else if (lnbitsize < rnbitsize)
6064 rhs = fold_convert_loc (loc, lntype, rhs);
6065 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6066 type = lntype;
6070 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6071 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6073 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6074 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6076 result = build2 (wanted_code, truth_type, lhs, rhs);
6077 goto fold_truthop_exit;
6080 return 0;
6083 /* Handle the case of comparisons with constants. If there is something in
6084 common between the masks, those bits of the constants must be the same.
6085 If not, the condition is always false. Test for this to avoid generating
6086 incorrect code below. */
6087 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6088 if (! integer_zerop (result)
6089 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6090 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6092 if (wanted_code == NE_EXPR)
6094 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6095 return constant_boolean_node (true, truth_type);
6097 else
6099 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6100 return constant_boolean_node (false, truth_type);
6104 /* Construct the expression we will return. First get the component
6105 reference we will make. Unless the mask is all ones the width of
6106 that field, perform the mask operation. Then compare with the
6107 merged constant. */
6108 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6109 ll_unsignedp || rl_unsignedp);
6111 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6112 if (! all_ones_mask_p (ll_mask, lnbitsize))
6114 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6115 SET_EXPR_LOCATION (result, loc);
6118 result = build2 (wanted_code, truth_type, result,
6119 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6121 fold_truthop_exit:
6122 SET_EXPR_LOCATION (result, loc);
6123 return result;
6126 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6127 constant. */
6129 static tree
6130 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6131 tree op0, tree op1)
6133 tree arg0 = op0;
6134 enum tree_code op_code;
6135 tree comp_const;
6136 tree minmax_const;
6137 int consts_equal, consts_lt;
6138 tree inner;
6140 STRIP_SIGN_NOPS (arg0);
6142 op_code = TREE_CODE (arg0);
6143 minmax_const = TREE_OPERAND (arg0, 1);
6144 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6145 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6146 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6147 inner = TREE_OPERAND (arg0, 0);
6149 /* If something does not permit us to optimize, return the original tree. */
6150 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6151 || TREE_CODE (comp_const) != INTEGER_CST
6152 || TREE_OVERFLOW (comp_const)
6153 || TREE_CODE (minmax_const) != INTEGER_CST
6154 || TREE_OVERFLOW (minmax_const))
6155 return NULL_TREE;
6157 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6158 and GT_EXPR, doing the rest with recursive calls using logical
6159 simplifications. */
6160 switch (code)
6162 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6164 tree tem
6165 = optimize_minmax_comparison (loc,
6166 invert_tree_comparison (code, false),
6167 type, op0, op1);
6168 if (tem)
6169 return invert_truthvalue_loc (loc, tem);
6170 return NULL_TREE;
6173 case GE_EXPR:
6174 return
6175 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6176 optimize_minmax_comparison
6177 (loc, EQ_EXPR, type, arg0, comp_const),
6178 optimize_minmax_comparison
6179 (loc, GT_EXPR, type, arg0, comp_const));
6181 case EQ_EXPR:
6182 if (op_code == MAX_EXPR && consts_equal)
6183 /* MAX (X, 0) == 0 -> X <= 0 */
6184 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6186 else if (op_code == MAX_EXPR && consts_lt)
6187 /* MAX (X, 0) == 5 -> X == 5 */
6188 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6190 else if (op_code == MAX_EXPR)
6191 /* MAX (X, 0) == -1 -> false */
6192 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6194 else if (consts_equal)
6195 /* MIN (X, 0) == 0 -> X >= 0 */
6196 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6198 else if (consts_lt)
6199 /* MIN (X, 0) == 5 -> false */
6200 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6202 else
6203 /* MIN (X, 0) == -1 -> X == -1 */
6204 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6206 case GT_EXPR:
6207 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6208 /* MAX (X, 0) > 0 -> X > 0
6209 MAX (X, 0) > 5 -> X > 5 */
6210 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6212 else if (op_code == MAX_EXPR)
6213 /* MAX (X, 0) > -1 -> true */
6214 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6216 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6217 /* MIN (X, 0) > 0 -> false
6218 MIN (X, 0) > 5 -> false */
6219 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6221 else
6222 /* MIN (X, 0) > -1 -> X > -1 */
6223 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6225 default:
6226 return NULL_TREE;
6230 /* T is an integer expression that is being multiplied, divided, or taken a
6231 modulus (CODE says which and what kind of divide or modulus) by a
6232 constant C. See if we can eliminate that operation by folding it with
6233 other operations already in T. WIDE_TYPE, if non-null, is a type that
6234 should be used for the computation if wider than our type.
6236 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6237 (X * 2) + (Y * 4). We must, however, be assured that either the original
6238 expression would not overflow or that overflow is undefined for the type
6239 in the language in question.
6241 If we return a non-null expression, it is an equivalent form of the
6242 original computation, but need not be in the original type.
6244 We set *STRICT_OVERFLOW_P to true if the return values depends on
6245 signed overflow being undefined. Otherwise we do not change
6246 *STRICT_OVERFLOW_P. */
6248 static tree
6249 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6250 bool *strict_overflow_p)
6252 /* To avoid exponential search depth, refuse to allow recursion past
6253 three levels. Beyond that (1) it's highly unlikely that we'll find
6254 something interesting and (2) we've probably processed it before
6255 when we built the inner expression. */
6257 static int depth;
6258 tree ret;
6260 if (depth > 3)
6261 return NULL;
6263 depth++;
6264 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6265 depth--;
6267 return ret;
6270 static tree
6271 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6272 bool *strict_overflow_p)
6274 tree type = TREE_TYPE (t);
6275 enum tree_code tcode = TREE_CODE (t);
6276 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6277 > GET_MODE_SIZE (TYPE_MODE (type)))
6278 ? wide_type : type);
6279 tree t1, t2;
6280 int same_p = tcode == code;
6281 tree op0 = NULL_TREE, op1 = NULL_TREE;
6282 bool sub_strict_overflow_p;
6284 /* Don't deal with constants of zero here; they confuse the code below. */
6285 if (integer_zerop (c))
6286 return NULL_TREE;
6288 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6289 op0 = TREE_OPERAND (t, 0);
6291 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6292 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6294 /* Note that we need not handle conditional operations here since fold
6295 already handles those cases. So just do arithmetic here. */
6296 switch (tcode)
6298 case INTEGER_CST:
6299 /* For a constant, we can always simplify if we are a multiply
6300 or (for divide and modulus) if it is a multiple of our constant. */
6301 if (code == MULT_EXPR
6302 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6303 return const_binop (code, fold_convert (ctype, t),
6304 fold_convert (ctype, c), 0);
6305 break;
6307 CASE_CONVERT: case NON_LVALUE_EXPR:
6308 /* If op0 is an expression ... */
6309 if ((COMPARISON_CLASS_P (op0)
6310 || UNARY_CLASS_P (op0)
6311 || BINARY_CLASS_P (op0)
6312 || VL_EXP_CLASS_P (op0)
6313 || EXPRESSION_CLASS_P (op0))
6314 /* ... and has wrapping overflow, and its type is smaller
6315 than ctype, then we cannot pass through as widening. */
6316 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6317 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6318 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6319 && (TYPE_PRECISION (ctype)
6320 > TYPE_PRECISION (TREE_TYPE (op0))))
6321 /* ... or this is a truncation (t is narrower than op0),
6322 then we cannot pass through this narrowing. */
6323 || (TYPE_PRECISION (type)
6324 < TYPE_PRECISION (TREE_TYPE (op0)))
6325 /* ... or signedness changes for division or modulus,
6326 then we cannot pass through this conversion. */
6327 || (code != MULT_EXPR
6328 && (TYPE_UNSIGNED (ctype)
6329 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6330 /* ... or has undefined overflow while the converted to
6331 type has not, we cannot do the operation in the inner type
6332 as that would introduce undefined overflow. */
6333 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6334 && !TYPE_OVERFLOW_UNDEFINED (type))))
6335 break;
6337 /* Pass the constant down and see if we can make a simplification. If
6338 we can, replace this expression with the inner simplification for
6339 possible later conversion to our or some other type. */
6340 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6341 && TREE_CODE (t2) == INTEGER_CST
6342 && !TREE_OVERFLOW (t2)
6343 && (0 != (t1 = extract_muldiv (op0, t2, code,
6344 code == MULT_EXPR
6345 ? ctype : NULL_TREE,
6346 strict_overflow_p))))
6347 return t1;
6348 break;
6350 case ABS_EXPR:
6351 /* If widening the type changes it from signed to unsigned, then we
6352 must avoid building ABS_EXPR itself as unsigned. */
6353 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6355 tree cstype = (*signed_type_for) (ctype);
6356 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6357 != 0)
6359 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6360 return fold_convert (ctype, t1);
6362 break;
6364 /* If the constant is negative, we cannot simplify this. */
6365 if (tree_int_cst_sgn (c) == -1)
6366 break;
6367 /* FALLTHROUGH */
6368 case NEGATE_EXPR:
6369 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6370 != 0)
6371 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6372 break;
6374 case MIN_EXPR: case MAX_EXPR:
6375 /* If widening the type changes the signedness, then we can't perform
6376 this optimization as that changes the result. */
6377 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6378 break;
6380 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6381 sub_strict_overflow_p = false;
6382 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6383 &sub_strict_overflow_p)) != 0
6384 && (t2 = extract_muldiv (op1, c, code, wide_type,
6385 &sub_strict_overflow_p)) != 0)
6387 if (tree_int_cst_sgn (c) < 0)
6388 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6389 if (sub_strict_overflow_p)
6390 *strict_overflow_p = true;
6391 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6392 fold_convert (ctype, t2));
6394 break;
6396 case LSHIFT_EXPR: case RSHIFT_EXPR:
6397 /* If the second operand is constant, this is a multiplication
6398 or floor division, by a power of two, so we can treat it that
6399 way unless the multiplier or divisor overflows. Signed
6400 left-shift overflow is implementation-defined rather than
6401 undefined in C90, so do not convert signed left shift into
6402 multiplication. */
6403 if (TREE_CODE (op1) == INTEGER_CST
6404 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6405 /* const_binop may not detect overflow correctly,
6406 so check for it explicitly here. */
6407 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6408 && TREE_INT_CST_HIGH (op1) == 0
6409 && 0 != (t1 = fold_convert (ctype,
6410 const_binop (LSHIFT_EXPR,
6411 size_one_node,
6412 op1, 0)))
6413 && !TREE_OVERFLOW (t1))
6414 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6415 ? MULT_EXPR : FLOOR_DIV_EXPR,
6416 ctype,
6417 fold_convert (ctype, op0),
6418 t1),
6419 c, code, wide_type, strict_overflow_p);
6420 break;
6422 case PLUS_EXPR: case MINUS_EXPR:
6423 /* See if we can eliminate the operation on both sides. If we can, we
6424 can return a new PLUS or MINUS. If we can't, the only remaining
6425 cases where we can do anything are if the second operand is a
6426 constant. */
6427 sub_strict_overflow_p = false;
6428 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6429 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6430 if (t1 != 0 && t2 != 0
6431 && (code == MULT_EXPR
6432 /* If not multiplication, we can only do this if both operands
6433 are divisible by c. */
6434 || (multiple_of_p (ctype, op0, c)
6435 && multiple_of_p (ctype, op1, c))))
6437 if (sub_strict_overflow_p)
6438 *strict_overflow_p = true;
6439 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6440 fold_convert (ctype, t2));
6443 /* If this was a subtraction, negate OP1 and set it to be an addition.
6444 This simplifies the logic below. */
6445 if (tcode == MINUS_EXPR)
6446 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6448 if (TREE_CODE (op1) != INTEGER_CST)
6449 break;
6451 /* If either OP1 or C are negative, this optimization is not safe for
6452 some of the division and remainder types while for others we need
6453 to change the code. */
6454 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6456 if (code == CEIL_DIV_EXPR)
6457 code = FLOOR_DIV_EXPR;
6458 else if (code == FLOOR_DIV_EXPR)
6459 code = CEIL_DIV_EXPR;
6460 else if (code != MULT_EXPR
6461 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6462 break;
6465 /* If it's a multiply or a division/modulus operation of a multiple
6466 of our constant, do the operation and verify it doesn't overflow. */
6467 if (code == MULT_EXPR
6468 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6470 op1 = const_binop (code, fold_convert (ctype, op1),
6471 fold_convert (ctype, c), 0);
6472 /* We allow the constant to overflow with wrapping semantics. */
6473 if (op1 == 0
6474 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6475 break;
6477 else
6478 break;
6480 /* If we have an unsigned type is not a sizetype, we cannot widen
6481 the operation since it will change the result if the original
6482 computation overflowed. */
6483 if (TYPE_UNSIGNED (ctype)
6484 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6485 && ctype != type)
6486 break;
6488 /* If we were able to eliminate our operation from the first side,
6489 apply our operation to the second side and reform the PLUS. */
6490 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6491 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6493 /* The last case is if we are a multiply. In that case, we can
6494 apply the distributive law to commute the multiply and addition
6495 if the multiplication of the constants doesn't overflow. */
6496 if (code == MULT_EXPR)
6497 return fold_build2 (tcode, ctype,
6498 fold_build2 (code, ctype,
6499 fold_convert (ctype, op0),
6500 fold_convert (ctype, c)),
6501 op1);
6503 break;
6505 case MULT_EXPR:
6506 /* We have a special case here if we are doing something like
6507 (C * 8) % 4 since we know that's zero. */
6508 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6509 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6510 /* If the multiplication can overflow we cannot optimize this.
6511 ??? Until we can properly mark individual operations as
6512 not overflowing we need to treat sizetype special here as
6513 stor-layout relies on this opimization to make
6514 DECL_FIELD_BIT_OFFSET always a constant. */
6515 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6516 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6517 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6518 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6519 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6521 *strict_overflow_p = true;
6522 return omit_one_operand (type, integer_zero_node, op0);
6525 /* ... fall through ... */
6527 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6528 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6529 /* If we can extract our operation from the LHS, do so and return a
6530 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6531 do something only if the second operand is a constant. */
6532 if (same_p
6533 && (t1 = extract_muldiv (op0, c, code, wide_type,
6534 strict_overflow_p)) != 0)
6535 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6536 fold_convert (ctype, op1));
6537 else if (tcode == MULT_EXPR && code == MULT_EXPR
6538 && (t1 = extract_muldiv (op1, c, code, wide_type,
6539 strict_overflow_p)) != 0)
6540 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6541 fold_convert (ctype, t1));
6542 else if (TREE_CODE (op1) != INTEGER_CST)
6543 return 0;
6545 /* If these are the same operation types, we can associate them
6546 assuming no overflow. */
6547 if (tcode == code
6548 && 0 != (t1 = int_const_binop (MULT_EXPR,
6549 fold_convert (ctype, op1),
6550 fold_convert (ctype, c), 1))
6551 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6552 TREE_INT_CST_HIGH (t1),
6553 (TYPE_UNSIGNED (ctype)
6554 && tcode != MULT_EXPR) ? -1 : 1,
6555 TREE_OVERFLOW (t1)))
6556 && !TREE_OVERFLOW (t1))
6557 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6559 /* If these operations "cancel" each other, we have the main
6560 optimizations of this pass, which occur when either constant is a
6561 multiple of the other, in which case we replace this with either an
6562 operation or CODE or TCODE.
6564 If we have an unsigned type that is not a sizetype, we cannot do
6565 this since it will change the result if the original computation
6566 overflowed. */
6567 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6568 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6569 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6570 || (tcode == MULT_EXPR
6571 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6572 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6573 && code != MULT_EXPR)))
6575 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6577 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6578 *strict_overflow_p = true;
6579 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6580 fold_convert (ctype,
6581 const_binop (TRUNC_DIV_EXPR,
6582 op1, c, 0)));
6584 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6586 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6587 *strict_overflow_p = true;
6588 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6589 fold_convert (ctype,
6590 const_binop (TRUNC_DIV_EXPR,
6591 c, op1, 0)));
6594 break;
6596 default:
6597 break;
6600 return 0;
6603 /* Return a node which has the indicated constant VALUE (either 0 or
6604 1), and is of the indicated TYPE. */
6606 tree
6607 constant_boolean_node (int value, tree type)
6609 if (type == integer_type_node)
6610 return value ? integer_one_node : integer_zero_node;
6611 else if (type == boolean_type_node)
6612 return value ? boolean_true_node : boolean_false_node;
6613 else
6614 return build_int_cst (type, value);
6618 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6619 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6620 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6621 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6622 COND is the first argument to CODE; otherwise (as in the example
6623 given here), it is the second argument. TYPE is the type of the
6624 original expression. Return NULL_TREE if no simplification is
6625 possible. */
6627 static tree
6628 fold_binary_op_with_conditional_arg (location_t loc,
6629 enum tree_code code,
6630 tree type, tree op0, tree op1,
6631 tree cond, tree arg, int cond_first_p)
6633 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6634 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6635 tree test, true_value, false_value;
6636 tree lhs = NULL_TREE;
6637 tree rhs = NULL_TREE;
6639 /* This transformation is only worthwhile if we don't have to wrap
6640 arg in a SAVE_EXPR, and the operation can be simplified on at least
6641 one of the branches once its pushed inside the COND_EXPR. */
6642 if (!TREE_CONSTANT (arg))
6643 return NULL_TREE;
6645 if (TREE_CODE (cond) == COND_EXPR)
6647 test = TREE_OPERAND (cond, 0);
6648 true_value = TREE_OPERAND (cond, 1);
6649 false_value = TREE_OPERAND (cond, 2);
6650 /* If this operand throws an expression, then it does not make
6651 sense to try to perform a logical or arithmetic operation
6652 involving it. */
6653 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6654 lhs = true_value;
6655 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6656 rhs = false_value;
6658 else
6660 tree testtype = TREE_TYPE (cond);
6661 test = cond;
6662 true_value = constant_boolean_node (true, testtype);
6663 false_value = constant_boolean_node (false, testtype);
6666 arg = fold_convert_loc (loc, arg_type, arg);
6667 if (lhs == 0)
6669 true_value = fold_convert_loc (loc, cond_type, true_value);
6670 if (cond_first_p)
6671 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6672 else
6673 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6675 if (rhs == 0)
6677 false_value = fold_convert_loc (loc, cond_type, false_value);
6678 if (cond_first_p)
6679 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6680 else
6681 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6684 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6685 return fold_convert_loc (loc, type, test);
6689 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6691 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6692 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6693 ADDEND is the same as X.
6695 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6696 and finite. The problematic cases are when X is zero, and its mode
6697 has signed zeros. In the case of rounding towards -infinity,
6698 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6699 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6701 bool
6702 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6704 if (!real_zerop (addend))
6705 return false;
6707 /* Don't allow the fold with -fsignaling-nans. */
6708 if (HONOR_SNANS (TYPE_MODE (type)))
6709 return false;
6711 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6712 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6713 return true;
6715 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6716 if (TREE_CODE (addend) == REAL_CST
6717 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6718 negate = !negate;
6720 /* The mode has signed zeros, and we have to honor their sign.
6721 In this situation, there is only one case we can return true for.
6722 X - 0 is the same as X unless rounding towards -infinity is
6723 supported. */
6724 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6727 /* Subroutine of fold() that checks comparisons of built-in math
6728 functions against real constants.
6730 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6731 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6732 is the type of the result and ARG0 and ARG1 are the operands of the
6733 comparison. ARG1 must be a TREE_REAL_CST.
6735 The function returns the constant folded tree if a simplification
6736 can be made, and NULL_TREE otherwise. */
6738 static tree
6739 fold_mathfn_compare (location_t loc,
6740 enum built_in_function fcode, enum tree_code code,
6741 tree type, tree arg0, tree arg1)
6743 REAL_VALUE_TYPE c;
6745 if (BUILTIN_SQRT_P (fcode))
6747 tree arg = CALL_EXPR_ARG (arg0, 0);
6748 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6750 c = TREE_REAL_CST (arg1);
6751 if (REAL_VALUE_NEGATIVE (c))
6753 /* sqrt(x) < y is always false, if y is negative. */
6754 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6755 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6757 /* sqrt(x) > y is always true, if y is negative and we
6758 don't care about NaNs, i.e. negative values of x. */
6759 if (code == NE_EXPR || !HONOR_NANS (mode))
6760 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6762 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6763 return fold_build2_loc (loc, GE_EXPR, type, arg,
6764 build_real (TREE_TYPE (arg), dconst0));
6766 else if (code == GT_EXPR || code == GE_EXPR)
6768 REAL_VALUE_TYPE c2;
6770 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6771 real_convert (&c2, mode, &c2);
6773 if (REAL_VALUE_ISINF (c2))
6775 /* sqrt(x) > y is x == +Inf, when y is very large. */
6776 if (HONOR_INFINITIES (mode))
6777 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6778 build_real (TREE_TYPE (arg), c2));
6780 /* sqrt(x) > y is always false, when y is very large
6781 and we don't care about infinities. */
6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6785 /* sqrt(x) > c is the same as x > c*c. */
6786 return fold_build2_loc (loc, code, type, arg,
6787 build_real (TREE_TYPE (arg), c2));
6789 else if (code == LT_EXPR || code == LE_EXPR)
6791 REAL_VALUE_TYPE c2;
6793 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6794 real_convert (&c2, mode, &c2);
6796 if (REAL_VALUE_ISINF (c2))
6798 /* sqrt(x) < y is always true, when y is a very large
6799 value and we don't care about NaNs or Infinities. */
6800 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6801 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6803 /* sqrt(x) < y is x != +Inf when y is very large and we
6804 don't care about NaNs. */
6805 if (! HONOR_NANS (mode))
6806 return fold_build2_loc (loc, NE_EXPR, type, arg,
6807 build_real (TREE_TYPE (arg), c2));
6809 /* sqrt(x) < y is x >= 0 when y is very large and we
6810 don't care about Infinities. */
6811 if (! HONOR_INFINITIES (mode))
6812 return fold_build2_loc (loc, GE_EXPR, type, arg,
6813 build_real (TREE_TYPE (arg), dconst0));
6815 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6816 if (lang_hooks.decls.global_bindings_p () != 0
6817 || CONTAINS_PLACEHOLDER_P (arg))
6818 return NULL_TREE;
6820 arg = save_expr (arg);
6821 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6822 fold_build2_loc (loc, GE_EXPR, type, arg,
6823 build_real (TREE_TYPE (arg),
6824 dconst0)),
6825 fold_build2_loc (loc, NE_EXPR, type, arg,
6826 build_real (TREE_TYPE (arg),
6827 c2)));
6830 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6831 if (! HONOR_NANS (mode))
6832 return fold_build2_loc (loc, code, type, arg,
6833 build_real (TREE_TYPE (arg), c2));
6835 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6836 if (lang_hooks.decls.global_bindings_p () == 0
6837 && ! CONTAINS_PLACEHOLDER_P (arg))
6839 arg = save_expr (arg);
6840 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6841 fold_build2_loc (loc, GE_EXPR, type, arg,
6842 build_real (TREE_TYPE (arg),
6843 dconst0)),
6844 fold_build2_loc (loc, code, type, arg,
6845 build_real (TREE_TYPE (arg),
6846 c2)));
6851 return NULL_TREE;
6854 /* Subroutine of fold() that optimizes comparisons against Infinities,
6855 either +Inf or -Inf.
6857 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6858 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6859 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6861 The function returns the constant folded tree if a simplification
6862 can be made, and NULL_TREE otherwise. */
6864 static tree
6865 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6866 tree arg0, tree arg1)
6868 enum machine_mode mode;
6869 REAL_VALUE_TYPE max;
6870 tree temp;
6871 bool neg;
6873 mode = TYPE_MODE (TREE_TYPE (arg0));
6875 /* For negative infinity swap the sense of the comparison. */
6876 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6877 if (neg)
6878 code = swap_tree_comparison (code);
6880 switch (code)
6882 case GT_EXPR:
6883 /* x > +Inf is always false, if with ignore sNANs. */
6884 if (HONOR_SNANS (mode))
6885 return NULL_TREE;
6886 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6888 case LE_EXPR:
6889 /* x <= +Inf is always true, if we don't case about NaNs. */
6890 if (! HONOR_NANS (mode))
6891 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6893 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6894 if (lang_hooks.decls.global_bindings_p () == 0
6895 && ! CONTAINS_PLACEHOLDER_P (arg0))
6897 arg0 = save_expr (arg0);
6898 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6900 break;
6902 case EQ_EXPR:
6903 case GE_EXPR:
6904 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6905 real_maxval (&max, neg, mode);
6906 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6907 arg0, build_real (TREE_TYPE (arg0), max));
6909 case LT_EXPR:
6910 /* x < +Inf is always equal to x <= DBL_MAX. */
6911 real_maxval (&max, neg, mode);
6912 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6913 arg0, build_real (TREE_TYPE (arg0), max));
6915 case NE_EXPR:
6916 /* x != +Inf is always equal to !(x > DBL_MAX). */
6917 real_maxval (&max, neg, mode);
6918 if (! HONOR_NANS (mode))
6919 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6920 arg0, build_real (TREE_TYPE (arg0), max));
6922 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6923 arg0, build_real (TREE_TYPE (arg0), max));
6924 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6926 default:
6927 break;
6930 return NULL_TREE;
6933 /* Subroutine of fold() that optimizes comparisons of a division by
6934 a nonzero integer constant against an integer constant, i.e.
6935 X/C1 op C2.
6937 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6938 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6939 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6941 The function returns the constant folded tree if a simplification
6942 can be made, and NULL_TREE otherwise. */
6944 static tree
6945 fold_div_compare (location_t loc,
6946 enum tree_code code, tree type, tree arg0, tree arg1)
6948 tree prod, tmp, hi, lo;
6949 tree arg00 = TREE_OPERAND (arg0, 0);
6950 tree arg01 = TREE_OPERAND (arg0, 1);
6951 unsigned HOST_WIDE_INT lpart;
6952 HOST_WIDE_INT hpart;
6953 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6954 bool neg_overflow;
6955 int overflow;
6957 /* We have to do this the hard way to detect unsigned overflow.
6958 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6959 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6960 TREE_INT_CST_HIGH (arg01),
6961 TREE_INT_CST_LOW (arg1),
6962 TREE_INT_CST_HIGH (arg1),
6963 &lpart, &hpart, unsigned_p);
6964 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6965 -1, overflow);
6966 neg_overflow = false;
6968 if (unsigned_p)
6970 tmp = int_const_binop (MINUS_EXPR, arg01,
6971 build_int_cst (TREE_TYPE (arg01), 1), 0);
6972 lo = prod;
6974 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6975 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6976 TREE_INT_CST_HIGH (prod),
6977 TREE_INT_CST_LOW (tmp),
6978 TREE_INT_CST_HIGH (tmp),
6979 &lpart, &hpart, unsigned_p);
6980 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6981 -1, overflow | TREE_OVERFLOW (prod));
6983 else if (tree_int_cst_sgn (arg01) >= 0)
6985 tmp = int_const_binop (MINUS_EXPR, arg01,
6986 build_int_cst (TREE_TYPE (arg01), 1), 0);
6987 switch (tree_int_cst_sgn (arg1))
6989 case -1:
6990 neg_overflow = true;
6991 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6992 hi = prod;
6993 break;
6995 case 0:
6996 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6997 hi = tmp;
6998 break;
7000 case 1:
7001 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7002 lo = prod;
7003 break;
7005 default:
7006 gcc_unreachable ();
7009 else
7011 /* A negative divisor reverses the relational operators. */
7012 code = swap_tree_comparison (code);
7014 tmp = int_const_binop (PLUS_EXPR, arg01,
7015 build_int_cst (TREE_TYPE (arg01), 1), 0);
7016 switch (tree_int_cst_sgn (arg1))
7018 case -1:
7019 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7020 lo = prod;
7021 break;
7023 case 0:
7024 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7025 lo = tmp;
7026 break;
7028 case 1:
7029 neg_overflow = true;
7030 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7031 hi = prod;
7032 break;
7034 default:
7035 gcc_unreachable ();
7039 switch (code)
7041 case EQ_EXPR:
7042 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7043 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7044 if (TREE_OVERFLOW (hi))
7045 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7046 if (TREE_OVERFLOW (lo))
7047 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7048 return build_range_check (loc, type, arg00, 1, lo, hi);
7050 case NE_EXPR:
7051 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7052 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7053 if (TREE_OVERFLOW (hi))
7054 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7055 if (TREE_OVERFLOW (lo))
7056 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7057 return build_range_check (loc, type, arg00, 0, lo, hi);
7059 case LT_EXPR:
7060 if (TREE_OVERFLOW (lo))
7062 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7063 return omit_one_operand_loc (loc, type, tmp, arg00);
7065 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7067 case LE_EXPR:
7068 if (TREE_OVERFLOW (hi))
7070 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7071 return omit_one_operand_loc (loc, type, tmp, arg00);
7073 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7075 case GT_EXPR:
7076 if (TREE_OVERFLOW (hi))
7078 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7079 return omit_one_operand_loc (loc, type, tmp, arg00);
7081 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7083 case GE_EXPR:
7084 if (TREE_OVERFLOW (lo))
7086 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7087 return omit_one_operand_loc (loc, type, tmp, arg00);
7089 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7091 default:
7092 break;
7095 return NULL_TREE;
7099 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7100 equality/inequality test, then return a simplified form of the test
7101 using a sign testing. Otherwise return NULL. TYPE is the desired
7102 result type. */
7104 static tree
7105 fold_single_bit_test_into_sign_test (location_t loc,
7106 enum tree_code code, tree arg0, tree arg1,
7107 tree result_type)
7109 /* If this is testing a single bit, we can optimize the test. */
7110 if ((code == NE_EXPR || code == EQ_EXPR)
7111 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7112 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7114 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7115 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7116 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7118 if (arg00 != NULL_TREE
7119 /* This is only a win if casting to a signed type is cheap,
7120 i.e. when arg00's type is not a partial mode. */
7121 && TYPE_PRECISION (TREE_TYPE (arg00))
7122 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7124 tree stype = signed_type_for (TREE_TYPE (arg00));
7125 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7126 result_type,
7127 fold_convert_loc (loc, stype, arg00),
7128 build_int_cst (stype, 0));
7132 return NULL_TREE;
7135 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7136 equality/inequality test, then return a simplified form of
7137 the test using shifts and logical operations. Otherwise return
7138 NULL. TYPE is the desired result type. */
7140 tree
7141 fold_single_bit_test (location_t loc, enum tree_code code,
7142 tree arg0, tree arg1, tree result_type)
7144 /* If this is testing a single bit, we can optimize the test. */
7145 if ((code == NE_EXPR || code == EQ_EXPR)
7146 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7147 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7149 tree inner = TREE_OPERAND (arg0, 0);
7150 tree type = TREE_TYPE (arg0);
7151 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7152 enum machine_mode operand_mode = TYPE_MODE (type);
7153 int ops_unsigned;
7154 tree signed_type, unsigned_type, intermediate_type;
7155 tree tem, one;
7157 /* First, see if we can fold the single bit test into a sign-bit
7158 test. */
7159 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7160 result_type);
7161 if (tem)
7162 return tem;
7164 /* Otherwise we have (A & C) != 0 where C is a single bit,
7165 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7166 Similarly for (A & C) == 0. */
7168 /* If INNER is a right shift of a constant and it plus BITNUM does
7169 not overflow, adjust BITNUM and INNER. */
7170 if (TREE_CODE (inner) == RSHIFT_EXPR
7171 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7172 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7173 && bitnum < TYPE_PRECISION (type)
7174 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7175 bitnum - TYPE_PRECISION (type)))
7177 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7178 inner = TREE_OPERAND (inner, 0);
7181 /* If we are going to be able to omit the AND below, we must do our
7182 operations as unsigned. If we must use the AND, we have a choice.
7183 Normally unsigned is faster, but for some machines signed is. */
7184 #ifdef LOAD_EXTEND_OP
7185 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7186 && !flag_syntax_only) ? 0 : 1;
7187 #else
7188 ops_unsigned = 1;
7189 #endif
7191 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7192 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7193 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7194 inner = fold_convert_loc (loc, intermediate_type, inner);
7196 if (bitnum != 0)
7197 inner = build2 (RSHIFT_EXPR, intermediate_type,
7198 inner, size_int (bitnum));
7200 one = build_int_cst (intermediate_type, 1);
7202 if (code == EQ_EXPR)
7203 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7205 /* Put the AND last so it can combine with more things. */
7206 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7208 /* Make sure to return the proper type. */
7209 inner = fold_convert_loc (loc, result_type, inner);
7211 return inner;
7213 return NULL_TREE;
7216 /* Check whether we are allowed to reorder operands arg0 and arg1,
7217 such that the evaluation of arg1 occurs before arg0. */
7219 static bool
7220 reorder_operands_p (const_tree arg0, const_tree arg1)
7222 if (! flag_evaluation_order)
7223 return true;
7224 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7225 return true;
7226 return ! TREE_SIDE_EFFECTS (arg0)
7227 && ! TREE_SIDE_EFFECTS (arg1);
7230 /* Test whether it is preferable two swap two operands, ARG0 and
7231 ARG1, for example because ARG0 is an integer constant and ARG1
7232 isn't. If REORDER is true, only recommend swapping if we can
7233 evaluate the operands in reverse order. */
7235 bool
7236 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7238 STRIP_SIGN_NOPS (arg0);
7239 STRIP_SIGN_NOPS (arg1);
7241 if (TREE_CODE (arg1) == INTEGER_CST)
7242 return 0;
7243 if (TREE_CODE (arg0) == INTEGER_CST)
7244 return 1;
7246 if (TREE_CODE (arg1) == REAL_CST)
7247 return 0;
7248 if (TREE_CODE (arg0) == REAL_CST)
7249 return 1;
7251 if (TREE_CODE (arg1) == FIXED_CST)
7252 return 0;
7253 if (TREE_CODE (arg0) == FIXED_CST)
7254 return 1;
7256 if (TREE_CODE (arg1) == COMPLEX_CST)
7257 return 0;
7258 if (TREE_CODE (arg0) == COMPLEX_CST)
7259 return 1;
7261 if (TREE_CONSTANT (arg1))
7262 return 0;
7263 if (TREE_CONSTANT (arg0))
7264 return 1;
7266 if (optimize_function_for_size_p (cfun))
7267 return 0;
7269 if (reorder && flag_evaluation_order
7270 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7271 return 0;
7273 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7274 for commutative and comparison operators. Ensuring a canonical
7275 form allows the optimizers to find additional redundancies without
7276 having to explicitly check for both orderings. */
7277 if (TREE_CODE (arg0) == SSA_NAME
7278 && TREE_CODE (arg1) == SSA_NAME
7279 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7280 return 1;
7282 /* Put SSA_NAMEs last. */
7283 if (TREE_CODE (arg1) == SSA_NAME)
7284 return 0;
7285 if (TREE_CODE (arg0) == SSA_NAME)
7286 return 1;
7288 /* Put variables last. */
7289 if (DECL_P (arg1))
7290 return 0;
7291 if (DECL_P (arg0))
7292 return 1;
7294 return 0;
7297 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7298 ARG0 is extended to a wider type. */
7300 static tree
7301 fold_widened_comparison (location_t loc, enum tree_code code,
7302 tree type, tree arg0, tree arg1)
7304 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7305 tree arg1_unw;
7306 tree shorter_type, outer_type;
7307 tree min, max;
7308 bool above, below;
7310 if (arg0_unw == arg0)
7311 return NULL_TREE;
7312 shorter_type = TREE_TYPE (arg0_unw);
7314 #ifdef HAVE_canonicalize_funcptr_for_compare
7315 /* Disable this optimization if we're casting a function pointer
7316 type on targets that require function pointer canonicalization. */
7317 if (HAVE_canonicalize_funcptr_for_compare
7318 && TREE_CODE (shorter_type) == POINTER_TYPE
7319 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7320 return NULL_TREE;
7321 #endif
7323 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7324 return NULL_TREE;
7326 arg1_unw = get_unwidened (arg1, NULL_TREE);
7328 /* If possible, express the comparison in the shorter mode. */
7329 if ((code == EQ_EXPR || code == NE_EXPR
7330 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7331 && (TREE_TYPE (arg1_unw) == shorter_type
7332 || ((TYPE_PRECISION (shorter_type)
7333 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7334 && (TYPE_UNSIGNED (shorter_type)
7335 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7336 || (TREE_CODE (arg1_unw) == INTEGER_CST
7337 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7338 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7339 && int_fits_type_p (arg1_unw, shorter_type))))
7340 return fold_build2_loc (loc, code, type, arg0_unw,
7341 fold_convert_loc (loc, shorter_type, arg1_unw));
7343 if (TREE_CODE (arg1_unw) != INTEGER_CST
7344 || TREE_CODE (shorter_type) != INTEGER_TYPE
7345 || !int_fits_type_p (arg1_unw, shorter_type))
7346 return NULL_TREE;
7348 /* If we are comparing with the integer that does not fit into the range
7349 of the shorter type, the result is known. */
7350 outer_type = TREE_TYPE (arg1_unw);
7351 min = lower_bound_in_type (outer_type, shorter_type);
7352 max = upper_bound_in_type (outer_type, shorter_type);
7354 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7355 max, arg1_unw));
7356 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7357 arg1_unw, min));
7359 switch (code)
7361 case EQ_EXPR:
7362 if (above || below)
7363 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7364 break;
7366 case NE_EXPR:
7367 if (above || below)
7368 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7369 break;
7371 case LT_EXPR:
7372 case LE_EXPR:
7373 if (above)
7374 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7375 else if (below)
7376 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7378 case GT_EXPR:
7379 case GE_EXPR:
7380 if (above)
7381 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7382 else if (below)
7383 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7385 default:
7386 break;
7389 return NULL_TREE;
7392 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7393 ARG0 just the signedness is changed. */
7395 static tree
7396 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7397 tree arg0, tree arg1)
7399 tree arg0_inner;
7400 tree inner_type, outer_type;
7402 if (!CONVERT_EXPR_P (arg0))
7403 return NULL_TREE;
7405 outer_type = TREE_TYPE (arg0);
7406 arg0_inner = TREE_OPERAND (arg0, 0);
7407 inner_type = TREE_TYPE (arg0_inner);
7409 #ifdef HAVE_canonicalize_funcptr_for_compare
7410 /* Disable this optimization if we're casting a function pointer
7411 type on targets that require function pointer canonicalization. */
7412 if (HAVE_canonicalize_funcptr_for_compare
7413 && TREE_CODE (inner_type) == POINTER_TYPE
7414 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7415 return NULL_TREE;
7416 #endif
7418 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7419 return NULL_TREE;
7421 if (TREE_CODE (arg1) != INTEGER_CST
7422 && !(CONVERT_EXPR_P (arg1)
7423 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7424 return NULL_TREE;
7426 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7427 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7428 && code != NE_EXPR
7429 && code != EQ_EXPR)
7430 return NULL_TREE;
7432 if (TREE_CODE (arg1) == INTEGER_CST)
7433 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7434 TREE_INT_CST_HIGH (arg1), 0,
7435 TREE_OVERFLOW (arg1));
7436 else
7437 arg1 = fold_convert_loc (loc, inner_type, arg1);
7439 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7442 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7443 step of the array. Reconstructs s and delta in the case of s *
7444 delta being an integer constant (and thus already folded). ADDR is
7445 the address. MULT is the multiplicative expression. If the
7446 function succeeds, the new address expression is returned.
7447 Otherwise NULL_TREE is returned. LOC is the location of the
7448 resulting expression. */
7450 static tree
7451 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7453 tree s, delta, step;
7454 tree ref = TREE_OPERAND (addr, 0), pref;
7455 tree ret, pos;
7456 tree itype;
7457 bool mdim = false;
7459 /* Strip the nops that might be added when converting op1 to sizetype. */
7460 STRIP_NOPS (op1);
7462 /* Canonicalize op1 into a possibly non-constant delta
7463 and an INTEGER_CST s. */
7464 if (TREE_CODE (op1) == MULT_EXPR)
7466 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7468 STRIP_NOPS (arg0);
7469 STRIP_NOPS (arg1);
7471 if (TREE_CODE (arg0) == INTEGER_CST)
7473 s = arg0;
7474 delta = arg1;
7476 else if (TREE_CODE (arg1) == INTEGER_CST)
7478 s = arg1;
7479 delta = arg0;
7481 else
7482 return NULL_TREE;
7484 else if (TREE_CODE (op1) == INTEGER_CST)
7486 delta = op1;
7487 s = NULL_TREE;
7489 else
7491 /* Simulate we are delta * 1. */
7492 delta = op1;
7493 s = integer_one_node;
7496 for (;; ref = TREE_OPERAND (ref, 0))
7498 if (TREE_CODE (ref) == ARRAY_REF)
7500 /* Remember if this was a multi-dimensional array. */
7501 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7502 mdim = true;
7504 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7505 if (! itype)
7506 continue;
7508 step = array_ref_element_size (ref);
7509 if (TREE_CODE (step) != INTEGER_CST)
7510 continue;
7512 if (s)
7514 if (! tree_int_cst_equal (step, s))
7515 continue;
7517 else
7519 /* Try if delta is a multiple of step. */
7520 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7521 if (! tmp)
7522 continue;
7523 delta = tmp;
7526 /* Only fold here if we can verify we do not overflow one
7527 dimension of a multi-dimensional array. */
7528 if (mdim)
7530 tree tmp;
7532 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7533 || !INTEGRAL_TYPE_P (itype)
7534 || !TYPE_MAX_VALUE (itype)
7535 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7536 continue;
7538 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7539 fold_convert_loc (loc, itype,
7540 TREE_OPERAND (ref, 1)),
7541 fold_convert_loc (loc, itype, delta));
7542 if (!tmp
7543 || TREE_CODE (tmp) != INTEGER_CST
7544 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7545 continue;
7548 break;
7550 else
7551 mdim = false;
7553 if (!handled_component_p (ref))
7554 return NULL_TREE;
7557 /* We found the suitable array reference. So copy everything up to it,
7558 and replace the index. */
7560 pref = TREE_OPERAND (addr, 0);
7561 ret = copy_node (pref);
7562 SET_EXPR_LOCATION (ret, loc);
7563 pos = ret;
7565 while (pref != ref)
7567 pref = TREE_OPERAND (pref, 0);
7568 TREE_OPERAND (pos, 0) = copy_node (pref);
7569 pos = TREE_OPERAND (pos, 0);
7572 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7573 fold_convert_loc (loc, itype,
7574 TREE_OPERAND (pos, 1)),
7575 fold_convert_loc (loc, itype, delta));
7577 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7581 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7582 means A >= Y && A != MAX, but in this case we know that
7583 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7585 static tree
7586 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7588 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7590 if (TREE_CODE (bound) == LT_EXPR)
7591 a = TREE_OPERAND (bound, 0);
7592 else if (TREE_CODE (bound) == GT_EXPR)
7593 a = TREE_OPERAND (bound, 1);
7594 else
7595 return NULL_TREE;
7597 typea = TREE_TYPE (a);
7598 if (!INTEGRAL_TYPE_P (typea)
7599 && !POINTER_TYPE_P (typea))
7600 return NULL_TREE;
7602 if (TREE_CODE (ineq) == LT_EXPR)
7604 a1 = TREE_OPERAND (ineq, 1);
7605 y = TREE_OPERAND (ineq, 0);
7607 else if (TREE_CODE (ineq) == GT_EXPR)
7609 a1 = TREE_OPERAND (ineq, 0);
7610 y = TREE_OPERAND (ineq, 1);
7612 else
7613 return NULL_TREE;
7615 if (TREE_TYPE (a1) != typea)
7616 return NULL_TREE;
7618 if (POINTER_TYPE_P (typea))
7620 /* Convert the pointer types into integer before taking the difference. */
7621 tree ta = fold_convert_loc (loc, ssizetype, a);
7622 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7623 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7625 else
7626 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7628 if (!diff || !integer_onep (diff))
7629 return NULL_TREE;
7631 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7634 /* Fold a sum or difference of at least one multiplication.
7635 Returns the folded tree or NULL if no simplification could be made. */
7637 static tree
7638 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7639 tree arg0, tree arg1)
7641 tree arg00, arg01, arg10, arg11;
7642 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7644 /* (A * C) +- (B * C) -> (A+-B) * C.
7645 (A * C) +- A -> A * (C+-1).
7646 We are most concerned about the case where C is a constant,
7647 but other combinations show up during loop reduction. Since
7648 it is not difficult, try all four possibilities. */
7650 if (TREE_CODE (arg0) == MULT_EXPR)
7652 arg00 = TREE_OPERAND (arg0, 0);
7653 arg01 = TREE_OPERAND (arg0, 1);
7655 else if (TREE_CODE (arg0) == INTEGER_CST)
7657 arg00 = build_one_cst (type);
7658 arg01 = arg0;
7660 else
7662 /* We cannot generate constant 1 for fract. */
7663 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7664 return NULL_TREE;
7665 arg00 = arg0;
7666 arg01 = build_one_cst (type);
7668 if (TREE_CODE (arg1) == MULT_EXPR)
7670 arg10 = TREE_OPERAND (arg1, 0);
7671 arg11 = TREE_OPERAND (arg1, 1);
7673 else if (TREE_CODE (arg1) == INTEGER_CST)
7675 arg10 = build_one_cst (type);
7676 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7677 the purpose of this canonicalization. */
7678 if (TREE_INT_CST_HIGH (arg1) == -1
7679 && negate_expr_p (arg1)
7680 && code == PLUS_EXPR)
7682 arg11 = negate_expr (arg1);
7683 code = MINUS_EXPR;
7685 else
7686 arg11 = arg1;
7688 else
7690 /* We cannot generate constant 1 for fract. */
7691 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7692 return NULL_TREE;
7693 arg10 = arg1;
7694 arg11 = build_one_cst (type);
7696 same = NULL_TREE;
7698 if (operand_equal_p (arg01, arg11, 0))
7699 same = arg01, alt0 = arg00, alt1 = arg10;
7700 else if (operand_equal_p (arg00, arg10, 0))
7701 same = arg00, alt0 = arg01, alt1 = arg11;
7702 else if (operand_equal_p (arg00, arg11, 0))
7703 same = arg00, alt0 = arg01, alt1 = arg10;
7704 else if (operand_equal_p (arg01, arg10, 0))
7705 same = arg01, alt0 = arg00, alt1 = arg11;
7707 /* No identical multiplicands; see if we can find a common
7708 power-of-two factor in non-power-of-two multiplies. This
7709 can help in multi-dimensional array access. */
7710 else if (host_integerp (arg01, 0)
7711 && host_integerp (arg11, 0))
7713 HOST_WIDE_INT int01, int11, tmp;
7714 bool swap = false;
7715 tree maybe_same;
7716 int01 = TREE_INT_CST_LOW (arg01);
7717 int11 = TREE_INT_CST_LOW (arg11);
7719 /* Move min of absolute values to int11. */
7720 if ((int01 >= 0 ? int01 : -int01)
7721 < (int11 >= 0 ? int11 : -int11))
7723 tmp = int01, int01 = int11, int11 = tmp;
7724 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7725 maybe_same = arg01;
7726 swap = true;
7728 else
7729 maybe_same = arg11;
7731 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7732 /* The remainder should not be a constant, otherwise we
7733 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7734 increased the number of multiplications necessary. */
7735 && TREE_CODE (arg10) != INTEGER_CST)
7737 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7738 build_int_cst (TREE_TYPE (arg00),
7739 int01 / int11));
7740 alt1 = arg10;
7741 same = maybe_same;
7742 if (swap)
7743 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7747 if (same)
7748 return fold_build2_loc (loc, MULT_EXPR, type,
7749 fold_build2_loc (loc, code, type,
7750 fold_convert_loc (loc, type, alt0),
7751 fold_convert_loc (loc, type, alt1)),
7752 fold_convert_loc (loc, type, same));
7754 return NULL_TREE;
7757 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7758 specified by EXPR into the buffer PTR of length LEN bytes.
7759 Return the number of bytes placed in the buffer, or zero
7760 upon failure. */
7762 static int
7763 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7765 tree type = TREE_TYPE (expr);
7766 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7767 int byte, offset, word, words;
7768 unsigned char value;
7770 if (total_bytes > len)
7771 return 0;
7772 words = total_bytes / UNITS_PER_WORD;
7774 for (byte = 0; byte < total_bytes; byte++)
7776 int bitpos = byte * BITS_PER_UNIT;
7777 if (bitpos < HOST_BITS_PER_WIDE_INT)
7778 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7779 else
7780 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7781 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7783 if (total_bytes > UNITS_PER_WORD)
7785 word = byte / UNITS_PER_WORD;
7786 if (WORDS_BIG_ENDIAN)
7787 word = (words - 1) - word;
7788 offset = word * UNITS_PER_WORD;
7789 if (BYTES_BIG_ENDIAN)
7790 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7791 else
7792 offset += byte % UNITS_PER_WORD;
7794 else
7795 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7796 ptr[offset] = value;
7798 return total_bytes;
7802 /* Subroutine of native_encode_expr. Encode the REAL_CST
7803 specified by EXPR into the buffer PTR of length LEN bytes.
7804 Return the number of bytes placed in the buffer, or zero
7805 upon failure. */
7807 static int
7808 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7810 tree type = TREE_TYPE (expr);
7811 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7812 int byte, offset, word, words, bitpos;
7813 unsigned char value;
7815 /* There are always 32 bits in each long, no matter the size of
7816 the hosts long. We handle floating point representations with
7817 up to 192 bits. */
7818 long tmp[6];
7820 if (total_bytes > len)
7821 return 0;
7822 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7824 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7826 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7827 bitpos += BITS_PER_UNIT)
7829 byte = (bitpos / BITS_PER_UNIT) & 3;
7830 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7832 if (UNITS_PER_WORD < 4)
7834 word = byte / UNITS_PER_WORD;
7835 if (WORDS_BIG_ENDIAN)
7836 word = (words - 1) - word;
7837 offset = word * UNITS_PER_WORD;
7838 if (BYTES_BIG_ENDIAN)
7839 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7840 else
7841 offset += byte % UNITS_PER_WORD;
7843 else
7844 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7845 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7847 return total_bytes;
7850 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7851 specified by EXPR into the buffer PTR of length LEN bytes.
7852 Return the number of bytes placed in the buffer, or zero
7853 upon failure. */
7855 static int
7856 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7858 int rsize, isize;
7859 tree part;
7861 part = TREE_REALPART (expr);
7862 rsize = native_encode_expr (part, ptr, len);
7863 if (rsize == 0)
7864 return 0;
7865 part = TREE_IMAGPART (expr);
7866 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7867 if (isize != rsize)
7868 return 0;
7869 return rsize + isize;
7873 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7874 specified by EXPR into the buffer PTR of length LEN bytes.
7875 Return the number of bytes placed in the buffer, or zero
7876 upon failure. */
7878 static int
7879 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7881 int i, size, offset, count;
7882 tree itype, elem, elements;
7884 offset = 0;
7885 elements = TREE_VECTOR_CST_ELTS (expr);
7886 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7887 itype = TREE_TYPE (TREE_TYPE (expr));
7888 size = GET_MODE_SIZE (TYPE_MODE (itype));
7889 for (i = 0; i < count; i++)
7891 if (elements)
7893 elem = TREE_VALUE (elements);
7894 elements = TREE_CHAIN (elements);
7896 else
7897 elem = NULL_TREE;
7899 if (elem)
7901 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7902 return 0;
7904 else
7906 if (offset + size > len)
7907 return 0;
7908 memset (ptr+offset, 0, size);
7910 offset += size;
7912 return offset;
7916 /* Subroutine of native_encode_expr. Encode the STRING_CST
7917 specified by EXPR into the buffer PTR of length LEN bytes.
7918 Return the number of bytes placed in the buffer, or zero
7919 upon failure. */
7921 static int
7922 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7924 tree type = TREE_TYPE (expr);
7925 HOST_WIDE_INT total_bytes;
7927 if (TREE_CODE (type) != ARRAY_TYPE
7928 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7929 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7930 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7931 return 0;
7932 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7933 if (total_bytes > len)
7934 return 0;
7935 if (TREE_STRING_LENGTH (expr) < total_bytes)
7937 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7938 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7939 total_bytes - TREE_STRING_LENGTH (expr));
7941 else
7942 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7943 return total_bytes;
7947 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7948 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7949 buffer PTR of length LEN bytes. Return the number of bytes
7950 placed in the buffer, or zero upon failure. */
7953 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7955 switch (TREE_CODE (expr))
7957 case INTEGER_CST:
7958 return native_encode_int (expr, ptr, len);
7960 case REAL_CST:
7961 return native_encode_real (expr, ptr, len);
7963 case COMPLEX_CST:
7964 return native_encode_complex (expr, ptr, len);
7966 case VECTOR_CST:
7967 return native_encode_vector (expr, ptr, len);
7969 case STRING_CST:
7970 return native_encode_string (expr, ptr, len);
7972 default:
7973 return 0;
7978 /* Subroutine of native_interpret_expr. Interpret the contents of
7979 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7980 If the buffer cannot be interpreted, return NULL_TREE. */
7982 static tree
7983 native_interpret_int (tree type, const unsigned char *ptr, int len)
7985 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7986 int byte, offset, word, words;
7987 unsigned char value;
7988 unsigned int HOST_WIDE_INT lo = 0;
7989 HOST_WIDE_INT hi = 0;
7991 if (total_bytes > len)
7992 return NULL_TREE;
7993 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7994 return NULL_TREE;
7995 words = total_bytes / UNITS_PER_WORD;
7997 for (byte = 0; byte < total_bytes; byte++)
7999 int bitpos = byte * BITS_PER_UNIT;
8000 if (total_bytes > UNITS_PER_WORD)
8002 word = byte / UNITS_PER_WORD;
8003 if (WORDS_BIG_ENDIAN)
8004 word = (words - 1) - word;
8005 offset = word * UNITS_PER_WORD;
8006 if (BYTES_BIG_ENDIAN)
8007 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8008 else
8009 offset += byte % UNITS_PER_WORD;
8011 else
8012 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8013 value = ptr[offset];
8015 if (bitpos < HOST_BITS_PER_WIDE_INT)
8016 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8017 else
8018 hi |= (unsigned HOST_WIDE_INT) value
8019 << (bitpos - HOST_BITS_PER_WIDE_INT);
8022 return build_int_cst_wide_type (type, lo, hi);
8026 /* Subroutine of native_interpret_expr. Interpret the contents of
8027 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8028 If the buffer cannot be interpreted, return NULL_TREE. */
8030 static tree
8031 native_interpret_real (tree type, const unsigned char *ptr, int len)
8033 enum machine_mode mode = TYPE_MODE (type);
8034 int total_bytes = GET_MODE_SIZE (mode);
8035 int byte, offset, word, words, bitpos;
8036 unsigned char value;
8037 /* There are always 32 bits in each long, no matter the size of
8038 the hosts long. We handle floating point representations with
8039 up to 192 bits. */
8040 REAL_VALUE_TYPE r;
8041 long tmp[6];
8043 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8044 if (total_bytes > len || total_bytes > 24)
8045 return NULL_TREE;
8046 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8048 memset (tmp, 0, sizeof (tmp));
8049 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8050 bitpos += BITS_PER_UNIT)
8052 byte = (bitpos / BITS_PER_UNIT) & 3;
8053 if (UNITS_PER_WORD < 4)
8055 word = byte / UNITS_PER_WORD;
8056 if (WORDS_BIG_ENDIAN)
8057 word = (words - 1) - word;
8058 offset = word * UNITS_PER_WORD;
8059 if (BYTES_BIG_ENDIAN)
8060 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8061 else
8062 offset += byte % UNITS_PER_WORD;
8064 else
8065 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8066 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8068 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8071 real_from_target (&r, tmp, mode);
8072 return build_real (type, r);
8076 /* Subroutine of native_interpret_expr. Interpret the contents of
8077 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8078 If the buffer cannot be interpreted, return NULL_TREE. */
8080 static tree
8081 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8083 tree etype, rpart, ipart;
8084 int size;
8086 etype = TREE_TYPE (type);
8087 size = GET_MODE_SIZE (TYPE_MODE (etype));
8088 if (size * 2 > len)
8089 return NULL_TREE;
8090 rpart = native_interpret_expr (etype, ptr, size);
8091 if (!rpart)
8092 return NULL_TREE;
8093 ipart = native_interpret_expr (etype, ptr+size, size);
8094 if (!ipart)
8095 return NULL_TREE;
8096 return build_complex (type, rpart, ipart);
8100 /* Subroutine of native_interpret_expr. Interpret the contents of
8101 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8102 If the buffer cannot be interpreted, return NULL_TREE. */
8104 static tree
8105 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8107 tree etype, elem, elements;
8108 int i, size, count;
8110 etype = TREE_TYPE (type);
8111 size = GET_MODE_SIZE (TYPE_MODE (etype));
8112 count = TYPE_VECTOR_SUBPARTS (type);
8113 if (size * count > len)
8114 return NULL_TREE;
8116 elements = NULL_TREE;
8117 for (i = count - 1; i >= 0; i--)
8119 elem = native_interpret_expr (etype, ptr+(i*size), size);
8120 if (!elem)
8121 return NULL_TREE;
8122 elements = tree_cons (NULL_TREE, elem, elements);
8124 return build_vector (type, elements);
8128 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8129 the buffer PTR of length LEN as a constant of type TYPE. For
8130 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8131 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8132 return NULL_TREE. */
8134 tree
8135 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8137 switch (TREE_CODE (type))
8139 case INTEGER_TYPE:
8140 case ENUMERAL_TYPE:
8141 case BOOLEAN_TYPE:
8142 return native_interpret_int (type, ptr, len);
8144 case REAL_TYPE:
8145 return native_interpret_real (type, ptr, len);
8147 case COMPLEX_TYPE:
8148 return native_interpret_complex (type, ptr, len);
8150 case VECTOR_TYPE:
8151 return native_interpret_vector (type, ptr, len);
8153 default:
8154 return NULL_TREE;
8159 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8160 TYPE at compile-time. If we're unable to perform the conversion
8161 return NULL_TREE. */
8163 static tree
8164 fold_view_convert_expr (tree type, tree expr)
8166 /* We support up to 512-bit values (for V8DFmode). */
8167 unsigned char buffer[64];
8168 int len;
8170 /* Check that the host and target are sane. */
8171 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8172 return NULL_TREE;
8174 len = native_encode_expr (expr, buffer, sizeof (buffer));
8175 if (len == 0)
8176 return NULL_TREE;
8178 return native_interpret_expr (type, buffer, len);
8181 /* Build an expression for the address of T. Folds away INDIRECT_REF
8182 to avoid confusing the gimplify process. */
8184 tree
8185 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8187 /* The size of the object is not relevant when talking about its address. */
8188 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8189 t = TREE_OPERAND (t, 0);
8191 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8192 if (TREE_CODE (t) == INDIRECT_REF
8193 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8195 t = TREE_OPERAND (t, 0);
8197 if (TREE_TYPE (t) != ptrtype)
8199 t = build1 (NOP_EXPR, ptrtype, t);
8200 SET_EXPR_LOCATION (t, loc);
8203 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8205 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8207 if (TREE_TYPE (t) != ptrtype)
8208 t = fold_convert_loc (loc, ptrtype, t);
8210 else
8212 t = build1 (ADDR_EXPR, ptrtype, t);
8213 SET_EXPR_LOCATION (t, loc);
8216 return t;
8219 /* Build an expression for the address of T. */
8221 tree
8222 build_fold_addr_expr_loc (location_t loc, tree t)
8224 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8226 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8229 /* Fold a unary expression of code CODE and type TYPE with operand
8230 OP0. Return the folded expression if folding is successful.
8231 Otherwise, return NULL_TREE. */
8233 tree
8234 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8236 tree tem;
8237 tree arg0;
8238 enum tree_code_class kind = TREE_CODE_CLASS (code);
8240 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8241 && TREE_CODE_LENGTH (code) == 1);
8243 arg0 = op0;
8244 if (arg0)
8246 if (CONVERT_EXPR_CODE_P (code)
8247 || code == FLOAT_EXPR || code == ABS_EXPR)
8249 /* Don't use STRIP_NOPS, because signedness of argument type
8250 matters. */
8251 STRIP_SIGN_NOPS (arg0);
8253 else
8255 /* Strip any conversions that don't change the mode. This
8256 is safe for every expression, except for a comparison
8257 expression because its signedness is derived from its
8258 operands.
8260 Note that this is done as an internal manipulation within
8261 the constant folder, in order to find the simplest
8262 representation of the arguments so that their form can be
8263 studied. In any cases, the appropriate type conversions
8264 should be put back in the tree that will get out of the
8265 constant folder. */
8266 STRIP_NOPS (arg0);
8270 if (TREE_CODE_CLASS (code) == tcc_unary)
8272 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8273 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8274 fold_build1_loc (loc, code, type,
8275 fold_convert_loc (loc, TREE_TYPE (op0),
8276 TREE_OPERAND (arg0, 1))));
8277 else if (TREE_CODE (arg0) == COND_EXPR)
8279 tree arg01 = TREE_OPERAND (arg0, 1);
8280 tree arg02 = TREE_OPERAND (arg0, 2);
8281 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8282 arg01 = fold_build1_loc (loc, code, type,
8283 fold_convert_loc (loc,
8284 TREE_TYPE (op0), arg01));
8285 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8286 arg02 = fold_build1_loc (loc, code, type,
8287 fold_convert_loc (loc,
8288 TREE_TYPE (op0), arg02));
8289 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8290 arg01, arg02);
8292 /* If this was a conversion, and all we did was to move into
8293 inside the COND_EXPR, bring it back out. But leave it if
8294 it is a conversion from integer to integer and the
8295 result precision is no wider than a word since such a
8296 conversion is cheap and may be optimized away by combine,
8297 while it couldn't if it were outside the COND_EXPR. Then return
8298 so we don't get into an infinite recursion loop taking the
8299 conversion out and then back in. */
8301 if ((CONVERT_EXPR_CODE_P (code)
8302 || code == NON_LVALUE_EXPR)
8303 && TREE_CODE (tem) == COND_EXPR
8304 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8305 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8306 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8307 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8308 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8309 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8310 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8311 && (INTEGRAL_TYPE_P
8312 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8313 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8314 || flag_syntax_only))
8316 tem = build1 (code, type,
8317 build3 (COND_EXPR,
8318 TREE_TYPE (TREE_OPERAND
8319 (TREE_OPERAND (tem, 1), 0)),
8320 TREE_OPERAND (tem, 0),
8321 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8322 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8323 SET_EXPR_LOCATION (tem, loc);
8325 return tem;
8327 else if (COMPARISON_CLASS_P (arg0))
8329 if (TREE_CODE (type) == BOOLEAN_TYPE)
8331 arg0 = copy_node (arg0);
8332 TREE_TYPE (arg0) = type;
8333 return arg0;
8335 else if (TREE_CODE (type) != INTEGER_TYPE)
8336 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8337 fold_build1_loc (loc, code, type,
8338 integer_one_node),
8339 fold_build1_loc (loc, code, type,
8340 integer_zero_node));
8344 switch (code)
8346 case PAREN_EXPR:
8347 /* Re-association barriers around constants and other re-association
8348 barriers can be removed. */
8349 if (CONSTANT_CLASS_P (op0)
8350 || TREE_CODE (op0) == PAREN_EXPR)
8351 return fold_convert_loc (loc, type, op0);
8352 return NULL_TREE;
8354 CASE_CONVERT:
8355 case FLOAT_EXPR:
8356 case FIX_TRUNC_EXPR:
8357 if (TREE_TYPE (op0) == type)
8358 return op0;
8360 /* If we have (type) (a CMP b) and type is an integral type, return
8361 new expression involving the new type. */
8362 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8363 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8364 TREE_OPERAND (op0, 1));
8366 /* Handle cases of two conversions in a row. */
8367 if (CONVERT_EXPR_P (op0))
8369 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8370 tree inter_type = TREE_TYPE (op0);
8371 int inside_int = INTEGRAL_TYPE_P (inside_type);
8372 int inside_ptr = POINTER_TYPE_P (inside_type);
8373 int inside_float = FLOAT_TYPE_P (inside_type);
8374 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8375 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8376 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8377 int inter_int = INTEGRAL_TYPE_P (inter_type);
8378 int inter_ptr = POINTER_TYPE_P (inter_type);
8379 int inter_float = FLOAT_TYPE_P (inter_type);
8380 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8381 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8382 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8383 int final_int = INTEGRAL_TYPE_P (type);
8384 int final_ptr = POINTER_TYPE_P (type);
8385 int final_float = FLOAT_TYPE_P (type);
8386 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8387 unsigned int final_prec = TYPE_PRECISION (type);
8388 int final_unsignedp = TYPE_UNSIGNED (type);
8390 /* In addition to the cases of two conversions in a row
8391 handled below, if we are converting something to its own
8392 type via an object of identical or wider precision, neither
8393 conversion is needed. */
8394 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8395 && (((inter_int || inter_ptr) && final_int)
8396 || (inter_float && final_float))
8397 && inter_prec >= final_prec)
8398 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8400 /* Likewise, if the intermediate and initial types are either both
8401 float or both integer, we don't need the middle conversion if the
8402 former is wider than the latter and doesn't change the signedness
8403 (for integers). Avoid this if the final type is a pointer since
8404 then we sometimes need the middle conversion. Likewise if the
8405 final type has a precision not equal to the size of its mode. */
8406 if (((inter_int && inside_int)
8407 || (inter_float && inside_float)
8408 || (inter_vec && inside_vec))
8409 && inter_prec >= inside_prec
8410 && (inter_float || inter_vec
8411 || inter_unsignedp == inside_unsignedp)
8412 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8413 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8414 && ! final_ptr
8415 && (! final_vec || inter_prec == inside_prec))
8416 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8418 /* If we have a sign-extension of a zero-extended value, we can
8419 replace that by a single zero-extension. */
8420 if (inside_int && inter_int && final_int
8421 && inside_prec < inter_prec && inter_prec < final_prec
8422 && inside_unsignedp && !inter_unsignedp)
8423 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8425 /* Two conversions in a row are not needed unless:
8426 - some conversion is floating-point (overstrict for now), or
8427 - some conversion is a vector (overstrict for now), or
8428 - the intermediate type is narrower than both initial and
8429 final, or
8430 - the intermediate type and innermost type differ in signedness,
8431 and the outermost type is wider than the intermediate, or
8432 - the initial type is a pointer type and the precisions of the
8433 intermediate and final types differ, or
8434 - the final type is a pointer type and the precisions of the
8435 initial and intermediate types differ. */
8436 if (! inside_float && ! inter_float && ! final_float
8437 && ! inside_vec && ! inter_vec && ! final_vec
8438 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8439 && ! (inside_int && inter_int
8440 && inter_unsignedp != inside_unsignedp
8441 && inter_prec < final_prec)
8442 && ((inter_unsignedp && inter_prec > inside_prec)
8443 == (final_unsignedp && final_prec > inter_prec))
8444 && ! (inside_ptr && inter_prec != final_prec)
8445 && ! (final_ptr && inside_prec != inter_prec)
8446 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8447 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8448 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8451 /* Handle (T *)&A.B.C for A being of type T and B and C
8452 living at offset zero. This occurs frequently in
8453 C++ upcasting and then accessing the base. */
8454 if (TREE_CODE (op0) == ADDR_EXPR
8455 && POINTER_TYPE_P (type)
8456 && handled_component_p (TREE_OPERAND (op0, 0)))
8458 HOST_WIDE_INT bitsize, bitpos;
8459 tree offset;
8460 enum machine_mode mode;
8461 int unsignedp, volatilep;
8462 tree base = TREE_OPERAND (op0, 0);
8463 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8464 &mode, &unsignedp, &volatilep, false);
8465 /* If the reference was to a (constant) zero offset, we can use
8466 the address of the base if it has the same base type
8467 as the result type. */
8468 if (! offset && bitpos == 0
8469 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8470 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8471 return fold_convert_loc (loc, type,
8472 build_fold_addr_expr_loc (loc, base));
8475 if (TREE_CODE (op0) == MODIFY_EXPR
8476 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8477 /* Detect assigning a bitfield. */
8478 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8479 && DECL_BIT_FIELD
8480 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8482 /* Don't leave an assignment inside a conversion
8483 unless assigning a bitfield. */
8484 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8485 /* First do the assignment, then return converted constant. */
8486 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8487 TREE_NO_WARNING (tem) = 1;
8488 TREE_USED (tem) = 1;
8489 SET_EXPR_LOCATION (tem, loc);
8490 return tem;
8493 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8494 constants (if x has signed type, the sign bit cannot be set
8495 in c). This folds extension into the BIT_AND_EXPR.
8496 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8497 very likely don't have maximal range for their precision and this
8498 transformation effectively doesn't preserve non-maximal ranges. */
8499 if (TREE_CODE (type) == INTEGER_TYPE
8500 && TREE_CODE (op0) == BIT_AND_EXPR
8501 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8503 tree and_expr = op0;
8504 tree and0 = TREE_OPERAND (and_expr, 0);
8505 tree and1 = TREE_OPERAND (and_expr, 1);
8506 int change = 0;
8508 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8509 || (TYPE_PRECISION (type)
8510 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8511 change = 1;
8512 else if (TYPE_PRECISION (TREE_TYPE (and1))
8513 <= HOST_BITS_PER_WIDE_INT
8514 && host_integerp (and1, 1))
8516 unsigned HOST_WIDE_INT cst;
8518 cst = tree_low_cst (and1, 1);
8519 cst &= (HOST_WIDE_INT) -1
8520 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8521 change = (cst == 0);
8522 #ifdef LOAD_EXTEND_OP
8523 if (change
8524 && !flag_syntax_only
8525 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8526 == ZERO_EXTEND))
8528 tree uns = unsigned_type_for (TREE_TYPE (and0));
8529 and0 = fold_convert_loc (loc, uns, and0);
8530 and1 = fold_convert_loc (loc, uns, and1);
8532 #endif
8534 if (change)
8536 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8537 TREE_INT_CST_HIGH (and1), 0,
8538 TREE_OVERFLOW (and1));
8539 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8540 fold_convert_loc (loc, type, and0), tem);
8544 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8545 when one of the new casts will fold away. Conservatively we assume
8546 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8547 if (POINTER_TYPE_P (type)
8548 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8549 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8550 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8551 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8553 tree arg00 = TREE_OPERAND (arg0, 0);
8554 tree arg01 = TREE_OPERAND (arg0, 1);
8556 return fold_build2_loc (loc,
8557 TREE_CODE (arg0), type,
8558 fold_convert_loc (loc, type, arg00),
8559 fold_convert_loc (loc, sizetype, arg01));
8562 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8563 of the same precision, and X is an integer type not narrower than
8564 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8565 if (INTEGRAL_TYPE_P (type)
8566 && TREE_CODE (op0) == BIT_NOT_EXPR
8567 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8568 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8569 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8571 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8572 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8573 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8574 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8575 fold_convert_loc (loc, type, tem));
8578 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8579 type of X and Y (integer types only). */
8580 if (INTEGRAL_TYPE_P (type)
8581 && TREE_CODE (op0) == MULT_EXPR
8582 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8583 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8585 /* Be careful not to introduce new overflows. */
8586 tree mult_type;
8587 if (TYPE_OVERFLOW_WRAPS (type))
8588 mult_type = type;
8589 else
8590 mult_type = unsigned_type_for (type);
8592 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8594 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8595 fold_convert_loc (loc, mult_type,
8596 TREE_OPERAND (op0, 0)),
8597 fold_convert_loc (loc, mult_type,
8598 TREE_OPERAND (op0, 1)));
8599 return fold_convert_loc (loc, type, tem);
8603 tem = fold_convert_const (code, type, op0);
8604 return tem ? tem : NULL_TREE;
8606 case FIXED_CONVERT_EXPR:
8607 tem = fold_convert_const (code, type, arg0);
8608 return tem ? tem : NULL_TREE;
8610 case VIEW_CONVERT_EXPR:
8611 if (TREE_TYPE (op0) == type)
8612 return op0;
8613 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8614 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8615 type, TREE_OPERAND (op0, 0));
8617 /* For integral conversions with the same precision or pointer
8618 conversions use a NOP_EXPR instead. */
8619 if ((INTEGRAL_TYPE_P (type)
8620 || POINTER_TYPE_P (type))
8621 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8622 || POINTER_TYPE_P (TREE_TYPE (op0)))
8623 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8624 return fold_convert_loc (loc, type, op0);
8626 /* Strip inner integral conversions that do not change the precision. */
8627 if (CONVERT_EXPR_P (op0)
8628 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8629 || POINTER_TYPE_P (TREE_TYPE (op0)))
8630 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8631 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8632 && (TYPE_PRECISION (TREE_TYPE (op0))
8633 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8634 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8635 type, TREE_OPERAND (op0, 0));
8637 return fold_view_convert_expr (type, op0);
8639 case NEGATE_EXPR:
8640 tem = fold_negate_expr (loc, arg0);
8641 if (tem)
8642 return fold_convert_loc (loc, type, tem);
8643 return NULL_TREE;
8645 case ABS_EXPR:
8646 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8647 return fold_abs_const (arg0, type);
8648 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8649 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8650 /* Convert fabs((double)float) into (double)fabsf(float). */
8651 else if (TREE_CODE (arg0) == NOP_EXPR
8652 && TREE_CODE (type) == REAL_TYPE)
8654 tree targ0 = strip_float_extensions (arg0);
8655 if (targ0 != arg0)
8656 return fold_convert_loc (loc, type,
8657 fold_build1_loc (loc, ABS_EXPR,
8658 TREE_TYPE (targ0),
8659 targ0));
8661 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8662 else if (TREE_CODE (arg0) == ABS_EXPR)
8663 return arg0;
8664 else if (tree_expr_nonnegative_p (arg0))
8665 return arg0;
8667 /* Strip sign ops from argument. */
8668 if (TREE_CODE (type) == REAL_TYPE)
8670 tem = fold_strip_sign_ops (arg0);
8671 if (tem)
8672 return fold_build1_loc (loc, ABS_EXPR, type,
8673 fold_convert_loc (loc, type, tem));
8675 return NULL_TREE;
8677 case CONJ_EXPR:
8678 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8679 return fold_convert_loc (loc, type, arg0);
8680 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8682 tree itype = TREE_TYPE (type);
8683 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8684 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8685 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8686 negate_expr (ipart));
8688 if (TREE_CODE (arg0) == COMPLEX_CST)
8690 tree itype = TREE_TYPE (type);
8691 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8692 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8693 return build_complex (type, rpart, negate_expr (ipart));
8695 if (TREE_CODE (arg0) == CONJ_EXPR)
8696 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8697 return NULL_TREE;
8699 case BIT_NOT_EXPR:
8700 if (TREE_CODE (arg0) == INTEGER_CST)
8701 return fold_not_const (arg0, type);
8702 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8703 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8704 /* Convert ~ (-A) to A - 1. */
8705 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8706 return fold_build2_loc (loc, MINUS_EXPR, type,
8707 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8708 build_int_cst (type, 1));
8709 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8710 else if (INTEGRAL_TYPE_P (type)
8711 && ((TREE_CODE (arg0) == MINUS_EXPR
8712 && integer_onep (TREE_OPERAND (arg0, 1)))
8713 || (TREE_CODE (arg0) == PLUS_EXPR
8714 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8715 return fold_build1_loc (loc, NEGATE_EXPR, type,
8716 fold_convert_loc (loc, type,
8717 TREE_OPERAND (arg0, 0)));
8718 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8719 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8720 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8721 fold_convert_loc (loc, type,
8722 TREE_OPERAND (arg0, 0)))))
8723 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8724 fold_convert_loc (loc, type,
8725 TREE_OPERAND (arg0, 1)));
8726 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8727 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8728 fold_convert_loc (loc, type,
8729 TREE_OPERAND (arg0, 1)))))
8730 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8731 fold_convert_loc (loc, type,
8732 TREE_OPERAND (arg0, 0)), tem);
8733 /* Perform BIT_NOT_EXPR on each element individually. */
8734 else if (TREE_CODE (arg0) == VECTOR_CST)
8736 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8737 int count = TYPE_VECTOR_SUBPARTS (type), i;
8739 for (i = 0; i < count; i++)
8741 if (elements)
8743 elem = TREE_VALUE (elements);
8744 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8745 if (elem == NULL_TREE)
8746 break;
8747 elements = TREE_CHAIN (elements);
8749 else
8750 elem = build_int_cst (TREE_TYPE (type), -1);
8751 list = tree_cons (NULL_TREE, elem, list);
8753 if (i == count)
8754 return build_vector (type, nreverse (list));
8757 return NULL_TREE;
8759 case TRUTH_NOT_EXPR:
8760 /* The argument to invert_truthvalue must have Boolean type. */
8761 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8762 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8764 /* Note that the operand of this must be an int
8765 and its values must be 0 or 1.
8766 ("true" is a fixed value perhaps depending on the language,
8767 but we don't handle values other than 1 correctly yet.) */
8768 tem = fold_truth_not_expr (loc, arg0);
8769 if (!tem)
8770 return NULL_TREE;
8771 return fold_convert_loc (loc, type, tem);
8773 case REALPART_EXPR:
8774 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8775 return fold_convert_loc (loc, type, arg0);
8776 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8777 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8778 TREE_OPERAND (arg0, 1));
8779 if (TREE_CODE (arg0) == COMPLEX_CST)
8780 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8781 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8783 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8784 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8785 fold_build1_loc (loc, REALPART_EXPR, itype,
8786 TREE_OPERAND (arg0, 0)),
8787 fold_build1_loc (loc, REALPART_EXPR, itype,
8788 TREE_OPERAND (arg0, 1)));
8789 return fold_convert_loc (loc, type, tem);
8791 if (TREE_CODE (arg0) == CONJ_EXPR)
8793 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8794 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8795 TREE_OPERAND (arg0, 0));
8796 return fold_convert_loc (loc, type, tem);
8798 if (TREE_CODE (arg0) == CALL_EXPR)
8800 tree fn = get_callee_fndecl (arg0);
8801 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8802 switch (DECL_FUNCTION_CODE (fn))
8804 CASE_FLT_FN (BUILT_IN_CEXPI):
8805 fn = mathfn_built_in (type, BUILT_IN_COS);
8806 if (fn)
8807 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8808 break;
8810 default:
8811 break;
8814 return NULL_TREE;
8816 case IMAGPART_EXPR:
8817 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8818 return fold_convert_loc (loc, type, integer_zero_node);
8819 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8820 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8821 TREE_OPERAND (arg0, 0));
8822 if (TREE_CODE (arg0) == COMPLEX_CST)
8823 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8824 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8826 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8827 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8828 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8829 TREE_OPERAND (arg0, 0)),
8830 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8831 TREE_OPERAND (arg0, 1)));
8832 return fold_convert_loc (loc, type, tem);
8834 if (TREE_CODE (arg0) == CONJ_EXPR)
8836 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8837 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8838 return fold_convert_loc (loc, type, negate_expr (tem));
8840 if (TREE_CODE (arg0) == CALL_EXPR)
8842 tree fn = get_callee_fndecl (arg0);
8843 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8844 switch (DECL_FUNCTION_CODE (fn))
8846 CASE_FLT_FN (BUILT_IN_CEXPI):
8847 fn = mathfn_built_in (type, BUILT_IN_SIN);
8848 if (fn)
8849 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8850 break;
8852 default:
8853 break;
8856 return NULL_TREE;
8858 default:
8859 return NULL_TREE;
8860 } /* switch (code) */
8864 /* If the operation was a conversion do _not_ mark a resulting constant
8865 with TREE_OVERFLOW if the original constant was not. These conversions
8866 have implementation defined behavior and retaining the TREE_OVERFLOW
8867 flag here would confuse later passes such as VRP. */
8868 tree
8869 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8870 tree type, tree op0)
8872 tree res = fold_unary_loc (loc, code, type, op0);
8873 if (res
8874 && TREE_CODE (res) == INTEGER_CST
8875 && TREE_CODE (op0) == INTEGER_CST
8876 && CONVERT_EXPR_CODE_P (code))
8877 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8879 return res;
8882 /* Fold a binary expression of code CODE and type TYPE with operands
8883 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8884 Return the folded expression if folding is successful. Otherwise,
8885 return NULL_TREE. */
8887 static tree
8888 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8890 enum tree_code compl_code;
8892 if (code == MIN_EXPR)
8893 compl_code = MAX_EXPR;
8894 else if (code == MAX_EXPR)
8895 compl_code = MIN_EXPR;
8896 else
8897 gcc_unreachable ();
8899 /* MIN (MAX (a, b), b) == b. */
8900 if (TREE_CODE (op0) == compl_code
8901 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8902 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8904 /* MIN (MAX (b, a), b) == b. */
8905 if (TREE_CODE (op0) == compl_code
8906 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8907 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8908 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8910 /* MIN (a, MAX (a, b)) == a. */
8911 if (TREE_CODE (op1) == compl_code
8912 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8913 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8914 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8916 /* MIN (a, MAX (b, a)) == a. */
8917 if (TREE_CODE (op1) == compl_code
8918 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8919 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8920 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8922 return NULL_TREE;
8925 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8926 by changing CODE to reduce the magnitude of constants involved in
8927 ARG0 of the comparison.
8928 Returns a canonicalized comparison tree if a simplification was
8929 possible, otherwise returns NULL_TREE.
8930 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8931 valid if signed overflow is undefined. */
8933 static tree
8934 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8935 tree arg0, tree arg1,
8936 bool *strict_overflow_p)
8938 enum tree_code code0 = TREE_CODE (arg0);
8939 tree t, cst0 = NULL_TREE;
8940 int sgn0;
8941 bool swap = false;
8943 /* Match A +- CST code arg1 and CST code arg1. We can change the
8944 first form only if overflow is undefined. */
8945 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8946 /* In principle pointers also have undefined overflow behavior,
8947 but that causes problems elsewhere. */
8948 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8949 && (code0 == MINUS_EXPR
8950 || code0 == PLUS_EXPR)
8951 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8952 || code0 == INTEGER_CST))
8953 return NULL_TREE;
8955 /* Identify the constant in arg0 and its sign. */
8956 if (code0 == INTEGER_CST)
8957 cst0 = arg0;
8958 else
8959 cst0 = TREE_OPERAND (arg0, 1);
8960 sgn0 = tree_int_cst_sgn (cst0);
8962 /* Overflowed constants and zero will cause problems. */
8963 if (integer_zerop (cst0)
8964 || TREE_OVERFLOW (cst0))
8965 return NULL_TREE;
8967 /* See if we can reduce the magnitude of the constant in
8968 arg0 by changing the comparison code. */
8969 if (code0 == INTEGER_CST)
8971 /* CST <= arg1 -> CST-1 < arg1. */
8972 if (code == LE_EXPR && sgn0 == 1)
8973 code = LT_EXPR;
8974 /* -CST < arg1 -> -CST-1 <= arg1. */
8975 else if (code == LT_EXPR && sgn0 == -1)
8976 code = LE_EXPR;
8977 /* CST > arg1 -> CST-1 >= arg1. */
8978 else if (code == GT_EXPR && sgn0 == 1)
8979 code = GE_EXPR;
8980 /* -CST >= arg1 -> -CST-1 > arg1. */
8981 else if (code == GE_EXPR && sgn0 == -1)
8982 code = GT_EXPR;
8983 else
8984 return NULL_TREE;
8985 /* arg1 code' CST' might be more canonical. */
8986 swap = true;
8988 else
8990 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8991 if (code == LT_EXPR
8992 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8993 code = LE_EXPR;
8994 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8995 else if (code == GT_EXPR
8996 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8997 code = GE_EXPR;
8998 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8999 else if (code == LE_EXPR
9000 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9001 code = LT_EXPR;
9002 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9003 else if (code == GE_EXPR
9004 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9005 code = GT_EXPR;
9006 else
9007 return NULL_TREE;
9008 *strict_overflow_p = true;
9011 /* Now build the constant reduced in magnitude. But not if that
9012 would produce one outside of its types range. */
9013 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9014 && ((sgn0 == 1
9015 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9016 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9017 || (sgn0 == -1
9018 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9019 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9020 /* We cannot swap the comparison here as that would cause us to
9021 endlessly recurse. */
9022 return NULL_TREE;
9024 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9025 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9026 if (code0 != INTEGER_CST)
9027 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9029 /* If swapping might yield to a more canonical form, do so. */
9030 if (swap)
9031 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9032 else
9033 return fold_build2_loc (loc, code, type, t, arg1);
9036 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9037 overflow further. Try to decrease the magnitude of constants involved
9038 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9039 and put sole constants at the second argument position.
9040 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9042 static tree
9043 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9044 tree arg0, tree arg1)
9046 tree t;
9047 bool strict_overflow_p;
9048 const char * const warnmsg = G_("assuming signed overflow does not occur "
9049 "when reducing constant in comparison");
9051 /* Try canonicalization by simplifying arg0. */
9052 strict_overflow_p = false;
9053 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9054 &strict_overflow_p);
9055 if (t)
9057 if (strict_overflow_p)
9058 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9059 return t;
9062 /* Try canonicalization by simplifying arg1 using the swapped
9063 comparison. */
9064 code = swap_tree_comparison (code);
9065 strict_overflow_p = false;
9066 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9067 &strict_overflow_p);
9068 if (t && strict_overflow_p)
9069 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9070 return t;
9073 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9074 space. This is used to avoid issuing overflow warnings for
9075 expressions like &p->x which can not wrap. */
9077 static bool
9078 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9080 unsigned HOST_WIDE_INT offset_low, total_low;
9081 HOST_WIDE_INT size, offset_high, total_high;
9083 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9084 return true;
9086 if (bitpos < 0)
9087 return true;
9089 if (offset == NULL_TREE)
9091 offset_low = 0;
9092 offset_high = 0;
9094 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9095 return true;
9096 else
9098 offset_low = TREE_INT_CST_LOW (offset);
9099 offset_high = TREE_INT_CST_HIGH (offset);
9102 if (add_double_with_sign (offset_low, offset_high,
9103 bitpos / BITS_PER_UNIT, 0,
9104 &total_low, &total_high,
9105 true))
9106 return true;
9108 if (total_high != 0)
9109 return true;
9111 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9112 if (size <= 0)
9113 return true;
9115 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9116 array. */
9117 if (TREE_CODE (base) == ADDR_EXPR)
9119 HOST_WIDE_INT base_size;
9121 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9122 if (base_size > 0 && size < base_size)
9123 size = base_size;
9126 return total_low > (unsigned HOST_WIDE_INT) size;
9129 /* Subroutine of fold_binary. This routine performs all of the
9130 transformations that are common to the equality/inequality
9131 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9132 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9133 fold_binary should call fold_binary. Fold a comparison with
9134 tree code CODE and type TYPE with operands OP0 and OP1. Return
9135 the folded comparison or NULL_TREE. */
9137 static tree
9138 fold_comparison (location_t loc, enum tree_code code, tree type,
9139 tree op0, tree op1)
9141 tree arg0, arg1, tem;
9143 arg0 = op0;
9144 arg1 = op1;
9146 STRIP_SIGN_NOPS (arg0);
9147 STRIP_SIGN_NOPS (arg1);
9149 tem = fold_relational_const (code, type, arg0, arg1);
9150 if (tem != NULL_TREE)
9151 return tem;
9153 /* If one arg is a real or integer constant, put it last. */
9154 if (tree_swap_operands_p (arg0, arg1, true))
9155 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9157 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9158 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9159 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9160 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9161 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9162 && (TREE_CODE (arg1) == INTEGER_CST
9163 && !TREE_OVERFLOW (arg1)))
9165 tree const1 = TREE_OPERAND (arg0, 1);
9166 tree const2 = arg1;
9167 tree variable = TREE_OPERAND (arg0, 0);
9168 tree lhs;
9169 int lhs_add;
9170 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9172 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9173 TREE_TYPE (arg1), const2, const1);
9175 /* If the constant operation overflowed this can be
9176 simplified as a comparison against INT_MAX/INT_MIN. */
9177 if (TREE_CODE (lhs) == INTEGER_CST
9178 && TREE_OVERFLOW (lhs))
9180 int const1_sgn = tree_int_cst_sgn (const1);
9181 enum tree_code code2 = code;
9183 /* Get the sign of the constant on the lhs if the
9184 operation were VARIABLE + CONST1. */
9185 if (TREE_CODE (arg0) == MINUS_EXPR)
9186 const1_sgn = -const1_sgn;
9188 /* The sign of the constant determines if we overflowed
9189 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9190 Canonicalize to the INT_MIN overflow by swapping the comparison
9191 if necessary. */
9192 if (const1_sgn == -1)
9193 code2 = swap_tree_comparison (code);
9195 /* We now can look at the canonicalized case
9196 VARIABLE + 1 CODE2 INT_MIN
9197 and decide on the result. */
9198 if (code2 == LT_EXPR
9199 || code2 == LE_EXPR
9200 || code2 == EQ_EXPR)
9201 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9202 else if (code2 == NE_EXPR
9203 || code2 == GE_EXPR
9204 || code2 == GT_EXPR)
9205 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9208 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9209 && (TREE_CODE (lhs) != INTEGER_CST
9210 || !TREE_OVERFLOW (lhs)))
9212 fold_overflow_warning (("assuming signed overflow does not occur "
9213 "when changing X +- C1 cmp C2 to "
9214 "X cmp C1 +- C2"),
9215 WARN_STRICT_OVERFLOW_COMPARISON);
9216 return fold_build2_loc (loc, code, type, variable, lhs);
9220 /* For comparisons of pointers we can decompose it to a compile time
9221 comparison of the base objects and the offsets into the object.
9222 This requires at least one operand being an ADDR_EXPR or a
9223 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9224 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9225 && (TREE_CODE (arg0) == ADDR_EXPR
9226 || TREE_CODE (arg1) == ADDR_EXPR
9227 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9228 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9230 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9231 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9232 enum machine_mode mode;
9233 int volatilep, unsignedp;
9234 bool indirect_base0 = false, indirect_base1 = false;
9236 /* Get base and offset for the access. Strip ADDR_EXPR for
9237 get_inner_reference, but put it back by stripping INDIRECT_REF
9238 off the base object if possible. indirect_baseN will be true
9239 if baseN is not an address but refers to the object itself. */
9240 base0 = arg0;
9241 if (TREE_CODE (arg0) == ADDR_EXPR)
9243 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9244 &bitsize, &bitpos0, &offset0, &mode,
9245 &unsignedp, &volatilep, false);
9246 if (TREE_CODE (base0) == INDIRECT_REF)
9247 base0 = TREE_OPERAND (base0, 0);
9248 else
9249 indirect_base0 = true;
9251 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9253 base0 = TREE_OPERAND (arg0, 0);
9254 offset0 = TREE_OPERAND (arg0, 1);
9257 base1 = arg1;
9258 if (TREE_CODE (arg1) == ADDR_EXPR)
9260 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9261 &bitsize, &bitpos1, &offset1, &mode,
9262 &unsignedp, &volatilep, false);
9263 if (TREE_CODE (base1) == INDIRECT_REF)
9264 base1 = TREE_OPERAND (base1, 0);
9265 else
9266 indirect_base1 = true;
9268 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9270 base1 = TREE_OPERAND (arg1, 0);
9271 offset1 = TREE_OPERAND (arg1, 1);
9274 /* If we have equivalent bases we might be able to simplify. */
9275 if (indirect_base0 == indirect_base1
9276 && operand_equal_p (base0, base1, 0))
9278 /* We can fold this expression to a constant if the non-constant
9279 offset parts are equal. */
9280 if ((offset0 == offset1
9281 || (offset0 && offset1
9282 && operand_equal_p (offset0, offset1, 0)))
9283 && (code == EQ_EXPR
9284 || code == NE_EXPR
9285 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9288 if (code != EQ_EXPR
9289 && code != NE_EXPR
9290 && bitpos0 != bitpos1
9291 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9292 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9293 fold_overflow_warning (("assuming pointer wraparound does not "
9294 "occur when comparing P +- C1 with "
9295 "P +- C2"),
9296 WARN_STRICT_OVERFLOW_CONDITIONAL);
9298 switch (code)
9300 case EQ_EXPR:
9301 return constant_boolean_node (bitpos0 == bitpos1, type);
9302 case NE_EXPR:
9303 return constant_boolean_node (bitpos0 != bitpos1, type);
9304 case LT_EXPR:
9305 return constant_boolean_node (bitpos0 < bitpos1, type);
9306 case LE_EXPR:
9307 return constant_boolean_node (bitpos0 <= bitpos1, type);
9308 case GE_EXPR:
9309 return constant_boolean_node (bitpos0 >= bitpos1, type);
9310 case GT_EXPR:
9311 return constant_boolean_node (bitpos0 > bitpos1, type);
9312 default:;
9315 /* We can simplify the comparison to a comparison of the variable
9316 offset parts if the constant offset parts are equal.
9317 Be careful to use signed size type here because otherwise we
9318 mess with array offsets in the wrong way. This is possible
9319 because pointer arithmetic is restricted to retain within an
9320 object and overflow on pointer differences is undefined as of
9321 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9322 else if (bitpos0 == bitpos1
9323 && ((code == EQ_EXPR || code == NE_EXPR)
9324 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9326 tree signed_size_type_node;
9327 signed_size_type_node = signed_type_for (size_type_node);
9329 /* By converting to signed size type we cover middle-end pointer
9330 arithmetic which operates on unsigned pointer types of size
9331 type size and ARRAY_REF offsets which are properly sign or
9332 zero extended from their type in case it is narrower than
9333 size type. */
9334 if (offset0 == NULL_TREE)
9335 offset0 = build_int_cst (signed_size_type_node, 0);
9336 else
9337 offset0 = fold_convert_loc (loc, signed_size_type_node,
9338 offset0);
9339 if (offset1 == NULL_TREE)
9340 offset1 = build_int_cst (signed_size_type_node, 0);
9341 else
9342 offset1 = fold_convert_loc (loc, signed_size_type_node,
9343 offset1);
9345 if (code != EQ_EXPR
9346 && code != NE_EXPR
9347 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9348 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9349 fold_overflow_warning (("assuming pointer wraparound does not "
9350 "occur when comparing P +- C1 with "
9351 "P +- C2"),
9352 WARN_STRICT_OVERFLOW_COMPARISON);
9354 return fold_build2_loc (loc, code, type, offset0, offset1);
9357 /* For non-equal bases we can simplify if they are addresses
9358 of local binding decls or constants. */
9359 else if (indirect_base0 && indirect_base1
9360 /* We know that !operand_equal_p (base0, base1, 0)
9361 because the if condition was false. But make
9362 sure two decls are not the same. */
9363 && base0 != base1
9364 && TREE_CODE (arg0) == ADDR_EXPR
9365 && TREE_CODE (arg1) == ADDR_EXPR
9366 && (((TREE_CODE (base0) == VAR_DECL
9367 || TREE_CODE (base0) == PARM_DECL)
9368 && (targetm.binds_local_p (base0)
9369 || CONSTANT_CLASS_P (base1)))
9370 || CONSTANT_CLASS_P (base0))
9371 && (((TREE_CODE (base1) == VAR_DECL
9372 || TREE_CODE (base1) == PARM_DECL)
9373 && (targetm.binds_local_p (base1)
9374 || CONSTANT_CLASS_P (base0)))
9375 || CONSTANT_CLASS_P (base1)))
9377 if (code == EQ_EXPR)
9378 return omit_two_operands_loc (loc, type, boolean_false_node,
9379 arg0, arg1);
9380 else if (code == NE_EXPR)
9381 return omit_two_operands_loc (loc, type, boolean_true_node,
9382 arg0, arg1);
9384 /* For equal offsets we can simplify to a comparison of the
9385 base addresses. */
9386 else if (bitpos0 == bitpos1
9387 && (indirect_base0
9388 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9389 && (indirect_base1
9390 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9391 && ((offset0 == offset1)
9392 || (offset0 && offset1
9393 && operand_equal_p (offset0, offset1, 0))))
9395 if (indirect_base0)
9396 base0 = build_fold_addr_expr_loc (loc, base0);
9397 if (indirect_base1)
9398 base1 = build_fold_addr_expr_loc (loc, base1);
9399 return fold_build2_loc (loc, code, type, base0, base1);
9403 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9404 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9405 the resulting offset is smaller in absolute value than the
9406 original one. */
9407 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9408 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9409 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9410 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9411 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9412 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9413 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9415 tree const1 = TREE_OPERAND (arg0, 1);
9416 tree const2 = TREE_OPERAND (arg1, 1);
9417 tree variable1 = TREE_OPERAND (arg0, 0);
9418 tree variable2 = TREE_OPERAND (arg1, 0);
9419 tree cst;
9420 const char * const warnmsg = G_("assuming signed overflow does not "
9421 "occur when combining constants around "
9422 "a comparison");
9424 /* Put the constant on the side where it doesn't overflow and is
9425 of lower absolute value than before. */
9426 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9427 ? MINUS_EXPR : PLUS_EXPR,
9428 const2, const1, 0);
9429 if (!TREE_OVERFLOW (cst)
9430 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9432 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9433 return fold_build2_loc (loc, code, type,
9434 variable1,
9435 fold_build2_loc (loc,
9436 TREE_CODE (arg1), TREE_TYPE (arg1),
9437 variable2, cst));
9440 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9441 ? MINUS_EXPR : PLUS_EXPR,
9442 const1, const2, 0);
9443 if (!TREE_OVERFLOW (cst)
9444 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9446 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9447 return fold_build2_loc (loc, code, type,
9448 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9449 variable1, cst),
9450 variable2);
9454 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9455 signed arithmetic case. That form is created by the compiler
9456 often enough for folding it to be of value. One example is in
9457 computing loop trip counts after Operator Strength Reduction. */
9458 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9459 && TREE_CODE (arg0) == MULT_EXPR
9460 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9461 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9462 && integer_zerop (arg1))
9464 tree const1 = TREE_OPERAND (arg0, 1);
9465 tree const2 = arg1; /* zero */
9466 tree variable1 = TREE_OPERAND (arg0, 0);
9467 enum tree_code cmp_code = code;
9469 gcc_assert (!integer_zerop (const1));
9471 fold_overflow_warning (("assuming signed overflow does not occur when "
9472 "eliminating multiplication in comparison "
9473 "with zero"),
9474 WARN_STRICT_OVERFLOW_COMPARISON);
9476 /* If const1 is negative we swap the sense of the comparison. */
9477 if (tree_int_cst_sgn (const1) < 0)
9478 cmp_code = swap_tree_comparison (cmp_code);
9480 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9483 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9484 if (tem)
9485 return tem;
9487 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9489 tree targ0 = strip_float_extensions (arg0);
9490 tree targ1 = strip_float_extensions (arg1);
9491 tree newtype = TREE_TYPE (targ0);
9493 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9494 newtype = TREE_TYPE (targ1);
9496 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9497 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9498 return fold_build2_loc (loc, code, type,
9499 fold_convert_loc (loc, newtype, targ0),
9500 fold_convert_loc (loc, newtype, targ1));
9502 /* (-a) CMP (-b) -> b CMP a */
9503 if (TREE_CODE (arg0) == NEGATE_EXPR
9504 && TREE_CODE (arg1) == NEGATE_EXPR)
9505 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9506 TREE_OPERAND (arg0, 0));
9508 if (TREE_CODE (arg1) == REAL_CST)
9510 REAL_VALUE_TYPE cst;
9511 cst = TREE_REAL_CST (arg1);
9513 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9514 if (TREE_CODE (arg0) == NEGATE_EXPR)
9515 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9516 TREE_OPERAND (arg0, 0),
9517 build_real (TREE_TYPE (arg1),
9518 REAL_VALUE_NEGATE (cst)));
9520 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9521 /* a CMP (-0) -> a CMP 0 */
9522 if (REAL_VALUE_MINUS_ZERO (cst))
9523 return fold_build2_loc (loc, code, type, arg0,
9524 build_real (TREE_TYPE (arg1), dconst0));
9526 /* x != NaN is always true, other ops are always false. */
9527 if (REAL_VALUE_ISNAN (cst)
9528 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9530 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9531 return omit_one_operand_loc (loc, type, tem, arg0);
9534 /* Fold comparisons against infinity. */
9535 if (REAL_VALUE_ISINF (cst)
9536 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9538 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9539 if (tem != NULL_TREE)
9540 return tem;
9544 /* If this is a comparison of a real constant with a PLUS_EXPR
9545 or a MINUS_EXPR of a real constant, we can convert it into a
9546 comparison with a revised real constant as long as no overflow
9547 occurs when unsafe_math_optimizations are enabled. */
9548 if (flag_unsafe_math_optimizations
9549 && TREE_CODE (arg1) == REAL_CST
9550 && (TREE_CODE (arg0) == PLUS_EXPR
9551 || TREE_CODE (arg0) == MINUS_EXPR)
9552 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9553 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9554 ? MINUS_EXPR : PLUS_EXPR,
9555 arg1, TREE_OPERAND (arg0, 1), 0))
9556 && !TREE_OVERFLOW (tem))
9557 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9559 /* Likewise, we can simplify a comparison of a real constant with
9560 a MINUS_EXPR whose first operand is also a real constant, i.e.
9561 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9562 floating-point types only if -fassociative-math is set. */
9563 if (flag_associative_math
9564 && TREE_CODE (arg1) == REAL_CST
9565 && TREE_CODE (arg0) == MINUS_EXPR
9566 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9567 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9568 arg1, 0))
9569 && !TREE_OVERFLOW (tem))
9570 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9571 TREE_OPERAND (arg0, 1), tem);
9573 /* Fold comparisons against built-in math functions. */
9574 if (TREE_CODE (arg1) == REAL_CST
9575 && flag_unsafe_math_optimizations
9576 && ! flag_errno_math)
9578 enum built_in_function fcode = builtin_mathfn_code (arg0);
9580 if (fcode != END_BUILTINS)
9582 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9583 if (tem != NULL_TREE)
9584 return tem;
9589 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9590 && CONVERT_EXPR_P (arg0))
9592 /* If we are widening one operand of an integer comparison,
9593 see if the other operand is similarly being widened. Perhaps we
9594 can do the comparison in the narrower type. */
9595 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9596 if (tem)
9597 return tem;
9599 /* Or if we are changing signedness. */
9600 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9601 if (tem)
9602 return tem;
9605 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9606 constant, we can simplify it. */
9607 if (TREE_CODE (arg1) == INTEGER_CST
9608 && (TREE_CODE (arg0) == MIN_EXPR
9609 || TREE_CODE (arg0) == MAX_EXPR)
9610 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9612 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9613 if (tem)
9614 return tem;
9617 /* Simplify comparison of something with itself. (For IEEE
9618 floating-point, we can only do some of these simplifications.) */
9619 if (operand_equal_p (arg0, arg1, 0))
9621 switch (code)
9623 case EQ_EXPR:
9624 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9625 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9626 return constant_boolean_node (1, type);
9627 break;
9629 case GE_EXPR:
9630 case LE_EXPR:
9631 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9632 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9633 return constant_boolean_node (1, type);
9634 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9636 case NE_EXPR:
9637 /* For NE, we can only do this simplification if integer
9638 or we don't honor IEEE floating point NaNs. */
9639 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9640 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9641 break;
9642 /* ... fall through ... */
9643 case GT_EXPR:
9644 case LT_EXPR:
9645 return constant_boolean_node (0, type);
9646 default:
9647 gcc_unreachable ();
9651 /* If we are comparing an expression that just has comparisons
9652 of two integer values, arithmetic expressions of those comparisons,
9653 and constants, we can simplify it. There are only three cases
9654 to check: the two values can either be equal, the first can be
9655 greater, or the second can be greater. Fold the expression for
9656 those three values. Since each value must be 0 or 1, we have
9657 eight possibilities, each of which corresponds to the constant 0
9658 or 1 or one of the six possible comparisons.
9660 This handles common cases like (a > b) == 0 but also handles
9661 expressions like ((x > y) - (y > x)) > 0, which supposedly
9662 occur in macroized code. */
9664 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9666 tree cval1 = 0, cval2 = 0;
9667 int save_p = 0;
9669 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9670 /* Don't handle degenerate cases here; they should already
9671 have been handled anyway. */
9672 && cval1 != 0 && cval2 != 0
9673 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9674 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9675 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9676 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9677 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9678 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9679 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9681 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9682 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9684 /* We can't just pass T to eval_subst in case cval1 or cval2
9685 was the same as ARG1. */
9687 tree high_result
9688 = fold_build2_loc (loc, code, type,
9689 eval_subst (loc, arg0, cval1, maxval,
9690 cval2, minval),
9691 arg1);
9692 tree equal_result
9693 = fold_build2_loc (loc, code, type,
9694 eval_subst (loc, arg0, cval1, maxval,
9695 cval2, maxval),
9696 arg1);
9697 tree low_result
9698 = fold_build2_loc (loc, code, type,
9699 eval_subst (loc, arg0, cval1, minval,
9700 cval2, maxval),
9701 arg1);
9703 /* All three of these results should be 0 or 1. Confirm they are.
9704 Then use those values to select the proper code to use. */
9706 if (TREE_CODE (high_result) == INTEGER_CST
9707 && TREE_CODE (equal_result) == INTEGER_CST
9708 && TREE_CODE (low_result) == INTEGER_CST)
9710 /* Make a 3-bit mask with the high-order bit being the
9711 value for `>', the next for '=', and the low for '<'. */
9712 switch ((integer_onep (high_result) * 4)
9713 + (integer_onep (equal_result) * 2)
9714 + integer_onep (low_result))
9716 case 0:
9717 /* Always false. */
9718 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9719 case 1:
9720 code = LT_EXPR;
9721 break;
9722 case 2:
9723 code = EQ_EXPR;
9724 break;
9725 case 3:
9726 code = LE_EXPR;
9727 break;
9728 case 4:
9729 code = GT_EXPR;
9730 break;
9731 case 5:
9732 code = NE_EXPR;
9733 break;
9734 case 6:
9735 code = GE_EXPR;
9736 break;
9737 case 7:
9738 /* Always true. */
9739 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9742 if (save_p)
9744 tem = save_expr (build2 (code, type, cval1, cval2));
9745 SET_EXPR_LOCATION (tem, loc);
9746 return tem;
9748 return fold_build2_loc (loc, code, type, cval1, cval2);
9753 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9754 into a single range test. */
9755 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9756 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9757 && TREE_CODE (arg1) == INTEGER_CST
9758 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9759 && !integer_zerop (TREE_OPERAND (arg0, 1))
9760 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9761 && !TREE_OVERFLOW (arg1))
9763 tem = fold_div_compare (loc, code, type, arg0, arg1);
9764 if (tem != NULL_TREE)
9765 return tem;
9768 /* Fold ~X op ~Y as Y op X. */
9769 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9770 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9772 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9773 return fold_build2_loc (loc, code, type,
9774 fold_convert_loc (loc, cmp_type,
9775 TREE_OPERAND (arg1, 0)),
9776 TREE_OPERAND (arg0, 0));
9779 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9780 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9781 && TREE_CODE (arg1) == INTEGER_CST)
9783 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9784 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9785 TREE_OPERAND (arg0, 0),
9786 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9787 fold_convert_loc (loc, cmp_type, arg1)));
9790 return NULL_TREE;
9794 /* Subroutine of fold_binary. Optimize complex multiplications of the
9795 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9796 argument EXPR represents the expression "z" of type TYPE. */
9798 static tree
9799 fold_mult_zconjz (location_t loc, tree type, tree expr)
9801 tree itype = TREE_TYPE (type);
9802 tree rpart, ipart, tem;
9804 if (TREE_CODE (expr) == COMPLEX_EXPR)
9806 rpart = TREE_OPERAND (expr, 0);
9807 ipart = TREE_OPERAND (expr, 1);
9809 else if (TREE_CODE (expr) == COMPLEX_CST)
9811 rpart = TREE_REALPART (expr);
9812 ipart = TREE_IMAGPART (expr);
9814 else
9816 expr = save_expr (expr);
9817 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9818 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9821 rpart = save_expr (rpart);
9822 ipart = save_expr (ipart);
9823 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9824 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9825 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9826 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9827 fold_convert_loc (loc, itype, integer_zero_node));
9831 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9832 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9833 guarantees that P and N have the same least significant log2(M) bits.
9834 N is not otherwise constrained. In particular, N is not normalized to
9835 0 <= N < M as is common. In general, the precise value of P is unknown.
9836 M is chosen as large as possible such that constant N can be determined.
9838 Returns M and sets *RESIDUE to N.
9840 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9841 account. This is not always possible due to PR 35705.
9844 static unsigned HOST_WIDE_INT
9845 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9846 bool allow_func_align)
9848 enum tree_code code;
9850 *residue = 0;
9852 code = TREE_CODE (expr);
9853 if (code == ADDR_EXPR)
9855 expr = TREE_OPERAND (expr, 0);
9856 if (handled_component_p (expr))
9858 HOST_WIDE_INT bitsize, bitpos;
9859 tree offset;
9860 enum machine_mode mode;
9861 int unsignedp, volatilep;
9863 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9864 &mode, &unsignedp, &volatilep, false);
9865 *residue = bitpos / BITS_PER_UNIT;
9866 if (offset)
9868 if (TREE_CODE (offset) == INTEGER_CST)
9869 *residue += TREE_INT_CST_LOW (offset);
9870 else
9871 /* We don't handle more complicated offset expressions. */
9872 return 1;
9876 if (DECL_P (expr)
9877 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9878 return DECL_ALIGN_UNIT (expr);
9880 else if (code == POINTER_PLUS_EXPR)
9882 tree op0, op1;
9883 unsigned HOST_WIDE_INT modulus;
9884 enum tree_code inner_code;
9886 op0 = TREE_OPERAND (expr, 0);
9887 STRIP_NOPS (op0);
9888 modulus = get_pointer_modulus_and_residue (op0, residue,
9889 allow_func_align);
9891 op1 = TREE_OPERAND (expr, 1);
9892 STRIP_NOPS (op1);
9893 inner_code = TREE_CODE (op1);
9894 if (inner_code == INTEGER_CST)
9896 *residue += TREE_INT_CST_LOW (op1);
9897 return modulus;
9899 else if (inner_code == MULT_EXPR)
9901 op1 = TREE_OPERAND (op1, 1);
9902 if (TREE_CODE (op1) == INTEGER_CST)
9904 unsigned HOST_WIDE_INT align;
9906 /* Compute the greatest power-of-2 divisor of op1. */
9907 align = TREE_INT_CST_LOW (op1);
9908 align &= -align;
9910 /* If align is non-zero and less than *modulus, replace
9911 *modulus with align., If align is 0, then either op1 is 0
9912 or the greatest power-of-2 divisor of op1 doesn't fit in an
9913 unsigned HOST_WIDE_INT. In either case, no additional
9914 constraint is imposed. */
9915 if (align)
9916 modulus = MIN (modulus, align);
9918 return modulus;
9923 /* If we get here, we were unable to determine anything useful about the
9924 expression. */
9925 return 1;
9929 /* Fold a binary expression of code CODE and type TYPE with operands
9930 OP0 and OP1. LOC is the location of the resulting expression.
9931 Return the folded expression if folding is successful. Otherwise,
9932 return NULL_TREE. */
9934 tree
9935 fold_binary_loc (location_t loc,
9936 enum tree_code code, tree type, tree op0, tree op1)
9938 enum tree_code_class kind = TREE_CODE_CLASS (code);
9939 tree arg0, arg1, tem;
9940 tree t1 = NULL_TREE;
9941 bool strict_overflow_p;
9943 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9944 && TREE_CODE_LENGTH (code) == 2
9945 && op0 != NULL_TREE
9946 && op1 != NULL_TREE);
9948 arg0 = op0;
9949 arg1 = op1;
9951 /* Strip any conversions that don't change the mode. This is
9952 safe for every expression, except for a comparison expression
9953 because its signedness is derived from its operands. So, in
9954 the latter case, only strip conversions that don't change the
9955 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9956 preserved.
9958 Note that this is done as an internal manipulation within the
9959 constant folder, in order to find the simplest representation
9960 of the arguments so that their form can be studied. In any
9961 cases, the appropriate type conversions should be put back in
9962 the tree that will get out of the constant folder. */
9964 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9966 STRIP_SIGN_NOPS (arg0);
9967 STRIP_SIGN_NOPS (arg1);
9969 else
9971 STRIP_NOPS (arg0);
9972 STRIP_NOPS (arg1);
9975 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9976 constant but we can't do arithmetic on them. */
9977 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9978 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9979 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9980 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9981 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9982 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9984 if (kind == tcc_binary)
9986 /* Make sure type and arg0 have the same saturating flag. */
9987 gcc_assert (TYPE_SATURATING (type)
9988 == TYPE_SATURATING (TREE_TYPE (arg0)));
9989 tem = const_binop (code, arg0, arg1, 0);
9991 else if (kind == tcc_comparison)
9992 tem = fold_relational_const (code, type, arg0, arg1);
9993 else
9994 tem = NULL_TREE;
9996 if (tem != NULL_TREE)
9998 if (TREE_TYPE (tem) != type)
9999 tem = fold_convert_loc (loc, type, tem);
10000 return tem;
10004 /* If this is a commutative operation, and ARG0 is a constant, move it
10005 to ARG1 to reduce the number of tests below. */
10006 if (commutative_tree_code (code)
10007 && tree_swap_operands_p (arg0, arg1, true))
10008 return fold_build2_loc (loc, code, type, op1, op0);
10010 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10012 First check for cases where an arithmetic operation is applied to a
10013 compound, conditional, or comparison operation. Push the arithmetic
10014 operation inside the compound or conditional to see if any folding
10015 can then be done. Convert comparison to conditional for this purpose.
10016 The also optimizes non-constant cases that used to be done in
10017 expand_expr.
10019 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10020 one of the operands is a comparison and the other is a comparison, a
10021 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10022 code below would make the expression more complex. Change it to a
10023 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10024 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10026 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10027 || code == EQ_EXPR || code == NE_EXPR)
10028 && ((truth_value_p (TREE_CODE (arg0))
10029 && (truth_value_p (TREE_CODE (arg1))
10030 || (TREE_CODE (arg1) == BIT_AND_EXPR
10031 && integer_onep (TREE_OPERAND (arg1, 1)))))
10032 || (truth_value_p (TREE_CODE (arg1))
10033 && (truth_value_p (TREE_CODE (arg0))
10034 || (TREE_CODE (arg0) == BIT_AND_EXPR
10035 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10037 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10038 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10039 : TRUTH_XOR_EXPR,
10040 boolean_type_node,
10041 fold_convert_loc (loc, boolean_type_node, arg0),
10042 fold_convert_loc (loc, boolean_type_node, arg1));
10044 if (code == EQ_EXPR)
10045 tem = invert_truthvalue_loc (loc, tem);
10047 return fold_convert_loc (loc, type, tem);
10050 if (TREE_CODE_CLASS (code) == tcc_binary
10051 || TREE_CODE_CLASS (code) == tcc_comparison)
10053 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10055 tem = fold_build2_loc (loc, code, type,
10056 fold_convert_loc (loc, TREE_TYPE (op0),
10057 TREE_OPERAND (arg0, 1)), op1);
10058 protected_set_expr_location (tem, loc);
10059 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10060 goto fold_binary_exit;
10062 if (TREE_CODE (arg1) == COMPOUND_EXPR
10063 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10065 tem = fold_build2_loc (loc, code, type, op0,
10066 fold_convert_loc (loc, TREE_TYPE (op1),
10067 TREE_OPERAND (arg1, 1)));
10068 protected_set_expr_location (tem, loc);
10069 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10070 goto fold_binary_exit;
10073 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10075 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10076 arg0, arg1,
10077 /*cond_first_p=*/1);
10078 if (tem != NULL_TREE)
10079 return tem;
10082 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10084 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10085 arg1, arg0,
10086 /*cond_first_p=*/0);
10087 if (tem != NULL_TREE)
10088 return tem;
10092 switch (code)
10094 case POINTER_PLUS_EXPR:
10095 /* 0 +p index -> (type)index */
10096 if (integer_zerop (arg0))
10097 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10099 /* PTR +p 0 -> PTR */
10100 if (integer_zerop (arg1))
10101 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10103 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10104 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10105 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10106 return fold_convert_loc (loc, type,
10107 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10108 fold_convert_loc (loc, sizetype,
10109 arg1),
10110 fold_convert_loc (loc, sizetype,
10111 arg0)));
10113 /* index +p PTR -> PTR +p index */
10114 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10115 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10116 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10117 fold_convert_loc (loc, type, arg1),
10118 fold_convert_loc (loc, sizetype, arg0));
10120 /* (PTR +p B) +p A -> PTR +p (B + A) */
10121 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10123 tree inner;
10124 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10125 tree arg00 = TREE_OPERAND (arg0, 0);
10126 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10127 arg01, fold_convert_loc (loc, sizetype, arg1));
10128 return fold_convert_loc (loc, type,
10129 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10130 TREE_TYPE (arg00),
10131 arg00, inner));
10134 /* PTR_CST +p CST -> CST1 */
10135 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10136 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10137 fold_convert_loc (loc, type, arg1));
10139 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10140 of the array. Loop optimizer sometimes produce this type of
10141 expressions. */
10142 if (TREE_CODE (arg0) == ADDR_EXPR)
10144 tem = try_move_mult_to_index (loc, arg0,
10145 fold_convert_loc (loc, sizetype, arg1));
10146 if (tem)
10147 return fold_convert_loc (loc, type, tem);
10150 return NULL_TREE;
10152 case PLUS_EXPR:
10153 /* A + (-B) -> A - B */
10154 if (TREE_CODE (arg1) == NEGATE_EXPR)
10155 return fold_build2_loc (loc, MINUS_EXPR, type,
10156 fold_convert_loc (loc, type, arg0),
10157 fold_convert_loc (loc, type,
10158 TREE_OPERAND (arg1, 0)));
10159 /* (-A) + B -> B - A */
10160 if (TREE_CODE (arg0) == NEGATE_EXPR
10161 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10162 return fold_build2_loc (loc, MINUS_EXPR, type,
10163 fold_convert_loc (loc, type, arg1),
10164 fold_convert_loc (loc, type,
10165 TREE_OPERAND (arg0, 0)));
10167 if (INTEGRAL_TYPE_P (type))
10169 /* Convert ~A + 1 to -A. */
10170 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10171 && integer_onep (arg1))
10172 return fold_build1_loc (loc, NEGATE_EXPR, type,
10173 fold_convert_loc (loc, type,
10174 TREE_OPERAND (arg0, 0)));
10176 /* ~X + X is -1. */
10177 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10178 && !TYPE_OVERFLOW_TRAPS (type))
10180 tree tem = TREE_OPERAND (arg0, 0);
10182 STRIP_NOPS (tem);
10183 if (operand_equal_p (tem, arg1, 0))
10185 t1 = build_int_cst_type (type, -1);
10186 return omit_one_operand_loc (loc, type, t1, arg1);
10190 /* X + ~X is -1. */
10191 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10192 && !TYPE_OVERFLOW_TRAPS (type))
10194 tree tem = TREE_OPERAND (arg1, 0);
10196 STRIP_NOPS (tem);
10197 if (operand_equal_p (arg0, tem, 0))
10199 t1 = build_int_cst_type (type, -1);
10200 return omit_one_operand_loc (loc, type, t1, arg0);
10204 /* X + (X / CST) * -CST is X % CST. */
10205 if (TREE_CODE (arg1) == MULT_EXPR
10206 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10207 && operand_equal_p (arg0,
10208 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10210 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10211 tree cst1 = TREE_OPERAND (arg1, 1);
10212 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10213 cst1, cst0);
10214 if (sum && integer_zerop (sum))
10215 return fold_convert_loc (loc, type,
10216 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10217 TREE_TYPE (arg0), arg0,
10218 cst0));
10222 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10223 same or one. Make sure type is not saturating.
10224 fold_plusminus_mult_expr will re-associate. */
10225 if ((TREE_CODE (arg0) == MULT_EXPR
10226 || TREE_CODE (arg1) == MULT_EXPR)
10227 && !TYPE_SATURATING (type)
10228 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10230 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10231 if (tem)
10232 return tem;
10235 if (! FLOAT_TYPE_P (type))
10237 if (integer_zerop (arg1))
10238 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10240 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10241 with a constant, and the two constants have no bits in common,
10242 we should treat this as a BIT_IOR_EXPR since this may produce more
10243 simplifications. */
10244 if (TREE_CODE (arg0) == BIT_AND_EXPR
10245 && TREE_CODE (arg1) == BIT_AND_EXPR
10246 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10247 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10248 && integer_zerop (const_binop (BIT_AND_EXPR,
10249 TREE_OPERAND (arg0, 1),
10250 TREE_OPERAND (arg1, 1), 0)))
10252 code = BIT_IOR_EXPR;
10253 goto bit_ior;
10256 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10257 (plus (plus (mult) (mult)) (foo)) so that we can
10258 take advantage of the factoring cases below. */
10259 if (((TREE_CODE (arg0) == PLUS_EXPR
10260 || TREE_CODE (arg0) == MINUS_EXPR)
10261 && TREE_CODE (arg1) == MULT_EXPR)
10262 || ((TREE_CODE (arg1) == PLUS_EXPR
10263 || TREE_CODE (arg1) == MINUS_EXPR)
10264 && TREE_CODE (arg0) == MULT_EXPR))
10266 tree parg0, parg1, parg, marg;
10267 enum tree_code pcode;
10269 if (TREE_CODE (arg1) == MULT_EXPR)
10270 parg = arg0, marg = arg1;
10271 else
10272 parg = arg1, marg = arg0;
10273 pcode = TREE_CODE (parg);
10274 parg0 = TREE_OPERAND (parg, 0);
10275 parg1 = TREE_OPERAND (parg, 1);
10276 STRIP_NOPS (parg0);
10277 STRIP_NOPS (parg1);
10279 if (TREE_CODE (parg0) == MULT_EXPR
10280 && TREE_CODE (parg1) != MULT_EXPR)
10281 return fold_build2_loc (loc, pcode, type,
10282 fold_build2_loc (loc, PLUS_EXPR, type,
10283 fold_convert_loc (loc, type,
10284 parg0),
10285 fold_convert_loc (loc, type,
10286 marg)),
10287 fold_convert_loc (loc, type, parg1));
10288 if (TREE_CODE (parg0) != MULT_EXPR
10289 && TREE_CODE (parg1) == MULT_EXPR)
10290 return
10291 fold_build2_loc (loc, PLUS_EXPR, type,
10292 fold_convert_loc (loc, type, parg0),
10293 fold_build2_loc (loc, pcode, type,
10294 fold_convert_loc (loc, type, marg),
10295 fold_convert_loc (loc, type,
10296 parg1)));
10299 else
10301 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10302 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10303 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10305 /* Likewise if the operands are reversed. */
10306 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10307 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10309 /* Convert X + -C into X - C. */
10310 if (TREE_CODE (arg1) == REAL_CST
10311 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10313 tem = fold_negate_const (arg1, type);
10314 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10315 return fold_build2_loc (loc, MINUS_EXPR, type,
10316 fold_convert_loc (loc, type, arg0),
10317 fold_convert_loc (loc, type, tem));
10320 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10321 to __complex__ ( x, y ). This is not the same for SNaNs or
10322 if signed zeros are involved. */
10323 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10324 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10325 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10327 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10328 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10329 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10330 bool arg0rz = false, arg0iz = false;
10331 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10332 || (arg0i && (arg0iz = real_zerop (arg0i))))
10334 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10335 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10336 if (arg0rz && arg1i && real_zerop (arg1i))
10338 tree rp = arg1r ? arg1r
10339 : build1 (REALPART_EXPR, rtype, arg1);
10340 tree ip = arg0i ? arg0i
10341 : build1 (IMAGPART_EXPR, rtype, arg0);
10342 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10344 else if (arg0iz && arg1r && real_zerop (arg1r))
10346 tree rp = arg0r ? arg0r
10347 : build1 (REALPART_EXPR, rtype, arg0);
10348 tree ip = arg1i ? arg1i
10349 : build1 (IMAGPART_EXPR, rtype, arg1);
10350 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10355 if (flag_unsafe_math_optimizations
10356 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10357 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10358 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10359 return tem;
10361 /* Convert x+x into x*2.0. */
10362 if (operand_equal_p (arg0, arg1, 0)
10363 && SCALAR_FLOAT_TYPE_P (type))
10364 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10365 build_real (type, dconst2));
10367 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10368 We associate floats only if the user has specified
10369 -fassociative-math. */
10370 if (flag_associative_math
10371 && TREE_CODE (arg1) == PLUS_EXPR
10372 && TREE_CODE (arg0) != MULT_EXPR)
10374 tree tree10 = TREE_OPERAND (arg1, 0);
10375 tree tree11 = TREE_OPERAND (arg1, 1);
10376 if (TREE_CODE (tree11) == MULT_EXPR
10377 && TREE_CODE (tree10) == MULT_EXPR)
10379 tree tree0;
10380 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10381 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10384 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10385 We associate floats only if the user has specified
10386 -fassociative-math. */
10387 if (flag_associative_math
10388 && TREE_CODE (arg0) == PLUS_EXPR
10389 && TREE_CODE (arg1) != MULT_EXPR)
10391 tree tree00 = TREE_OPERAND (arg0, 0);
10392 tree tree01 = TREE_OPERAND (arg0, 1);
10393 if (TREE_CODE (tree01) == MULT_EXPR
10394 && TREE_CODE (tree00) == MULT_EXPR)
10396 tree tree0;
10397 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10398 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10403 bit_rotate:
10404 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10405 is a rotate of A by C1 bits. */
10406 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10407 is a rotate of A by B bits. */
10409 enum tree_code code0, code1;
10410 tree rtype;
10411 code0 = TREE_CODE (arg0);
10412 code1 = TREE_CODE (arg1);
10413 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10414 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10415 && operand_equal_p (TREE_OPERAND (arg0, 0),
10416 TREE_OPERAND (arg1, 0), 0)
10417 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10418 TYPE_UNSIGNED (rtype))
10419 /* Only create rotates in complete modes. Other cases are not
10420 expanded properly. */
10421 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10423 tree tree01, tree11;
10424 enum tree_code code01, code11;
10426 tree01 = TREE_OPERAND (arg0, 1);
10427 tree11 = TREE_OPERAND (arg1, 1);
10428 STRIP_NOPS (tree01);
10429 STRIP_NOPS (tree11);
10430 code01 = TREE_CODE (tree01);
10431 code11 = TREE_CODE (tree11);
10432 if (code01 == INTEGER_CST
10433 && code11 == INTEGER_CST
10434 && TREE_INT_CST_HIGH (tree01) == 0
10435 && TREE_INT_CST_HIGH (tree11) == 0
10436 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10437 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10439 tem = build2 (LROTATE_EXPR,
10440 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10441 TREE_OPERAND (arg0, 0),
10442 code0 == LSHIFT_EXPR
10443 ? tree01 : tree11);
10444 SET_EXPR_LOCATION (tem, loc);
10445 return fold_convert_loc (loc, type, tem);
10447 else if (code11 == MINUS_EXPR)
10449 tree tree110, tree111;
10450 tree110 = TREE_OPERAND (tree11, 0);
10451 tree111 = TREE_OPERAND (tree11, 1);
10452 STRIP_NOPS (tree110);
10453 STRIP_NOPS (tree111);
10454 if (TREE_CODE (tree110) == INTEGER_CST
10455 && 0 == compare_tree_int (tree110,
10456 TYPE_PRECISION
10457 (TREE_TYPE (TREE_OPERAND
10458 (arg0, 0))))
10459 && operand_equal_p (tree01, tree111, 0))
10460 return
10461 fold_convert_loc (loc, type,
10462 build2 ((code0 == LSHIFT_EXPR
10463 ? LROTATE_EXPR
10464 : RROTATE_EXPR),
10465 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10466 TREE_OPERAND (arg0, 0), tree01));
10468 else if (code01 == MINUS_EXPR)
10470 tree tree010, tree011;
10471 tree010 = TREE_OPERAND (tree01, 0);
10472 tree011 = TREE_OPERAND (tree01, 1);
10473 STRIP_NOPS (tree010);
10474 STRIP_NOPS (tree011);
10475 if (TREE_CODE (tree010) == INTEGER_CST
10476 && 0 == compare_tree_int (tree010,
10477 TYPE_PRECISION
10478 (TREE_TYPE (TREE_OPERAND
10479 (arg0, 0))))
10480 && operand_equal_p (tree11, tree011, 0))
10481 return fold_convert_loc
10482 (loc, type,
10483 build2 ((code0 != LSHIFT_EXPR
10484 ? LROTATE_EXPR
10485 : RROTATE_EXPR),
10486 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10487 TREE_OPERAND (arg0, 0), tree11));
10492 associate:
10493 /* In most languages, can't associate operations on floats through
10494 parentheses. Rather than remember where the parentheses were, we
10495 don't associate floats at all, unless the user has specified
10496 -fassociative-math.
10497 And, we need to make sure type is not saturating. */
10499 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10500 && !TYPE_SATURATING (type))
10502 tree var0, con0, lit0, minus_lit0;
10503 tree var1, con1, lit1, minus_lit1;
10504 bool ok = true;
10506 /* Split both trees into variables, constants, and literals. Then
10507 associate each group together, the constants with literals,
10508 then the result with variables. This increases the chances of
10509 literals being recombined later and of generating relocatable
10510 expressions for the sum of a constant and literal. */
10511 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10512 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10513 code == MINUS_EXPR);
10515 /* With undefined overflow we can only associate constants
10516 with one variable. */
10517 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10518 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10519 && var0 && var1)
10521 tree tmp0 = var0;
10522 tree tmp1 = var1;
10524 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10525 tmp0 = TREE_OPERAND (tmp0, 0);
10526 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10527 tmp1 = TREE_OPERAND (tmp1, 0);
10528 /* The only case we can still associate with two variables
10529 is if they are the same, modulo negation. */
10530 if (!operand_equal_p (tmp0, tmp1, 0))
10531 ok = false;
10534 /* Only do something if we found more than two objects. Otherwise,
10535 nothing has changed and we risk infinite recursion. */
10536 if (ok
10537 && (2 < ((var0 != 0) + (var1 != 0)
10538 + (con0 != 0) + (con1 != 0)
10539 + (lit0 != 0) + (lit1 != 0)
10540 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10542 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10543 if (code == MINUS_EXPR)
10544 code = PLUS_EXPR;
10546 var0 = associate_trees (loc, var0, var1, code, type);
10547 con0 = associate_trees (loc, con0, con1, code, type);
10548 lit0 = associate_trees (loc, lit0, lit1, code, type);
10549 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10551 /* Preserve the MINUS_EXPR if the negative part of the literal is
10552 greater than the positive part. Otherwise, the multiplicative
10553 folding code (i.e extract_muldiv) may be fooled in case
10554 unsigned constants are subtracted, like in the following
10555 example: ((X*2 + 4) - 8U)/2. */
10556 if (minus_lit0 && lit0)
10558 if (TREE_CODE (lit0) == INTEGER_CST
10559 && TREE_CODE (minus_lit0) == INTEGER_CST
10560 && tree_int_cst_lt (lit0, minus_lit0))
10562 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10563 MINUS_EXPR, type);
10564 lit0 = 0;
10566 else
10568 lit0 = associate_trees (loc, lit0, minus_lit0,
10569 MINUS_EXPR, type);
10570 minus_lit0 = 0;
10573 if (minus_lit0)
10575 if (con0 == 0)
10576 return
10577 fold_convert_loc (loc, type,
10578 associate_trees (loc, var0, minus_lit0,
10579 MINUS_EXPR, type));
10580 else
10582 con0 = associate_trees (loc, con0, minus_lit0,
10583 MINUS_EXPR, type);
10584 return
10585 fold_convert_loc (loc, type,
10586 associate_trees (loc, var0, con0,
10587 PLUS_EXPR, type));
10591 con0 = associate_trees (loc, con0, lit0, code, type);
10592 return
10593 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10594 code, type));
10598 return NULL_TREE;
10600 case MINUS_EXPR:
10601 /* Pointer simplifications for subtraction, simple reassociations. */
10602 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10604 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10605 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10606 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10608 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10609 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10610 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10611 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10612 return fold_build2_loc (loc, PLUS_EXPR, type,
10613 fold_build2_loc (loc, MINUS_EXPR, type,
10614 arg00, arg10),
10615 fold_build2_loc (loc, MINUS_EXPR, type,
10616 arg01, arg11));
10618 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10619 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10621 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10622 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10623 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10624 fold_convert_loc (loc, type, arg1));
10625 if (tmp)
10626 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10629 /* A - (-B) -> A + B */
10630 if (TREE_CODE (arg1) == NEGATE_EXPR)
10631 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10632 fold_convert_loc (loc, type,
10633 TREE_OPERAND (arg1, 0)));
10634 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10635 if (TREE_CODE (arg0) == NEGATE_EXPR
10636 && (FLOAT_TYPE_P (type)
10637 || INTEGRAL_TYPE_P (type))
10638 && negate_expr_p (arg1)
10639 && reorder_operands_p (arg0, arg1))
10640 return fold_build2_loc (loc, MINUS_EXPR, type,
10641 fold_convert_loc (loc, type,
10642 negate_expr (arg1)),
10643 fold_convert_loc (loc, type,
10644 TREE_OPERAND (arg0, 0)));
10645 /* Convert -A - 1 to ~A. */
10646 if (INTEGRAL_TYPE_P (type)
10647 && TREE_CODE (arg0) == NEGATE_EXPR
10648 && integer_onep (arg1)
10649 && !TYPE_OVERFLOW_TRAPS (type))
10650 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10651 fold_convert_loc (loc, type,
10652 TREE_OPERAND (arg0, 0)));
10654 /* Convert -1 - A to ~A. */
10655 if (INTEGRAL_TYPE_P (type)
10656 && integer_all_onesp (arg0))
10657 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10660 /* X - (X / CST) * CST is X % CST. */
10661 if (INTEGRAL_TYPE_P (type)
10662 && TREE_CODE (arg1) == MULT_EXPR
10663 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10664 && operand_equal_p (arg0,
10665 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10666 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10667 TREE_OPERAND (arg1, 1), 0))
10668 return
10669 fold_convert_loc (loc, type,
10670 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10671 arg0, TREE_OPERAND (arg1, 1)));
10673 if (! FLOAT_TYPE_P (type))
10675 if (integer_zerop (arg0))
10676 return negate_expr (fold_convert_loc (loc, type, arg1));
10677 if (integer_zerop (arg1))
10678 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10680 /* Fold A - (A & B) into ~B & A. */
10681 if (!TREE_SIDE_EFFECTS (arg0)
10682 && TREE_CODE (arg1) == BIT_AND_EXPR)
10684 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10686 tree arg10 = fold_convert_loc (loc, type,
10687 TREE_OPERAND (arg1, 0));
10688 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10689 fold_build1_loc (loc, BIT_NOT_EXPR,
10690 type, arg10),
10691 fold_convert_loc (loc, type, arg0));
10693 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10695 tree arg11 = fold_convert_loc (loc,
10696 type, TREE_OPERAND (arg1, 1));
10697 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10698 fold_build1_loc (loc, BIT_NOT_EXPR,
10699 type, arg11),
10700 fold_convert_loc (loc, type, arg0));
10704 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10705 any power of 2 minus 1. */
10706 if (TREE_CODE (arg0) == BIT_AND_EXPR
10707 && TREE_CODE (arg1) == BIT_AND_EXPR
10708 && operand_equal_p (TREE_OPERAND (arg0, 0),
10709 TREE_OPERAND (arg1, 0), 0))
10711 tree mask0 = TREE_OPERAND (arg0, 1);
10712 tree mask1 = TREE_OPERAND (arg1, 1);
10713 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10715 if (operand_equal_p (tem, mask1, 0))
10717 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10718 TREE_OPERAND (arg0, 0), mask1);
10719 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10724 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10725 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10726 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10728 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10729 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10730 (-ARG1 + ARG0) reduces to -ARG1. */
10731 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10732 return negate_expr (fold_convert_loc (loc, type, arg1));
10734 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10735 __complex__ ( x, -y ). This is not the same for SNaNs or if
10736 signed zeros are involved. */
10737 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10738 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10739 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10741 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10742 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10743 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10744 bool arg0rz = false, arg0iz = false;
10745 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10746 || (arg0i && (arg0iz = real_zerop (arg0i))))
10748 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10749 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10750 if (arg0rz && arg1i && real_zerop (arg1i))
10752 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10753 arg1r ? arg1r
10754 : build1 (REALPART_EXPR, rtype, arg1));
10755 tree ip = arg0i ? arg0i
10756 : build1 (IMAGPART_EXPR, rtype, arg0);
10757 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10759 else if (arg0iz && arg1r && real_zerop (arg1r))
10761 tree rp = arg0r ? arg0r
10762 : build1 (REALPART_EXPR, rtype, arg0);
10763 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10764 arg1i ? arg1i
10765 : build1 (IMAGPART_EXPR, rtype, arg1));
10766 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10771 /* Fold &x - &x. This can happen from &x.foo - &x.
10772 This is unsafe for certain floats even in non-IEEE formats.
10773 In IEEE, it is unsafe because it does wrong for NaNs.
10774 Also note that operand_equal_p is always false if an operand
10775 is volatile. */
10777 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10778 && operand_equal_p (arg0, arg1, 0))
10779 return fold_convert_loc (loc, type, integer_zero_node);
10781 /* A - B -> A + (-B) if B is easily negatable. */
10782 if (negate_expr_p (arg1)
10783 && ((FLOAT_TYPE_P (type)
10784 /* Avoid this transformation if B is a positive REAL_CST. */
10785 && (TREE_CODE (arg1) != REAL_CST
10786 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10787 || INTEGRAL_TYPE_P (type)))
10788 return fold_build2_loc (loc, PLUS_EXPR, type,
10789 fold_convert_loc (loc, type, arg0),
10790 fold_convert_loc (loc, type,
10791 negate_expr (arg1)));
10793 /* Try folding difference of addresses. */
10795 HOST_WIDE_INT diff;
10797 if ((TREE_CODE (arg0) == ADDR_EXPR
10798 || TREE_CODE (arg1) == ADDR_EXPR)
10799 && ptr_difference_const (arg0, arg1, &diff))
10800 return build_int_cst_type (type, diff);
10803 /* Fold &a[i] - &a[j] to i-j. */
10804 if (TREE_CODE (arg0) == ADDR_EXPR
10805 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10806 && TREE_CODE (arg1) == ADDR_EXPR
10807 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10809 tree aref0 = TREE_OPERAND (arg0, 0);
10810 tree aref1 = TREE_OPERAND (arg1, 0);
10811 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10812 TREE_OPERAND (aref1, 0), 0))
10814 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10815 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10816 tree esz = array_ref_element_size (aref0);
10817 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10818 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10819 fold_convert_loc (loc, type, esz));
10824 if (FLOAT_TYPE_P (type)
10825 && flag_unsafe_math_optimizations
10826 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10827 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10828 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10829 return tem;
10831 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10832 same or one. Make sure type is not saturating.
10833 fold_plusminus_mult_expr will re-associate. */
10834 if ((TREE_CODE (arg0) == MULT_EXPR
10835 || TREE_CODE (arg1) == MULT_EXPR)
10836 && !TYPE_SATURATING (type)
10837 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10839 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10840 if (tem)
10841 return tem;
10844 goto associate;
10846 case MULT_EXPR:
10847 /* (-A) * (-B) -> A * B */
10848 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10849 return fold_build2_loc (loc, MULT_EXPR, type,
10850 fold_convert_loc (loc, type,
10851 TREE_OPERAND (arg0, 0)),
10852 fold_convert_loc (loc, type,
10853 negate_expr (arg1)));
10854 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10855 return fold_build2_loc (loc, MULT_EXPR, type,
10856 fold_convert_loc (loc, type,
10857 negate_expr (arg0)),
10858 fold_convert_loc (loc, type,
10859 TREE_OPERAND (arg1, 0)));
10861 if (! FLOAT_TYPE_P (type))
10863 if (integer_zerop (arg1))
10864 return omit_one_operand_loc (loc, type, arg1, arg0);
10865 if (integer_onep (arg1))
10866 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10867 /* Transform x * -1 into -x. Make sure to do the negation
10868 on the original operand with conversions not stripped
10869 because we can only strip non-sign-changing conversions. */
10870 if (integer_all_onesp (arg1))
10871 return fold_convert_loc (loc, type, negate_expr (op0));
10872 /* Transform x * -C into -x * C if x is easily negatable. */
10873 if (TREE_CODE (arg1) == INTEGER_CST
10874 && tree_int_cst_sgn (arg1) == -1
10875 && negate_expr_p (arg0)
10876 && (tem = negate_expr (arg1)) != arg1
10877 && !TREE_OVERFLOW (tem))
10878 return fold_build2_loc (loc, MULT_EXPR, type,
10879 fold_convert_loc (loc, type,
10880 negate_expr (arg0)),
10881 tem);
10883 /* (a * (1 << b)) is (a << b) */
10884 if (TREE_CODE (arg1) == LSHIFT_EXPR
10885 && integer_onep (TREE_OPERAND (arg1, 0)))
10886 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10887 TREE_OPERAND (arg1, 1));
10888 if (TREE_CODE (arg0) == LSHIFT_EXPR
10889 && integer_onep (TREE_OPERAND (arg0, 0)))
10890 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10891 TREE_OPERAND (arg0, 1));
10893 /* (A + A) * C -> A * 2 * C */
10894 if (TREE_CODE (arg0) == PLUS_EXPR
10895 && TREE_CODE (arg1) == INTEGER_CST
10896 && operand_equal_p (TREE_OPERAND (arg0, 0),
10897 TREE_OPERAND (arg0, 1), 0))
10898 return fold_build2_loc (loc, MULT_EXPR, type,
10899 omit_one_operand_loc (loc, type,
10900 TREE_OPERAND (arg0, 0),
10901 TREE_OPERAND (arg0, 1)),
10902 fold_build2_loc (loc, MULT_EXPR, type,
10903 build_int_cst (type, 2) , arg1));
10905 strict_overflow_p = false;
10906 if (TREE_CODE (arg1) == INTEGER_CST
10907 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10908 &strict_overflow_p)))
10910 if (strict_overflow_p)
10911 fold_overflow_warning (("assuming signed overflow does not "
10912 "occur when simplifying "
10913 "multiplication"),
10914 WARN_STRICT_OVERFLOW_MISC);
10915 return fold_convert_loc (loc, type, tem);
10918 /* Optimize z * conj(z) for integer complex numbers. */
10919 if (TREE_CODE (arg0) == CONJ_EXPR
10920 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10921 return fold_mult_zconjz (loc, type, arg1);
10922 if (TREE_CODE (arg1) == CONJ_EXPR
10923 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10924 return fold_mult_zconjz (loc, type, arg0);
10926 else
10928 /* Maybe fold x * 0 to 0. The expressions aren't the same
10929 when x is NaN, since x * 0 is also NaN. Nor are they the
10930 same in modes with signed zeros, since multiplying a
10931 negative value by 0 gives -0, not +0. */
10932 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10933 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10934 && real_zerop (arg1))
10935 return omit_one_operand_loc (loc, type, arg1, arg0);
10936 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10937 Likewise for complex arithmetic with signed zeros. */
10938 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10939 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10940 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10941 && real_onep (arg1))
10942 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10944 /* Transform x * -1.0 into -x. */
10945 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10946 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10947 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10948 && real_minus_onep (arg1))
10949 return fold_convert_loc (loc, type, negate_expr (arg0));
10951 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10952 the result for floating point types due to rounding so it is applied
10953 only if -fassociative-math was specify. */
10954 if (flag_associative_math
10955 && TREE_CODE (arg0) == RDIV_EXPR
10956 && TREE_CODE (arg1) == REAL_CST
10957 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10959 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10960 arg1, 0);
10961 if (tem)
10962 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10963 TREE_OPERAND (arg0, 1));
10966 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10967 if (operand_equal_p (arg0, arg1, 0))
10969 tree tem = fold_strip_sign_ops (arg0);
10970 if (tem != NULL_TREE)
10972 tem = fold_convert_loc (loc, type, tem);
10973 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10977 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10978 This is not the same for NaNs or if signed zeros are
10979 involved. */
10980 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10981 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10982 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10983 && TREE_CODE (arg1) == COMPLEX_CST
10984 && real_zerop (TREE_REALPART (arg1)))
10986 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10987 if (real_onep (TREE_IMAGPART (arg1)))
10988 return
10989 fold_build2_loc (loc, COMPLEX_EXPR, type,
10990 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10991 rtype, arg0)),
10992 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10993 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10994 return
10995 fold_build2_loc (loc, COMPLEX_EXPR, type,
10996 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10997 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10998 rtype, arg0)));
11001 /* Optimize z * conj(z) for floating point complex numbers.
11002 Guarded by flag_unsafe_math_optimizations as non-finite
11003 imaginary components don't produce scalar results. */
11004 if (flag_unsafe_math_optimizations
11005 && TREE_CODE (arg0) == CONJ_EXPR
11006 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11007 return fold_mult_zconjz (loc, type, arg1);
11008 if (flag_unsafe_math_optimizations
11009 && TREE_CODE (arg1) == CONJ_EXPR
11010 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11011 return fold_mult_zconjz (loc, type, arg0);
11013 if (flag_unsafe_math_optimizations)
11015 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11016 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11018 /* Optimizations of root(...)*root(...). */
11019 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11021 tree rootfn, arg;
11022 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11023 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11025 /* Optimize sqrt(x)*sqrt(x) as x. */
11026 if (BUILTIN_SQRT_P (fcode0)
11027 && operand_equal_p (arg00, arg10, 0)
11028 && ! HONOR_SNANS (TYPE_MODE (type)))
11029 return arg00;
11031 /* Optimize root(x)*root(y) as root(x*y). */
11032 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11033 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11034 return build_call_expr_loc (loc, rootfn, 1, arg);
11037 /* Optimize expN(x)*expN(y) as expN(x+y). */
11038 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11040 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11041 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11042 CALL_EXPR_ARG (arg0, 0),
11043 CALL_EXPR_ARG (arg1, 0));
11044 return build_call_expr_loc (loc, expfn, 1, arg);
11047 /* Optimizations of pow(...)*pow(...). */
11048 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11049 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11050 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11052 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11053 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11054 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11055 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11057 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11058 if (operand_equal_p (arg01, arg11, 0))
11060 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11061 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11062 arg00, arg10);
11063 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11066 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11067 if (operand_equal_p (arg00, arg10, 0))
11069 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11070 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11071 arg01, arg11);
11072 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11076 /* Optimize tan(x)*cos(x) as sin(x). */
11077 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11078 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11079 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11080 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11081 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11082 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11083 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11084 CALL_EXPR_ARG (arg1, 0), 0))
11086 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11088 if (sinfn != NULL_TREE)
11089 return build_call_expr_loc (loc, sinfn, 1,
11090 CALL_EXPR_ARG (arg0, 0));
11093 /* Optimize x*pow(x,c) as pow(x,c+1). */
11094 if (fcode1 == BUILT_IN_POW
11095 || fcode1 == BUILT_IN_POWF
11096 || fcode1 == BUILT_IN_POWL)
11098 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11099 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11100 if (TREE_CODE (arg11) == REAL_CST
11101 && !TREE_OVERFLOW (arg11)
11102 && operand_equal_p (arg0, arg10, 0))
11104 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11105 REAL_VALUE_TYPE c;
11106 tree arg;
11108 c = TREE_REAL_CST (arg11);
11109 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11110 arg = build_real (type, c);
11111 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11115 /* Optimize pow(x,c)*x as pow(x,c+1). */
11116 if (fcode0 == BUILT_IN_POW
11117 || fcode0 == BUILT_IN_POWF
11118 || fcode0 == BUILT_IN_POWL)
11120 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11121 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11122 if (TREE_CODE (arg01) == REAL_CST
11123 && !TREE_OVERFLOW (arg01)
11124 && operand_equal_p (arg1, arg00, 0))
11126 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11127 REAL_VALUE_TYPE c;
11128 tree arg;
11130 c = TREE_REAL_CST (arg01);
11131 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11132 arg = build_real (type, c);
11133 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11137 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11138 if (optimize_function_for_speed_p (cfun)
11139 && operand_equal_p (arg0, arg1, 0))
11141 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11143 if (powfn)
11145 tree arg = build_real (type, dconst2);
11146 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11151 goto associate;
11153 case BIT_IOR_EXPR:
11154 bit_ior:
11155 if (integer_all_onesp (arg1))
11156 return omit_one_operand_loc (loc, type, arg1, arg0);
11157 if (integer_zerop (arg1))
11158 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11159 if (operand_equal_p (arg0, arg1, 0))
11160 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11162 /* ~X | X is -1. */
11163 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11164 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11166 t1 = fold_convert_loc (loc, type, integer_zero_node);
11167 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11168 return omit_one_operand_loc (loc, type, t1, arg1);
11171 /* X | ~X is -1. */
11172 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11175 t1 = fold_convert_loc (loc, type, integer_zero_node);
11176 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11177 return omit_one_operand_loc (loc, type, t1, arg0);
11180 /* Canonicalize (X & C1) | C2. */
11181 if (TREE_CODE (arg0) == BIT_AND_EXPR
11182 && TREE_CODE (arg1) == INTEGER_CST
11183 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11185 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11186 int width = TYPE_PRECISION (type), w;
11187 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11188 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11189 hi2 = TREE_INT_CST_HIGH (arg1);
11190 lo2 = TREE_INT_CST_LOW (arg1);
11192 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11193 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11194 return omit_one_operand_loc (loc, type, arg1,
11195 TREE_OPERAND (arg0, 0));
11197 if (width > HOST_BITS_PER_WIDE_INT)
11199 mhi = (unsigned HOST_WIDE_INT) -1
11200 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11201 mlo = -1;
11203 else
11205 mhi = 0;
11206 mlo = (unsigned HOST_WIDE_INT) -1
11207 >> (HOST_BITS_PER_WIDE_INT - width);
11210 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11211 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11212 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11213 TREE_OPERAND (arg0, 0), arg1);
11215 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11216 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11217 mode which allows further optimizations. */
11218 hi1 &= mhi;
11219 lo1 &= mlo;
11220 hi2 &= mhi;
11221 lo2 &= mlo;
11222 hi3 = hi1 & ~hi2;
11223 lo3 = lo1 & ~lo2;
11224 for (w = BITS_PER_UNIT;
11225 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11226 w <<= 1)
11228 unsigned HOST_WIDE_INT mask
11229 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11230 if (((lo1 | lo2) & mask) == mask
11231 && (lo1 & ~mask) == 0 && hi1 == 0)
11233 hi3 = 0;
11234 lo3 = mask;
11235 break;
11238 if (hi3 != hi1 || lo3 != lo1)
11239 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11240 fold_build2_loc (loc, BIT_AND_EXPR, type,
11241 TREE_OPERAND (arg0, 0),
11242 build_int_cst_wide (type,
11243 lo3, hi3)),
11244 arg1);
11247 /* (X & Y) | Y is (X, Y). */
11248 if (TREE_CODE (arg0) == BIT_AND_EXPR
11249 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11250 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11251 /* (X & Y) | X is (Y, X). */
11252 if (TREE_CODE (arg0) == BIT_AND_EXPR
11253 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11254 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11255 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11256 /* X | (X & Y) is (Y, X). */
11257 if (TREE_CODE (arg1) == BIT_AND_EXPR
11258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11259 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11260 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11261 /* X | (Y & X) is (Y, X). */
11262 if (TREE_CODE (arg1) == BIT_AND_EXPR
11263 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11264 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11265 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11267 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11268 if (t1 != NULL_TREE)
11269 return t1;
11271 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11273 This results in more efficient code for machines without a NAND
11274 instruction. Combine will canonicalize to the first form
11275 which will allow use of NAND instructions provided by the
11276 backend if they exist. */
11277 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11278 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11280 return
11281 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11282 build2 (BIT_AND_EXPR, type,
11283 fold_convert_loc (loc, type,
11284 TREE_OPERAND (arg0, 0)),
11285 fold_convert_loc (loc, type,
11286 TREE_OPERAND (arg1, 0))));
11289 /* See if this can be simplified into a rotate first. If that
11290 is unsuccessful continue in the association code. */
11291 goto bit_rotate;
11293 case BIT_XOR_EXPR:
11294 if (integer_zerop (arg1))
11295 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11296 if (integer_all_onesp (arg1))
11297 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11298 if (operand_equal_p (arg0, arg1, 0))
11299 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11301 /* ~X ^ X is -1. */
11302 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11303 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11305 t1 = fold_convert_loc (loc, type, integer_zero_node);
11306 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11307 return omit_one_operand_loc (loc, type, t1, arg1);
11310 /* X ^ ~X is -1. */
11311 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11312 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11314 t1 = fold_convert_loc (loc, type, integer_zero_node);
11315 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11316 return omit_one_operand_loc (loc, type, t1, arg0);
11319 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11320 with a constant, and the two constants have no bits in common,
11321 we should treat this as a BIT_IOR_EXPR since this may produce more
11322 simplifications. */
11323 if (TREE_CODE (arg0) == BIT_AND_EXPR
11324 && TREE_CODE (arg1) == BIT_AND_EXPR
11325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11326 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11327 && integer_zerop (const_binop (BIT_AND_EXPR,
11328 TREE_OPERAND (arg0, 1),
11329 TREE_OPERAND (arg1, 1), 0)))
11331 code = BIT_IOR_EXPR;
11332 goto bit_ior;
11335 /* (X | Y) ^ X -> Y & ~ X*/
11336 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11337 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11339 tree t2 = TREE_OPERAND (arg0, 1);
11340 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11341 arg1);
11342 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11343 fold_convert_loc (loc, type, t2),
11344 fold_convert_loc (loc, type, t1));
11345 return t1;
11348 /* (Y | X) ^ X -> Y & ~ X*/
11349 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11350 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11352 tree t2 = TREE_OPERAND (arg0, 0);
11353 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11354 arg1);
11355 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11356 fold_convert_loc (loc, type, t2),
11357 fold_convert_loc (loc, type, t1));
11358 return t1;
11361 /* X ^ (X | Y) -> Y & ~ X*/
11362 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11363 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11365 tree t2 = TREE_OPERAND (arg1, 1);
11366 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11367 arg0);
11368 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11369 fold_convert_loc (loc, type, t2),
11370 fold_convert_loc (loc, type, t1));
11371 return t1;
11374 /* X ^ (Y | X) -> Y & ~ X*/
11375 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11376 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11378 tree t2 = TREE_OPERAND (arg1, 0);
11379 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11380 arg0);
11381 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11382 fold_convert_loc (loc, type, t2),
11383 fold_convert_loc (loc, type, t1));
11384 return t1;
11387 /* Convert ~X ^ ~Y to X ^ Y. */
11388 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11389 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11390 return fold_build2_loc (loc, code, type,
11391 fold_convert_loc (loc, type,
11392 TREE_OPERAND (arg0, 0)),
11393 fold_convert_loc (loc, type,
11394 TREE_OPERAND (arg1, 0)));
11396 /* Convert ~X ^ C to X ^ ~C. */
11397 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11398 && TREE_CODE (arg1) == INTEGER_CST)
11399 return fold_build2_loc (loc, code, type,
11400 fold_convert_loc (loc, type,
11401 TREE_OPERAND (arg0, 0)),
11402 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11404 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11405 if (TREE_CODE (arg0) == BIT_AND_EXPR
11406 && integer_onep (TREE_OPERAND (arg0, 1))
11407 && integer_onep (arg1))
11408 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11409 build_int_cst (TREE_TYPE (arg0), 0));
11411 /* Fold (X & Y) ^ Y as ~X & Y. */
11412 if (TREE_CODE (arg0) == BIT_AND_EXPR
11413 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11415 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11416 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11417 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11418 fold_convert_loc (loc, type, arg1));
11420 /* Fold (X & Y) ^ X as ~Y & X. */
11421 if (TREE_CODE (arg0) == BIT_AND_EXPR
11422 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11423 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11425 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11426 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11427 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11428 fold_convert_loc (loc, type, arg1));
11430 /* Fold X ^ (X & Y) as X & ~Y. */
11431 if (TREE_CODE (arg1) == BIT_AND_EXPR
11432 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11434 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11435 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11436 fold_convert_loc (loc, type, arg0),
11437 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11439 /* Fold X ^ (Y & X) as ~Y & X. */
11440 if (TREE_CODE (arg1) == BIT_AND_EXPR
11441 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11442 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11444 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11445 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11446 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11447 fold_convert_loc (loc, type, arg0));
11450 /* See if this can be simplified into a rotate first. If that
11451 is unsuccessful continue in the association code. */
11452 goto bit_rotate;
11454 case BIT_AND_EXPR:
11455 if (integer_all_onesp (arg1))
11456 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11457 if (integer_zerop (arg1))
11458 return omit_one_operand_loc (loc, type, arg1, arg0);
11459 if (operand_equal_p (arg0, arg1, 0))
11460 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11462 /* ~X & X is always zero. */
11463 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11464 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11465 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11467 /* X & ~X is always zero. */
11468 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11469 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11470 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11472 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11473 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11474 && TREE_CODE (arg1) == INTEGER_CST
11475 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11477 tree tmp1 = fold_convert_loc (loc, type, arg1);
11478 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11479 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11480 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11481 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11482 return
11483 fold_convert_loc (loc, type,
11484 fold_build2_loc (loc, BIT_IOR_EXPR,
11485 type, tmp2, tmp3));
11488 /* (X | Y) & Y is (X, Y). */
11489 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11490 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11491 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11492 /* (X | Y) & X is (Y, X). */
11493 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11494 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11495 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11496 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11497 /* X & (X | Y) is (Y, X). */
11498 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11499 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11500 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11501 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11502 /* X & (Y | X) is (Y, X). */
11503 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11504 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11505 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11506 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11508 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11509 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11510 && integer_onep (TREE_OPERAND (arg0, 1))
11511 && integer_onep (arg1))
11513 tem = TREE_OPERAND (arg0, 0);
11514 return fold_build2_loc (loc, EQ_EXPR, type,
11515 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11516 build_int_cst (TREE_TYPE (tem), 1)),
11517 build_int_cst (TREE_TYPE (tem), 0));
11519 /* Fold ~X & 1 as (X & 1) == 0. */
11520 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11521 && integer_onep (arg1))
11523 tem = TREE_OPERAND (arg0, 0);
11524 return fold_build2_loc (loc, EQ_EXPR, type,
11525 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11526 build_int_cst (TREE_TYPE (tem), 1)),
11527 build_int_cst (TREE_TYPE (tem), 0));
11530 /* Fold (X ^ Y) & Y as ~X & Y. */
11531 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11532 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11534 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11535 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11536 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11537 fold_convert_loc (loc, type, arg1));
11539 /* Fold (X ^ Y) & X as ~Y & X. */
11540 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11541 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11542 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11544 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11545 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11546 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11547 fold_convert_loc (loc, type, arg1));
11549 /* Fold X & (X ^ Y) as X & ~Y. */
11550 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11551 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11553 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11554 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11555 fold_convert_loc (loc, type, arg0),
11556 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11558 /* Fold X & (Y ^ X) as ~Y & X. */
11559 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11560 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11561 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11563 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11564 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11565 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11566 fold_convert_loc (loc, type, arg0));
11569 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11570 if (t1 != NULL_TREE)
11571 return t1;
11572 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11573 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11574 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11576 unsigned int prec
11577 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11579 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11580 && (~TREE_INT_CST_LOW (arg1)
11581 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11582 return
11583 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11586 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11588 This results in more efficient code for machines without a NOR
11589 instruction. Combine will canonicalize to the first form
11590 which will allow use of NOR instructions provided by the
11591 backend if they exist. */
11592 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11593 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11595 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11596 build2 (BIT_IOR_EXPR, type,
11597 fold_convert_loc (loc, type,
11598 TREE_OPERAND (arg0, 0)),
11599 fold_convert_loc (loc, type,
11600 TREE_OPERAND (arg1, 0))));
11603 /* If arg0 is derived from the address of an object or function, we may
11604 be able to fold this expression using the object or function's
11605 alignment. */
11606 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11608 unsigned HOST_WIDE_INT modulus, residue;
11609 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11611 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11612 integer_onep (arg1));
11614 /* This works because modulus is a power of 2. If this weren't the
11615 case, we'd have to replace it by its greatest power-of-2
11616 divisor: modulus & -modulus. */
11617 if (low < modulus)
11618 return build_int_cst (type, residue & low);
11621 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11622 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11623 if the new mask might be further optimized. */
11624 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11625 || TREE_CODE (arg0) == RSHIFT_EXPR)
11626 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11627 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11628 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11629 < TYPE_PRECISION (TREE_TYPE (arg0))
11630 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11631 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11633 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11634 unsigned HOST_WIDE_INT mask
11635 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11636 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11637 tree shift_type = TREE_TYPE (arg0);
11639 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11640 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11641 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11642 && TYPE_PRECISION (TREE_TYPE (arg0))
11643 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11645 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11646 tree arg00 = TREE_OPERAND (arg0, 0);
11647 /* See if more bits can be proven as zero because of
11648 zero extension. */
11649 if (TREE_CODE (arg00) == NOP_EXPR
11650 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11652 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11653 if (TYPE_PRECISION (inner_type)
11654 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11655 && TYPE_PRECISION (inner_type) < prec)
11657 prec = TYPE_PRECISION (inner_type);
11658 /* See if we can shorten the right shift. */
11659 if (shiftc < prec)
11660 shift_type = inner_type;
11663 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11664 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11665 zerobits <<= prec - shiftc;
11666 /* For arithmetic shift if sign bit could be set, zerobits
11667 can contain actually sign bits, so no transformation is
11668 possible, unless MASK masks them all away. In that
11669 case the shift needs to be converted into logical shift. */
11670 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11671 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11673 if ((mask & zerobits) == 0)
11674 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11675 else
11676 zerobits = 0;
11680 /* ((X << 16) & 0xff00) is (X, 0). */
11681 if ((mask & zerobits) == mask)
11682 return omit_one_operand_loc (loc, type,
11683 build_int_cst (type, 0), arg0);
11685 newmask = mask | zerobits;
11686 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11688 unsigned int prec;
11690 /* Only do the transformation if NEWMASK is some integer
11691 mode's mask. */
11692 for (prec = BITS_PER_UNIT;
11693 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11694 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11695 break;
11696 if (prec < HOST_BITS_PER_WIDE_INT
11697 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11699 tree newmaskt;
11701 if (shift_type != TREE_TYPE (arg0))
11703 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11704 fold_convert_loc (loc, shift_type,
11705 TREE_OPERAND (arg0, 0)),
11706 TREE_OPERAND (arg0, 1));
11707 tem = fold_convert_loc (loc, type, tem);
11709 else
11710 tem = op0;
11711 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11712 if (!tree_int_cst_equal (newmaskt, arg1))
11713 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11718 goto associate;
11720 case RDIV_EXPR:
11721 /* Don't touch a floating-point divide by zero unless the mode
11722 of the constant can represent infinity. */
11723 if (TREE_CODE (arg1) == REAL_CST
11724 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11725 && real_zerop (arg1))
11726 return NULL_TREE;
11728 /* Optimize A / A to 1.0 if we don't care about
11729 NaNs or Infinities. Skip the transformation
11730 for non-real operands. */
11731 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11732 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11733 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11734 && operand_equal_p (arg0, arg1, 0))
11736 tree r = build_real (TREE_TYPE (arg0), dconst1);
11738 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11741 /* The complex version of the above A / A optimization. */
11742 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11743 && operand_equal_p (arg0, arg1, 0))
11745 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11746 if (! HONOR_NANS (TYPE_MODE (elem_type))
11747 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11749 tree r = build_real (elem_type, dconst1);
11750 /* omit_two_operands will call fold_convert for us. */
11751 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11755 /* (-A) / (-B) -> A / B */
11756 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11757 return fold_build2_loc (loc, RDIV_EXPR, type,
11758 TREE_OPERAND (arg0, 0),
11759 negate_expr (arg1));
11760 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11761 return fold_build2_loc (loc, RDIV_EXPR, type,
11762 negate_expr (arg0),
11763 TREE_OPERAND (arg1, 0));
11765 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11766 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11767 && real_onep (arg1))
11768 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11770 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11771 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11772 && real_minus_onep (arg1))
11773 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11774 negate_expr (arg0)));
11776 /* If ARG1 is a constant, we can convert this to a multiply by the
11777 reciprocal. This does not have the same rounding properties,
11778 so only do this if -freciprocal-math. We can actually
11779 always safely do it if ARG1 is a power of two, but it's hard to
11780 tell if it is or not in a portable manner. */
11781 if (TREE_CODE (arg1) == REAL_CST)
11783 if (flag_reciprocal_math
11784 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11785 arg1, 0)))
11786 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11787 /* Find the reciprocal if optimizing and the result is exact. */
11788 if (optimize)
11790 REAL_VALUE_TYPE r;
11791 r = TREE_REAL_CST (arg1);
11792 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11794 tem = build_real (type, r);
11795 return fold_build2_loc (loc, MULT_EXPR, type,
11796 fold_convert_loc (loc, type, arg0), tem);
11800 /* Convert A/B/C to A/(B*C). */
11801 if (flag_reciprocal_math
11802 && TREE_CODE (arg0) == RDIV_EXPR)
11803 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11804 fold_build2_loc (loc, MULT_EXPR, type,
11805 TREE_OPERAND (arg0, 1), arg1));
11807 /* Convert A/(B/C) to (A/B)*C. */
11808 if (flag_reciprocal_math
11809 && TREE_CODE (arg1) == RDIV_EXPR)
11810 return fold_build2_loc (loc, MULT_EXPR, type,
11811 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11812 TREE_OPERAND (arg1, 0)),
11813 TREE_OPERAND (arg1, 1));
11815 /* Convert C1/(X*C2) into (C1/C2)/X. */
11816 if (flag_reciprocal_math
11817 && TREE_CODE (arg1) == MULT_EXPR
11818 && TREE_CODE (arg0) == REAL_CST
11819 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11821 tree tem = const_binop (RDIV_EXPR, arg0,
11822 TREE_OPERAND (arg1, 1), 0);
11823 if (tem)
11824 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11825 TREE_OPERAND (arg1, 0));
11828 if (flag_unsafe_math_optimizations)
11830 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11831 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11833 /* Optimize sin(x)/cos(x) as tan(x). */
11834 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11835 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11836 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11837 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11838 CALL_EXPR_ARG (arg1, 0), 0))
11840 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11842 if (tanfn != NULL_TREE)
11843 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11846 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11847 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11848 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11849 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11850 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11851 CALL_EXPR_ARG (arg1, 0), 0))
11853 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11855 if (tanfn != NULL_TREE)
11857 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11858 CALL_EXPR_ARG (arg0, 0));
11859 return fold_build2_loc (loc, RDIV_EXPR, type,
11860 build_real (type, dconst1), tmp);
11864 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11865 NaNs or Infinities. */
11866 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11867 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11868 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11870 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11871 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11873 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11874 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11875 && operand_equal_p (arg00, arg01, 0))
11877 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11879 if (cosfn != NULL_TREE)
11880 return build_call_expr_loc (loc, cosfn, 1, arg00);
11884 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11885 NaNs or Infinities. */
11886 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11887 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11888 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11890 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11891 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11893 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11894 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11895 && operand_equal_p (arg00, arg01, 0))
11897 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11899 if (cosfn != NULL_TREE)
11901 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11902 return fold_build2_loc (loc, RDIV_EXPR, type,
11903 build_real (type, dconst1),
11904 tmp);
11909 /* Optimize pow(x,c)/x as pow(x,c-1). */
11910 if (fcode0 == BUILT_IN_POW
11911 || fcode0 == BUILT_IN_POWF
11912 || fcode0 == BUILT_IN_POWL)
11914 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11915 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11916 if (TREE_CODE (arg01) == REAL_CST
11917 && !TREE_OVERFLOW (arg01)
11918 && operand_equal_p (arg1, arg00, 0))
11920 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11921 REAL_VALUE_TYPE c;
11922 tree arg;
11924 c = TREE_REAL_CST (arg01);
11925 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11926 arg = build_real (type, c);
11927 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11931 /* Optimize a/root(b/c) into a*root(c/b). */
11932 if (BUILTIN_ROOT_P (fcode1))
11934 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11936 if (TREE_CODE (rootarg) == RDIV_EXPR)
11938 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11939 tree b = TREE_OPERAND (rootarg, 0);
11940 tree c = TREE_OPERAND (rootarg, 1);
11942 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11944 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11945 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11949 /* Optimize x/expN(y) into x*expN(-y). */
11950 if (BUILTIN_EXPONENT_P (fcode1))
11952 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11953 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11954 arg1 = build_call_expr_loc (loc,
11955 expfn, 1,
11956 fold_convert_loc (loc, type, arg));
11957 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11960 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11961 if (fcode1 == BUILT_IN_POW
11962 || fcode1 == BUILT_IN_POWF
11963 || fcode1 == BUILT_IN_POWL)
11965 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11966 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11967 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11968 tree neg11 = fold_convert_loc (loc, type,
11969 negate_expr (arg11));
11970 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11971 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11974 return NULL_TREE;
11976 case TRUNC_DIV_EXPR:
11977 case FLOOR_DIV_EXPR:
11978 /* Simplify A / (B << N) where A and B are positive and B is
11979 a power of 2, to A >> (N + log2(B)). */
11980 strict_overflow_p = false;
11981 if (TREE_CODE (arg1) == LSHIFT_EXPR
11982 && (TYPE_UNSIGNED (type)
11983 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11985 tree sval = TREE_OPERAND (arg1, 0);
11986 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11988 tree sh_cnt = TREE_OPERAND (arg1, 1);
11989 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11991 if (strict_overflow_p)
11992 fold_overflow_warning (("assuming signed overflow does not "
11993 "occur when simplifying A / (B << N)"),
11994 WARN_STRICT_OVERFLOW_MISC);
11996 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11997 sh_cnt, build_int_cst (NULL_TREE, pow2));
11998 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11999 fold_convert_loc (loc, type, arg0), sh_cnt);
12003 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12004 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12005 if (INTEGRAL_TYPE_P (type)
12006 && TYPE_UNSIGNED (type)
12007 && code == FLOOR_DIV_EXPR)
12008 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12010 /* Fall thru */
12012 case ROUND_DIV_EXPR:
12013 case CEIL_DIV_EXPR:
12014 case EXACT_DIV_EXPR:
12015 if (integer_onep (arg1))
12016 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12017 if (integer_zerop (arg1))
12018 return NULL_TREE;
12019 /* X / -1 is -X. */
12020 if (!TYPE_UNSIGNED (type)
12021 && TREE_CODE (arg1) == INTEGER_CST
12022 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12023 && TREE_INT_CST_HIGH (arg1) == -1)
12024 return fold_convert_loc (loc, type, negate_expr (arg0));
12026 /* Convert -A / -B to A / B when the type is signed and overflow is
12027 undefined. */
12028 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12029 && TREE_CODE (arg0) == NEGATE_EXPR
12030 && negate_expr_p (arg1))
12032 if (INTEGRAL_TYPE_P (type))
12033 fold_overflow_warning (("assuming signed overflow does not occur "
12034 "when distributing negation across "
12035 "division"),
12036 WARN_STRICT_OVERFLOW_MISC);
12037 return fold_build2_loc (loc, code, type,
12038 fold_convert_loc (loc, type,
12039 TREE_OPERAND (arg0, 0)),
12040 fold_convert_loc (loc, type,
12041 negate_expr (arg1)));
12043 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12044 && TREE_CODE (arg1) == NEGATE_EXPR
12045 && negate_expr_p (arg0))
12047 if (INTEGRAL_TYPE_P (type))
12048 fold_overflow_warning (("assuming signed overflow does not occur "
12049 "when distributing negation across "
12050 "division"),
12051 WARN_STRICT_OVERFLOW_MISC);
12052 return fold_build2_loc (loc, code, type,
12053 fold_convert_loc (loc, type,
12054 negate_expr (arg0)),
12055 fold_convert_loc (loc, type,
12056 TREE_OPERAND (arg1, 0)));
12059 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12060 operation, EXACT_DIV_EXPR.
12062 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12063 At one time others generated faster code, it's not clear if they do
12064 after the last round to changes to the DIV code in expmed.c. */
12065 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12066 && multiple_of_p (type, arg0, arg1))
12067 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12069 strict_overflow_p = false;
12070 if (TREE_CODE (arg1) == INTEGER_CST
12071 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12072 &strict_overflow_p)))
12074 if (strict_overflow_p)
12075 fold_overflow_warning (("assuming signed overflow does not occur "
12076 "when simplifying division"),
12077 WARN_STRICT_OVERFLOW_MISC);
12078 return fold_convert_loc (loc, type, tem);
12081 return NULL_TREE;
12083 case CEIL_MOD_EXPR:
12084 case FLOOR_MOD_EXPR:
12085 case ROUND_MOD_EXPR:
12086 case TRUNC_MOD_EXPR:
12087 /* X % 1 is always zero, but be sure to preserve any side
12088 effects in X. */
12089 if (integer_onep (arg1))
12090 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12092 /* X % 0, return X % 0 unchanged so that we can get the
12093 proper warnings and errors. */
12094 if (integer_zerop (arg1))
12095 return NULL_TREE;
12097 /* 0 % X is always zero, but be sure to preserve any side
12098 effects in X. Place this after checking for X == 0. */
12099 if (integer_zerop (arg0))
12100 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12102 /* X % -1 is zero. */
12103 if (!TYPE_UNSIGNED (type)
12104 && TREE_CODE (arg1) == INTEGER_CST
12105 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12106 && TREE_INT_CST_HIGH (arg1) == -1)
12107 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12109 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12110 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12111 strict_overflow_p = false;
12112 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12113 && (TYPE_UNSIGNED (type)
12114 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12116 tree c = arg1;
12117 /* Also optimize A % (C << N) where C is a power of 2,
12118 to A & ((C << N) - 1). */
12119 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12120 c = TREE_OPERAND (arg1, 0);
12122 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12124 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12125 build_int_cst (TREE_TYPE (arg1), 1));
12126 if (strict_overflow_p)
12127 fold_overflow_warning (("assuming signed overflow does not "
12128 "occur when simplifying "
12129 "X % (power of two)"),
12130 WARN_STRICT_OVERFLOW_MISC);
12131 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12132 fold_convert_loc (loc, type, arg0),
12133 fold_convert_loc (loc, type, mask));
12137 /* X % -C is the same as X % C. */
12138 if (code == TRUNC_MOD_EXPR
12139 && !TYPE_UNSIGNED (type)
12140 && TREE_CODE (arg1) == INTEGER_CST
12141 && !TREE_OVERFLOW (arg1)
12142 && TREE_INT_CST_HIGH (arg1) < 0
12143 && !TYPE_OVERFLOW_TRAPS (type)
12144 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12145 && !sign_bit_p (arg1, arg1))
12146 return fold_build2_loc (loc, code, type,
12147 fold_convert_loc (loc, type, arg0),
12148 fold_convert_loc (loc, type,
12149 negate_expr (arg1)));
12151 /* X % -Y is the same as X % Y. */
12152 if (code == TRUNC_MOD_EXPR
12153 && !TYPE_UNSIGNED (type)
12154 && TREE_CODE (arg1) == NEGATE_EXPR
12155 && !TYPE_OVERFLOW_TRAPS (type))
12156 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12157 fold_convert_loc (loc, type,
12158 TREE_OPERAND (arg1, 0)));
12160 if (TREE_CODE (arg1) == INTEGER_CST
12161 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12162 &strict_overflow_p)))
12164 if (strict_overflow_p)
12165 fold_overflow_warning (("assuming signed overflow does not occur "
12166 "when simplifying modulus"),
12167 WARN_STRICT_OVERFLOW_MISC);
12168 return fold_convert_loc (loc, type, tem);
12171 return NULL_TREE;
12173 case LROTATE_EXPR:
12174 case RROTATE_EXPR:
12175 if (integer_all_onesp (arg0))
12176 return omit_one_operand_loc (loc, type, arg0, arg1);
12177 goto shift;
12179 case RSHIFT_EXPR:
12180 /* Optimize -1 >> x for arithmetic right shifts. */
12181 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12182 && tree_expr_nonnegative_p (arg1))
12183 return omit_one_operand_loc (loc, type, arg0, arg1);
12184 /* ... fall through ... */
12186 case LSHIFT_EXPR:
12187 shift:
12188 if (integer_zerop (arg1))
12189 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12190 if (integer_zerop (arg0))
12191 return omit_one_operand_loc (loc, type, arg0, arg1);
12193 /* Since negative shift count is not well-defined,
12194 don't try to compute it in the compiler. */
12195 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12196 return NULL_TREE;
12198 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12199 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12200 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12201 && host_integerp (TREE_OPERAND (arg0, 1), false)
12202 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12204 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12205 + TREE_INT_CST_LOW (arg1));
12207 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12208 being well defined. */
12209 if (low >= TYPE_PRECISION (type))
12211 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12212 low = low % TYPE_PRECISION (type);
12213 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12214 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12215 TREE_OPERAND (arg0, 0));
12216 else
12217 low = TYPE_PRECISION (type) - 1;
12220 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12221 build_int_cst (type, low));
12224 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12225 into x & ((unsigned)-1 >> c) for unsigned types. */
12226 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12227 || (TYPE_UNSIGNED (type)
12228 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12229 && host_integerp (arg1, false)
12230 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12231 && host_integerp (TREE_OPERAND (arg0, 1), false)
12232 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12234 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12235 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12236 tree lshift;
12237 tree arg00;
12239 if (low0 == low1)
12241 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12243 lshift = build_int_cst (type, -1);
12244 lshift = int_const_binop (code, lshift, arg1, 0);
12246 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12250 /* Rewrite an LROTATE_EXPR by a constant into an
12251 RROTATE_EXPR by a new constant. */
12252 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12254 tree tem = build_int_cst (TREE_TYPE (arg1),
12255 TYPE_PRECISION (type));
12256 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12257 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12260 /* If we have a rotate of a bit operation with the rotate count and
12261 the second operand of the bit operation both constant,
12262 permute the two operations. */
12263 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12264 && (TREE_CODE (arg0) == BIT_AND_EXPR
12265 || TREE_CODE (arg0) == BIT_IOR_EXPR
12266 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12267 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12268 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12269 fold_build2_loc (loc, code, type,
12270 TREE_OPERAND (arg0, 0), arg1),
12271 fold_build2_loc (loc, code, type,
12272 TREE_OPERAND (arg0, 1), arg1));
12274 /* Two consecutive rotates adding up to the precision of the
12275 type can be ignored. */
12276 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12277 && TREE_CODE (arg0) == RROTATE_EXPR
12278 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12279 && TREE_INT_CST_HIGH (arg1) == 0
12280 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12281 && ((TREE_INT_CST_LOW (arg1)
12282 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12283 == (unsigned int) TYPE_PRECISION (type)))
12284 return TREE_OPERAND (arg0, 0);
12286 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12287 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12288 if the latter can be further optimized. */
12289 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12290 && TREE_CODE (arg0) == BIT_AND_EXPR
12291 && TREE_CODE (arg1) == INTEGER_CST
12292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12294 tree mask = fold_build2_loc (loc, code, type,
12295 fold_convert_loc (loc, type,
12296 TREE_OPERAND (arg0, 1)),
12297 arg1);
12298 tree shift = fold_build2_loc (loc, code, type,
12299 fold_convert_loc (loc, type,
12300 TREE_OPERAND (arg0, 0)),
12301 arg1);
12302 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12303 if (tem)
12304 return tem;
12307 return NULL_TREE;
12309 case MIN_EXPR:
12310 if (operand_equal_p (arg0, arg1, 0))
12311 return omit_one_operand_loc (loc, type, arg0, arg1);
12312 if (INTEGRAL_TYPE_P (type)
12313 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12314 return omit_one_operand_loc (loc, type, arg1, arg0);
12315 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12316 if (tem)
12317 return tem;
12318 goto associate;
12320 case MAX_EXPR:
12321 if (operand_equal_p (arg0, arg1, 0))
12322 return omit_one_operand_loc (loc, type, arg0, arg1);
12323 if (INTEGRAL_TYPE_P (type)
12324 && TYPE_MAX_VALUE (type)
12325 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12326 return omit_one_operand_loc (loc, type, arg1, arg0);
12327 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12328 if (tem)
12329 return tem;
12330 goto associate;
12332 case TRUTH_ANDIF_EXPR:
12333 /* Note that the operands of this must be ints
12334 and their values must be 0 or 1.
12335 ("true" is a fixed value perhaps depending on the language.) */
12336 /* If first arg is constant zero, return it. */
12337 if (integer_zerop (arg0))
12338 return fold_convert_loc (loc, type, arg0);
12339 case TRUTH_AND_EXPR:
12340 /* If either arg is constant true, drop it. */
12341 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12342 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12343 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12344 /* Preserve sequence points. */
12345 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12346 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12347 /* If second arg is constant zero, result is zero, but first arg
12348 must be evaluated. */
12349 if (integer_zerop (arg1))
12350 return omit_one_operand_loc (loc, type, arg1, arg0);
12351 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12352 case will be handled here. */
12353 if (integer_zerop (arg0))
12354 return omit_one_operand_loc (loc, type, arg0, arg1);
12356 /* !X && X is always false. */
12357 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12358 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12359 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12360 /* X && !X is always false. */
12361 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12362 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12363 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12365 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12366 means A >= Y && A != MAX, but in this case we know that
12367 A < X <= MAX. */
12369 if (!TREE_SIDE_EFFECTS (arg0)
12370 && !TREE_SIDE_EFFECTS (arg1))
12372 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12373 if (tem && !operand_equal_p (tem, arg0, 0))
12374 return fold_build2_loc (loc, code, type, tem, arg1);
12376 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12377 if (tem && !operand_equal_p (tem, arg1, 0))
12378 return fold_build2_loc (loc, code, type, arg0, tem);
12381 truth_andor:
12382 /* We only do these simplifications if we are optimizing. */
12383 if (!optimize)
12384 return NULL_TREE;
12386 /* Check for things like (A || B) && (A || C). We can convert this
12387 to A || (B && C). Note that either operator can be any of the four
12388 truth and/or operations and the transformation will still be
12389 valid. Also note that we only care about order for the
12390 ANDIF and ORIF operators. If B contains side effects, this
12391 might change the truth-value of A. */
12392 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12393 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12394 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12395 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12396 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12397 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12399 tree a00 = TREE_OPERAND (arg0, 0);
12400 tree a01 = TREE_OPERAND (arg0, 1);
12401 tree a10 = TREE_OPERAND (arg1, 0);
12402 tree a11 = TREE_OPERAND (arg1, 1);
12403 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12404 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12405 && (code == TRUTH_AND_EXPR
12406 || code == TRUTH_OR_EXPR));
12408 if (operand_equal_p (a00, a10, 0))
12409 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12410 fold_build2_loc (loc, code, type, a01, a11));
12411 else if (commutative && operand_equal_p (a00, a11, 0))
12412 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12413 fold_build2_loc (loc, code, type, a01, a10));
12414 else if (commutative && operand_equal_p (a01, a10, 0))
12415 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12416 fold_build2_loc (loc, code, type, a00, a11));
12418 /* This case if tricky because we must either have commutative
12419 operators or else A10 must not have side-effects. */
12421 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12422 && operand_equal_p (a01, a11, 0))
12423 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12424 fold_build2_loc (loc, code, type, a00, a10),
12425 a01);
12428 /* See if we can build a range comparison. */
12429 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12430 return tem;
12432 /* Check for the possibility of merging component references. If our
12433 lhs is another similar operation, try to merge its rhs with our
12434 rhs. Then try to merge our lhs and rhs. */
12435 if (TREE_CODE (arg0) == code
12436 && 0 != (tem = fold_truthop (loc, code, type,
12437 TREE_OPERAND (arg0, 1), arg1)))
12438 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12440 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12441 return tem;
12443 return NULL_TREE;
12445 case TRUTH_ORIF_EXPR:
12446 /* Note that the operands of this must be ints
12447 and their values must be 0 or true.
12448 ("true" is a fixed value perhaps depending on the language.) */
12449 /* If first arg is constant true, return it. */
12450 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12451 return fold_convert_loc (loc, type, arg0);
12452 case TRUTH_OR_EXPR:
12453 /* If either arg is constant zero, drop it. */
12454 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12455 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12456 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12457 /* Preserve sequence points. */
12458 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12459 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12460 /* If second arg is constant true, result is true, but we must
12461 evaluate first arg. */
12462 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12463 return omit_one_operand_loc (loc, type, arg1, arg0);
12464 /* Likewise for first arg, but note this only occurs here for
12465 TRUTH_OR_EXPR. */
12466 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12467 return omit_one_operand_loc (loc, type, arg0, arg1);
12469 /* !X || X is always true. */
12470 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12471 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12472 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12473 /* X || !X is always true. */
12474 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12475 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12476 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12478 goto truth_andor;
12480 case TRUTH_XOR_EXPR:
12481 /* If the second arg is constant zero, drop it. */
12482 if (integer_zerop (arg1))
12483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12484 /* If the second arg is constant true, this is a logical inversion. */
12485 if (integer_onep (arg1))
12487 /* Only call invert_truthvalue if operand is a truth value. */
12488 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12489 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12490 else
12491 tem = invert_truthvalue_loc (loc, arg0);
12492 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12494 /* Identical arguments cancel to zero. */
12495 if (operand_equal_p (arg0, arg1, 0))
12496 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12498 /* !X ^ X is always true. */
12499 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12500 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12501 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12503 /* X ^ !X is always true. */
12504 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12505 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12506 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12508 return NULL_TREE;
12510 case EQ_EXPR:
12511 case NE_EXPR:
12512 tem = fold_comparison (loc, code, type, op0, op1);
12513 if (tem != NULL_TREE)
12514 return tem;
12516 /* bool_var != 0 becomes bool_var. */
12517 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12518 && code == NE_EXPR)
12519 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12521 /* bool_var == 1 becomes bool_var. */
12522 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12523 && code == EQ_EXPR)
12524 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12526 /* bool_var != 1 becomes !bool_var. */
12527 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12528 && code == NE_EXPR)
12529 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12530 fold_convert_loc (loc, type, arg0));
12532 /* bool_var == 0 becomes !bool_var. */
12533 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12534 && code == EQ_EXPR)
12535 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12536 fold_convert_loc (loc, type, arg0));
12538 /* If this is an equality comparison of the address of two non-weak,
12539 unaliased symbols neither of which are extern (since we do not
12540 have access to attributes for externs), then we know the result. */
12541 if (TREE_CODE (arg0) == ADDR_EXPR
12542 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12543 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12544 && ! lookup_attribute ("alias",
12545 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12546 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12547 && TREE_CODE (arg1) == ADDR_EXPR
12548 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12549 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12550 && ! lookup_attribute ("alias",
12551 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12552 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12554 /* We know that we're looking at the address of two
12555 non-weak, unaliased, static _DECL nodes.
12557 It is both wasteful and incorrect to call operand_equal_p
12558 to compare the two ADDR_EXPR nodes. It is wasteful in that
12559 all we need to do is test pointer equality for the arguments
12560 to the two ADDR_EXPR nodes. It is incorrect to use
12561 operand_equal_p as that function is NOT equivalent to a
12562 C equality test. It can in fact return false for two
12563 objects which would test as equal using the C equality
12564 operator. */
12565 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12566 return constant_boolean_node (equal
12567 ? code == EQ_EXPR : code != EQ_EXPR,
12568 type);
12571 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12572 a MINUS_EXPR of a constant, we can convert it into a comparison with
12573 a revised constant as long as no overflow occurs. */
12574 if (TREE_CODE (arg1) == INTEGER_CST
12575 && (TREE_CODE (arg0) == PLUS_EXPR
12576 || TREE_CODE (arg0) == MINUS_EXPR)
12577 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12578 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12579 ? MINUS_EXPR : PLUS_EXPR,
12580 fold_convert_loc (loc, TREE_TYPE (arg0),
12581 arg1),
12582 TREE_OPERAND (arg0, 1), 0))
12583 && !TREE_OVERFLOW (tem))
12584 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12586 /* Similarly for a NEGATE_EXPR. */
12587 if (TREE_CODE (arg0) == NEGATE_EXPR
12588 && TREE_CODE (arg1) == INTEGER_CST
12589 && 0 != (tem = negate_expr (arg1))
12590 && TREE_CODE (tem) == INTEGER_CST
12591 && !TREE_OVERFLOW (tem))
12592 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12594 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12595 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12596 && TREE_CODE (arg1) == INTEGER_CST
12597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12598 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12599 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12600 fold_convert_loc (loc,
12601 TREE_TYPE (arg0),
12602 arg1),
12603 TREE_OPERAND (arg0, 1)));
12605 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12606 if ((TREE_CODE (arg0) == PLUS_EXPR
12607 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12608 || TREE_CODE (arg0) == MINUS_EXPR)
12609 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12610 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12611 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12613 tree val = TREE_OPERAND (arg0, 1);
12614 return omit_two_operands_loc (loc, type,
12615 fold_build2_loc (loc, code, type,
12616 val,
12617 build_int_cst (TREE_TYPE (val),
12618 0)),
12619 TREE_OPERAND (arg0, 0), arg1);
12622 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12623 if (TREE_CODE (arg0) == MINUS_EXPR
12624 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12625 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12626 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12628 return omit_two_operands_loc (loc, type,
12629 code == NE_EXPR
12630 ? boolean_true_node : boolean_false_node,
12631 TREE_OPERAND (arg0, 1), arg1);
12634 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12635 for !=. Don't do this for ordered comparisons due to overflow. */
12636 if (TREE_CODE (arg0) == MINUS_EXPR
12637 && integer_zerop (arg1))
12638 return fold_build2_loc (loc, code, type,
12639 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12641 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12642 if (TREE_CODE (arg0) == ABS_EXPR
12643 && (integer_zerop (arg1) || real_zerop (arg1)))
12644 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12646 /* If this is an EQ or NE comparison with zero and ARG0 is
12647 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12648 two operations, but the latter can be done in one less insn
12649 on machines that have only two-operand insns or on which a
12650 constant cannot be the first operand. */
12651 if (TREE_CODE (arg0) == BIT_AND_EXPR
12652 && integer_zerop (arg1))
12654 tree arg00 = TREE_OPERAND (arg0, 0);
12655 tree arg01 = TREE_OPERAND (arg0, 1);
12656 if (TREE_CODE (arg00) == LSHIFT_EXPR
12657 && integer_onep (TREE_OPERAND (arg00, 0)))
12659 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12660 arg01, TREE_OPERAND (arg00, 1));
12661 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12662 build_int_cst (TREE_TYPE (arg0), 1));
12663 return fold_build2_loc (loc, code, type,
12664 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12665 arg1);
12667 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12668 && integer_onep (TREE_OPERAND (arg01, 0)))
12670 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12671 arg00, TREE_OPERAND (arg01, 1));
12672 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12673 build_int_cst (TREE_TYPE (arg0), 1));
12674 return fold_build2_loc (loc, code, type,
12675 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12676 arg1);
12680 /* If this is an NE or EQ comparison of zero against the result of a
12681 signed MOD operation whose second operand is a power of 2, make
12682 the MOD operation unsigned since it is simpler and equivalent. */
12683 if (integer_zerop (arg1)
12684 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12685 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12686 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12687 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12688 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12689 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12691 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12692 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12693 fold_convert_loc (loc, newtype,
12694 TREE_OPERAND (arg0, 0)),
12695 fold_convert_loc (loc, newtype,
12696 TREE_OPERAND (arg0, 1)));
12698 return fold_build2_loc (loc, code, type, newmod,
12699 fold_convert_loc (loc, newtype, arg1));
12702 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12703 C1 is a valid shift constant, and C2 is a power of two, i.e.
12704 a single bit. */
12705 if (TREE_CODE (arg0) == BIT_AND_EXPR
12706 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12707 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12708 == INTEGER_CST
12709 && integer_pow2p (TREE_OPERAND (arg0, 1))
12710 && integer_zerop (arg1))
12712 tree itype = TREE_TYPE (arg0);
12713 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12714 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12716 /* Check for a valid shift count. */
12717 if (TREE_INT_CST_HIGH (arg001) == 0
12718 && TREE_INT_CST_LOW (arg001) < prec)
12720 tree arg01 = TREE_OPERAND (arg0, 1);
12721 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12722 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12723 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12724 can be rewritten as (X & (C2 << C1)) != 0. */
12725 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12727 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12728 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12729 return fold_build2_loc (loc, code, type, tem, arg1);
12731 /* Otherwise, for signed (arithmetic) shifts,
12732 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12733 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12734 else if (!TYPE_UNSIGNED (itype))
12735 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12736 arg000, build_int_cst (itype, 0));
12737 /* Otherwise, of unsigned (logical) shifts,
12738 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12739 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12740 else
12741 return omit_one_operand_loc (loc, type,
12742 code == EQ_EXPR ? integer_one_node
12743 : integer_zero_node,
12744 arg000);
12748 /* If this is an NE comparison of zero with an AND of one, remove the
12749 comparison since the AND will give the correct value. */
12750 if (code == NE_EXPR
12751 && integer_zerop (arg1)
12752 && TREE_CODE (arg0) == BIT_AND_EXPR
12753 && integer_onep (TREE_OPERAND (arg0, 1)))
12754 return fold_convert_loc (loc, type, arg0);
12756 /* If we have (A & C) == C where C is a power of 2, convert this into
12757 (A & C) != 0. Similarly for NE_EXPR. */
12758 if (TREE_CODE (arg0) == BIT_AND_EXPR
12759 && integer_pow2p (TREE_OPERAND (arg0, 1))
12760 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12761 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12762 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12763 integer_zero_node));
12765 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12766 bit, then fold the expression into A < 0 or A >= 0. */
12767 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12768 if (tem)
12769 return tem;
12771 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12772 Similarly for NE_EXPR. */
12773 if (TREE_CODE (arg0) == BIT_AND_EXPR
12774 && TREE_CODE (arg1) == INTEGER_CST
12775 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12777 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12778 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12779 TREE_OPERAND (arg0, 1));
12780 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12781 arg1, notc);
12782 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12783 if (integer_nonzerop (dandnotc))
12784 return omit_one_operand_loc (loc, type, rslt, arg0);
12787 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12788 Similarly for NE_EXPR. */
12789 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12790 && TREE_CODE (arg1) == INTEGER_CST
12791 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12793 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12794 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12795 TREE_OPERAND (arg0, 1), notd);
12796 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12797 if (integer_nonzerop (candnotd))
12798 return omit_one_operand_loc (loc, type, rslt, arg0);
12801 /* If this is a comparison of a field, we may be able to simplify it. */
12802 if ((TREE_CODE (arg0) == COMPONENT_REF
12803 || TREE_CODE (arg0) == BIT_FIELD_REF)
12804 /* Handle the constant case even without -O
12805 to make sure the warnings are given. */
12806 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12808 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12809 if (t1)
12810 return t1;
12813 /* Optimize comparisons of strlen vs zero to a compare of the
12814 first character of the string vs zero. To wit,
12815 strlen(ptr) == 0 => *ptr == 0
12816 strlen(ptr) != 0 => *ptr != 0
12817 Other cases should reduce to one of these two (or a constant)
12818 due to the return value of strlen being unsigned. */
12819 if (TREE_CODE (arg0) == CALL_EXPR
12820 && integer_zerop (arg1))
12822 tree fndecl = get_callee_fndecl (arg0);
12824 if (fndecl
12825 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12826 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12827 && call_expr_nargs (arg0) == 1
12828 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12830 tree iref = build_fold_indirect_ref_loc (loc,
12831 CALL_EXPR_ARG (arg0, 0));
12832 return fold_build2_loc (loc, code, type, iref,
12833 build_int_cst (TREE_TYPE (iref), 0));
12837 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12838 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12839 if (TREE_CODE (arg0) == RSHIFT_EXPR
12840 && integer_zerop (arg1)
12841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12843 tree arg00 = TREE_OPERAND (arg0, 0);
12844 tree arg01 = TREE_OPERAND (arg0, 1);
12845 tree itype = TREE_TYPE (arg00);
12846 if (TREE_INT_CST_HIGH (arg01) == 0
12847 && TREE_INT_CST_LOW (arg01)
12848 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12850 if (TYPE_UNSIGNED (itype))
12852 itype = signed_type_for (itype);
12853 arg00 = fold_convert_loc (loc, itype, arg00);
12855 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12856 type, arg00, build_int_cst (itype, 0));
12860 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12861 if (integer_zerop (arg1)
12862 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12863 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12864 TREE_OPERAND (arg0, 1));
12866 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12867 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12868 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12869 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12870 build_int_cst (TREE_TYPE (arg1), 0));
12871 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12872 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12874 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12875 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12876 build_int_cst (TREE_TYPE (arg1), 0));
12878 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12879 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12880 && TREE_CODE (arg1) == INTEGER_CST
12881 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12882 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12883 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12884 TREE_OPERAND (arg0, 1), arg1));
12886 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12887 (X & C) == 0 when C is a single bit. */
12888 if (TREE_CODE (arg0) == BIT_AND_EXPR
12889 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12890 && integer_zerop (arg1)
12891 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12893 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12894 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12895 TREE_OPERAND (arg0, 1));
12896 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12897 type, tem, arg1);
12900 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12901 constant C is a power of two, i.e. a single bit. */
12902 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12903 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12904 && integer_zerop (arg1)
12905 && integer_pow2p (TREE_OPERAND (arg0, 1))
12906 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12907 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12909 tree arg00 = TREE_OPERAND (arg0, 0);
12910 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12911 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12914 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12915 when is C is a power of two, i.e. a single bit. */
12916 if (TREE_CODE (arg0) == BIT_AND_EXPR
12917 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12918 && integer_zerop (arg1)
12919 && integer_pow2p (TREE_OPERAND (arg0, 1))
12920 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12921 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12923 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12924 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12925 arg000, TREE_OPERAND (arg0, 1));
12926 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12927 tem, build_int_cst (TREE_TYPE (tem), 0));
12930 if (integer_zerop (arg1)
12931 && tree_expr_nonzero_p (arg0))
12933 tree res = constant_boolean_node (code==NE_EXPR, type);
12934 return omit_one_operand_loc (loc, type, res, arg0);
12937 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12938 if (TREE_CODE (arg0) == NEGATE_EXPR
12939 && TREE_CODE (arg1) == NEGATE_EXPR)
12940 return fold_build2_loc (loc, code, type,
12941 TREE_OPERAND (arg0, 0),
12942 TREE_OPERAND (arg1, 0));
12944 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12945 if (TREE_CODE (arg0) == BIT_AND_EXPR
12946 && TREE_CODE (arg1) == BIT_AND_EXPR)
12948 tree arg00 = TREE_OPERAND (arg0, 0);
12949 tree arg01 = TREE_OPERAND (arg0, 1);
12950 tree arg10 = TREE_OPERAND (arg1, 0);
12951 tree arg11 = TREE_OPERAND (arg1, 1);
12952 tree itype = TREE_TYPE (arg0);
12954 if (operand_equal_p (arg01, arg11, 0))
12955 return fold_build2_loc (loc, code, type,
12956 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12957 fold_build2_loc (loc,
12958 BIT_XOR_EXPR, itype,
12959 arg00, arg10),
12960 arg01),
12961 build_int_cst (itype, 0));
12963 if (operand_equal_p (arg01, arg10, 0))
12964 return fold_build2_loc (loc, code, type,
12965 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12966 fold_build2_loc (loc,
12967 BIT_XOR_EXPR, itype,
12968 arg00, arg11),
12969 arg01),
12970 build_int_cst (itype, 0));
12972 if (operand_equal_p (arg00, arg11, 0))
12973 return fold_build2_loc (loc, code, type,
12974 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12975 fold_build2_loc (loc,
12976 BIT_XOR_EXPR, itype,
12977 arg01, arg10),
12978 arg00),
12979 build_int_cst (itype, 0));
12981 if (operand_equal_p (arg00, arg10, 0))
12982 return fold_build2_loc (loc, code, type,
12983 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12984 fold_build2_loc (loc,
12985 BIT_XOR_EXPR, itype,
12986 arg01, arg11),
12987 arg00),
12988 build_int_cst (itype, 0));
12991 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12992 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12994 tree arg00 = TREE_OPERAND (arg0, 0);
12995 tree arg01 = TREE_OPERAND (arg0, 1);
12996 tree arg10 = TREE_OPERAND (arg1, 0);
12997 tree arg11 = TREE_OPERAND (arg1, 1);
12998 tree itype = TREE_TYPE (arg0);
13000 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13001 operand_equal_p guarantees no side-effects so we don't need
13002 to use omit_one_operand on Z. */
13003 if (operand_equal_p (arg01, arg11, 0))
13004 return fold_build2_loc (loc, code, type, arg00, arg10);
13005 if (operand_equal_p (arg01, arg10, 0))
13006 return fold_build2_loc (loc, code, type, arg00, arg11);
13007 if (operand_equal_p (arg00, arg11, 0))
13008 return fold_build2_loc (loc, code, type, arg01, arg10);
13009 if (operand_equal_p (arg00, arg10, 0))
13010 return fold_build2_loc (loc, code, type, arg01, arg11);
13012 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13013 if (TREE_CODE (arg01) == INTEGER_CST
13014 && TREE_CODE (arg11) == INTEGER_CST)
13015 return fold_build2_loc (loc, code, type,
13016 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13017 fold_build2_loc (loc,
13018 BIT_XOR_EXPR, itype,
13019 arg01, arg11)),
13020 arg10);
13023 /* Attempt to simplify equality/inequality comparisons of complex
13024 values. Only lower the comparison if the result is known or
13025 can be simplified to a single scalar comparison. */
13026 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13027 || TREE_CODE (arg0) == COMPLEX_CST)
13028 && (TREE_CODE (arg1) == COMPLEX_EXPR
13029 || TREE_CODE (arg1) == COMPLEX_CST))
13031 tree real0, imag0, real1, imag1;
13032 tree rcond, icond;
13034 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13036 real0 = TREE_OPERAND (arg0, 0);
13037 imag0 = TREE_OPERAND (arg0, 1);
13039 else
13041 real0 = TREE_REALPART (arg0);
13042 imag0 = TREE_IMAGPART (arg0);
13045 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13047 real1 = TREE_OPERAND (arg1, 0);
13048 imag1 = TREE_OPERAND (arg1, 1);
13050 else
13052 real1 = TREE_REALPART (arg1);
13053 imag1 = TREE_IMAGPART (arg1);
13056 rcond = fold_binary_loc (loc, code, type, real0, real1);
13057 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13059 if (integer_zerop (rcond))
13061 if (code == EQ_EXPR)
13062 return omit_two_operands_loc (loc, type, boolean_false_node,
13063 imag0, imag1);
13064 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13066 else
13068 if (code == NE_EXPR)
13069 return omit_two_operands_loc (loc, type, boolean_true_node,
13070 imag0, imag1);
13071 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13075 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13076 if (icond && TREE_CODE (icond) == INTEGER_CST)
13078 if (integer_zerop (icond))
13080 if (code == EQ_EXPR)
13081 return omit_two_operands_loc (loc, type, boolean_false_node,
13082 real0, real1);
13083 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13085 else
13087 if (code == NE_EXPR)
13088 return omit_two_operands_loc (loc, type, boolean_true_node,
13089 real0, real1);
13090 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13095 return NULL_TREE;
13097 case LT_EXPR:
13098 case GT_EXPR:
13099 case LE_EXPR:
13100 case GE_EXPR:
13101 tem = fold_comparison (loc, code, type, op0, op1);
13102 if (tem != NULL_TREE)
13103 return tem;
13105 /* Transform comparisons of the form X +- C CMP X. */
13106 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13107 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13108 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13109 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13110 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13111 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13113 tree arg01 = TREE_OPERAND (arg0, 1);
13114 enum tree_code code0 = TREE_CODE (arg0);
13115 int is_positive;
13117 if (TREE_CODE (arg01) == REAL_CST)
13118 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13119 else
13120 is_positive = tree_int_cst_sgn (arg01);
13122 /* (X - c) > X becomes false. */
13123 if (code == GT_EXPR
13124 && ((code0 == MINUS_EXPR && is_positive >= 0)
13125 || (code0 == PLUS_EXPR && is_positive <= 0)))
13127 if (TREE_CODE (arg01) == INTEGER_CST
13128 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13129 fold_overflow_warning (("assuming signed overflow does not "
13130 "occur when assuming that (X - c) > X "
13131 "is always false"),
13132 WARN_STRICT_OVERFLOW_ALL);
13133 return constant_boolean_node (0, type);
13136 /* Likewise (X + c) < X becomes false. */
13137 if (code == LT_EXPR
13138 && ((code0 == PLUS_EXPR && is_positive >= 0)
13139 || (code0 == MINUS_EXPR && is_positive <= 0)))
13141 if (TREE_CODE (arg01) == INTEGER_CST
13142 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13143 fold_overflow_warning (("assuming signed overflow does not "
13144 "occur when assuming that "
13145 "(X + c) < X is always false"),
13146 WARN_STRICT_OVERFLOW_ALL);
13147 return constant_boolean_node (0, type);
13150 /* Convert (X - c) <= X to true. */
13151 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13152 && code == LE_EXPR
13153 && ((code0 == MINUS_EXPR && is_positive >= 0)
13154 || (code0 == PLUS_EXPR && is_positive <= 0)))
13156 if (TREE_CODE (arg01) == INTEGER_CST
13157 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13158 fold_overflow_warning (("assuming signed overflow does not "
13159 "occur when assuming that "
13160 "(X - c) <= X is always true"),
13161 WARN_STRICT_OVERFLOW_ALL);
13162 return constant_boolean_node (1, type);
13165 /* Convert (X + c) >= X to true. */
13166 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13167 && code == GE_EXPR
13168 && ((code0 == PLUS_EXPR && is_positive >= 0)
13169 || (code0 == MINUS_EXPR && is_positive <= 0)))
13171 if (TREE_CODE (arg01) == INTEGER_CST
13172 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13173 fold_overflow_warning (("assuming signed overflow does not "
13174 "occur when assuming that "
13175 "(X + c) >= X is always true"),
13176 WARN_STRICT_OVERFLOW_ALL);
13177 return constant_boolean_node (1, type);
13180 if (TREE_CODE (arg01) == INTEGER_CST)
13182 /* Convert X + c > X and X - c < X to true for integers. */
13183 if (code == GT_EXPR
13184 && ((code0 == PLUS_EXPR && is_positive > 0)
13185 || (code0 == MINUS_EXPR && is_positive < 0)))
13187 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13188 fold_overflow_warning (("assuming signed overflow does "
13189 "not occur when assuming that "
13190 "(X + c) > X is always true"),
13191 WARN_STRICT_OVERFLOW_ALL);
13192 return constant_boolean_node (1, type);
13195 if (code == LT_EXPR
13196 && ((code0 == MINUS_EXPR && is_positive > 0)
13197 || (code0 == PLUS_EXPR && is_positive < 0)))
13199 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13200 fold_overflow_warning (("assuming signed overflow does "
13201 "not occur when assuming that "
13202 "(X - c) < X is always true"),
13203 WARN_STRICT_OVERFLOW_ALL);
13204 return constant_boolean_node (1, type);
13207 /* Convert X + c <= X and X - c >= X to false for integers. */
13208 if (code == LE_EXPR
13209 && ((code0 == PLUS_EXPR && is_positive > 0)
13210 || (code0 == MINUS_EXPR && is_positive < 0)))
13212 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13213 fold_overflow_warning (("assuming signed overflow does "
13214 "not occur when assuming that "
13215 "(X + c) <= X is always false"),
13216 WARN_STRICT_OVERFLOW_ALL);
13217 return constant_boolean_node (0, type);
13220 if (code == GE_EXPR
13221 && ((code0 == MINUS_EXPR && is_positive > 0)
13222 || (code0 == PLUS_EXPR && is_positive < 0)))
13224 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13225 fold_overflow_warning (("assuming signed overflow does "
13226 "not occur when assuming that "
13227 "(X - c) >= X is always false"),
13228 WARN_STRICT_OVERFLOW_ALL);
13229 return constant_boolean_node (0, type);
13234 /* Comparisons with the highest or lowest possible integer of
13235 the specified precision will have known values. */
13237 tree arg1_type = TREE_TYPE (arg1);
13238 unsigned int width = TYPE_PRECISION (arg1_type);
13240 if (TREE_CODE (arg1) == INTEGER_CST
13241 && width <= 2 * HOST_BITS_PER_WIDE_INT
13242 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13244 HOST_WIDE_INT signed_max_hi;
13245 unsigned HOST_WIDE_INT signed_max_lo;
13246 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13248 if (width <= HOST_BITS_PER_WIDE_INT)
13250 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13251 - 1;
13252 signed_max_hi = 0;
13253 max_hi = 0;
13255 if (TYPE_UNSIGNED (arg1_type))
13257 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13258 min_lo = 0;
13259 min_hi = 0;
13261 else
13263 max_lo = signed_max_lo;
13264 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13265 min_hi = -1;
13268 else
13270 width -= HOST_BITS_PER_WIDE_INT;
13271 signed_max_lo = -1;
13272 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13273 - 1;
13274 max_lo = -1;
13275 min_lo = 0;
13277 if (TYPE_UNSIGNED (arg1_type))
13279 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13280 min_hi = 0;
13282 else
13284 max_hi = signed_max_hi;
13285 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13289 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13290 && TREE_INT_CST_LOW (arg1) == max_lo)
13291 switch (code)
13293 case GT_EXPR:
13294 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13296 case GE_EXPR:
13297 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13299 case LE_EXPR:
13300 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13302 case LT_EXPR:
13303 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13305 /* The GE_EXPR and LT_EXPR cases above are not normally
13306 reached because of previous transformations. */
13308 default:
13309 break;
13311 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13312 == max_hi
13313 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13314 switch (code)
13316 case GT_EXPR:
13317 arg1 = const_binop (PLUS_EXPR, arg1,
13318 build_int_cst (TREE_TYPE (arg1), 1), 0);
13319 return fold_build2_loc (loc, EQ_EXPR, type,
13320 fold_convert_loc (loc,
13321 TREE_TYPE (arg1), arg0),
13322 arg1);
13323 case LE_EXPR:
13324 arg1 = const_binop (PLUS_EXPR, arg1,
13325 build_int_cst (TREE_TYPE (arg1), 1), 0);
13326 return fold_build2_loc (loc, NE_EXPR, type,
13327 fold_convert_loc (loc, TREE_TYPE (arg1),
13328 arg0),
13329 arg1);
13330 default:
13331 break;
13333 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13334 == min_hi
13335 && TREE_INT_CST_LOW (arg1) == min_lo)
13336 switch (code)
13338 case LT_EXPR:
13339 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13341 case LE_EXPR:
13342 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13344 case GE_EXPR:
13345 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13347 case GT_EXPR:
13348 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13350 default:
13351 break;
13353 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13354 == min_hi
13355 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13356 switch (code)
13358 case GE_EXPR:
13359 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13360 return fold_build2_loc (loc, NE_EXPR, type,
13361 fold_convert_loc (loc,
13362 TREE_TYPE (arg1), arg0),
13363 arg1);
13364 case LT_EXPR:
13365 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13366 return fold_build2_loc (loc, EQ_EXPR, type,
13367 fold_convert_loc (loc, TREE_TYPE (arg1),
13368 arg0),
13369 arg1);
13370 default:
13371 break;
13374 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13375 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13376 && TYPE_UNSIGNED (arg1_type)
13377 /* We will flip the signedness of the comparison operator
13378 associated with the mode of arg1, so the sign bit is
13379 specified by this mode. Check that arg1 is the signed
13380 max associated with this sign bit. */
13381 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13382 /* signed_type does not work on pointer types. */
13383 && INTEGRAL_TYPE_P (arg1_type))
13385 /* The following case also applies to X < signed_max+1
13386 and X >= signed_max+1 because previous transformations. */
13387 if (code == LE_EXPR || code == GT_EXPR)
13389 tree st;
13390 st = signed_type_for (TREE_TYPE (arg1));
13391 return fold_build2_loc (loc,
13392 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13393 type, fold_convert_loc (loc, st, arg0),
13394 build_int_cst (st, 0));
13400 /* If we are comparing an ABS_EXPR with a constant, we can
13401 convert all the cases into explicit comparisons, but they may
13402 well not be faster than doing the ABS and one comparison.
13403 But ABS (X) <= C is a range comparison, which becomes a subtraction
13404 and a comparison, and is probably faster. */
13405 if (code == LE_EXPR
13406 && TREE_CODE (arg1) == INTEGER_CST
13407 && TREE_CODE (arg0) == ABS_EXPR
13408 && ! TREE_SIDE_EFFECTS (arg0)
13409 && (0 != (tem = negate_expr (arg1)))
13410 && TREE_CODE (tem) == INTEGER_CST
13411 && !TREE_OVERFLOW (tem))
13412 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13413 build2 (GE_EXPR, type,
13414 TREE_OPERAND (arg0, 0), tem),
13415 build2 (LE_EXPR, type,
13416 TREE_OPERAND (arg0, 0), arg1));
13418 /* Convert ABS_EXPR<x> >= 0 to true. */
13419 strict_overflow_p = false;
13420 if (code == GE_EXPR
13421 && (integer_zerop (arg1)
13422 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13423 && real_zerop (arg1)))
13424 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13426 if (strict_overflow_p)
13427 fold_overflow_warning (("assuming signed overflow does not occur "
13428 "when simplifying comparison of "
13429 "absolute value and zero"),
13430 WARN_STRICT_OVERFLOW_CONDITIONAL);
13431 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13434 /* Convert ABS_EXPR<x> < 0 to false. */
13435 strict_overflow_p = false;
13436 if (code == LT_EXPR
13437 && (integer_zerop (arg1) || real_zerop (arg1))
13438 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13440 if (strict_overflow_p)
13441 fold_overflow_warning (("assuming signed overflow does not occur "
13442 "when simplifying comparison of "
13443 "absolute value and zero"),
13444 WARN_STRICT_OVERFLOW_CONDITIONAL);
13445 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13448 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13449 and similarly for >= into !=. */
13450 if ((code == LT_EXPR || code == GE_EXPR)
13451 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13452 && TREE_CODE (arg1) == LSHIFT_EXPR
13453 && integer_onep (TREE_OPERAND (arg1, 0)))
13455 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13456 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13457 TREE_OPERAND (arg1, 1)),
13458 build_int_cst (TREE_TYPE (arg0), 0));
13459 goto fold_binary_exit;
13462 if ((code == LT_EXPR || code == GE_EXPR)
13463 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13464 && CONVERT_EXPR_P (arg1)
13465 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13466 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13468 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13469 fold_convert_loc (loc, TREE_TYPE (arg0),
13470 build2 (RSHIFT_EXPR,
13471 TREE_TYPE (arg0), arg0,
13472 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13473 1))),
13474 build_int_cst (TREE_TYPE (arg0), 0));
13475 goto fold_binary_exit;
13478 return NULL_TREE;
13480 case UNORDERED_EXPR:
13481 case ORDERED_EXPR:
13482 case UNLT_EXPR:
13483 case UNLE_EXPR:
13484 case UNGT_EXPR:
13485 case UNGE_EXPR:
13486 case UNEQ_EXPR:
13487 case LTGT_EXPR:
13488 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13490 t1 = fold_relational_const (code, type, arg0, arg1);
13491 if (t1 != NULL_TREE)
13492 return t1;
13495 /* If the first operand is NaN, the result is constant. */
13496 if (TREE_CODE (arg0) == REAL_CST
13497 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13498 && (code != LTGT_EXPR || ! flag_trapping_math))
13500 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13501 ? integer_zero_node
13502 : integer_one_node;
13503 return omit_one_operand_loc (loc, type, t1, arg1);
13506 /* If the second operand is NaN, the result is constant. */
13507 if (TREE_CODE (arg1) == REAL_CST
13508 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13509 && (code != LTGT_EXPR || ! flag_trapping_math))
13511 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13512 ? integer_zero_node
13513 : integer_one_node;
13514 return omit_one_operand_loc (loc, type, t1, arg0);
13517 /* Simplify unordered comparison of something with itself. */
13518 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13519 && operand_equal_p (arg0, arg1, 0))
13520 return constant_boolean_node (1, type);
13522 if (code == LTGT_EXPR
13523 && !flag_trapping_math
13524 && operand_equal_p (arg0, arg1, 0))
13525 return constant_boolean_node (0, type);
13527 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13529 tree targ0 = strip_float_extensions (arg0);
13530 tree targ1 = strip_float_extensions (arg1);
13531 tree newtype = TREE_TYPE (targ0);
13533 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13534 newtype = TREE_TYPE (targ1);
13536 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13537 return fold_build2_loc (loc, code, type,
13538 fold_convert_loc (loc, newtype, targ0),
13539 fold_convert_loc (loc, newtype, targ1));
13542 return NULL_TREE;
13544 case COMPOUND_EXPR:
13545 /* When pedantic, a compound expression can be neither an lvalue
13546 nor an integer constant expression. */
13547 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13548 return NULL_TREE;
13549 /* Don't let (0, 0) be null pointer constant. */
13550 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13551 : fold_convert_loc (loc, type, arg1);
13552 return pedantic_non_lvalue_loc (loc, tem);
13554 case COMPLEX_EXPR:
13555 if ((TREE_CODE (arg0) == REAL_CST
13556 && TREE_CODE (arg1) == REAL_CST)
13557 || (TREE_CODE (arg0) == INTEGER_CST
13558 && TREE_CODE (arg1) == INTEGER_CST))
13559 return build_complex (type, arg0, arg1);
13560 return NULL_TREE;
13562 case ASSERT_EXPR:
13563 /* An ASSERT_EXPR should never be passed to fold_binary. */
13564 gcc_unreachable ();
13566 default:
13567 return NULL_TREE;
13568 } /* switch (code) */
13569 fold_binary_exit:
13570 protected_set_expr_location (tem, loc);
13571 return tem;
13574 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13575 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13576 of GOTO_EXPR. */
13578 static tree
13579 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13581 switch (TREE_CODE (*tp))
13583 case LABEL_EXPR:
13584 return *tp;
13586 case GOTO_EXPR:
13587 *walk_subtrees = 0;
13589 /* ... fall through ... */
13591 default:
13592 return NULL_TREE;
13596 /* Return whether the sub-tree ST contains a label which is accessible from
13597 outside the sub-tree. */
13599 static bool
13600 contains_label_p (tree st)
13602 return
13603 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13606 /* Fold a ternary expression of code CODE and type TYPE with operands
13607 OP0, OP1, and OP2. Return the folded expression if folding is
13608 successful. Otherwise, return NULL_TREE. */
13610 tree
13611 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13612 tree op0, tree op1, tree op2)
13614 tree tem;
13615 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13616 enum tree_code_class kind = TREE_CODE_CLASS (code);
13618 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13619 && TREE_CODE_LENGTH (code) == 3);
13621 /* Strip any conversions that don't change the mode. This is safe
13622 for every expression, except for a comparison expression because
13623 its signedness is derived from its operands. So, in the latter
13624 case, only strip conversions that don't change the signedness.
13626 Note that this is done as an internal manipulation within the
13627 constant folder, in order to find the simplest representation of
13628 the arguments so that their form can be studied. In any cases,
13629 the appropriate type conversions should be put back in the tree
13630 that will get out of the constant folder. */
13631 if (op0)
13633 arg0 = op0;
13634 STRIP_NOPS (arg0);
13637 if (op1)
13639 arg1 = op1;
13640 STRIP_NOPS (arg1);
13643 switch (code)
13645 case COMPONENT_REF:
13646 if (TREE_CODE (arg0) == CONSTRUCTOR
13647 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13649 unsigned HOST_WIDE_INT idx;
13650 tree field, value;
13651 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13652 if (field == arg1)
13653 return value;
13655 return NULL_TREE;
13657 case COND_EXPR:
13658 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13659 so all simple results must be passed through pedantic_non_lvalue. */
13660 if (TREE_CODE (arg0) == INTEGER_CST)
13662 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13663 tem = integer_zerop (arg0) ? op2 : op1;
13664 /* Only optimize constant conditions when the selected branch
13665 has the same type as the COND_EXPR. This avoids optimizing
13666 away "c ? x : throw", where the throw has a void type.
13667 Avoid throwing away that operand which contains label. */
13668 if ((!TREE_SIDE_EFFECTS (unused_op)
13669 || !contains_label_p (unused_op))
13670 && (! VOID_TYPE_P (TREE_TYPE (tem))
13671 || VOID_TYPE_P (type)))
13672 return pedantic_non_lvalue_loc (loc, tem);
13673 return NULL_TREE;
13675 if (operand_equal_p (arg1, op2, 0))
13676 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13678 /* If we have A op B ? A : C, we may be able to convert this to a
13679 simpler expression, depending on the operation and the values
13680 of B and C. Signed zeros prevent all of these transformations,
13681 for reasons given above each one.
13683 Also try swapping the arguments and inverting the conditional. */
13684 if (COMPARISON_CLASS_P (arg0)
13685 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13686 arg1, TREE_OPERAND (arg0, 1))
13687 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13689 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13690 if (tem)
13691 return tem;
13694 if (COMPARISON_CLASS_P (arg0)
13695 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13696 op2,
13697 TREE_OPERAND (arg0, 1))
13698 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13700 tem = fold_truth_not_expr (loc, arg0);
13701 if (tem && COMPARISON_CLASS_P (tem))
13703 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13704 if (tem)
13705 return tem;
13709 /* If the second operand is simpler than the third, swap them
13710 since that produces better jump optimization results. */
13711 if (truth_value_p (TREE_CODE (arg0))
13712 && tree_swap_operands_p (op1, op2, false))
13714 /* See if this can be inverted. If it can't, possibly because
13715 it was a floating-point inequality comparison, don't do
13716 anything. */
13717 tem = fold_truth_not_expr (loc, arg0);
13718 if (tem)
13719 return fold_build3_loc (loc, code, type, tem, op2, op1);
13722 /* Convert A ? 1 : 0 to simply A. */
13723 if (integer_onep (op1)
13724 && integer_zerop (op2)
13725 /* If we try to convert OP0 to our type, the
13726 call to fold will try to move the conversion inside
13727 a COND, which will recurse. In that case, the COND_EXPR
13728 is probably the best choice, so leave it alone. */
13729 && type == TREE_TYPE (arg0))
13730 return pedantic_non_lvalue_loc (loc, arg0);
13732 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13733 over COND_EXPR in cases such as floating point comparisons. */
13734 if (integer_zerop (op1)
13735 && integer_onep (op2)
13736 && truth_value_p (TREE_CODE (arg0)))
13737 return pedantic_non_lvalue_loc (loc,
13738 fold_convert_loc (loc, type,
13739 invert_truthvalue_loc (loc,
13740 arg0)));
13742 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13743 if (TREE_CODE (arg0) == LT_EXPR
13744 && integer_zerop (TREE_OPERAND (arg0, 1))
13745 && integer_zerop (op2)
13746 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13748 /* sign_bit_p only checks ARG1 bits within A's precision.
13749 If <sign bit of A> has wider type than A, bits outside
13750 of A's precision in <sign bit of A> need to be checked.
13751 If they are all 0, this optimization needs to be done
13752 in unsigned A's type, if they are all 1 in signed A's type,
13753 otherwise this can't be done. */
13754 if (TYPE_PRECISION (TREE_TYPE (tem))
13755 < TYPE_PRECISION (TREE_TYPE (arg1))
13756 && TYPE_PRECISION (TREE_TYPE (tem))
13757 < TYPE_PRECISION (type))
13759 unsigned HOST_WIDE_INT mask_lo;
13760 HOST_WIDE_INT mask_hi;
13761 int inner_width, outer_width;
13762 tree tem_type;
13764 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13765 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13766 if (outer_width > TYPE_PRECISION (type))
13767 outer_width = TYPE_PRECISION (type);
13769 if (outer_width > HOST_BITS_PER_WIDE_INT)
13771 mask_hi = ((unsigned HOST_WIDE_INT) -1
13772 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13773 mask_lo = -1;
13775 else
13777 mask_hi = 0;
13778 mask_lo = ((unsigned HOST_WIDE_INT) -1
13779 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13781 if (inner_width > HOST_BITS_PER_WIDE_INT)
13783 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13784 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13785 mask_lo = 0;
13787 else
13788 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13789 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13791 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13792 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13794 tem_type = signed_type_for (TREE_TYPE (tem));
13795 tem = fold_convert_loc (loc, tem_type, tem);
13797 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13798 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13800 tem_type = unsigned_type_for (TREE_TYPE (tem));
13801 tem = fold_convert_loc (loc, tem_type, tem);
13803 else
13804 tem = NULL;
13807 if (tem)
13808 return
13809 fold_convert_loc (loc, type,
13810 fold_build2_loc (loc, BIT_AND_EXPR,
13811 TREE_TYPE (tem), tem,
13812 fold_convert_loc (loc,
13813 TREE_TYPE (tem),
13814 arg1)));
13817 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13818 already handled above. */
13819 if (TREE_CODE (arg0) == BIT_AND_EXPR
13820 && integer_onep (TREE_OPERAND (arg0, 1))
13821 && integer_zerop (op2)
13822 && integer_pow2p (arg1))
13824 tree tem = TREE_OPERAND (arg0, 0);
13825 STRIP_NOPS (tem);
13826 if (TREE_CODE (tem) == RSHIFT_EXPR
13827 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13828 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13829 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13830 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13831 TREE_OPERAND (tem, 0), arg1);
13834 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13835 is probably obsolete because the first operand should be a
13836 truth value (that's why we have the two cases above), but let's
13837 leave it in until we can confirm this for all front-ends. */
13838 if (integer_zerop (op2)
13839 && TREE_CODE (arg0) == NE_EXPR
13840 && integer_zerop (TREE_OPERAND (arg0, 1))
13841 && integer_pow2p (arg1)
13842 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13843 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13844 arg1, OEP_ONLY_CONST))
13845 return pedantic_non_lvalue_loc (loc,
13846 fold_convert_loc (loc, type,
13847 TREE_OPERAND (arg0, 0)));
13849 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13850 if (integer_zerop (op2)
13851 && truth_value_p (TREE_CODE (arg0))
13852 && truth_value_p (TREE_CODE (arg1)))
13853 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13854 fold_convert_loc (loc, type, arg0),
13855 arg1);
13857 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13858 if (integer_onep (op2)
13859 && truth_value_p (TREE_CODE (arg0))
13860 && truth_value_p (TREE_CODE (arg1)))
13862 /* Only perform transformation if ARG0 is easily inverted. */
13863 tem = fold_truth_not_expr (loc, arg0);
13864 if (tem)
13865 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13866 fold_convert_loc (loc, type, tem),
13867 arg1);
13870 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13871 if (integer_zerop (arg1)
13872 && truth_value_p (TREE_CODE (arg0))
13873 && truth_value_p (TREE_CODE (op2)))
13875 /* Only perform transformation if ARG0 is easily inverted. */
13876 tem = fold_truth_not_expr (loc, arg0);
13877 if (tem)
13878 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13879 fold_convert_loc (loc, type, tem),
13880 op2);
13883 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13884 if (integer_onep (arg1)
13885 && truth_value_p (TREE_CODE (arg0))
13886 && truth_value_p (TREE_CODE (op2)))
13887 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13888 fold_convert_loc (loc, type, arg0),
13889 op2);
13891 return NULL_TREE;
13893 case CALL_EXPR:
13894 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13895 of fold_ternary on them. */
13896 gcc_unreachable ();
13898 case BIT_FIELD_REF:
13899 if ((TREE_CODE (arg0) == VECTOR_CST
13900 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13901 && type == TREE_TYPE (TREE_TYPE (arg0)))
13903 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13904 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13906 if (width != 0
13907 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13908 && (idx % width) == 0
13909 && (idx = idx / width)
13910 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13912 tree elements = NULL_TREE;
13914 if (TREE_CODE (arg0) == VECTOR_CST)
13915 elements = TREE_VECTOR_CST_ELTS (arg0);
13916 else
13918 unsigned HOST_WIDE_INT idx;
13919 tree value;
13921 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13922 elements = tree_cons (NULL_TREE, value, elements);
13924 while (idx-- > 0 && elements)
13925 elements = TREE_CHAIN (elements);
13926 if (elements)
13927 return TREE_VALUE (elements);
13928 else
13929 return fold_convert_loc (loc, type, integer_zero_node);
13933 /* A bit-field-ref that referenced the full argument can be stripped. */
13934 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13935 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13936 && integer_zerop (op2))
13937 return fold_convert_loc (loc, type, arg0);
13939 return NULL_TREE;
13941 default:
13942 return NULL_TREE;
13943 } /* switch (code) */
13946 /* Perform constant folding and related simplification of EXPR.
13947 The related simplifications include x*1 => x, x*0 => 0, etc.,
13948 and application of the associative law.
13949 NOP_EXPR conversions may be removed freely (as long as we
13950 are careful not to change the type of the overall expression).
13951 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13952 but we can constant-fold them if they have constant operands. */
13954 #ifdef ENABLE_FOLD_CHECKING
13955 # define fold(x) fold_1 (x)
13956 static tree fold_1 (tree);
13957 static
13958 #endif
13959 tree
13960 fold (tree expr)
13962 const tree t = expr;
13963 enum tree_code code = TREE_CODE (t);
13964 enum tree_code_class kind = TREE_CODE_CLASS (code);
13965 tree tem;
13966 location_t loc = EXPR_LOCATION (expr);
13968 /* Return right away if a constant. */
13969 if (kind == tcc_constant)
13970 return t;
13972 /* CALL_EXPR-like objects with variable numbers of operands are
13973 treated specially. */
13974 if (kind == tcc_vl_exp)
13976 if (code == CALL_EXPR)
13978 tem = fold_call_expr (loc, expr, false);
13979 return tem ? tem : expr;
13981 return expr;
13984 if (IS_EXPR_CODE_CLASS (kind))
13986 tree type = TREE_TYPE (t);
13987 tree op0, op1, op2;
13989 switch (TREE_CODE_LENGTH (code))
13991 case 1:
13992 op0 = TREE_OPERAND (t, 0);
13993 tem = fold_unary_loc (loc, code, type, op0);
13994 return tem ? tem : expr;
13995 case 2:
13996 op0 = TREE_OPERAND (t, 0);
13997 op1 = TREE_OPERAND (t, 1);
13998 tem = fold_binary_loc (loc, code, type, op0, op1);
13999 return tem ? tem : expr;
14000 case 3:
14001 op0 = TREE_OPERAND (t, 0);
14002 op1 = TREE_OPERAND (t, 1);
14003 op2 = TREE_OPERAND (t, 2);
14004 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14005 return tem ? tem : expr;
14006 default:
14007 break;
14011 switch (code)
14013 case ARRAY_REF:
14015 tree op0 = TREE_OPERAND (t, 0);
14016 tree op1 = TREE_OPERAND (t, 1);
14018 if (TREE_CODE (op1) == INTEGER_CST
14019 && TREE_CODE (op0) == CONSTRUCTOR
14020 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14022 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14023 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14024 unsigned HOST_WIDE_INT begin = 0;
14026 /* Find a matching index by means of a binary search. */
14027 while (begin != end)
14029 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14030 tree index = VEC_index (constructor_elt, elts, middle)->index;
14032 if (TREE_CODE (index) == INTEGER_CST
14033 && tree_int_cst_lt (index, op1))
14034 begin = middle + 1;
14035 else if (TREE_CODE (index) == INTEGER_CST
14036 && tree_int_cst_lt (op1, index))
14037 end = middle;
14038 else if (TREE_CODE (index) == RANGE_EXPR
14039 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14040 begin = middle + 1;
14041 else if (TREE_CODE (index) == RANGE_EXPR
14042 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14043 end = middle;
14044 else
14045 return VEC_index (constructor_elt, elts, middle)->value;
14049 return t;
14052 case CONST_DECL:
14053 return fold (DECL_INITIAL (t));
14055 default:
14056 return t;
14057 } /* switch (code) */
14060 #ifdef ENABLE_FOLD_CHECKING
14061 #undef fold
14063 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14064 static void fold_check_failed (const_tree, const_tree);
14065 void print_fold_checksum (const_tree);
14067 /* When --enable-checking=fold, compute a digest of expr before
14068 and after actual fold call to see if fold did not accidentally
14069 change original expr. */
14071 tree
14072 fold (tree expr)
14074 tree ret;
14075 struct md5_ctx ctx;
14076 unsigned char checksum_before[16], checksum_after[16];
14077 htab_t ht;
14079 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14080 md5_init_ctx (&ctx);
14081 fold_checksum_tree (expr, &ctx, ht);
14082 md5_finish_ctx (&ctx, checksum_before);
14083 htab_empty (ht);
14085 ret = fold_1 (expr);
14087 md5_init_ctx (&ctx);
14088 fold_checksum_tree (expr, &ctx, ht);
14089 md5_finish_ctx (&ctx, checksum_after);
14090 htab_delete (ht);
14092 if (memcmp (checksum_before, checksum_after, 16))
14093 fold_check_failed (expr, ret);
14095 return ret;
14098 void
14099 print_fold_checksum (const_tree expr)
14101 struct md5_ctx ctx;
14102 unsigned char checksum[16], cnt;
14103 htab_t ht;
14105 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14106 md5_init_ctx (&ctx);
14107 fold_checksum_tree (expr, &ctx, ht);
14108 md5_finish_ctx (&ctx, checksum);
14109 htab_delete (ht);
14110 for (cnt = 0; cnt < 16; ++cnt)
14111 fprintf (stderr, "%02x", checksum[cnt]);
14112 putc ('\n', stderr);
14115 static void
14116 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14118 internal_error ("fold check: original tree changed by fold");
14121 static void
14122 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14124 const void **slot;
14125 enum tree_code code;
14126 union tree_node buf;
14127 int i, len;
14129 recursive_label:
14131 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14132 <= sizeof (struct tree_function_decl))
14133 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14134 if (expr == NULL)
14135 return;
14136 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14137 if (*slot != NULL)
14138 return;
14139 *slot = expr;
14140 code = TREE_CODE (expr);
14141 if (TREE_CODE_CLASS (code) == tcc_declaration
14142 && DECL_ASSEMBLER_NAME_SET_P (expr))
14144 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14145 memcpy ((char *) &buf, expr, tree_size (expr));
14146 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14147 expr = (tree) &buf;
14149 else if (TREE_CODE_CLASS (code) == tcc_type
14150 && (TYPE_POINTER_TO (expr)
14151 || TYPE_REFERENCE_TO (expr)
14152 || TYPE_CACHED_VALUES_P (expr)
14153 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14154 || TYPE_NEXT_VARIANT (expr)))
14156 /* Allow these fields to be modified. */
14157 tree tmp;
14158 memcpy ((char *) &buf, expr, tree_size (expr));
14159 expr = tmp = (tree) &buf;
14160 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14161 TYPE_POINTER_TO (tmp) = NULL;
14162 TYPE_REFERENCE_TO (tmp) = NULL;
14163 TYPE_NEXT_VARIANT (tmp) = NULL;
14164 if (TYPE_CACHED_VALUES_P (tmp))
14166 TYPE_CACHED_VALUES_P (tmp) = 0;
14167 TYPE_CACHED_VALUES (tmp) = NULL;
14170 md5_process_bytes (expr, tree_size (expr), ctx);
14171 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14172 if (TREE_CODE_CLASS (code) != tcc_type
14173 && TREE_CODE_CLASS (code) != tcc_declaration
14174 && code != TREE_LIST
14175 && code != SSA_NAME)
14176 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14177 switch (TREE_CODE_CLASS (code))
14179 case tcc_constant:
14180 switch (code)
14182 case STRING_CST:
14183 md5_process_bytes (TREE_STRING_POINTER (expr),
14184 TREE_STRING_LENGTH (expr), ctx);
14185 break;
14186 case COMPLEX_CST:
14187 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14188 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14189 break;
14190 case VECTOR_CST:
14191 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14192 break;
14193 default:
14194 break;
14196 break;
14197 case tcc_exceptional:
14198 switch (code)
14200 case TREE_LIST:
14201 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14202 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14203 expr = TREE_CHAIN (expr);
14204 goto recursive_label;
14205 break;
14206 case TREE_VEC:
14207 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14208 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14209 break;
14210 default:
14211 break;
14213 break;
14214 case tcc_expression:
14215 case tcc_reference:
14216 case tcc_comparison:
14217 case tcc_unary:
14218 case tcc_binary:
14219 case tcc_statement:
14220 case tcc_vl_exp:
14221 len = TREE_OPERAND_LENGTH (expr);
14222 for (i = 0; i < len; ++i)
14223 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14224 break;
14225 case tcc_declaration:
14226 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14227 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14228 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14230 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14231 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14232 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14233 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14234 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14236 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14237 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14239 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14241 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14242 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14243 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14245 break;
14246 case tcc_type:
14247 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14248 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14249 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14250 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14251 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14252 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14253 if (INTEGRAL_TYPE_P (expr)
14254 || SCALAR_FLOAT_TYPE_P (expr))
14256 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14257 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14259 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14260 if (TREE_CODE (expr) == RECORD_TYPE
14261 || TREE_CODE (expr) == UNION_TYPE
14262 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14263 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14264 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14265 break;
14266 default:
14267 break;
14271 /* Helper function for outputting the checksum of a tree T. When
14272 debugging with gdb, you can "define mynext" to be "next" followed
14273 by "call debug_fold_checksum (op0)", then just trace down till the
14274 outputs differ. */
14276 void
14277 debug_fold_checksum (const_tree t)
14279 int i;
14280 unsigned char checksum[16];
14281 struct md5_ctx ctx;
14282 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14284 md5_init_ctx (&ctx);
14285 fold_checksum_tree (t, &ctx, ht);
14286 md5_finish_ctx (&ctx, checksum);
14287 htab_empty (ht);
14289 for (i = 0; i < 16; i++)
14290 fprintf (stderr, "%d ", checksum[i]);
14292 fprintf (stderr, "\n");
14295 #endif
14297 /* Fold a unary tree expression with code CODE of type TYPE with an
14298 operand OP0. LOC is the location of the resulting expression.
14299 Return a folded expression if successful. Otherwise, return a tree
14300 expression with code CODE of type TYPE with an operand OP0. */
14302 tree
14303 fold_build1_stat_loc (location_t loc,
14304 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14306 tree tem;
14307 #ifdef ENABLE_FOLD_CHECKING
14308 unsigned char checksum_before[16], checksum_after[16];
14309 struct md5_ctx ctx;
14310 htab_t ht;
14312 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14313 md5_init_ctx (&ctx);
14314 fold_checksum_tree (op0, &ctx, ht);
14315 md5_finish_ctx (&ctx, checksum_before);
14316 htab_empty (ht);
14317 #endif
14319 tem = fold_unary_loc (loc, code, type, op0);
14320 if (!tem)
14322 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14323 SET_EXPR_LOCATION (tem, loc);
14326 #ifdef ENABLE_FOLD_CHECKING
14327 md5_init_ctx (&ctx);
14328 fold_checksum_tree (op0, &ctx, ht);
14329 md5_finish_ctx (&ctx, checksum_after);
14330 htab_delete (ht);
14332 if (memcmp (checksum_before, checksum_after, 16))
14333 fold_check_failed (op0, tem);
14334 #endif
14335 return tem;
14338 /* Fold a binary tree expression with code CODE of type TYPE with
14339 operands OP0 and OP1. LOC is the location of the resulting
14340 expression. Return a folded expression if successful. Otherwise,
14341 return a tree expression with code CODE of type TYPE with operands
14342 OP0 and OP1. */
14344 tree
14345 fold_build2_stat_loc (location_t loc,
14346 enum tree_code code, tree type, tree op0, tree op1
14347 MEM_STAT_DECL)
14349 tree tem;
14350 #ifdef ENABLE_FOLD_CHECKING
14351 unsigned char checksum_before_op0[16],
14352 checksum_before_op1[16],
14353 checksum_after_op0[16],
14354 checksum_after_op1[16];
14355 struct md5_ctx ctx;
14356 htab_t ht;
14358 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14359 md5_init_ctx (&ctx);
14360 fold_checksum_tree (op0, &ctx, ht);
14361 md5_finish_ctx (&ctx, checksum_before_op0);
14362 htab_empty (ht);
14364 md5_init_ctx (&ctx);
14365 fold_checksum_tree (op1, &ctx, ht);
14366 md5_finish_ctx (&ctx, checksum_before_op1);
14367 htab_empty (ht);
14368 #endif
14370 tem = fold_binary_loc (loc, code, type, op0, op1);
14371 if (!tem)
14373 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14374 SET_EXPR_LOCATION (tem, loc);
14377 #ifdef ENABLE_FOLD_CHECKING
14378 md5_init_ctx (&ctx);
14379 fold_checksum_tree (op0, &ctx, ht);
14380 md5_finish_ctx (&ctx, checksum_after_op0);
14381 htab_empty (ht);
14383 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14384 fold_check_failed (op0, tem);
14386 md5_init_ctx (&ctx);
14387 fold_checksum_tree (op1, &ctx, ht);
14388 md5_finish_ctx (&ctx, checksum_after_op1);
14389 htab_delete (ht);
14391 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14392 fold_check_failed (op1, tem);
14393 #endif
14394 return tem;
14397 /* Fold a ternary tree expression with code CODE of type TYPE with
14398 operands OP0, OP1, and OP2. Return a folded expression if
14399 successful. Otherwise, return a tree expression with code CODE of
14400 type TYPE with operands OP0, OP1, and OP2. */
14402 tree
14403 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14404 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14406 tree tem;
14407 #ifdef ENABLE_FOLD_CHECKING
14408 unsigned char checksum_before_op0[16],
14409 checksum_before_op1[16],
14410 checksum_before_op2[16],
14411 checksum_after_op0[16],
14412 checksum_after_op1[16],
14413 checksum_after_op2[16];
14414 struct md5_ctx ctx;
14415 htab_t ht;
14417 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14418 md5_init_ctx (&ctx);
14419 fold_checksum_tree (op0, &ctx, ht);
14420 md5_finish_ctx (&ctx, checksum_before_op0);
14421 htab_empty (ht);
14423 md5_init_ctx (&ctx);
14424 fold_checksum_tree (op1, &ctx, ht);
14425 md5_finish_ctx (&ctx, checksum_before_op1);
14426 htab_empty (ht);
14428 md5_init_ctx (&ctx);
14429 fold_checksum_tree (op2, &ctx, ht);
14430 md5_finish_ctx (&ctx, checksum_before_op2);
14431 htab_empty (ht);
14432 #endif
14434 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14435 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14436 if (!tem)
14438 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14439 SET_EXPR_LOCATION (tem, loc);
14442 #ifdef ENABLE_FOLD_CHECKING
14443 md5_init_ctx (&ctx);
14444 fold_checksum_tree (op0, &ctx, ht);
14445 md5_finish_ctx (&ctx, checksum_after_op0);
14446 htab_empty (ht);
14448 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14449 fold_check_failed (op0, tem);
14451 md5_init_ctx (&ctx);
14452 fold_checksum_tree (op1, &ctx, ht);
14453 md5_finish_ctx (&ctx, checksum_after_op1);
14454 htab_empty (ht);
14456 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14457 fold_check_failed (op1, tem);
14459 md5_init_ctx (&ctx);
14460 fold_checksum_tree (op2, &ctx, ht);
14461 md5_finish_ctx (&ctx, checksum_after_op2);
14462 htab_delete (ht);
14464 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14465 fold_check_failed (op2, tem);
14466 #endif
14467 return tem;
14470 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14471 arguments in ARGARRAY, and a null static chain.
14472 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14473 of type TYPE from the given operands as constructed by build_call_array. */
14475 tree
14476 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14477 int nargs, tree *argarray)
14479 tree tem;
14480 #ifdef ENABLE_FOLD_CHECKING
14481 unsigned char checksum_before_fn[16],
14482 checksum_before_arglist[16],
14483 checksum_after_fn[16],
14484 checksum_after_arglist[16];
14485 struct md5_ctx ctx;
14486 htab_t ht;
14487 int i;
14489 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14490 md5_init_ctx (&ctx);
14491 fold_checksum_tree (fn, &ctx, ht);
14492 md5_finish_ctx (&ctx, checksum_before_fn);
14493 htab_empty (ht);
14495 md5_init_ctx (&ctx);
14496 for (i = 0; i < nargs; i++)
14497 fold_checksum_tree (argarray[i], &ctx, ht);
14498 md5_finish_ctx (&ctx, checksum_before_arglist);
14499 htab_empty (ht);
14500 #endif
14502 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14504 #ifdef ENABLE_FOLD_CHECKING
14505 md5_init_ctx (&ctx);
14506 fold_checksum_tree (fn, &ctx, ht);
14507 md5_finish_ctx (&ctx, checksum_after_fn);
14508 htab_empty (ht);
14510 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14511 fold_check_failed (fn, tem);
14513 md5_init_ctx (&ctx);
14514 for (i = 0; i < nargs; i++)
14515 fold_checksum_tree (argarray[i], &ctx, ht);
14516 md5_finish_ctx (&ctx, checksum_after_arglist);
14517 htab_delete (ht);
14519 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14520 fold_check_failed (NULL_TREE, tem);
14521 #endif
14522 return tem;
14525 /* Perform constant folding and related simplification of initializer
14526 expression EXPR. These behave identically to "fold_buildN" but ignore
14527 potential run-time traps and exceptions that fold must preserve. */
14529 #define START_FOLD_INIT \
14530 int saved_signaling_nans = flag_signaling_nans;\
14531 int saved_trapping_math = flag_trapping_math;\
14532 int saved_rounding_math = flag_rounding_math;\
14533 int saved_trapv = flag_trapv;\
14534 int saved_folding_initializer = folding_initializer;\
14535 flag_signaling_nans = 0;\
14536 flag_trapping_math = 0;\
14537 flag_rounding_math = 0;\
14538 flag_trapv = 0;\
14539 folding_initializer = 1;
14541 #define END_FOLD_INIT \
14542 flag_signaling_nans = saved_signaling_nans;\
14543 flag_trapping_math = saved_trapping_math;\
14544 flag_rounding_math = saved_rounding_math;\
14545 flag_trapv = saved_trapv;\
14546 folding_initializer = saved_folding_initializer;
14548 tree
14549 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14550 tree type, tree op)
14552 tree result;
14553 START_FOLD_INIT;
14555 result = fold_build1_loc (loc, code, type, op);
14557 END_FOLD_INIT;
14558 return result;
14561 tree
14562 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14563 tree type, tree op0, tree op1)
14565 tree result;
14566 START_FOLD_INIT;
14568 result = fold_build2_loc (loc, code, type, op0, op1);
14570 END_FOLD_INIT;
14571 return result;
14574 tree
14575 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14576 tree type, tree op0, tree op1, tree op2)
14578 tree result;
14579 START_FOLD_INIT;
14581 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14583 END_FOLD_INIT;
14584 return result;
14587 tree
14588 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14589 int nargs, tree *argarray)
14591 tree result;
14592 START_FOLD_INIT;
14594 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14596 END_FOLD_INIT;
14597 return result;
14600 #undef START_FOLD_INIT
14601 #undef END_FOLD_INIT
14603 /* Determine if first argument is a multiple of second argument. Return 0 if
14604 it is not, or we cannot easily determined it to be.
14606 An example of the sort of thing we care about (at this point; this routine
14607 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14608 fold cases do now) is discovering that
14610 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14612 is a multiple of
14614 SAVE_EXPR (J * 8)
14616 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14618 This code also handles discovering that
14620 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14622 is a multiple of 8 so we don't have to worry about dealing with a
14623 possible remainder.
14625 Note that we *look* inside a SAVE_EXPR only to determine how it was
14626 calculated; it is not safe for fold to do much of anything else with the
14627 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14628 at run time. For example, the latter example above *cannot* be implemented
14629 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14630 evaluation time of the original SAVE_EXPR is not necessarily the same at
14631 the time the new expression is evaluated. The only optimization of this
14632 sort that would be valid is changing
14634 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14636 divided by 8 to
14638 SAVE_EXPR (I) * SAVE_EXPR (J)
14640 (where the same SAVE_EXPR (J) is used in the original and the
14641 transformed version). */
14644 multiple_of_p (tree type, const_tree top, const_tree bottom)
14646 if (operand_equal_p (top, bottom, 0))
14647 return 1;
14649 if (TREE_CODE (type) != INTEGER_TYPE)
14650 return 0;
14652 switch (TREE_CODE (top))
14654 case BIT_AND_EXPR:
14655 /* Bitwise and provides a power of two multiple. If the mask is
14656 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14657 if (!integer_pow2p (bottom))
14658 return 0;
14659 /* FALLTHRU */
14661 case MULT_EXPR:
14662 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14663 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14665 case PLUS_EXPR:
14666 case MINUS_EXPR:
14667 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14668 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14670 case LSHIFT_EXPR:
14671 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14673 tree op1, t1;
14675 op1 = TREE_OPERAND (top, 1);
14676 /* const_binop may not detect overflow correctly,
14677 so check for it explicitly here. */
14678 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14679 > TREE_INT_CST_LOW (op1)
14680 && TREE_INT_CST_HIGH (op1) == 0
14681 && 0 != (t1 = fold_convert (type,
14682 const_binop (LSHIFT_EXPR,
14683 size_one_node,
14684 op1, 0)))
14685 && !TREE_OVERFLOW (t1))
14686 return multiple_of_p (type, t1, bottom);
14688 return 0;
14690 case NOP_EXPR:
14691 /* Can't handle conversions from non-integral or wider integral type. */
14692 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14693 || (TYPE_PRECISION (type)
14694 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14695 return 0;
14697 /* .. fall through ... */
14699 case SAVE_EXPR:
14700 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14702 case INTEGER_CST:
14703 if (TREE_CODE (bottom) != INTEGER_CST
14704 || integer_zerop (bottom)
14705 || (TYPE_UNSIGNED (type)
14706 && (tree_int_cst_sgn (top) < 0
14707 || tree_int_cst_sgn (bottom) < 0)))
14708 return 0;
14709 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14710 top, bottom, 0));
14712 default:
14713 return 0;
14717 /* Return true if CODE or TYPE is known to be non-negative. */
14719 static bool
14720 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14722 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14723 && truth_value_p (code))
14724 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14725 have a signed:1 type (where the value is -1 and 0). */
14726 return true;
14727 return false;
14730 /* Return true if (CODE OP0) is known to be non-negative. If the return
14731 value is based on the assumption that signed overflow is undefined,
14732 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14733 *STRICT_OVERFLOW_P. */
14735 bool
14736 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14737 bool *strict_overflow_p)
14739 if (TYPE_UNSIGNED (type))
14740 return true;
14742 switch (code)
14744 case ABS_EXPR:
14745 /* We can't return 1 if flag_wrapv is set because
14746 ABS_EXPR<INT_MIN> = INT_MIN. */
14747 if (!INTEGRAL_TYPE_P (type))
14748 return true;
14749 if (TYPE_OVERFLOW_UNDEFINED (type))
14751 *strict_overflow_p = true;
14752 return true;
14754 break;
14756 case NON_LVALUE_EXPR:
14757 case FLOAT_EXPR:
14758 case FIX_TRUNC_EXPR:
14759 return tree_expr_nonnegative_warnv_p (op0,
14760 strict_overflow_p);
14762 case NOP_EXPR:
14764 tree inner_type = TREE_TYPE (op0);
14765 tree outer_type = type;
14767 if (TREE_CODE (outer_type) == REAL_TYPE)
14769 if (TREE_CODE (inner_type) == REAL_TYPE)
14770 return tree_expr_nonnegative_warnv_p (op0,
14771 strict_overflow_p);
14772 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14774 if (TYPE_UNSIGNED (inner_type))
14775 return true;
14776 return tree_expr_nonnegative_warnv_p (op0,
14777 strict_overflow_p);
14780 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14782 if (TREE_CODE (inner_type) == REAL_TYPE)
14783 return tree_expr_nonnegative_warnv_p (op0,
14784 strict_overflow_p);
14785 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14786 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14787 && TYPE_UNSIGNED (inner_type);
14790 break;
14792 default:
14793 return tree_simple_nonnegative_warnv_p (code, type);
14796 /* We don't know sign of `t', so be conservative and return false. */
14797 return false;
14800 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14801 value is based on the assumption that signed overflow is undefined,
14802 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14803 *STRICT_OVERFLOW_P. */
14805 bool
14806 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14807 tree op1, bool *strict_overflow_p)
14809 if (TYPE_UNSIGNED (type))
14810 return true;
14812 switch (code)
14814 case POINTER_PLUS_EXPR:
14815 case PLUS_EXPR:
14816 if (FLOAT_TYPE_P (type))
14817 return (tree_expr_nonnegative_warnv_p (op0,
14818 strict_overflow_p)
14819 && tree_expr_nonnegative_warnv_p (op1,
14820 strict_overflow_p));
14822 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14823 both unsigned and at least 2 bits shorter than the result. */
14824 if (TREE_CODE (type) == INTEGER_TYPE
14825 && TREE_CODE (op0) == NOP_EXPR
14826 && TREE_CODE (op1) == NOP_EXPR)
14828 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14829 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14830 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14831 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14833 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14834 TYPE_PRECISION (inner2)) + 1;
14835 return prec < TYPE_PRECISION (type);
14838 break;
14840 case MULT_EXPR:
14841 if (FLOAT_TYPE_P (type))
14843 /* x * x for floating point x is always non-negative. */
14844 if (operand_equal_p (op0, op1, 0))
14845 return true;
14846 return (tree_expr_nonnegative_warnv_p (op0,
14847 strict_overflow_p)
14848 && tree_expr_nonnegative_warnv_p (op1,
14849 strict_overflow_p));
14852 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14853 both unsigned and their total bits is shorter than the result. */
14854 if (TREE_CODE (type) == INTEGER_TYPE
14855 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14856 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14858 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14859 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14860 : TREE_TYPE (op0);
14861 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14862 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14863 : TREE_TYPE (op1);
14865 bool unsigned0 = TYPE_UNSIGNED (inner0);
14866 bool unsigned1 = TYPE_UNSIGNED (inner1);
14868 if (TREE_CODE (op0) == INTEGER_CST)
14869 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14871 if (TREE_CODE (op1) == INTEGER_CST)
14872 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14874 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14875 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14877 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14878 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14879 : TYPE_PRECISION (inner0);
14881 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14882 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14883 : TYPE_PRECISION (inner1);
14885 return precision0 + precision1 < TYPE_PRECISION (type);
14888 return false;
14890 case BIT_AND_EXPR:
14891 case MAX_EXPR:
14892 return (tree_expr_nonnegative_warnv_p (op0,
14893 strict_overflow_p)
14894 || tree_expr_nonnegative_warnv_p (op1,
14895 strict_overflow_p));
14897 case BIT_IOR_EXPR:
14898 case BIT_XOR_EXPR:
14899 case MIN_EXPR:
14900 case RDIV_EXPR:
14901 case TRUNC_DIV_EXPR:
14902 case CEIL_DIV_EXPR:
14903 case FLOOR_DIV_EXPR:
14904 case ROUND_DIV_EXPR:
14905 return (tree_expr_nonnegative_warnv_p (op0,
14906 strict_overflow_p)
14907 && tree_expr_nonnegative_warnv_p (op1,
14908 strict_overflow_p));
14910 case TRUNC_MOD_EXPR:
14911 case CEIL_MOD_EXPR:
14912 case FLOOR_MOD_EXPR:
14913 case ROUND_MOD_EXPR:
14914 return tree_expr_nonnegative_warnv_p (op0,
14915 strict_overflow_p);
14916 default:
14917 return tree_simple_nonnegative_warnv_p (code, type);
14920 /* We don't know sign of `t', so be conservative and return false. */
14921 return false;
14924 /* Return true if T is known to be non-negative. If the return
14925 value is based on the assumption that signed overflow is undefined,
14926 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14927 *STRICT_OVERFLOW_P. */
14929 bool
14930 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14932 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14933 return true;
14935 switch (TREE_CODE (t))
14937 case INTEGER_CST:
14938 return tree_int_cst_sgn (t) >= 0;
14940 case REAL_CST:
14941 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14943 case FIXED_CST:
14944 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14946 case COND_EXPR:
14947 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14948 strict_overflow_p)
14949 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14950 strict_overflow_p));
14951 default:
14952 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14953 TREE_TYPE (t));
14955 /* We don't know sign of `t', so be conservative and return false. */
14956 return false;
14959 /* Return true if T is known to be non-negative. If the return
14960 value is based on the assumption that signed overflow is undefined,
14961 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14962 *STRICT_OVERFLOW_P. */
14964 bool
14965 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14966 tree arg0, tree arg1, bool *strict_overflow_p)
14968 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14969 switch (DECL_FUNCTION_CODE (fndecl))
14971 CASE_FLT_FN (BUILT_IN_ACOS):
14972 CASE_FLT_FN (BUILT_IN_ACOSH):
14973 CASE_FLT_FN (BUILT_IN_CABS):
14974 CASE_FLT_FN (BUILT_IN_COSH):
14975 CASE_FLT_FN (BUILT_IN_ERFC):
14976 CASE_FLT_FN (BUILT_IN_EXP):
14977 CASE_FLT_FN (BUILT_IN_EXP10):
14978 CASE_FLT_FN (BUILT_IN_EXP2):
14979 CASE_FLT_FN (BUILT_IN_FABS):
14980 CASE_FLT_FN (BUILT_IN_FDIM):
14981 CASE_FLT_FN (BUILT_IN_HYPOT):
14982 CASE_FLT_FN (BUILT_IN_POW10):
14983 CASE_INT_FN (BUILT_IN_FFS):
14984 CASE_INT_FN (BUILT_IN_PARITY):
14985 CASE_INT_FN (BUILT_IN_POPCOUNT):
14986 case BUILT_IN_BSWAP32:
14987 case BUILT_IN_BSWAP64:
14988 /* Always true. */
14989 return true;
14991 CASE_FLT_FN (BUILT_IN_SQRT):
14992 /* sqrt(-0.0) is -0.0. */
14993 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14994 return true;
14995 return tree_expr_nonnegative_warnv_p (arg0,
14996 strict_overflow_p);
14998 CASE_FLT_FN (BUILT_IN_ASINH):
14999 CASE_FLT_FN (BUILT_IN_ATAN):
15000 CASE_FLT_FN (BUILT_IN_ATANH):
15001 CASE_FLT_FN (BUILT_IN_CBRT):
15002 CASE_FLT_FN (BUILT_IN_CEIL):
15003 CASE_FLT_FN (BUILT_IN_ERF):
15004 CASE_FLT_FN (BUILT_IN_EXPM1):
15005 CASE_FLT_FN (BUILT_IN_FLOOR):
15006 CASE_FLT_FN (BUILT_IN_FMOD):
15007 CASE_FLT_FN (BUILT_IN_FREXP):
15008 CASE_FLT_FN (BUILT_IN_LCEIL):
15009 CASE_FLT_FN (BUILT_IN_LDEXP):
15010 CASE_FLT_FN (BUILT_IN_LFLOOR):
15011 CASE_FLT_FN (BUILT_IN_LLCEIL):
15012 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15013 CASE_FLT_FN (BUILT_IN_LLRINT):
15014 CASE_FLT_FN (BUILT_IN_LLROUND):
15015 CASE_FLT_FN (BUILT_IN_LRINT):
15016 CASE_FLT_FN (BUILT_IN_LROUND):
15017 CASE_FLT_FN (BUILT_IN_MODF):
15018 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15019 CASE_FLT_FN (BUILT_IN_RINT):
15020 CASE_FLT_FN (BUILT_IN_ROUND):
15021 CASE_FLT_FN (BUILT_IN_SCALB):
15022 CASE_FLT_FN (BUILT_IN_SCALBLN):
15023 CASE_FLT_FN (BUILT_IN_SCALBN):
15024 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15025 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15026 CASE_FLT_FN (BUILT_IN_SINH):
15027 CASE_FLT_FN (BUILT_IN_TANH):
15028 CASE_FLT_FN (BUILT_IN_TRUNC):
15029 /* True if the 1st argument is nonnegative. */
15030 return tree_expr_nonnegative_warnv_p (arg0,
15031 strict_overflow_p);
15033 CASE_FLT_FN (BUILT_IN_FMAX):
15034 /* True if the 1st OR 2nd arguments are nonnegative. */
15035 return (tree_expr_nonnegative_warnv_p (arg0,
15036 strict_overflow_p)
15037 || (tree_expr_nonnegative_warnv_p (arg1,
15038 strict_overflow_p)));
15040 CASE_FLT_FN (BUILT_IN_FMIN):
15041 /* True if the 1st AND 2nd arguments are nonnegative. */
15042 return (tree_expr_nonnegative_warnv_p (arg0,
15043 strict_overflow_p)
15044 && (tree_expr_nonnegative_warnv_p (arg1,
15045 strict_overflow_p)));
15047 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15048 /* True if the 2nd argument is nonnegative. */
15049 return tree_expr_nonnegative_warnv_p (arg1,
15050 strict_overflow_p);
15052 CASE_FLT_FN (BUILT_IN_POWI):
15053 /* True if the 1st argument is nonnegative or the second
15054 argument is an even integer. */
15055 if (TREE_CODE (arg1) == INTEGER_CST
15056 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15057 return true;
15058 return tree_expr_nonnegative_warnv_p (arg0,
15059 strict_overflow_p);
15061 CASE_FLT_FN (BUILT_IN_POW):
15062 /* True if the 1st argument is nonnegative or the second
15063 argument is an even integer valued real. */
15064 if (TREE_CODE (arg1) == REAL_CST)
15066 REAL_VALUE_TYPE c;
15067 HOST_WIDE_INT n;
15069 c = TREE_REAL_CST (arg1);
15070 n = real_to_integer (&c);
15071 if ((n & 1) == 0)
15073 REAL_VALUE_TYPE cint;
15074 real_from_integer (&cint, VOIDmode, n,
15075 n < 0 ? -1 : 0, 0);
15076 if (real_identical (&c, &cint))
15077 return true;
15080 return tree_expr_nonnegative_warnv_p (arg0,
15081 strict_overflow_p);
15083 default:
15084 break;
15086 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15087 type);
15090 /* Return true if T is known to be non-negative. If the return
15091 value is based on the assumption that signed overflow is undefined,
15092 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15093 *STRICT_OVERFLOW_P. */
15095 bool
15096 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15098 enum tree_code code = TREE_CODE (t);
15099 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15100 return true;
15102 switch (code)
15104 case TARGET_EXPR:
15106 tree temp = TARGET_EXPR_SLOT (t);
15107 t = TARGET_EXPR_INITIAL (t);
15109 /* If the initializer is non-void, then it's a normal expression
15110 that will be assigned to the slot. */
15111 if (!VOID_TYPE_P (t))
15112 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15114 /* Otherwise, the initializer sets the slot in some way. One common
15115 way is an assignment statement at the end of the initializer. */
15116 while (1)
15118 if (TREE_CODE (t) == BIND_EXPR)
15119 t = expr_last (BIND_EXPR_BODY (t));
15120 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15121 || TREE_CODE (t) == TRY_CATCH_EXPR)
15122 t = expr_last (TREE_OPERAND (t, 0));
15123 else if (TREE_CODE (t) == STATEMENT_LIST)
15124 t = expr_last (t);
15125 else
15126 break;
15128 if (TREE_CODE (t) == MODIFY_EXPR
15129 && TREE_OPERAND (t, 0) == temp)
15130 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15131 strict_overflow_p);
15133 return false;
15136 case CALL_EXPR:
15138 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15139 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15141 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15142 get_callee_fndecl (t),
15143 arg0,
15144 arg1,
15145 strict_overflow_p);
15147 case COMPOUND_EXPR:
15148 case MODIFY_EXPR:
15149 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15150 strict_overflow_p);
15151 case BIND_EXPR:
15152 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15153 strict_overflow_p);
15154 case SAVE_EXPR:
15155 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15156 strict_overflow_p);
15158 default:
15159 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15160 TREE_TYPE (t));
15163 /* We don't know sign of `t', so be conservative and return false. */
15164 return false;
15167 /* Return true if T is known to be non-negative. If the return
15168 value is based on the assumption that signed overflow is undefined,
15169 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15170 *STRICT_OVERFLOW_P. */
15172 bool
15173 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15175 enum tree_code code;
15176 if (t == error_mark_node)
15177 return false;
15179 code = TREE_CODE (t);
15180 switch (TREE_CODE_CLASS (code))
15182 case tcc_binary:
15183 case tcc_comparison:
15184 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15185 TREE_TYPE (t),
15186 TREE_OPERAND (t, 0),
15187 TREE_OPERAND (t, 1),
15188 strict_overflow_p);
15190 case tcc_unary:
15191 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15192 TREE_TYPE (t),
15193 TREE_OPERAND (t, 0),
15194 strict_overflow_p);
15196 case tcc_constant:
15197 case tcc_declaration:
15198 case tcc_reference:
15199 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15201 default:
15202 break;
15205 switch (code)
15207 case TRUTH_AND_EXPR:
15208 case TRUTH_OR_EXPR:
15209 case TRUTH_XOR_EXPR:
15210 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15211 TREE_TYPE (t),
15212 TREE_OPERAND (t, 0),
15213 TREE_OPERAND (t, 1),
15214 strict_overflow_p);
15215 case TRUTH_NOT_EXPR:
15216 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15217 TREE_TYPE (t),
15218 TREE_OPERAND (t, 0),
15219 strict_overflow_p);
15221 case COND_EXPR:
15222 case CONSTRUCTOR:
15223 case OBJ_TYPE_REF:
15224 case ASSERT_EXPR:
15225 case ADDR_EXPR:
15226 case WITH_SIZE_EXPR:
15227 case EXC_PTR_EXPR:
15228 case SSA_NAME:
15229 case FILTER_EXPR:
15230 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15232 default:
15233 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15237 /* Return true if `t' is known to be non-negative. Handle warnings
15238 about undefined signed overflow. */
15240 bool
15241 tree_expr_nonnegative_p (tree t)
15243 bool ret, strict_overflow_p;
15245 strict_overflow_p = false;
15246 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15247 if (strict_overflow_p)
15248 fold_overflow_warning (("assuming signed overflow does not occur when "
15249 "determining that expression is always "
15250 "non-negative"),
15251 WARN_STRICT_OVERFLOW_MISC);
15252 return ret;
15256 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15257 For floating point we further ensure that T is not denormal.
15258 Similar logic is present in nonzero_address in rtlanal.h.
15260 If the return value is based on the assumption that signed overflow
15261 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15262 change *STRICT_OVERFLOW_P. */
15264 bool
15265 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15266 bool *strict_overflow_p)
15268 switch (code)
15270 case ABS_EXPR:
15271 return tree_expr_nonzero_warnv_p (op0,
15272 strict_overflow_p);
15274 case NOP_EXPR:
15276 tree inner_type = TREE_TYPE (op0);
15277 tree outer_type = type;
15279 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15280 && tree_expr_nonzero_warnv_p (op0,
15281 strict_overflow_p));
15283 break;
15285 case NON_LVALUE_EXPR:
15286 return tree_expr_nonzero_warnv_p (op0,
15287 strict_overflow_p);
15289 default:
15290 break;
15293 return false;
15296 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15297 For floating point we further ensure that T is not denormal.
15298 Similar logic is present in nonzero_address in rtlanal.h.
15300 If the return value is based on the assumption that signed overflow
15301 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15302 change *STRICT_OVERFLOW_P. */
15304 bool
15305 tree_binary_nonzero_warnv_p (enum tree_code code,
15306 tree type,
15307 tree op0,
15308 tree op1, bool *strict_overflow_p)
15310 bool sub_strict_overflow_p;
15311 switch (code)
15313 case POINTER_PLUS_EXPR:
15314 case PLUS_EXPR:
15315 if (TYPE_OVERFLOW_UNDEFINED (type))
15317 /* With the presence of negative values it is hard
15318 to say something. */
15319 sub_strict_overflow_p = false;
15320 if (!tree_expr_nonnegative_warnv_p (op0,
15321 &sub_strict_overflow_p)
15322 || !tree_expr_nonnegative_warnv_p (op1,
15323 &sub_strict_overflow_p))
15324 return false;
15325 /* One of operands must be positive and the other non-negative. */
15326 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15327 overflows, on a twos-complement machine the sum of two
15328 nonnegative numbers can never be zero. */
15329 return (tree_expr_nonzero_warnv_p (op0,
15330 strict_overflow_p)
15331 || tree_expr_nonzero_warnv_p (op1,
15332 strict_overflow_p));
15334 break;
15336 case MULT_EXPR:
15337 if (TYPE_OVERFLOW_UNDEFINED (type))
15339 if (tree_expr_nonzero_warnv_p (op0,
15340 strict_overflow_p)
15341 && tree_expr_nonzero_warnv_p (op1,
15342 strict_overflow_p))
15344 *strict_overflow_p = true;
15345 return true;
15348 break;
15350 case MIN_EXPR:
15351 sub_strict_overflow_p = false;
15352 if (tree_expr_nonzero_warnv_p (op0,
15353 &sub_strict_overflow_p)
15354 && tree_expr_nonzero_warnv_p (op1,
15355 &sub_strict_overflow_p))
15357 if (sub_strict_overflow_p)
15358 *strict_overflow_p = true;
15360 break;
15362 case MAX_EXPR:
15363 sub_strict_overflow_p = false;
15364 if (tree_expr_nonzero_warnv_p (op0,
15365 &sub_strict_overflow_p))
15367 if (sub_strict_overflow_p)
15368 *strict_overflow_p = true;
15370 /* When both operands are nonzero, then MAX must be too. */
15371 if (tree_expr_nonzero_warnv_p (op1,
15372 strict_overflow_p))
15373 return true;
15375 /* MAX where operand 0 is positive is positive. */
15376 return tree_expr_nonnegative_warnv_p (op0,
15377 strict_overflow_p);
15379 /* MAX where operand 1 is positive is positive. */
15380 else if (tree_expr_nonzero_warnv_p (op1,
15381 &sub_strict_overflow_p)
15382 && tree_expr_nonnegative_warnv_p (op1,
15383 &sub_strict_overflow_p))
15385 if (sub_strict_overflow_p)
15386 *strict_overflow_p = true;
15387 return true;
15389 break;
15391 case BIT_IOR_EXPR:
15392 return (tree_expr_nonzero_warnv_p (op1,
15393 strict_overflow_p)
15394 || tree_expr_nonzero_warnv_p (op0,
15395 strict_overflow_p));
15397 default:
15398 break;
15401 return false;
15404 /* Return true when T is an address and is known to be nonzero.
15405 For floating point we further ensure that T is not denormal.
15406 Similar logic is present in nonzero_address in rtlanal.h.
15408 If the return value is based on the assumption that signed overflow
15409 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15410 change *STRICT_OVERFLOW_P. */
15412 bool
15413 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15415 bool sub_strict_overflow_p;
15416 switch (TREE_CODE (t))
15418 case INTEGER_CST:
15419 return !integer_zerop (t);
15421 case ADDR_EXPR:
15423 tree base = get_base_address (TREE_OPERAND (t, 0));
15425 if (!base)
15426 return false;
15428 /* Weak declarations may link to NULL. Other things may also be NULL
15429 so protect with -fdelete-null-pointer-checks; but not variables
15430 allocated on the stack. */
15431 if (DECL_P (base)
15432 && (flag_delete_null_pointer_checks
15433 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15434 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15436 /* Constants are never weak. */
15437 if (CONSTANT_CLASS_P (base))
15438 return true;
15440 return false;
15443 case COND_EXPR:
15444 sub_strict_overflow_p = false;
15445 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15446 &sub_strict_overflow_p)
15447 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15448 &sub_strict_overflow_p))
15450 if (sub_strict_overflow_p)
15451 *strict_overflow_p = true;
15452 return true;
15454 break;
15456 default:
15457 break;
15459 return false;
15462 /* Return true when T is an address and is known to be nonzero.
15463 For floating point we further ensure that T is not denormal.
15464 Similar logic is present in nonzero_address in rtlanal.h.
15466 If the return value is based on the assumption that signed overflow
15467 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15468 change *STRICT_OVERFLOW_P. */
15470 bool
15471 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15473 tree type = TREE_TYPE (t);
15474 enum tree_code code;
15476 /* Doing something useful for floating point would need more work. */
15477 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15478 return false;
15480 code = TREE_CODE (t);
15481 switch (TREE_CODE_CLASS (code))
15483 case tcc_unary:
15484 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15485 strict_overflow_p);
15486 case tcc_binary:
15487 case tcc_comparison:
15488 return tree_binary_nonzero_warnv_p (code, type,
15489 TREE_OPERAND (t, 0),
15490 TREE_OPERAND (t, 1),
15491 strict_overflow_p);
15492 case tcc_constant:
15493 case tcc_declaration:
15494 case tcc_reference:
15495 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15497 default:
15498 break;
15501 switch (code)
15503 case TRUTH_NOT_EXPR:
15504 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15505 strict_overflow_p);
15507 case TRUTH_AND_EXPR:
15508 case TRUTH_OR_EXPR:
15509 case TRUTH_XOR_EXPR:
15510 return tree_binary_nonzero_warnv_p (code, type,
15511 TREE_OPERAND (t, 0),
15512 TREE_OPERAND (t, 1),
15513 strict_overflow_p);
15515 case COND_EXPR:
15516 case CONSTRUCTOR:
15517 case OBJ_TYPE_REF:
15518 case ASSERT_EXPR:
15519 case ADDR_EXPR:
15520 case WITH_SIZE_EXPR:
15521 case EXC_PTR_EXPR:
15522 case SSA_NAME:
15523 case FILTER_EXPR:
15524 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15526 case COMPOUND_EXPR:
15527 case MODIFY_EXPR:
15528 case BIND_EXPR:
15529 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15530 strict_overflow_p);
15532 case SAVE_EXPR:
15533 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15534 strict_overflow_p);
15536 case CALL_EXPR:
15537 return alloca_call_p (t);
15539 default:
15540 break;
15542 return false;
15545 /* Return true when T is an address and is known to be nonzero.
15546 Handle warnings about undefined signed overflow. */
15548 bool
15549 tree_expr_nonzero_p (tree t)
15551 bool ret, strict_overflow_p;
15553 strict_overflow_p = false;
15554 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15555 if (strict_overflow_p)
15556 fold_overflow_warning (("assuming signed overflow does not occur when "
15557 "determining that expression is always "
15558 "non-zero"),
15559 WARN_STRICT_OVERFLOW_MISC);
15560 return ret;
15563 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15564 attempt to fold the expression to a constant without modifying TYPE,
15565 OP0 or OP1.
15567 If the expression could be simplified to a constant, then return
15568 the constant. If the expression would not be simplified to a
15569 constant, then return NULL_TREE. */
15571 tree
15572 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15574 tree tem = fold_binary (code, type, op0, op1);
15575 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15578 /* Given the components of a unary expression CODE, TYPE and OP0,
15579 attempt to fold the expression to a constant without modifying
15580 TYPE or OP0.
15582 If the expression could be simplified to a constant, then return
15583 the constant. If the expression would not be simplified to a
15584 constant, then return NULL_TREE. */
15586 tree
15587 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15589 tree tem = fold_unary (code, type, op0);
15590 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15593 /* If EXP represents referencing an element in a constant string
15594 (either via pointer arithmetic or array indexing), return the
15595 tree representing the value accessed, otherwise return NULL. */
15597 tree
15598 fold_read_from_constant_string (tree exp)
15600 if ((TREE_CODE (exp) == INDIRECT_REF
15601 || TREE_CODE (exp) == ARRAY_REF)
15602 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15604 tree exp1 = TREE_OPERAND (exp, 0);
15605 tree index;
15606 tree string;
15607 location_t loc = EXPR_LOCATION (exp);
15609 if (TREE_CODE (exp) == INDIRECT_REF)
15610 string = string_constant (exp1, &index);
15611 else
15613 tree low_bound = array_ref_low_bound (exp);
15614 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15616 /* Optimize the special-case of a zero lower bound.
15618 We convert the low_bound to sizetype to avoid some problems
15619 with constant folding. (E.g. suppose the lower bound is 1,
15620 and its mode is QI. Without the conversion,l (ARRAY
15621 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15622 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15623 if (! integer_zerop (low_bound))
15624 index = size_diffop_loc (loc, index,
15625 fold_convert_loc (loc, sizetype, low_bound));
15627 string = exp1;
15630 if (string
15631 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15632 && TREE_CODE (string) == STRING_CST
15633 && TREE_CODE (index) == INTEGER_CST
15634 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15635 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15636 == MODE_INT)
15637 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15638 return build_int_cst_type (TREE_TYPE (exp),
15639 (TREE_STRING_POINTER (string)
15640 [TREE_INT_CST_LOW (index)]));
15642 return NULL;
15645 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15646 an integer constant, real, or fixed-point constant.
15648 TYPE is the type of the result. */
15650 static tree
15651 fold_negate_const (tree arg0, tree type)
15653 tree t = NULL_TREE;
15655 switch (TREE_CODE (arg0))
15657 case INTEGER_CST:
15659 unsigned HOST_WIDE_INT low;
15660 HOST_WIDE_INT high;
15661 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15662 TREE_INT_CST_HIGH (arg0),
15663 &low, &high);
15664 t = force_fit_type_double (type, low, high, 1,
15665 (overflow | TREE_OVERFLOW (arg0))
15666 && !TYPE_UNSIGNED (type));
15667 break;
15670 case REAL_CST:
15671 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15672 break;
15674 case FIXED_CST:
15676 FIXED_VALUE_TYPE f;
15677 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15678 &(TREE_FIXED_CST (arg0)), NULL,
15679 TYPE_SATURATING (type));
15680 t = build_fixed (type, f);
15681 /* Propagate overflow flags. */
15682 if (overflow_p | TREE_OVERFLOW (arg0))
15683 TREE_OVERFLOW (t) = 1;
15684 break;
15687 default:
15688 gcc_unreachable ();
15691 return t;
15694 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15695 an integer constant or real constant.
15697 TYPE is the type of the result. */
15699 tree
15700 fold_abs_const (tree arg0, tree type)
15702 tree t = NULL_TREE;
15704 switch (TREE_CODE (arg0))
15706 case INTEGER_CST:
15707 /* If the value is unsigned, then the absolute value is
15708 the same as the ordinary value. */
15709 if (TYPE_UNSIGNED (type))
15710 t = arg0;
15711 /* Similarly, if the value is non-negative. */
15712 else if (INT_CST_LT (integer_minus_one_node, arg0))
15713 t = arg0;
15714 /* If the value is negative, then the absolute value is
15715 its negation. */
15716 else
15718 unsigned HOST_WIDE_INT low;
15719 HOST_WIDE_INT high;
15720 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15721 TREE_INT_CST_HIGH (arg0),
15722 &low, &high);
15723 t = force_fit_type_double (type, low, high, -1,
15724 overflow | TREE_OVERFLOW (arg0));
15726 break;
15728 case REAL_CST:
15729 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15730 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15731 else
15732 t = arg0;
15733 break;
15735 default:
15736 gcc_unreachable ();
15739 return t;
15742 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15743 constant. TYPE is the type of the result. */
15745 static tree
15746 fold_not_const (tree arg0, tree type)
15748 tree t = NULL_TREE;
15750 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15752 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15753 ~TREE_INT_CST_HIGH (arg0), 0,
15754 TREE_OVERFLOW (arg0));
15756 return t;
15759 /* Given CODE, a relational operator, the target type, TYPE and two
15760 constant operands OP0 and OP1, return the result of the
15761 relational operation. If the result is not a compile time
15762 constant, then return NULL_TREE. */
15764 static tree
15765 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15767 int result, invert;
15769 /* From here on, the only cases we handle are when the result is
15770 known to be a constant. */
15772 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15774 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15775 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15777 /* Handle the cases where either operand is a NaN. */
15778 if (real_isnan (c0) || real_isnan (c1))
15780 switch (code)
15782 case EQ_EXPR:
15783 case ORDERED_EXPR:
15784 result = 0;
15785 break;
15787 case NE_EXPR:
15788 case UNORDERED_EXPR:
15789 case UNLT_EXPR:
15790 case UNLE_EXPR:
15791 case UNGT_EXPR:
15792 case UNGE_EXPR:
15793 case UNEQ_EXPR:
15794 result = 1;
15795 break;
15797 case LT_EXPR:
15798 case LE_EXPR:
15799 case GT_EXPR:
15800 case GE_EXPR:
15801 case LTGT_EXPR:
15802 if (flag_trapping_math)
15803 return NULL_TREE;
15804 result = 0;
15805 break;
15807 default:
15808 gcc_unreachable ();
15811 return constant_boolean_node (result, type);
15814 return constant_boolean_node (real_compare (code, c0, c1), type);
15817 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15819 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15820 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15821 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15824 /* Handle equality/inequality of complex constants. */
15825 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15827 tree rcond = fold_relational_const (code, type,
15828 TREE_REALPART (op0),
15829 TREE_REALPART (op1));
15830 tree icond = fold_relational_const (code, type,
15831 TREE_IMAGPART (op0),
15832 TREE_IMAGPART (op1));
15833 if (code == EQ_EXPR)
15834 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15835 else if (code == NE_EXPR)
15836 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15837 else
15838 return NULL_TREE;
15841 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15843 To compute GT, swap the arguments and do LT.
15844 To compute GE, do LT and invert the result.
15845 To compute LE, swap the arguments, do LT and invert the result.
15846 To compute NE, do EQ and invert the result.
15848 Therefore, the code below must handle only EQ and LT. */
15850 if (code == LE_EXPR || code == GT_EXPR)
15852 tree tem = op0;
15853 op0 = op1;
15854 op1 = tem;
15855 code = swap_tree_comparison (code);
15858 /* Note that it is safe to invert for real values here because we
15859 have already handled the one case that it matters. */
15861 invert = 0;
15862 if (code == NE_EXPR || code == GE_EXPR)
15864 invert = 1;
15865 code = invert_tree_comparison (code, false);
15868 /* Compute a result for LT or EQ if args permit;
15869 Otherwise return T. */
15870 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15872 if (code == EQ_EXPR)
15873 result = tree_int_cst_equal (op0, op1);
15874 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15875 result = INT_CST_LT_UNSIGNED (op0, op1);
15876 else
15877 result = INT_CST_LT (op0, op1);
15879 else
15880 return NULL_TREE;
15882 if (invert)
15883 result ^= 1;
15884 return constant_boolean_node (result, type);
15887 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15888 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15889 itself. */
15891 tree
15892 fold_build_cleanup_point_expr (tree type, tree expr)
15894 /* If the expression does not have side effects then we don't have to wrap
15895 it with a cleanup point expression. */
15896 if (!TREE_SIDE_EFFECTS (expr))
15897 return expr;
15899 /* If the expression is a return, check to see if the expression inside the
15900 return has no side effects or the right hand side of the modify expression
15901 inside the return. If either don't have side effects set we don't need to
15902 wrap the expression in a cleanup point expression. Note we don't check the
15903 left hand side of the modify because it should always be a return decl. */
15904 if (TREE_CODE (expr) == RETURN_EXPR)
15906 tree op = TREE_OPERAND (expr, 0);
15907 if (!op || !TREE_SIDE_EFFECTS (op))
15908 return expr;
15909 op = TREE_OPERAND (op, 1);
15910 if (!TREE_SIDE_EFFECTS (op))
15911 return expr;
15914 return build1 (CLEANUP_POINT_EXPR, type, expr);
15917 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15918 of an indirection through OP0, or NULL_TREE if no simplification is
15919 possible. */
15921 tree
15922 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15924 tree sub = op0;
15925 tree subtype;
15927 STRIP_NOPS (sub);
15928 subtype = TREE_TYPE (sub);
15929 if (!POINTER_TYPE_P (subtype))
15930 return NULL_TREE;
15932 if (TREE_CODE (sub) == ADDR_EXPR)
15934 tree op = TREE_OPERAND (sub, 0);
15935 tree optype = TREE_TYPE (op);
15936 /* *&CONST_DECL -> to the value of the const decl. */
15937 if (TREE_CODE (op) == CONST_DECL)
15938 return DECL_INITIAL (op);
15939 /* *&p => p; make sure to handle *&"str"[cst] here. */
15940 if (type == optype)
15942 tree fop = fold_read_from_constant_string (op);
15943 if (fop)
15944 return fop;
15945 else
15946 return op;
15948 /* *(foo *)&fooarray => fooarray[0] */
15949 else if (TREE_CODE (optype) == ARRAY_TYPE
15950 && type == TREE_TYPE (optype))
15952 tree type_domain = TYPE_DOMAIN (optype);
15953 tree min_val = size_zero_node;
15954 if (type_domain && TYPE_MIN_VALUE (type_domain))
15955 min_val = TYPE_MIN_VALUE (type_domain);
15956 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15957 SET_EXPR_LOCATION (op0, loc);
15958 return op0;
15960 /* *(foo *)&complexfoo => __real__ complexfoo */
15961 else if (TREE_CODE (optype) == COMPLEX_TYPE
15962 && type == TREE_TYPE (optype))
15963 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15964 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15965 else if (TREE_CODE (optype) == VECTOR_TYPE
15966 && type == TREE_TYPE (optype))
15968 tree part_width = TYPE_SIZE (type);
15969 tree index = bitsize_int (0);
15970 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15974 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15975 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15976 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15978 tree op00 = TREE_OPERAND (sub, 0);
15979 tree op01 = TREE_OPERAND (sub, 1);
15980 tree op00type;
15982 STRIP_NOPS (op00);
15983 op00type = TREE_TYPE (op00);
15984 if (TREE_CODE (op00) == ADDR_EXPR
15985 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15986 && type == TREE_TYPE (TREE_TYPE (op00type)))
15988 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15989 tree part_width = TYPE_SIZE (type);
15990 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15991 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15992 tree index = bitsize_int (indexi);
15994 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15995 return fold_build3_loc (loc,
15996 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15997 part_width, index);
16003 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16004 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16005 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16007 tree op00 = TREE_OPERAND (sub, 0);
16008 tree op01 = TREE_OPERAND (sub, 1);
16009 tree op00type;
16011 STRIP_NOPS (op00);
16012 op00type = TREE_TYPE (op00);
16013 if (TREE_CODE (op00) == ADDR_EXPR
16014 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16015 && type == TREE_TYPE (TREE_TYPE (op00type)))
16017 tree size = TYPE_SIZE_UNIT (type);
16018 if (tree_int_cst_equal (size, op01))
16019 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16020 TREE_OPERAND (op00, 0));
16024 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16025 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16026 && type == TREE_TYPE (TREE_TYPE (subtype)))
16028 tree type_domain;
16029 tree min_val = size_zero_node;
16030 sub = build_fold_indirect_ref_loc (loc, sub);
16031 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16032 if (type_domain && TYPE_MIN_VALUE (type_domain))
16033 min_val = TYPE_MIN_VALUE (type_domain);
16034 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16035 SET_EXPR_LOCATION (op0, loc);
16036 return op0;
16039 return NULL_TREE;
16042 /* Builds an expression for an indirection through T, simplifying some
16043 cases. */
16045 tree
16046 build_fold_indirect_ref_loc (location_t loc, tree t)
16048 tree type = TREE_TYPE (TREE_TYPE (t));
16049 tree sub = fold_indirect_ref_1 (loc, type, t);
16051 if (sub)
16052 return sub;
16054 t = build1 (INDIRECT_REF, type, t);
16055 SET_EXPR_LOCATION (t, loc);
16056 return t;
16059 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16061 tree
16062 fold_indirect_ref_loc (location_t loc, tree t)
16064 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16066 if (sub)
16067 return sub;
16068 else
16069 return t;
16072 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16073 whose result is ignored. The type of the returned tree need not be
16074 the same as the original expression. */
16076 tree
16077 fold_ignored_result (tree t)
16079 if (!TREE_SIDE_EFFECTS (t))
16080 return integer_zero_node;
16082 for (;;)
16083 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16085 case tcc_unary:
16086 t = TREE_OPERAND (t, 0);
16087 break;
16089 case tcc_binary:
16090 case tcc_comparison:
16091 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16092 t = TREE_OPERAND (t, 0);
16093 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16094 t = TREE_OPERAND (t, 1);
16095 else
16096 return t;
16097 break;
16099 case tcc_expression:
16100 switch (TREE_CODE (t))
16102 case COMPOUND_EXPR:
16103 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16104 return t;
16105 t = TREE_OPERAND (t, 0);
16106 break;
16108 case COND_EXPR:
16109 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16110 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16111 return t;
16112 t = TREE_OPERAND (t, 0);
16113 break;
16115 default:
16116 return t;
16118 break;
16120 default:
16121 return t;
16125 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16126 This can only be applied to objects of a sizetype. */
16128 tree
16129 round_up_loc (location_t loc, tree value, int divisor)
16131 tree div = NULL_TREE;
16133 gcc_assert (divisor > 0);
16134 if (divisor == 1)
16135 return value;
16137 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16138 have to do anything. Only do this when we are not given a const,
16139 because in that case, this check is more expensive than just
16140 doing it. */
16141 if (TREE_CODE (value) != INTEGER_CST)
16143 div = build_int_cst (TREE_TYPE (value), divisor);
16145 if (multiple_of_p (TREE_TYPE (value), value, div))
16146 return value;
16149 /* If divisor is a power of two, simplify this to bit manipulation. */
16150 if (divisor == (divisor & -divisor))
16152 if (TREE_CODE (value) == INTEGER_CST)
16154 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16155 unsigned HOST_WIDE_INT high;
16156 bool overflow_p;
16158 if ((low & (divisor - 1)) == 0)
16159 return value;
16161 overflow_p = TREE_OVERFLOW (value);
16162 high = TREE_INT_CST_HIGH (value);
16163 low &= ~(divisor - 1);
16164 low += divisor;
16165 if (low == 0)
16167 high++;
16168 if (high == 0)
16169 overflow_p = true;
16172 return force_fit_type_double (TREE_TYPE (value), low, high,
16173 -1, overflow_p);
16175 else
16177 tree t;
16179 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16180 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16181 t = build_int_cst (TREE_TYPE (value), -divisor);
16182 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16185 else
16187 if (!div)
16188 div = build_int_cst (TREE_TYPE (value), divisor);
16189 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16190 value = size_binop_loc (loc, MULT_EXPR, value, div);
16193 return value;
16196 /* Likewise, but round down. */
16198 tree
16199 round_down_loc (location_t loc, tree value, int divisor)
16201 tree div = NULL_TREE;
16203 gcc_assert (divisor > 0);
16204 if (divisor == 1)
16205 return value;
16207 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16208 have to do anything. Only do this when we are not given a const,
16209 because in that case, this check is more expensive than just
16210 doing it. */
16211 if (TREE_CODE (value) != INTEGER_CST)
16213 div = build_int_cst (TREE_TYPE (value), divisor);
16215 if (multiple_of_p (TREE_TYPE (value), value, div))
16216 return value;
16219 /* If divisor is a power of two, simplify this to bit manipulation. */
16220 if (divisor == (divisor & -divisor))
16222 tree t;
16224 t = build_int_cst (TREE_TYPE (value), -divisor);
16225 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16227 else
16229 if (!div)
16230 div = build_int_cst (TREE_TYPE (value), divisor);
16231 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16232 value = size_binop_loc (loc, MULT_EXPR, value, div);
16235 return value;
16238 /* Returns the pointer to the base of the object addressed by EXP and
16239 extracts the information about the offset of the access, storing it
16240 to PBITPOS and POFFSET. */
16242 static tree
16243 split_address_to_core_and_offset (tree exp,
16244 HOST_WIDE_INT *pbitpos, tree *poffset)
16246 tree core;
16247 enum machine_mode mode;
16248 int unsignedp, volatilep;
16249 HOST_WIDE_INT bitsize;
16250 location_t loc = EXPR_LOCATION (exp);
16252 if (TREE_CODE (exp) == ADDR_EXPR)
16254 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16255 poffset, &mode, &unsignedp, &volatilep,
16256 false);
16257 core = build_fold_addr_expr_loc (loc, core);
16259 else
16261 core = exp;
16262 *pbitpos = 0;
16263 *poffset = NULL_TREE;
16266 return core;
16269 /* Returns true if addresses of E1 and E2 differ by a constant, false
16270 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16272 bool
16273 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16275 tree core1, core2;
16276 HOST_WIDE_INT bitpos1, bitpos2;
16277 tree toffset1, toffset2, tdiff, type;
16279 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16280 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16282 if (bitpos1 % BITS_PER_UNIT != 0
16283 || bitpos2 % BITS_PER_UNIT != 0
16284 || !operand_equal_p (core1, core2, 0))
16285 return false;
16287 if (toffset1 && toffset2)
16289 type = TREE_TYPE (toffset1);
16290 if (type != TREE_TYPE (toffset2))
16291 toffset2 = fold_convert (type, toffset2);
16293 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16294 if (!cst_and_fits_in_hwi (tdiff))
16295 return false;
16297 *diff = int_cst_value (tdiff);
16299 else if (toffset1 || toffset2)
16301 /* If only one of the offsets is non-constant, the difference cannot
16302 be a constant. */
16303 return false;
16305 else
16306 *diff = 0;
16308 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16309 return true;
16312 /* Simplify the floating point expression EXP when the sign of the
16313 result is not significant. Return NULL_TREE if no simplification
16314 is possible. */
16316 tree
16317 fold_strip_sign_ops (tree exp)
16319 tree arg0, arg1;
16320 location_t loc = EXPR_LOCATION (exp);
16322 switch (TREE_CODE (exp))
16324 case ABS_EXPR:
16325 case NEGATE_EXPR:
16326 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16327 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16329 case MULT_EXPR:
16330 case RDIV_EXPR:
16331 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16332 return NULL_TREE;
16333 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16334 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16335 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16336 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16337 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16338 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16339 break;
16341 case COMPOUND_EXPR:
16342 arg0 = TREE_OPERAND (exp, 0);
16343 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16344 if (arg1)
16345 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16346 break;
16348 case COND_EXPR:
16349 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16350 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16351 if (arg0 || arg1)
16352 return fold_build3_loc (loc,
16353 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16354 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16355 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16356 break;
16358 case CALL_EXPR:
16360 const enum built_in_function fcode = builtin_mathfn_code (exp);
16361 switch (fcode)
16363 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16364 /* Strip copysign function call, return the 1st argument. */
16365 arg0 = CALL_EXPR_ARG (exp, 0);
16366 arg1 = CALL_EXPR_ARG (exp, 1);
16367 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16369 default:
16370 /* Strip sign ops from the argument of "odd" math functions. */
16371 if (negate_mathfn_p (fcode))
16373 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16374 if (arg0)
16375 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16377 break;
16380 break;
16382 default:
16383 break;
16385 return NULL_TREE;