2009-03-12 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / fold-const.c
blob66ea456693b2cfa86b6a8e69c802b27045c04f88
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
114 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
115 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
116 enum machine_mode *, int *, int *,
117 tree *, tree *);
118 static int all_ones_mask_p (const_tree, int);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
122 static tree range_predecessor (tree);
123 static tree range_successor (tree);
124 static tree make_range (tree, int *, tree *, tree *, bool *);
125 static tree build_range_check (tree, tree, int, tree, tree);
126 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 tree);
128 static tree fold_range_test (enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
133 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
134 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 #define LOWPART(x) \
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
173 static void
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 static void
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 HOST_WIDE_INT *hi)
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
205 unsigned int prec;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = POINTER_SIZE;
211 else
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
226 h1 = 0;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
246 h1 = -1;
248 else
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 h1 = -1;
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 *lv = l1;
259 *hv = h1;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
280 tree
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
283 bool overflowed)
285 int sign_extended_type;
286 bool overflow;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
298 if (overflowed
299 || overflowable < 0
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
307 return t;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 bool unsigned_p)
327 unsigned HOST_WIDE_INT l;
328 HOST_WIDE_INT h;
330 l = l1 + l2;
331 h = (HOST_WIDE_INT)((unsigned HOST_WIDE_INT)h1
332 + (unsigned HOST_WIDE_INT)h2
333 + (l < l1));
335 *lv = l;
336 *hv = h;
338 if (unsigned_p)
339 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
340 || (h == h1
341 && l < l1));
342 else
343 return OVERFLOW_SUM_SIGN (h1, h2, h);
346 /* Negate a doubleword integer with doubleword result.
347 Return nonzero if the operation overflows, assuming it's signed.
348 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
355 if (l1 == 0)
357 *lv = 0;
358 *hv = - h1;
359 return (*hv & h1) < 0;
361 else
363 *lv = -l1;
364 *hv = ~h1;
365 return 0;
369 /* Multiply two doubleword integers with doubleword result.
370 Return nonzero if the operation overflows according to UNSIGNED_P.
371 Each argument is given as two `HOST_WIDE_INT' pieces.
372 One argument is L1 and H1; the other, L2 and H2.
373 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
377 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
378 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 bool unsigned_p)
381 HOST_WIDE_INT arg1[4];
382 HOST_WIDE_INT arg2[4];
383 HOST_WIDE_INT prod[4 * 2];
384 unsigned HOST_WIDE_INT carry;
385 int i, j, k;
386 unsigned HOST_WIDE_INT toplow, neglow;
387 HOST_WIDE_INT tophigh, neghigh;
389 encode (arg1, l1, h1);
390 encode (arg2, l2, h2);
392 memset (prod, 0, sizeof prod);
394 for (i = 0; i < 4; i++)
396 carry = 0;
397 for (j = 0; j < 4; j++)
399 k = i + j;
400 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
401 carry += arg1[i] * arg2[j];
402 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
403 carry += prod[k];
404 prod[k] = LOWPART (carry);
405 carry = HIGHPART (carry);
407 prod[i + 4] = carry;
410 decode (prod, lv, hv);
411 decode (prod + 4, &toplow, &tophigh);
413 /* Unsigned overflow is immediate. */
414 if (unsigned_p)
415 return (toplow | tophigh) != 0;
417 /* Check for signed overflow by calculating the signed representation of the
418 top half of the result; it should agree with the low half's sign bit. */
419 if (h1 < 0)
421 neg_double (l2, h2, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 if (h2 < 0)
426 neg_double (l1, h1, &neglow, &neghigh);
427 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
429 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
432 /* Shift the doubleword integer in L1, H1 left by COUNT places
433 keeping only PREC bits of result.
434 Shift right if COUNT is negative.
435 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
436 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438 void
439 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
440 HOST_WIDE_INT count, unsigned int prec,
441 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
443 unsigned HOST_WIDE_INT signmask;
445 if (count < 0)
447 rshift_double (l1, h1, -count, prec, lv, hv, arith);
448 return;
451 if (SHIFT_COUNT_TRUNCATED)
452 count %= prec;
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
458 *hv = 0;
459 *lv = 0;
461 else if (count >= HOST_BITS_PER_WIDE_INT)
463 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
464 *lv = 0;
466 else
468 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
469 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
470 *lv = l1 << count;
473 /* Sign extend all bits that are beyond the precision. */
475 signmask = -((prec > HOST_BITS_PER_WIDE_INT
476 ? ((unsigned HOST_WIDE_INT) *hv
477 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
478 : (*lv >> (prec - 1))) & 1);
480 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
482 else if (prec >= HOST_BITS_PER_WIDE_INT)
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
487 else
489 *hv = signmask;
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
491 *lv |= signmask << prec;
495 /* Shift the doubleword integer in L1, H1 right by COUNT places
496 keeping only PREC bits of result. COUNT must be positive.
497 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
500 void
501 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 int arith)
506 unsigned HOST_WIDE_INT signmask;
508 signmask = (arith
509 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 : 0);
512 if (SHIFT_COUNT_TRUNCATED)
513 count %= prec;
515 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
517 /* Shifting by the host word size is undefined according to the
518 ANSI standard, so we must handle this as a special case. */
519 *hv = 0;
520 *lv = 0;
522 else if (count >= HOST_BITS_PER_WIDE_INT)
524 *hv = 0;
525 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 else
529 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
530 *lv = ((l1 >> count)
531 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
534 /* Zero / sign extend all bits that are beyond the precision. */
536 if (count >= (HOST_WIDE_INT)prec)
538 *hv = signmask;
539 *lv = signmask;
541 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
543 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
545 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
546 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
548 else
550 *hv = signmask;
551 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
552 *lv |= signmask << (prec - count);
556 /* Rotate the doubleword integer in L1, H1 left by COUNT places
557 keeping only PREC bits of result.
558 Rotate right if COUNT is negative.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
561 void
562 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
569 count %= prec;
570 if (count < 0)
571 count += prec;
573 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 *lv = s1l | s2l;
576 *hv = s1h | s2h;
579 /* Rotate the doubleword integer in L1, H1 left by COUNT places
580 keeping only PREC bits of result. COUNT must be positive.
581 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
583 void
584 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
585 HOST_WIDE_INT count, unsigned int prec,
586 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
588 unsigned HOST_WIDE_INT s1l, s2l;
589 HOST_WIDE_INT s1h, s2h;
591 count %= prec;
592 if (count < 0)
593 count += prec;
595 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
596 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
597 *lv = s1l | s2l;
598 *hv = s1h | s2h;
601 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
602 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
603 CODE is a tree code for a kind of division, one of
604 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
605 or EXACT_DIV_EXPR
606 It controls how the quotient is rounded to an integer.
607 Return nonzero if the operation overflows.
608 UNS nonzero says do unsigned division. */
611 div_and_round_double (enum tree_code code, int uns,
612 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
613 HOST_WIDE_INT hnum_orig,
614 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
615 HOST_WIDE_INT hden_orig,
616 unsigned HOST_WIDE_INT *lquo,
617 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
618 HOST_WIDE_INT *hrem)
620 int quo_neg = 0;
621 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
622 HOST_WIDE_INT den[4], quo[4];
623 int i, j;
624 unsigned HOST_WIDE_INT work;
625 unsigned HOST_WIDE_INT carry = 0;
626 unsigned HOST_WIDE_INT lnum = lnum_orig;
627 HOST_WIDE_INT hnum = hnum_orig;
628 unsigned HOST_WIDE_INT lden = lden_orig;
629 HOST_WIDE_INT hden = hden_orig;
630 int overflow = 0;
632 if (hden == 0 && lden == 0)
633 overflow = 1, lden = 1;
635 /* Calculate quotient sign and convert operands to unsigned. */
636 if (!uns)
638 if (hnum < 0)
640 quo_neg = ~ quo_neg;
641 /* (minimum integer) / (-1) is the only overflow case. */
642 if (neg_double (lnum, hnum, &lnum, &hnum)
643 && ((HOST_WIDE_INT) lden & hden) == -1)
644 overflow = 1;
646 if (hden < 0)
648 quo_neg = ~ quo_neg;
649 neg_double (lden, hden, &lden, &hden);
653 if (hnum == 0 && hden == 0)
654 { /* single precision */
655 *hquo = *hrem = 0;
656 /* This unsigned division rounds toward zero. */
657 *lquo = lnum / lden;
658 goto finish_up;
661 if (hnum == 0)
662 { /* trivial case: dividend < divisor */
663 /* hden != 0 already checked. */
664 *hquo = *lquo = 0;
665 *hrem = hnum;
666 *lrem = lnum;
667 goto finish_up;
670 memset (quo, 0, sizeof quo);
672 memset (num, 0, sizeof num); /* to zero 9th element */
673 memset (den, 0, sizeof den);
675 encode (num, lnum, hnum);
676 encode (den, lden, hden);
678 /* Special code for when the divisor < BASE. */
679 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
681 /* hnum != 0 already checked. */
682 for (i = 4 - 1; i >= 0; i--)
684 work = num[i] + carry * BASE;
685 quo[i] = work / lden;
686 carry = work % lden;
689 else
691 /* Full double precision division,
692 with thanks to Don Knuth's "Seminumerical Algorithms". */
693 int num_hi_sig, den_hi_sig;
694 unsigned HOST_WIDE_INT quo_est, scale;
696 /* Find the highest nonzero divisor digit. */
697 for (i = 4 - 1;; i--)
698 if (den[i] != 0)
700 den_hi_sig = i;
701 break;
704 /* Insure that the first digit of the divisor is at least BASE/2.
705 This is required by the quotient digit estimation algorithm. */
707 scale = BASE / (den[den_hi_sig] + 1);
708 if (scale > 1)
709 { /* scale divisor and dividend */
710 carry = 0;
711 for (i = 0; i <= 4 - 1; i++)
713 work = (num[i] * scale) + carry;
714 num[i] = LOWPART (work);
715 carry = HIGHPART (work);
718 num[4] = carry;
719 carry = 0;
720 for (i = 0; i <= 4 - 1; i++)
722 work = (den[i] * scale) + carry;
723 den[i] = LOWPART (work);
724 carry = HIGHPART (work);
725 if (den[i] != 0) den_hi_sig = i;
729 num_hi_sig = 4;
731 /* Main loop */
732 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
734 /* Guess the next quotient digit, quo_est, by dividing the first
735 two remaining dividend digits by the high order quotient digit.
736 quo_est is never low and is at most 2 high. */
737 unsigned HOST_WIDE_INT tmp;
739 num_hi_sig = i + den_hi_sig + 1;
740 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
741 if (num[num_hi_sig] != den[den_hi_sig])
742 quo_est = work / den[den_hi_sig];
743 else
744 quo_est = BASE - 1;
746 /* Refine quo_est so it's usually correct, and at most one high. */
747 tmp = work - quo_est * den[den_hi_sig];
748 if (tmp < BASE
749 && (den[den_hi_sig - 1] * quo_est
750 > (tmp * BASE + num[num_hi_sig - 2])))
751 quo_est--;
753 /* Try QUO_EST as the quotient digit, by multiplying the
754 divisor by QUO_EST and subtracting from the remaining dividend.
755 Keep in mind that QUO_EST is the I - 1st digit. */
757 carry = 0;
758 for (j = 0; j <= den_hi_sig; j++)
760 work = quo_est * den[j] + carry;
761 carry = HIGHPART (work);
762 work = num[i + j] - LOWPART (work);
763 num[i + j] = LOWPART (work);
764 carry += HIGHPART (work) != 0;
767 /* If quo_est was high by one, then num[i] went negative and
768 we need to correct things. */
769 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
771 quo_est--;
772 carry = 0; /* add divisor back in */
773 for (j = 0; j <= den_hi_sig; j++)
775 work = num[i + j] + den[j] + carry;
776 carry = HIGHPART (work);
777 num[i + j] = LOWPART (work);
780 num [num_hi_sig] += carry;
783 /* Store the quotient digit. */
784 quo[i] = quo_est;
788 decode (quo, lquo, hquo);
790 finish_up:
791 /* If result is negative, make it so. */
792 if (quo_neg)
793 neg_double (*lquo, *hquo, lquo, hquo);
795 /* Compute trial remainder: rem = num - (quo * den) */
796 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
797 neg_double (*lrem, *hrem, lrem, hrem);
798 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
800 switch (code)
802 case TRUNC_DIV_EXPR:
803 case TRUNC_MOD_EXPR: /* round toward zero */
804 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
805 return overflow;
807 case FLOOR_DIV_EXPR:
808 case FLOOR_MOD_EXPR: /* round toward negative infinity */
809 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
811 /* quo = quo - 1; */
812 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
813 lquo, hquo);
815 else
816 return overflow;
817 break;
819 case CEIL_DIV_EXPR:
820 case CEIL_MOD_EXPR: /* round toward positive infinity */
821 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 lquo, hquo);
826 else
827 return overflow;
828 break;
830 case ROUND_DIV_EXPR:
831 case ROUND_MOD_EXPR: /* round to closest integer */
833 unsigned HOST_WIDE_INT labs_rem = *lrem;
834 HOST_WIDE_INT habs_rem = *hrem;
835 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
836 HOST_WIDE_INT habs_den = hden, htwice;
838 /* Get absolute values. */
839 if (*hrem < 0)
840 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
841 if (hden < 0)
842 neg_double (lden, hden, &labs_den, &habs_den);
844 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
845 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
846 labs_rem, habs_rem, &ltwice, &htwice);
848 if (((unsigned HOST_WIDE_INT) habs_den
849 < (unsigned HOST_WIDE_INT) htwice)
850 || (((unsigned HOST_WIDE_INT) habs_den
851 == (unsigned HOST_WIDE_INT) htwice)
852 && (labs_den <= ltwice)))
854 if (*hquo < 0)
855 /* quo = quo - 1; */
856 add_double (*lquo, *hquo,
857 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 else
859 /* quo = quo + 1; */
860 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
861 lquo, hquo);
863 else
864 return overflow;
866 break;
868 default:
869 gcc_unreachable ();
872 /* Compute true remainder: rem = num - (quo * den) */
873 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
874 neg_double (*lrem, *hrem, lrem, hrem);
875 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
876 return overflow;
879 /* If ARG2 divides ARG1 with zero remainder, carries out the division
880 of type CODE and returns the quotient.
881 Otherwise returns NULL_TREE. */
883 static tree
884 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
886 unsigned HOST_WIDE_INT int1l, int2l;
887 HOST_WIDE_INT int1h, int2h;
888 unsigned HOST_WIDE_INT quol, reml;
889 HOST_WIDE_INT quoh, remh;
890 tree type = TREE_TYPE (arg1);
891 int uns = TYPE_UNSIGNED (type);
893 int1l = TREE_INT_CST_LOW (arg1);
894 int1h = TREE_INT_CST_HIGH (arg1);
895 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
896 &obj[some_exotic_number]. */
897 if (POINTER_TYPE_P (type))
899 uns = false;
900 type = signed_type_for (type);
901 fit_double_type (int1l, int1h, &int1l, &int1h,
902 type);
904 else
905 fit_double_type (int1l, int1h, &int1l, &int1h, type);
906 int2l = TREE_INT_CST_LOW (arg2);
907 int2h = TREE_INT_CST_HIGH (arg2);
909 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
910 &quol, &quoh, &reml, &remh);
911 if (remh != 0 || reml != 0)
912 return NULL_TREE;
914 return build_int_cst_wide (type, quol, quoh);
917 /* This is nonzero if we should defer warnings about undefined
918 overflow. This facility exists because these warnings are a
919 special case. The code to estimate loop iterations does not want
920 to issue any warnings, since it works with expressions which do not
921 occur in user code. Various bits of cleanup code call fold(), but
922 only use the result if it has certain characteristics (e.g., is a
923 constant); that code only wants to issue a warning if the result is
924 used. */
926 static int fold_deferring_overflow_warnings;
928 /* If a warning about undefined overflow is deferred, this is the
929 warning. Note that this may cause us to turn two warnings into
930 one, but that is fine since it is sufficient to only give one
931 warning per expression. */
933 static const char* fold_deferred_overflow_warning;
935 /* If a warning about undefined overflow is deferred, this is the
936 level at which the warning should be emitted. */
938 static enum warn_strict_overflow_code fold_deferred_overflow_code;
940 /* Start deferring overflow warnings. We could use a stack here to
941 permit nested calls, but at present it is not necessary. */
943 void
944 fold_defer_overflow_warnings (void)
946 ++fold_deferring_overflow_warnings;
949 /* Stop deferring overflow warnings. If there is a pending warning,
950 and ISSUE is true, then issue the warning if appropriate. STMT is
951 the statement with which the warning should be associated (used for
952 location information); STMT may be NULL. CODE is the level of the
953 warning--a warn_strict_overflow_code value. This function will use
954 the smaller of CODE and the deferred code when deciding whether to
955 issue the warning. CODE may be zero to mean to always use the
956 deferred code. */
958 void
959 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
961 const char *warnmsg;
962 location_t locus;
964 gcc_assert (fold_deferring_overflow_warnings > 0);
965 --fold_deferring_overflow_warnings;
966 if (fold_deferring_overflow_warnings > 0)
968 if (fold_deferred_overflow_warning != NULL
969 && code != 0
970 && code < (int) fold_deferred_overflow_code)
971 fold_deferred_overflow_code = code;
972 return;
975 warnmsg = fold_deferred_overflow_warning;
976 fold_deferred_overflow_warning = NULL;
978 if (!issue || warnmsg == NULL)
979 return;
981 if (gimple_no_warning_p (stmt))
982 return;
984 /* Use the smallest code level when deciding to issue the
985 warning. */
986 if (code == 0 || code > (int) fold_deferred_overflow_code)
987 code = fold_deferred_overflow_code;
989 if (!issue_strict_overflow_warning (code))
990 return;
992 if (stmt == NULL)
993 locus = input_location;
994 else
995 locus = gimple_location (stmt);
996 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
999 /* Stop deferring overflow warnings, ignoring any deferred
1000 warnings. */
1002 void
1003 fold_undefer_and_ignore_overflow_warnings (void)
1005 fold_undefer_overflow_warnings (false, NULL, 0);
1008 /* Whether we are deferring overflow warnings. */
1010 bool
1011 fold_deferring_overflow_warnings_p (void)
1013 return fold_deferring_overflow_warnings > 0;
1016 /* This is called when we fold something based on the fact that signed
1017 overflow is undefined. */
1019 static void
1020 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1022 if (fold_deferring_overflow_warnings > 0)
1024 if (fold_deferred_overflow_warning == NULL
1025 || wc < fold_deferred_overflow_code)
1027 fold_deferred_overflow_warning = gmsgid;
1028 fold_deferred_overflow_code = wc;
1031 else if (issue_strict_overflow_warning (wc))
1032 warning (OPT_Wstrict_overflow, gmsgid);
1035 /* Return true if the built-in mathematical function specified by CODE
1036 is odd, i.e. -f(x) == f(-x). */
1038 static bool
1039 negate_mathfn_p (enum built_in_function code)
1041 switch (code)
1043 CASE_FLT_FN (BUILT_IN_ASIN):
1044 CASE_FLT_FN (BUILT_IN_ASINH):
1045 CASE_FLT_FN (BUILT_IN_ATAN):
1046 CASE_FLT_FN (BUILT_IN_ATANH):
1047 CASE_FLT_FN (BUILT_IN_CASIN):
1048 CASE_FLT_FN (BUILT_IN_CASINH):
1049 CASE_FLT_FN (BUILT_IN_CATAN):
1050 CASE_FLT_FN (BUILT_IN_CATANH):
1051 CASE_FLT_FN (BUILT_IN_CBRT):
1052 CASE_FLT_FN (BUILT_IN_CPROJ):
1053 CASE_FLT_FN (BUILT_IN_CSIN):
1054 CASE_FLT_FN (BUILT_IN_CSINH):
1055 CASE_FLT_FN (BUILT_IN_CTAN):
1056 CASE_FLT_FN (BUILT_IN_CTANH):
1057 CASE_FLT_FN (BUILT_IN_ERF):
1058 CASE_FLT_FN (BUILT_IN_LLROUND):
1059 CASE_FLT_FN (BUILT_IN_LROUND):
1060 CASE_FLT_FN (BUILT_IN_ROUND):
1061 CASE_FLT_FN (BUILT_IN_SIN):
1062 CASE_FLT_FN (BUILT_IN_SINH):
1063 CASE_FLT_FN (BUILT_IN_TAN):
1064 CASE_FLT_FN (BUILT_IN_TANH):
1065 CASE_FLT_FN (BUILT_IN_TRUNC):
1066 return true;
1068 CASE_FLT_FN (BUILT_IN_LLRINT):
1069 CASE_FLT_FN (BUILT_IN_LRINT):
1070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1071 CASE_FLT_FN (BUILT_IN_RINT):
1072 return !flag_rounding_math;
1074 default:
1075 break;
1077 return false;
1080 /* Check whether we may negate an integer constant T without causing
1081 overflow. */
1083 bool
1084 may_negate_without_overflow_p (const_tree t)
1086 unsigned HOST_WIDE_INT val;
1087 unsigned int prec;
1088 tree type;
1090 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1092 type = TREE_TYPE (t);
1093 if (TYPE_UNSIGNED (type))
1094 return integer_zerop (t);
1096 prec = TYPE_PRECISION (type);
1097 if (prec > HOST_BITS_PER_WIDE_INT)
1099 if (TREE_INT_CST_LOW (t) != 0)
1100 return true;
1101 prec -= HOST_BITS_PER_WIDE_INT;
1102 val = TREE_INT_CST_HIGH (t);
1104 else
1105 val = TREE_INT_CST_LOW (t);
1106 if (prec < HOST_BITS_PER_WIDE_INT)
1107 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1108 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1111 /* Determine whether an expression T can be cheaply negated using
1112 the function negate_expr without introducing undefined overflow. */
1114 static bool
1115 negate_expr_p (tree t)
1117 tree type;
1119 if (t == 0)
1120 return false;
1122 type = TREE_TYPE (t);
1124 STRIP_SIGN_NOPS (t);
1125 switch (TREE_CODE (t))
1127 case INTEGER_CST:
1128 if (TYPE_OVERFLOW_TRAPS (type))
1129 return may_negate_without_overflow_p (t);
1130 return true;
1132 case BIT_NOT_EXPR:
1133 return INTEGRAL_TYPE_P (type);
1135 case FIXED_CST:
1136 case REAL_CST:
1137 case NEGATE_EXPR:
1138 case NEGATENV_EXPR:
1139 return true;
1141 case COMPLEX_CST:
1142 return negate_expr_p (TREE_REALPART (t))
1143 && negate_expr_p (TREE_IMAGPART (t));
1145 case COMPLEX_EXPR:
1146 return negate_expr_p (TREE_OPERAND (t, 0))
1147 && negate_expr_p (TREE_OPERAND (t, 1));
1149 case CONJ_EXPR:
1150 return negate_expr_p (TREE_OPERAND (t, 0));
1152 case PLUS_EXPR:
1153 case PLUSNV_EXPR:
1154 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1155 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1156 return false;
1157 /* -(A + B) -> (-B) - A. */
1158 if (negate_expr_p (TREE_OPERAND (t, 1))
1159 && reorder_operands_p (TREE_OPERAND (t, 0),
1160 TREE_OPERAND (t, 1)))
1161 return true;
1162 /* -(A + B) -> (-A) - B. */
1163 return negate_expr_p (TREE_OPERAND (t, 0));
1165 case MINUS_EXPR:
1166 case MINUSNV_EXPR:
1167 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1168 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1169 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1170 && reorder_operands_p (TREE_OPERAND (t, 0),
1171 TREE_OPERAND (t, 1));
1173 case MULT_EXPR:
1174 case MULTNV_EXPR:
1175 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1176 break;
1178 /* Fall through. */
1180 case RDIV_EXPR:
1181 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1182 return negate_expr_p (TREE_OPERAND (t, 1))
1183 || negate_expr_p (TREE_OPERAND (t, 0));
1184 break;
1186 case TRUNC_DIV_EXPR:
1187 case ROUND_DIV_EXPR:
1188 case FLOOR_DIV_EXPR:
1189 case CEIL_DIV_EXPR:
1190 case EXACT_DIV_EXPR:
1191 /* In general we can't negate A / B, because if A is INT_MIN and
1192 B is 1, we may turn this into INT_MIN / -1 which is undefined
1193 and actually traps on some architectures. */
1194 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
1195 break;
1196 return negate_expr_p (TREE_OPERAND (t, 1))
1197 || negate_expr_p (TREE_OPERAND (t, 0));
1199 CASE_CONVERT:
1200 /* Negate -((double)float) as (double)(-float). */
1201 if (TREE_CODE (type) == REAL_TYPE)
1203 tree tem = strip_float_extensions (t);
1204 if (tem != t)
1205 return negate_expr_p (tem);
1207 break;
1209 case CALL_EXPR:
1210 /* Negate -f(x) as f(-x). */
1211 if (negate_mathfn_p (builtin_mathfn_code (t)))
1212 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1213 break;
1215 case RSHIFT_EXPR:
1216 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1217 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1219 tree op1 = TREE_OPERAND (t, 1);
1220 if (TREE_INT_CST_HIGH (op1) == 0
1221 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1222 == TREE_INT_CST_LOW (op1))
1223 return true;
1225 break;
1227 default:
1228 break;
1230 return false;
1233 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1234 simplification is possible.
1235 If negate_expr_p would return true for T, NULL_TREE will never be
1236 returned. */
1238 static tree
1239 fold_negate_expr (tree t)
1241 tree type = TREE_TYPE (t);
1242 tree tem;
1244 switch (TREE_CODE (t))
1246 /* Convert - (~A) to A + 1. */
1247 case BIT_NOT_EXPR:
1248 if (INTEGRAL_TYPE_P (type))
1249 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1250 build_int_cst (type, 1));
1251 break;
1253 case INTEGER_CST:
1254 tem = fold_negate_const (t, type);
1255 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1256 || !TYPE_OVERFLOW_TRAPS (type))
1257 return tem;
1258 break;
1260 case REAL_CST:
1261 tem = fold_negate_const (t, type);
1262 /* Two's complement FP formats, such as c4x, may overflow. */
1263 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1264 return tem;
1265 break;
1267 case FIXED_CST:
1268 tem = fold_negate_const (t, type);
1269 return tem;
1271 case COMPLEX_CST:
1273 tree rpart = negate_expr (TREE_REALPART (t));
1274 tree ipart = negate_expr (TREE_IMAGPART (t));
1276 if ((TREE_CODE (rpart) == REAL_CST
1277 && TREE_CODE (ipart) == REAL_CST)
1278 || (TREE_CODE (rpart) == INTEGER_CST
1279 && TREE_CODE (ipart) == INTEGER_CST))
1280 return build_complex (type, rpart, ipart);
1282 break;
1284 case COMPLEX_EXPR:
1285 if (negate_expr_p (t))
1286 return fold_build2 (COMPLEX_EXPR, type,
1287 fold_negate_expr (TREE_OPERAND (t, 0)),
1288 fold_negate_expr (TREE_OPERAND (t, 1)));
1289 break;
1291 case CONJ_EXPR:
1292 if (negate_expr_p (t))
1293 return fold_build1 (CONJ_EXPR, type,
1294 fold_negate_expr (TREE_OPERAND (t, 0)));
1295 break;
1297 case NEGATE_EXPR:
1298 case NEGATENV_EXPR:
1299 return TREE_OPERAND (t, 0);
1301 case PLUS_EXPR:
1302 case PLUSNV_EXPR:
1303 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1304 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1306 /* -(A + B) -> (-B) - A. */
1307 if (negate_expr_p (TREE_OPERAND (t, 1))
1308 && reorder_operands_p (TREE_OPERAND (t, 0),
1309 TREE_OPERAND (t, 1)))
1311 tem = negate_expr (TREE_OPERAND (t, 1));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 0));
1316 /* -(A + B) -> (-A) - B. */
1317 if (negate_expr_p (TREE_OPERAND (t, 0)))
1319 tem = negate_expr (TREE_OPERAND (t, 0));
1320 return fold_build2 (MINUS_EXPR, type,
1321 tem, TREE_OPERAND (t, 1));
1324 break;
1326 case MINUS_EXPR:
1327 case MINUSNV_EXPR:
1328 /* - (A - B) -> B - A */
1329 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1330 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1331 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1332 return fold_build2 (MINUS_EXPR, type,
1333 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1334 break;
1336 case MULT_EXPR:
1337 case MULTNV_EXPR:
1338 if (TYPE_UNSIGNED (type))
1339 break;
1341 /* Fall through. */
1343 case RDIV_EXPR:
1344 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1346 tem = TREE_OPERAND (t, 1);
1347 if (negate_expr_p (tem))
1348 return fold_build2 (TREE_CODE (t), type,
1349 TREE_OPERAND (t, 0), negate_expr (tem));
1350 tem = TREE_OPERAND (t, 0);
1351 if (negate_expr_p (tem))
1352 return fold_build2 (TREE_CODE (t), type,
1353 negate_expr (tem), TREE_OPERAND (t, 1));
1355 break;
1357 case TRUNC_DIV_EXPR:
1358 case ROUND_DIV_EXPR:
1359 case FLOOR_DIV_EXPR:
1360 case CEIL_DIV_EXPR:
1361 case EXACT_DIV_EXPR:
1362 /* In general we can't negate A / B, because if A is INT_MIN and
1363 B is 1, we may turn this into INT_MIN / -1 which is undefined
1364 and actually traps on some architectures. */
1365 if (!INTEGRAL_TYPE_P (type))
1367 tem = TREE_OPERAND (t, 1);
1368 if (negate_expr_p (tem))
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1373 return fold_build2 (TREE_CODE (t), type,
1374 negate_expr (tem), TREE_OPERAND (t, 1));
1376 break;
1378 CASE_CONVERT:
1379 /* Convert -((double)float) into (double)(-float). */
1380 if (TREE_CODE (type) == REAL_TYPE)
1382 tem = strip_float_extensions (t);
1383 if (tem != t && negate_expr_p (tem))
1384 return fold_convert (type, negate_expr (tem));
1386 break;
1388 case CALL_EXPR:
1389 /* Negate -f(x) as f(-x). */
1390 if (negate_mathfn_p (builtin_mathfn_code (t))
1391 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1393 tree fndecl, arg;
1395 fndecl = get_callee_fndecl (t);
1396 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1397 return build_call_expr (fndecl, 1, arg);
1399 break;
1401 case RSHIFT_EXPR:
1402 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1403 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1405 tree op1 = TREE_OPERAND (t, 1);
1406 if (TREE_INT_CST_HIGH (op1) == 0
1407 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1408 == TREE_INT_CST_LOW (op1))
1410 tree ntype = TYPE_UNSIGNED (type)
1411 ? signed_type_for (type)
1412 : unsigned_type_for (type);
1413 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1414 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1415 return fold_convert (type, temp);
1418 break;
1420 default:
1421 break;
1424 return NULL_TREE;
1427 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1428 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1429 return NULL_TREE. */
1431 static tree
1432 negate_expr (tree t)
1434 tree type, tem;
1436 if (t == NULL_TREE)
1437 return NULL_TREE;
1439 type = TREE_TYPE (t);
1440 STRIP_SIGN_NOPS (t);
1442 tem = fold_negate_expr (t);
1443 if (!tem)
1444 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1445 return fold_convert (type, tem);
1448 /* Split a tree IN into a constant, literal and variable parts that could be
1449 combined with CODE to make IN. "constant" means an expression with
1450 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1451 commutative arithmetic operation. Store the constant part into *CONP,
1452 the literal in *LITP and return the variable part. If a part isn't
1453 present, set it to null. If the tree does not decompose in this way,
1454 return the entire tree as the variable part and the other parts as null.
1456 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1457 case, we negate an operand that was subtracted. Except if it is a
1458 literal for which we use *MINUS_LITP instead.
1460 If NEGATE_P is true, we are negating all of IN, again except a literal
1461 for which we use *MINUS_LITP instead.
1463 If IN is itself a literal or constant, return it as appropriate.
1465 Note that we do not guarantee that any of the three values will be the
1466 same type as IN, but they will have the same signedness and mode. */
1468 static tree
1469 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1470 tree *minus_litp, int negate_p)
1472 tree var = 0;
1474 *conp = 0;
1475 *litp = 0;
1476 *minus_litp = 0;
1478 /* Strip any conversions that don't change the machine mode or signedness. */
1479 STRIP_SIGN_NOPS (in);
1481 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1482 || TREE_CODE (in) == FIXED_CST)
1483 *litp = in;
1484 else if (TREE_CODE (in) == code
1485 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1486 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1487 /* We can associate addition and subtraction together (even
1488 though the C standard doesn't say so) for integers because
1489 the value is not affected. For reals, the value might be
1490 affected, so we can't. */
1491 && ((PLUS_EXPR_CODE_P (code) && MINUS_EXPR_P (in))
1492 || (MINUS_EXPR_CODE_P (code) && PLUS_EXPR_P (in)))))
1494 tree op0 = TREE_OPERAND (in, 0);
1495 tree op1 = TREE_OPERAND (in, 1);
1496 int neg1_p = MINUS_EXPR_P (in);
1497 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1499 /* First see if either of the operands is a literal, then a constant. */
1500 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1501 || TREE_CODE (op0) == FIXED_CST)
1502 *litp = op0, op0 = 0;
1503 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1504 || TREE_CODE (op1) == FIXED_CST)
1505 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1507 if (op0 != 0 && TREE_CONSTANT (op0))
1508 *conp = op0, op0 = 0;
1509 else if (op1 != 0 && TREE_CONSTANT (op1))
1510 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1512 /* If we haven't dealt with either operand, this is not a case we can
1513 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1514 if (op0 != 0 && op1 != 0)
1515 var = in;
1516 else if (op0 != 0)
1517 var = op0;
1518 else
1519 var = op1, neg_var_p = neg1_p;
1521 /* Now do any needed negations. */
1522 if (neg_litp_p)
1523 *minus_litp = *litp, *litp = 0;
1524 if (neg_conp_p)
1525 *conp = negate_expr (*conp);
1526 if (neg_var_p)
1527 var = negate_expr (var);
1529 else if (TREE_CONSTANT (in))
1530 *conp = in;
1531 else
1532 var = in;
1534 if (negate_p)
1536 if (*litp)
1537 *minus_litp = *litp, *litp = 0;
1538 else if (*minus_litp)
1539 *litp = *minus_litp, *minus_litp = 0;
1540 *conp = negate_expr (*conp);
1541 var = negate_expr (var);
1544 return var;
1547 /* Re-associate trees split by the above function. T1 and T2 are either
1548 expressions to associate or null. Return the new expression, if any. If
1549 we build an operation, do it in TYPE and with CODE. */
1551 static tree
1552 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1554 if (t1 == 0)
1555 return t2;
1556 else if (t2 == 0)
1557 return t1;
1559 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1560 try to fold this since we will have infinite recursion. But do
1561 deal with any NEGATE_EXPRs. */
1562 if (TREE_CODE (t1) == code
1563 || TREE_CODE (t2) == code
1564 || MINUS_EXPR_P (t1) || MINUS_EXPR_P (t2))
1566 if (PLUS_EXPR_CODE_P (code))
1568 if (NEGATE_EXPR_P (t1))
1569 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1570 fold_convert (type, TREE_OPERAND (t1, 0)));
1571 else if (NEGATE_EXPR_P (t2))
1572 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1573 fold_convert (type, TREE_OPERAND (t2, 0)));
1574 else if (integer_zerop (t2))
1575 return fold_convert (type, t1);
1577 else if (MINUS_EXPR_CODE_P (code))
1579 if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1583 return build2 (strip_nv (code), type, fold_convert (type, t1),
1584 fold_convert (type, t2));
1587 return fold_build2 (strip_nv (code), type, fold_convert (type, t1),
1588 fold_convert (type, t2));
1591 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1592 for use in int_const_binop, size_binop and size_diffop. */
1594 static bool
1595 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1597 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1598 return false;
1599 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1600 return false;
1602 switch (code)
1604 case LSHIFT_EXPR:
1605 case RSHIFT_EXPR:
1606 case LROTATE_EXPR:
1607 case RROTATE_EXPR:
1608 return true;
1610 default:
1611 break;
1614 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1615 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1616 && TYPE_MODE (type1) == TYPE_MODE (type2);
1620 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1621 to produce a new constant in *LOWP, *HIP. Return -1 if we don't know how
1622 to evaluate CODE at compile-time otherwise return 1 if the
1623 operation overflowed and 0 if not. */
1626 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1627 unsigned HOST_WIDE_INT *lowp, HOST_WIDE_INT *hip)
1629 unsigned HOST_WIDE_INT int1l, int2l;
1630 HOST_WIDE_INT int1h, int2h;
1631 unsigned HOST_WIDE_INT low;
1632 HOST_WIDE_INT hi;
1633 unsigned HOST_WIDE_INT garbagel;
1634 HOST_WIDE_INT garbageh;
1635 tree type = TREE_TYPE (arg1);
1636 int uns = TYPE_UNSIGNED (type);
1637 int overflow = 0;
1639 int1l = TREE_INT_CST_LOW (arg1);
1640 int1h = TREE_INT_CST_HIGH (arg1);
1641 int2l = TREE_INT_CST_LOW (arg2);
1642 int2h = TREE_INT_CST_HIGH (arg2);
1644 switch (code)
1646 case BIT_IOR_EXPR:
1647 low = int1l | int2l, hi = int1h | int2h;
1648 break;
1650 case BIT_XOR_EXPR:
1651 low = int1l ^ int2l, hi = int1h ^ int2h;
1652 break;
1654 case BIT_AND_EXPR:
1655 low = int1l & int2l, hi = int1h & int2h;
1656 break;
1658 case RSHIFT_EXPR:
1659 int2l = -int2l;
1660 case LSHIFT_EXPR:
1661 /* It's unclear from the C standard whether shifts can overflow.
1662 The following code ignores overflow; perhaps a C standard
1663 interpretation ruling is needed. */
1664 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1665 &low, &hi, !uns);
1666 break;
1668 case RROTATE_EXPR:
1669 int2l = - int2l;
1670 case LROTATE_EXPR:
1671 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1672 &low, &hi);
1673 break;
1675 case PLUS_EXPR:
1676 case PLUSNV_EXPR:
1677 overflow = add_double_with_sign (int1l, int1h, int2l, int2h,
1678 &low, &hi, uns);
1679 break;
1681 case MINUS_EXPR:
1682 case MINUSNV_EXPR:
1683 neg_double (int2l, int2h, &low, &hi);
1684 add_double (int1l, int1h, low, hi, &low, &hi);
1685 if (uns)
1686 overflow = ((unsigned HOST_WIDE_INT) hi > (unsigned HOST_WIDE_INT) int1h
1687 || (hi == int1h
1688 && low > int1l));
1689 else
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1691 break;
1693 case MULT_EXPR:
1694 case MULTNV_EXPR:
1695 overflow = mul_double_with_sign (int1l, int1h, int2l, int2h,
1696 &low, &hi, uns);
1697 break;
1699 case TRUNC_DIV_EXPR:
1700 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1701 case EXACT_DIV_EXPR:
1702 /* This is a shortcut for a common special case. */
1703 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1704 && !TREE_OVERFLOW (arg1)
1705 && !TREE_OVERFLOW (arg2)
1706 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1708 if (code == CEIL_DIV_EXPR)
1709 int1l += int2l - 1;
1711 low = int1l / int2l, hi = 0;
1712 break;
1715 /* ... fall through ... */
1717 case ROUND_DIV_EXPR:
1718 if (int2h == 0 && int2l == 0)
1719 return -1;
1720 if (int2h == 0 && int2l == 1)
1722 low = int1l, hi = int1h;
1723 break;
1725 if (int1l == int2l && int1h == int2h
1726 && ! (int1l == 0 && int1h == 0))
1728 low = 1, hi = 0;
1729 break;
1731 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1732 &low, &hi, &garbagel, &garbageh);
1733 break;
1735 case TRUNC_MOD_EXPR:
1736 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1737 /* This is a shortcut for a common special case. */
1738 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1739 && !TREE_OVERFLOW (arg1)
1740 && !TREE_OVERFLOW (arg2)
1741 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1743 if (code == CEIL_MOD_EXPR)
1744 int1l += int2l - 1;
1745 low = int1l % int2l, hi = 0;
1746 break;
1749 /* ... fall through ... */
1751 case ROUND_MOD_EXPR:
1752 if (int2h == 0 && int2l == 0)
1753 return -1;
1754 overflow = div_and_round_double (code, uns,
1755 int1l, int1h, int2l, int2h,
1756 &garbagel, &garbageh, &low, &hi);
1757 break;
1759 case MIN_EXPR:
1760 case MAX_EXPR:
1761 if (uns)
1762 low = (((unsigned HOST_WIDE_INT) int1h
1763 < (unsigned HOST_WIDE_INT) int2h)
1764 || (((unsigned HOST_WIDE_INT) int1h
1765 == (unsigned HOST_WIDE_INT) int2h)
1766 && int1l < int2l));
1767 else
1768 low = (int1h < int2h
1769 || (int1h == int2h && int1l < int2l));
1771 if (low == (code == MIN_EXPR))
1772 low = int1l, hi = int1h;
1773 else
1774 low = int2l, hi = int2h;
1775 break;
1777 default:
1778 return -1;
1781 *lowp = low;
1782 *hip = hi;
1784 return overflow;
1787 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1788 to produce a new constant. Return NULL_TREE if we don't know how
1789 to evaluate CODE at compile-time.
1791 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1793 tree
1794 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1795 int notrunc)
1797 unsigned HOST_WIDE_INT low;
1798 HOST_WIDE_INT hi;
1799 tree t;
1800 tree type = TREE_TYPE (arg1);
1801 int uns = TYPE_UNSIGNED (type);
1802 int is_sizetype
1803 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1804 int overflow;
1806 overflow = int_const_binop_1 (code, arg1, arg2, &low, &hi);
1807 if (overflow == -1)
1808 return NULL_TREE;
1810 if (notrunc)
1812 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1814 /* Propagate overflow flags ourselves. */
1815 if (((!uns || is_sizetype) && overflow)
1816 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1818 t = copy_node (t);
1819 TREE_OVERFLOW (t) = 1;
1822 else
1823 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1824 ((!uns || is_sizetype) && overflow)
1825 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1827 return t;
1830 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1831 constant. We assume ARG1 and ARG2 have the same data type, or at least
1832 are the same kind of constant and the same machine mode. Return zero if
1833 combining the constants is not allowed in the current operating mode.
1835 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1837 static tree
1838 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1840 /* Sanity check for the recursive cases. */
1841 if (!arg1 || !arg2)
1842 return NULL_TREE;
1844 STRIP_NOPS (arg1);
1845 STRIP_NOPS (arg2);
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return int_const_binop (code, arg1, arg2, notrunc);
1850 if (TREE_CODE (arg1) == REAL_CST)
1852 enum machine_mode mode;
1853 REAL_VALUE_TYPE d1;
1854 REAL_VALUE_TYPE d2;
1855 REAL_VALUE_TYPE value;
1856 REAL_VALUE_TYPE result;
1857 bool inexact;
1858 tree t, type;
1860 /* The following codes are handled by real_arithmetic. */
1861 switch (code)
1863 case PLUS_EXPR:
1864 case MINUS_EXPR:
1865 case MULT_EXPR:
1866 case RDIV_EXPR:
1867 case MIN_EXPR:
1868 case MAX_EXPR:
1869 break;
1871 default:
1872 return NULL_TREE;
1875 d1 = TREE_REAL_CST (arg1);
1876 d2 = TREE_REAL_CST (arg2);
1878 type = TREE_TYPE (arg1);
1879 mode = TYPE_MODE (type);
1881 /* Don't perform operation if we honor signaling NaNs and
1882 either operand is a NaN. */
1883 if (HONOR_SNANS (mode)
1884 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1885 return NULL_TREE;
1887 /* Don't perform operation if it would raise a division
1888 by zero exception. */
1889 if (code == RDIV_EXPR
1890 && REAL_VALUES_EQUAL (d2, dconst0)
1891 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1892 return NULL_TREE;
1894 /* If either operand is a NaN, just return it. Otherwise, set up
1895 for floating-point trap; we return an overflow. */
1896 if (REAL_VALUE_ISNAN (d1))
1897 return arg1;
1898 else if (REAL_VALUE_ISNAN (d2))
1899 return arg2;
1901 inexact = real_arithmetic (&value, code, &d1, &d2);
1902 real_convert (&result, mode, &value);
1904 /* Don't constant fold this floating point operation if
1905 the result has overflowed and flag_trapping_math. */
1906 if (flag_trapping_math
1907 && MODE_HAS_INFINITIES (mode)
1908 && REAL_VALUE_ISINF (result)
1909 && !REAL_VALUE_ISINF (d1)
1910 && !REAL_VALUE_ISINF (d2))
1911 return NULL_TREE;
1913 /* Don't constant fold this floating point operation if the
1914 result may dependent upon the run-time rounding mode and
1915 flag_rounding_math is set, or if GCC's software emulation
1916 is unable to accurately represent the result. */
1917 if ((flag_rounding_math
1918 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1919 && (inexact || !real_identical (&result, &value)))
1920 return NULL_TREE;
1922 t = build_real (type, result);
1924 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1925 return t;
1928 if (TREE_CODE (arg1) == FIXED_CST)
1930 FIXED_VALUE_TYPE f1;
1931 FIXED_VALUE_TYPE f2;
1932 FIXED_VALUE_TYPE result;
1933 tree t, type;
1934 int sat_p;
1935 bool overflow_p;
1937 /* The following codes are handled by fixed_arithmetic. */
1938 switch (code)
1940 case PLUS_EXPR:
1941 case MINUS_EXPR:
1942 case MULT_EXPR:
1943 case TRUNC_DIV_EXPR:
1944 f2 = TREE_FIXED_CST (arg2);
1945 break;
1947 case LSHIFT_EXPR:
1948 case RSHIFT_EXPR:
1949 f2.data.high = TREE_INT_CST_HIGH (arg2);
1950 f2.data.low = TREE_INT_CST_LOW (arg2);
1951 f2.mode = SImode;
1952 break;
1954 default:
1955 return NULL_TREE;
1958 f1 = TREE_FIXED_CST (arg1);
1959 type = TREE_TYPE (arg1);
1960 sat_p = TYPE_SATURATING (type);
1961 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1962 t = build_fixed (type, result);
1963 /* Propagate overflow flags. */
1964 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1966 TREE_OVERFLOW (t) = 1;
1967 TREE_CONSTANT_OVERFLOW (t) = 1;
1969 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1970 TREE_CONSTANT_OVERFLOW (t) = 1;
1971 return t;
1974 if (TREE_CODE (arg1) == COMPLEX_CST)
1976 tree type = TREE_TYPE (arg1);
1977 tree r1 = TREE_REALPART (arg1);
1978 tree i1 = TREE_IMAGPART (arg1);
1979 tree r2 = TREE_REALPART (arg2);
1980 tree i2 = TREE_IMAGPART (arg2);
1981 tree real, imag;
1983 switch (code)
1985 case PLUS_EXPR:
1986 case MINUS_EXPR:
1987 real = const_binop (code, r1, r2, notrunc);
1988 imag = const_binop (code, i1, i2, notrunc);
1989 break;
1991 case MULT_EXPR:
1992 real = const_binop (MINUS_EXPR,
1993 const_binop (MULT_EXPR, r1, r2, notrunc),
1994 const_binop (MULT_EXPR, i1, i2, notrunc),
1995 notrunc);
1996 imag = const_binop (PLUS_EXPR,
1997 const_binop (MULT_EXPR, r1, i2, notrunc),
1998 const_binop (MULT_EXPR, i1, r2, notrunc),
1999 notrunc);
2000 break;
2002 case RDIV_EXPR:
2004 tree magsquared
2005 = const_binop (PLUS_EXPR,
2006 const_binop (MULT_EXPR, r2, r2, notrunc),
2007 const_binop (MULT_EXPR, i2, i2, notrunc),
2008 notrunc);
2009 tree t1
2010 = const_binop (PLUS_EXPR,
2011 const_binop (MULT_EXPR, r1, r2, notrunc),
2012 const_binop (MULT_EXPR, i1, i2, notrunc),
2013 notrunc);
2014 tree t2
2015 = const_binop (MINUS_EXPR,
2016 const_binop (MULT_EXPR, i1, r2, notrunc),
2017 const_binop (MULT_EXPR, r1, i2, notrunc),
2018 notrunc);
2020 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
2021 code = TRUNC_DIV_EXPR;
2023 real = const_binop (code, t1, magsquared, notrunc);
2024 imag = const_binop (code, t2, magsquared, notrunc);
2026 break;
2028 default:
2029 return NULL_TREE;
2032 if (real && imag)
2033 return build_complex (type, real, imag);
2036 return NULL_TREE;
2039 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2040 indicates which particular sizetype to create. */
2042 tree
2043 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2045 return build_int_cst (sizetype_tab[(int) kind], number);
2048 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2049 is a tree code. The type of the result is taken from the operands.
2050 Both must be equivalent integer types, ala int_binop_types_match_p.
2051 If the operands are constant, so is the result. */
2053 tree
2054 size_binop (enum tree_code code, tree arg0, tree arg1)
2056 tree type = TREE_TYPE (arg0);
2058 if (arg0 == error_mark_node || arg1 == error_mark_node)
2059 return error_mark_node;
2061 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2062 TREE_TYPE (arg1)));
2064 /* Handle the special case of two integer constants faster. */
2065 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2067 /* And some specific cases even faster than that. */
2068 if (code == PLUS_EXPR)
2070 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2071 return arg1;
2072 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2073 return arg0;
2075 else if (code == MINUS_EXPR)
2077 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2078 return arg0;
2080 else if (code == MULT_EXPR)
2082 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2083 return arg1;
2086 /* Handle general case of two integer constants. */
2087 return int_const_binop (code, arg0, arg1, 0);
2090 return fold_build2 (code, type, arg0, arg1);
2093 /* Given two values, either both of sizetype or both of bitsizetype,
2094 compute the difference between the two values. Return the value
2095 in signed type corresponding to the type of the operands. */
2097 tree
2098 size_diffop (tree arg0, tree arg1)
2100 tree type = TREE_TYPE (arg0);
2101 tree ctype;
2103 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2104 TREE_TYPE (arg1)));
2106 /* If the type is already signed, just do the simple thing. */
2107 if (!TYPE_UNSIGNED (type))
2108 return size_binop (MINUS_EXPR, arg0, arg1);
2110 if (type == sizetype)
2111 ctype = ssizetype;
2112 else if (type == bitsizetype)
2113 ctype = sbitsizetype;
2114 else
2115 ctype = signed_type_for (type);
2117 /* If either operand is not a constant, do the conversions to the signed
2118 type and subtract. The hardware will do the right thing with any
2119 overflow in the subtraction. */
2120 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2121 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2122 fold_convert (ctype, arg1));
2124 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2125 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2126 overflow) and negate (which can't either). Special-case a result
2127 of zero while we're here. */
2128 if (tree_int_cst_equal (arg0, arg1))
2129 return build_int_cst (ctype, 0);
2130 else if (tree_int_cst_lt (arg1, arg0))
2131 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2132 else
2133 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2134 fold_convert (ctype, size_binop (MINUS_EXPR,
2135 arg1, arg0)));
2138 /* A subroutine of fold_convert_const handling conversions of an
2139 INTEGER_CST to another integer type. */
2141 static tree
2142 fold_convert_const_int_from_int (tree type, const_tree arg1)
2144 tree t;
2146 /* Given an integer constant, make new constant with new type,
2147 appropriately sign-extended or truncated. */
2148 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2149 TREE_INT_CST_HIGH (arg1),
2150 /* Don't set the overflow when
2151 converting from a pointer, */
2152 !POINTER_TYPE_P (TREE_TYPE (arg1))
2153 /* or to a sizetype with same signedness
2154 and the precision is unchanged.
2155 ??? sizetype is always sign-extended,
2156 but its signedness depends on the
2157 frontend. Thus we see spurious overflows
2158 here if we do not check this. */
2159 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2160 == TYPE_PRECISION (type))
2161 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2162 == TYPE_UNSIGNED (type))
2163 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2164 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2165 || (TREE_CODE (type) == INTEGER_TYPE
2166 && TYPE_IS_SIZETYPE (type)))),
2167 (TREE_INT_CST_HIGH (arg1) < 0
2168 && (TYPE_UNSIGNED (type)
2169 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2170 | TREE_OVERFLOW (arg1));
2172 return t;
2175 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2176 to an integer type. */
2178 static tree
2179 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2181 int overflow = 0;
2182 tree t;
2184 /* The following code implements the floating point to integer
2185 conversion rules required by the Java Language Specification,
2186 that IEEE NaNs are mapped to zero and values that overflow
2187 the target precision saturate, i.e. values greater than
2188 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2189 are mapped to INT_MIN. These semantics are allowed by the
2190 C and C++ standards that simply state that the behavior of
2191 FP-to-integer conversion is unspecified upon overflow. */
2193 HOST_WIDE_INT high, low;
2194 REAL_VALUE_TYPE r;
2195 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2197 switch (code)
2199 case FIX_TRUNC_EXPR:
2200 real_trunc (&r, VOIDmode, &x);
2201 break;
2203 default:
2204 gcc_unreachable ();
2207 /* If R is NaN, return zero and show we have an overflow. */
2208 if (REAL_VALUE_ISNAN (r))
2210 overflow = 1;
2211 high = 0;
2212 low = 0;
2215 /* See if R is less than the lower bound or greater than the
2216 upper bound. */
2218 if (! overflow)
2220 tree lt = TYPE_MIN_VALUE (type);
2221 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2222 if (REAL_VALUES_LESS (r, l))
2224 overflow = 1;
2225 high = TREE_INT_CST_HIGH (lt);
2226 low = TREE_INT_CST_LOW (lt);
2230 if (! overflow)
2232 tree ut = TYPE_MAX_VALUE (type);
2233 if (ut)
2235 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2236 if (REAL_VALUES_LESS (u, r))
2238 overflow = 1;
2239 high = TREE_INT_CST_HIGH (ut);
2240 low = TREE_INT_CST_LOW (ut);
2245 if (! overflow)
2246 REAL_VALUE_TO_INT (&low, &high, r);
2248 t = force_fit_type_double (type, low, high, -1,
2249 overflow | TREE_OVERFLOW (arg1));
2250 return t;
2253 /* A subroutine of fold_convert_const handling conversions of a
2254 FIXED_CST to an integer type. */
2256 static tree
2257 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2259 tree t;
2260 double_int temp, temp_trunc;
2261 unsigned int mode;
2263 /* Right shift FIXED_CST to temp by fbit. */
2264 temp = TREE_FIXED_CST (arg1).data;
2265 mode = TREE_FIXED_CST (arg1).mode;
2266 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2268 lshift_double (temp.low, temp.high,
2269 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2270 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2272 /* Left shift temp to temp_trunc by fbit. */
2273 lshift_double (temp.low, temp.high,
2274 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2275 &temp_trunc.low, &temp_trunc.high,
2276 SIGNED_FIXED_POINT_MODE_P (mode));
2278 else
2280 temp.low = 0;
2281 temp.high = 0;
2282 temp_trunc.low = 0;
2283 temp_trunc.high = 0;
2286 /* If FIXED_CST is negative, we need to round the value toward 0.
2287 By checking if the fractional bits are not zero to add 1 to temp. */
2288 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2289 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2291 double_int one;
2292 one.low = 1;
2293 one.high = 0;
2294 temp = double_int_add (temp, one);
2297 /* Given a fixed-point constant, make new constant with new type,
2298 appropriately sign-extended or truncated. */
2299 t = force_fit_type_double (type, temp.low, temp.high, -1,
2300 (temp.high < 0
2301 && (TYPE_UNSIGNED (type)
2302 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2303 | TREE_OVERFLOW (arg1));
2305 return t;
2308 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2309 to another floating point type. */
2311 static tree
2312 fold_convert_const_real_from_real (tree type, const_tree arg1)
2314 REAL_VALUE_TYPE value;
2315 tree t;
2317 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2318 t = build_real (type, value);
2320 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2321 return t;
2324 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2325 to a floating point type. */
2327 static tree
2328 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2330 REAL_VALUE_TYPE value;
2331 tree t;
2333 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2334 t = build_real (type, value);
2336 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2337 TREE_CONSTANT_OVERFLOW (t)
2338 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2339 return t;
2342 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2343 to another fixed-point type. */
2345 static tree
2346 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2348 FIXED_VALUE_TYPE value;
2349 tree t;
2350 bool overflow_p;
2352 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2353 TYPE_SATURATING (type));
2354 t = build_fixed (type, value);
2356 /* Propagate overflow flags. */
2357 if (overflow_p | TREE_OVERFLOW (arg1))
2359 TREE_OVERFLOW (t) = 1;
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2362 else if (TREE_CONSTANT_OVERFLOW (arg1))
2363 TREE_CONSTANT_OVERFLOW (t) = 1;
2364 return t;
2367 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2368 to a fixed-point type. */
2370 static tree
2371 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2373 FIXED_VALUE_TYPE value;
2374 tree t;
2375 bool overflow_p;
2377 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2378 TREE_INT_CST (arg1),
2379 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2380 TYPE_SATURATING (type));
2381 t = build_fixed (type, value);
2383 /* Propagate overflow flags. */
2384 if (overflow_p | TREE_OVERFLOW (arg1))
2386 TREE_OVERFLOW (t) = 1;
2387 TREE_CONSTANT_OVERFLOW (t) = 1;
2389 else if (TREE_CONSTANT_OVERFLOW (arg1))
2390 TREE_CONSTANT_OVERFLOW (t) = 1;
2391 return t;
2394 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2395 to a fixed-point type. */
2397 static tree
2398 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2400 FIXED_VALUE_TYPE value;
2401 tree t;
2402 bool overflow_p;
2404 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2405 &TREE_REAL_CST (arg1),
2406 TYPE_SATURATING (type));
2407 t = build_fixed (type, value);
2409 /* Propagate overflow flags. */
2410 if (overflow_p | TREE_OVERFLOW (arg1))
2412 TREE_OVERFLOW (t) = 1;
2413 TREE_CONSTANT_OVERFLOW (t) = 1;
2415 else if (TREE_CONSTANT_OVERFLOW (arg1))
2416 TREE_CONSTANT_OVERFLOW (t) = 1;
2417 return t;
2420 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2421 type TYPE. If no simplification can be done return NULL_TREE. */
2423 static tree
2424 fold_convert_const (enum tree_code code, tree type, tree arg1)
2426 if (TREE_TYPE (arg1) == type)
2427 return arg1;
2429 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2430 || TREE_CODE (type) == OFFSET_TYPE)
2432 if (TREE_CODE (arg1) == INTEGER_CST)
2433 return fold_convert_const_int_from_int (type, arg1);
2434 else if (TREE_CODE (arg1) == REAL_CST)
2435 return fold_convert_const_int_from_real (code, type, arg1);
2436 else if (TREE_CODE (arg1) == FIXED_CST)
2437 return fold_convert_const_int_from_fixed (type, arg1);
2439 else if (TREE_CODE (type) == REAL_TYPE)
2441 if (TREE_CODE (arg1) == INTEGER_CST)
2442 return build_real_from_int_cst (type, arg1);
2443 else if (TREE_CODE (arg1) == REAL_CST)
2444 return fold_convert_const_real_from_real (type, arg1);
2445 else if (TREE_CODE (arg1) == FIXED_CST)
2446 return fold_convert_const_real_from_fixed (type, arg1);
2448 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2450 if (TREE_CODE (arg1) == FIXED_CST)
2451 return fold_convert_const_fixed_from_fixed (type, arg1);
2452 else if (TREE_CODE (arg1) == INTEGER_CST)
2453 return fold_convert_const_fixed_from_int (type, arg1);
2454 else if (TREE_CODE (arg1) == REAL_CST)
2455 return fold_convert_const_fixed_from_real (type, arg1);
2457 return NULL_TREE;
2460 /* Construct a vector of zero elements of vector type TYPE. */
2462 static tree
2463 build_zero_vector (tree type)
2465 tree elem, list;
2466 int i, units;
2468 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2469 units = TYPE_VECTOR_SUBPARTS (type);
2471 list = NULL_TREE;
2472 for (i = 0; i < units; i++)
2473 list = tree_cons (NULL_TREE, elem, list);
2474 return build_vector (type, list);
2477 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2479 bool
2480 fold_convertible_p (const_tree type, const_tree arg)
2482 tree orig = TREE_TYPE (arg);
2484 if (type == orig)
2485 return true;
2487 if (TREE_CODE (arg) == ERROR_MARK
2488 || TREE_CODE (type) == ERROR_MARK
2489 || TREE_CODE (orig) == ERROR_MARK)
2490 return false;
2492 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2493 return true;
2495 switch (TREE_CODE (type))
2497 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2498 case POINTER_TYPE: case REFERENCE_TYPE:
2499 case OFFSET_TYPE:
2500 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2501 || TREE_CODE (orig) == OFFSET_TYPE)
2502 return true;
2503 return (TREE_CODE (orig) == VECTOR_TYPE
2504 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2506 case REAL_TYPE:
2507 case FIXED_POINT_TYPE:
2508 case COMPLEX_TYPE:
2509 case VECTOR_TYPE:
2510 case VOID_TYPE:
2511 return TREE_CODE (type) == TREE_CODE (orig);
2513 default:
2514 return false;
2518 /* Convert expression ARG to type TYPE. Used by the middle-end for
2519 simple conversions in preference to calling the front-end's convert. */
2521 tree
2522 fold_convert (tree type, tree arg)
2524 tree orig = TREE_TYPE (arg);
2525 tree tem;
2527 if (type == orig)
2528 return arg;
2530 if (TREE_CODE (arg) == ERROR_MARK
2531 || TREE_CODE (type) == ERROR_MARK
2532 || TREE_CODE (orig) == ERROR_MARK)
2533 return error_mark_node;
2535 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2536 return fold_build1 (NOP_EXPR, type, arg);
2538 switch (TREE_CODE (type))
2540 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2541 case POINTER_TYPE: case REFERENCE_TYPE:
2542 case OFFSET_TYPE:
2543 if (TREE_CODE (arg) == INTEGER_CST)
2545 tem = fold_convert_const (NOP_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2547 return tem;
2549 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2550 || TREE_CODE (orig) == OFFSET_TYPE)
2551 return fold_build1 (NOP_EXPR, type, arg);
2552 if (TREE_CODE (orig) == COMPLEX_TYPE)
2554 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2555 return fold_convert (type, tem);
2557 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2558 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2559 return fold_build1 (NOP_EXPR, type, arg);
2561 case REAL_TYPE:
2562 if (TREE_CODE (arg) == INTEGER_CST)
2564 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2565 if (tem != NULL_TREE)
2566 return tem;
2568 else if (TREE_CODE (arg) == REAL_CST)
2570 tem = fold_convert_const (NOP_EXPR, type, arg);
2571 if (tem != NULL_TREE)
2572 return tem;
2574 else if (TREE_CODE (arg) == FIXED_CST)
2576 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2578 return tem;
2581 switch (TREE_CODE (orig))
2583 case INTEGER_TYPE:
2584 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2585 case POINTER_TYPE: case REFERENCE_TYPE:
2586 return fold_build1 (FLOAT_EXPR, type, arg);
2588 case REAL_TYPE:
2589 return fold_build1 (NOP_EXPR, type, arg);
2591 case FIXED_POINT_TYPE:
2592 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2594 case COMPLEX_TYPE:
2595 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2596 return fold_convert (type, tem);
2598 default:
2599 gcc_unreachable ();
2602 case FIXED_POINT_TYPE:
2603 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2604 || TREE_CODE (arg) == REAL_CST)
2606 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2607 if (tem != NULL_TREE)
2608 return tem;
2611 switch (TREE_CODE (orig))
2613 case FIXED_POINT_TYPE:
2614 case INTEGER_TYPE:
2615 case ENUMERAL_TYPE:
2616 case BOOLEAN_TYPE:
2617 case REAL_TYPE:
2618 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2620 case COMPLEX_TYPE:
2621 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2622 return fold_convert (type, tem);
2624 default:
2625 gcc_unreachable ();
2628 case COMPLEX_TYPE:
2629 switch (TREE_CODE (orig))
2631 case INTEGER_TYPE:
2632 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2633 case POINTER_TYPE: case REFERENCE_TYPE:
2634 case REAL_TYPE:
2635 case FIXED_POINT_TYPE:
2636 return build2 (COMPLEX_EXPR, type,
2637 fold_convert (TREE_TYPE (type), arg),
2638 fold_convert (TREE_TYPE (type), integer_zero_node));
2639 case COMPLEX_TYPE:
2641 tree rpart, ipart;
2643 if (TREE_CODE (arg) == COMPLEX_EXPR)
2645 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2646 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2647 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2650 arg = save_expr (arg);
2651 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2652 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2653 rpart = fold_convert (TREE_TYPE (type), rpart);
2654 ipart = fold_convert (TREE_TYPE (type), ipart);
2655 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2658 default:
2659 gcc_unreachable ();
2662 case VECTOR_TYPE:
2663 if (integer_zerop (arg))
2664 return build_zero_vector (type);
2665 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2666 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2667 || TREE_CODE (orig) == VECTOR_TYPE);
2668 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2670 case VOID_TYPE:
2671 tem = fold_ignored_result (arg);
2672 if (TREE_CODE (tem) == MODIFY_EXPR)
2673 return tem;
2674 return fold_build1 (NOP_EXPR, type, tem);
2676 default:
2677 gcc_unreachable ();
2681 /* Return false if expr can be assumed not to be an lvalue, true
2682 otherwise. */
2684 static bool
2685 maybe_lvalue_p (const_tree x)
2687 /* We only need to wrap lvalue tree codes. */
2688 switch (TREE_CODE (x))
2690 case VAR_DECL:
2691 case PARM_DECL:
2692 case RESULT_DECL:
2693 case LABEL_DECL:
2694 case FUNCTION_DECL:
2695 case SSA_NAME:
2697 case COMPONENT_REF:
2698 case INDIRECT_REF:
2699 case ALIGN_INDIRECT_REF:
2700 case MISALIGNED_INDIRECT_REF:
2701 case ARRAY_REF:
2702 case ARRAY_RANGE_REF:
2703 case BIT_FIELD_REF:
2704 case OBJ_TYPE_REF:
2706 case REALPART_EXPR:
2707 case IMAGPART_EXPR:
2708 case PREINCREMENT_EXPR:
2709 case PREDECREMENT_EXPR:
2710 case SAVE_EXPR:
2711 case TRY_CATCH_EXPR:
2712 case WITH_CLEANUP_EXPR:
2713 case COMPOUND_EXPR:
2714 case MODIFY_EXPR:
2715 case TARGET_EXPR:
2716 case COND_EXPR:
2717 case BIND_EXPR:
2718 case MIN_EXPR:
2719 case MAX_EXPR:
2720 break;
2722 default:
2723 /* Assume the worst for front-end tree codes. */
2724 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2725 break;
2726 return false;
2729 return true;
2732 /* Return an expr equal to X but certainly not valid as an lvalue. */
2734 tree
2735 non_lvalue (tree x)
2737 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2738 us. */
2739 if (in_gimple_form)
2740 return x;
2742 if (! maybe_lvalue_p (x))
2743 return x;
2744 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2747 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2748 Zero means allow extended lvalues. */
2750 int pedantic_lvalues;
2752 /* When pedantic, return an expr equal to X but certainly not valid as a
2753 pedantic lvalue. Otherwise, return X. */
2755 static tree
2756 pedantic_non_lvalue (tree x)
2758 if (pedantic_lvalues)
2759 return non_lvalue (x);
2760 else
2761 return x;
2764 /* Given a tree comparison code, return the code that is the logical inverse
2765 of the given code. It is not safe to do this for floating-point
2766 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2767 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2769 enum tree_code
2770 invert_tree_comparison (enum tree_code code, bool honor_nans)
2772 if (honor_nans && flag_trapping_math)
2773 return ERROR_MARK;
2775 switch (code)
2777 case EQ_EXPR:
2778 return NE_EXPR;
2779 case NE_EXPR:
2780 return EQ_EXPR;
2781 case GT_EXPR:
2782 return honor_nans ? UNLE_EXPR : LE_EXPR;
2783 case GE_EXPR:
2784 return honor_nans ? UNLT_EXPR : LT_EXPR;
2785 case LT_EXPR:
2786 return honor_nans ? UNGE_EXPR : GE_EXPR;
2787 case LE_EXPR:
2788 return honor_nans ? UNGT_EXPR : GT_EXPR;
2789 case LTGT_EXPR:
2790 return UNEQ_EXPR;
2791 case UNEQ_EXPR:
2792 return LTGT_EXPR;
2793 case UNGT_EXPR:
2794 return LE_EXPR;
2795 case UNGE_EXPR:
2796 return LT_EXPR;
2797 case UNLT_EXPR:
2798 return GE_EXPR;
2799 case UNLE_EXPR:
2800 return GT_EXPR;
2801 case ORDERED_EXPR:
2802 return UNORDERED_EXPR;
2803 case UNORDERED_EXPR:
2804 return ORDERED_EXPR;
2805 default:
2806 gcc_unreachable ();
2810 /* Similar, but return the comparison that results if the operands are
2811 swapped. This is safe for floating-point. */
2813 enum tree_code
2814 swap_tree_comparison (enum tree_code code)
2816 switch (code)
2818 case EQ_EXPR:
2819 case NE_EXPR:
2820 case ORDERED_EXPR:
2821 case UNORDERED_EXPR:
2822 case LTGT_EXPR:
2823 case UNEQ_EXPR:
2824 return code;
2825 case GT_EXPR:
2826 return LT_EXPR;
2827 case GE_EXPR:
2828 return LE_EXPR;
2829 case LT_EXPR:
2830 return GT_EXPR;
2831 case LE_EXPR:
2832 return GE_EXPR;
2833 case UNGT_EXPR:
2834 return UNLT_EXPR;
2835 case UNGE_EXPR:
2836 return UNLE_EXPR;
2837 case UNLT_EXPR:
2838 return UNGT_EXPR;
2839 case UNLE_EXPR:
2840 return UNGE_EXPR;
2841 default:
2842 gcc_unreachable ();
2847 /* Convert a comparison tree code from an enum tree_code representation
2848 into a compcode bit-based encoding. This function is the inverse of
2849 compcode_to_comparison. */
2851 static enum comparison_code
2852 comparison_to_compcode (enum tree_code code)
2854 switch (code)
2856 case LT_EXPR:
2857 return COMPCODE_LT;
2858 case EQ_EXPR:
2859 return COMPCODE_EQ;
2860 case LE_EXPR:
2861 return COMPCODE_LE;
2862 case GT_EXPR:
2863 return COMPCODE_GT;
2864 case NE_EXPR:
2865 return COMPCODE_NE;
2866 case GE_EXPR:
2867 return COMPCODE_GE;
2868 case ORDERED_EXPR:
2869 return COMPCODE_ORD;
2870 case UNORDERED_EXPR:
2871 return COMPCODE_UNORD;
2872 case UNLT_EXPR:
2873 return COMPCODE_UNLT;
2874 case UNEQ_EXPR:
2875 return COMPCODE_UNEQ;
2876 case UNLE_EXPR:
2877 return COMPCODE_UNLE;
2878 case UNGT_EXPR:
2879 return COMPCODE_UNGT;
2880 case LTGT_EXPR:
2881 return COMPCODE_LTGT;
2882 case UNGE_EXPR:
2883 return COMPCODE_UNGE;
2884 default:
2885 gcc_unreachable ();
2889 /* Convert a compcode bit-based encoding of a comparison operator back
2890 to GCC's enum tree_code representation. This function is the
2891 inverse of comparison_to_compcode. */
2893 static enum tree_code
2894 compcode_to_comparison (enum comparison_code code)
2896 switch (code)
2898 case COMPCODE_LT:
2899 return LT_EXPR;
2900 case COMPCODE_EQ:
2901 return EQ_EXPR;
2902 case COMPCODE_LE:
2903 return LE_EXPR;
2904 case COMPCODE_GT:
2905 return GT_EXPR;
2906 case COMPCODE_NE:
2907 return NE_EXPR;
2908 case COMPCODE_GE:
2909 return GE_EXPR;
2910 case COMPCODE_ORD:
2911 return ORDERED_EXPR;
2912 case COMPCODE_UNORD:
2913 return UNORDERED_EXPR;
2914 case COMPCODE_UNLT:
2915 return UNLT_EXPR;
2916 case COMPCODE_UNEQ:
2917 return UNEQ_EXPR;
2918 case COMPCODE_UNLE:
2919 return UNLE_EXPR;
2920 case COMPCODE_UNGT:
2921 return UNGT_EXPR;
2922 case COMPCODE_LTGT:
2923 return LTGT_EXPR;
2924 case COMPCODE_UNGE:
2925 return UNGE_EXPR;
2926 default:
2927 gcc_unreachable ();
2931 /* Return a tree for the comparison which is the combination of
2932 doing the AND or OR (depending on CODE) of the two operations LCODE
2933 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2934 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2935 if this makes the transformation invalid. */
2937 tree
2938 combine_comparisons (enum tree_code code, enum tree_code lcode,
2939 enum tree_code rcode, tree truth_type,
2940 tree ll_arg, tree lr_arg)
2942 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2943 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2944 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2945 enum comparison_code compcode;
2947 switch (code)
2949 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2950 compcode = lcompcode & rcompcode;
2951 break;
2953 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2954 compcode = lcompcode | rcompcode;
2955 break;
2957 default:
2958 return NULL_TREE;
2961 if (!honor_nans)
2963 /* Eliminate unordered comparisons, as well as LTGT and ORD
2964 which are not used unless the mode has NaNs. */
2965 compcode &= ~COMPCODE_UNORD;
2966 if (compcode == COMPCODE_LTGT)
2967 compcode = COMPCODE_NE;
2968 else if (compcode == COMPCODE_ORD)
2969 compcode = COMPCODE_TRUE;
2971 else if (flag_trapping_math)
2973 /* Check that the original operation and the optimized ones will trap
2974 under the same condition. */
2975 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2976 && (lcompcode != COMPCODE_EQ)
2977 && (lcompcode != COMPCODE_ORD);
2978 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2979 && (rcompcode != COMPCODE_EQ)
2980 && (rcompcode != COMPCODE_ORD);
2981 bool trap = (compcode & COMPCODE_UNORD) == 0
2982 && (compcode != COMPCODE_EQ)
2983 && (compcode != COMPCODE_ORD);
2985 /* In a short-circuited boolean expression the LHS might be
2986 such that the RHS, if evaluated, will never trap. For
2987 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2988 if neither x nor y is NaN. (This is a mixed blessing: for
2989 example, the expression above will never trap, hence
2990 optimizing it to x < y would be invalid). */
2991 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2992 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2993 rtrap = false;
2995 /* If the comparison was short-circuited, and only the RHS
2996 trapped, we may now generate a spurious trap. */
2997 if (rtrap && !ltrap
2998 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2999 return NULL_TREE;
3001 /* If we changed the conditions that cause a trap, we lose. */
3002 if ((ltrap || rtrap) != trap)
3003 return NULL_TREE;
3006 if (compcode == COMPCODE_TRUE)
3007 return constant_boolean_node (true, truth_type);
3008 else if (compcode == COMPCODE_FALSE)
3009 return constant_boolean_node (false, truth_type);
3010 else
3011 return fold_build2 (compcode_to_comparison (compcode),
3012 truth_type, ll_arg, lr_arg);
3015 /* Return nonzero if two operands (typically of the same tree node)
3016 are necessarily equal. If either argument has side-effects this
3017 function returns zero. FLAGS modifies behavior as follows:
3019 If OEP_ONLY_CONST is set, only return nonzero for constants.
3020 This function tests whether the operands are indistinguishable;
3021 it does not test whether they are equal using C's == operation.
3022 The distinction is important for IEEE floating point, because
3023 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3024 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3026 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3027 even though it may hold multiple values during a function.
3028 This is because a GCC tree node guarantees that nothing else is
3029 executed between the evaluation of its "operands" (which may often
3030 be evaluated in arbitrary order). Hence if the operands themselves
3031 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3032 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3033 unset means assuming isochronic (or instantaneous) tree equivalence.
3034 Unless comparing arbitrary expression trees, such as from different
3035 statements, this flag can usually be left unset.
3037 If OEP_PURE_SAME is set, then pure functions with identical arguments
3038 are considered the same. It is used when the caller has other ways
3039 to ensure that global memory is unchanged in between. */
3042 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3044 /* If either is ERROR_MARK, they aren't equal. */
3045 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3046 return 0;
3048 /* Check equality of integer constants before bailing out due to
3049 precision differences. */
3050 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3051 return tree_int_cst_equal (arg0, arg1);
3053 /* If both types don't have the same signedness, then we can't consider
3054 them equal. We must check this before the STRIP_NOPS calls
3055 because they may change the signedness of the arguments. As pointers
3056 strictly don't have a signedness, require either two pointers or
3057 two non-pointers as well. */
3058 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3059 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3060 return 0;
3062 /* If both types don't have the same precision, then it is not safe
3063 to strip NOPs. */
3064 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3065 return 0;
3067 STRIP_NOPS (arg0);
3068 STRIP_NOPS (arg1);
3070 /* In case both args are comparisons but with different comparison
3071 code, try to swap the comparison operands of one arg to produce
3072 a match and compare that variant. */
3073 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3074 && COMPARISON_CLASS_P (arg0)
3075 && COMPARISON_CLASS_P (arg1))
3077 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3079 if (TREE_CODE (arg0) == swap_code)
3080 return operand_equal_p (TREE_OPERAND (arg0, 0),
3081 TREE_OPERAND (arg1, 1), flags)
3082 && operand_equal_p (TREE_OPERAND (arg0, 1),
3083 TREE_OPERAND (arg1, 0), flags);
3086 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3087 /* This is needed for conversions and for COMPONENT_REF.
3088 Might as well play it safe and always test this. */
3089 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3090 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3091 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3092 return 0;
3094 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3095 We don't care about side effects in that case because the SAVE_EXPR
3096 takes care of that for us. In all other cases, two expressions are
3097 equal if they have no side effects. If we have two identical
3098 expressions with side effects that should be treated the same due
3099 to the only side effects being identical SAVE_EXPR's, that will
3100 be detected in the recursive calls below. */
3101 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3102 && (TREE_CODE (arg0) == SAVE_EXPR
3103 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3104 return 1;
3106 /* Next handle constant cases, those for which we can return 1 even
3107 if ONLY_CONST is set. */
3108 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3109 switch (TREE_CODE (arg0))
3111 case INTEGER_CST:
3112 return tree_int_cst_equal (arg0, arg1);
3114 case FIXED_CST:
3115 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3116 TREE_FIXED_CST (arg1));
3118 case REAL_CST:
3119 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3120 TREE_REAL_CST (arg1)))
3121 return 1;
3124 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3126 /* If we do not distinguish between signed and unsigned zero,
3127 consider them equal. */
3128 if (real_zerop (arg0) && real_zerop (arg1))
3129 return 1;
3131 return 0;
3133 case VECTOR_CST:
3135 tree v1, v2;
3137 v1 = TREE_VECTOR_CST_ELTS (arg0);
3138 v2 = TREE_VECTOR_CST_ELTS (arg1);
3139 while (v1 && v2)
3141 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3142 flags))
3143 return 0;
3144 v1 = TREE_CHAIN (v1);
3145 v2 = TREE_CHAIN (v2);
3148 return v1 == v2;
3151 case COMPLEX_CST:
3152 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3153 flags)
3154 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3155 flags));
3157 case STRING_CST:
3158 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3159 && ! memcmp (TREE_STRING_POINTER (arg0),
3160 TREE_STRING_POINTER (arg1),
3161 TREE_STRING_LENGTH (arg0)));
3163 case ADDR_EXPR:
3164 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3166 default:
3167 break;
3170 if (flags & OEP_ONLY_CONST)
3171 return 0;
3173 /* Define macros to test an operand from arg0 and arg1 for equality and a
3174 variant that allows null and views null as being different from any
3175 non-null value. In the latter case, if either is null, the both
3176 must be; otherwise, do the normal comparison. */
3177 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3178 TREE_OPERAND (arg1, N), flags)
3180 #define OP_SAME_WITH_NULL(N) \
3181 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3182 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3184 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3186 case tcc_unary:
3187 /* Two conversions are equal only if signedness and modes match. */
3188 switch (TREE_CODE (arg0))
3190 CASE_CONVERT:
3191 case FIX_TRUNC_EXPR:
3192 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3193 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3194 return 0;
3195 break;
3196 default:
3197 break;
3200 return OP_SAME (0);
3203 case tcc_comparison:
3204 case tcc_binary:
3205 if (OP_SAME (0) && OP_SAME (1))
3206 return 1;
3208 /* For commutative ops, allow the other order. */
3209 return (commutative_tree_code (TREE_CODE (arg0))
3210 && operand_equal_p (TREE_OPERAND (arg0, 0),
3211 TREE_OPERAND (arg1, 1), flags)
3212 && operand_equal_p (TREE_OPERAND (arg0, 1),
3213 TREE_OPERAND (arg1, 0), flags));
3215 case tcc_reference:
3216 /* If either of the pointer (or reference) expressions we are
3217 dereferencing contain a side effect, these cannot be equal. */
3218 if (TREE_SIDE_EFFECTS (arg0)
3219 || TREE_SIDE_EFFECTS (arg1))
3220 return 0;
3222 switch (TREE_CODE (arg0))
3224 case INDIRECT_REF:
3225 case ALIGN_INDIRECT_REF:
3226 case MISALIGNED_INDIRECT_REF:
3227 case REALPART_EXPR:
3228 case IMAGPART_EXPR:
3229 return OP_SAME (0);
3231 case ARRAY_REF:
3232 case ARRAY_RANGE_REF:
3233 /* Operands 2 and 3 may be null.
3234 Compare the array index by value if it is constant first as we
3235 may have different types but same value here. */
3236 return (OP_SAME (0)
3237 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3238 TREE_OPERAND (arg1, 1))
3239 || OP_SAME (1))
3240 && OP_SAME_WITH_NULL (2)
3241 && OP_SAME_WITH_NULL (3));
3243 case COMPONENT_REF:
3244 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3245 may be NULL when we're called to compare MEM_EXPRs. */
3246 return OP_SAME_WITH_NULL (0)
3247 && OP_SAME (1)
3248 && OP_SAME_WITH_NULL (2);
3250 case BIT_FIELD_REF:
3251 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3253 default:
3254 return 0;
3257 case tcc_expression:
3258 switch (TREE_CODE (arg0))
3260 case ADDR_EXPR:
3261 case TRUTH_NOT_EXPR:
3262 return OP_SAME (0);
3264 case TRUTH_ANDIF_EXPR:
3265 case TRUTH_ORIF_EXPR:
3266 return OP_SAME (0) && OP_SAME (1);
3268 case TRUTH_AND_EXPR:
3269 case TRUTH_OR_EXPR:
3270 case TRUTH_XOR_EXPR:
3271 if (OP_SAME (0) && OP_SAME (1))
3272 return 1;
3274 /* Otherwise take into account this is a commutative operation. */
3275 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3276 TREE_OPERAND (arg1, 1), flags)
3277 && operand_equal_p (TREE_OPERAND (arg0, 1),
3278 TREE_OPERAND (arg1, 0), flags));
3280 case COND_EXPR:
3281 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3283 default:
3284 return 0;
3287 case tcc_vl_exp:
3288 switch (TREE_CODE (arg0))
3290 case CALL_EXPR:
3291 /* If the CALL_EXPRs call different functions, then they
3292 clearly can not be equal. */
3293 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3294 flags))
3295 return 0;
3298 unsigned int cef = call_expr_flags (arg0);
3299 if (flags & OEP_PURE_SAME)
3300 cef &= ECF_CONST | ECF_PURE;
3301 else
3302 cef &= ECF_CONST;
3303 if (!cef)
3304 return 0;
3307 /* Now see if all the arguments are the same. */
3309 const_call_expr_arg_iterator iter0, iter1;
3310 const_tree a0, a1;
3311 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3312 a1 = first_const_call_expr_arg (arg1, &iter1);
3313 a0 && a1;
3314 a0 = next_const_call_expr_arg (&iter0),
3315 a1 = next_const_call_expr_arg (&iter1))
3316 if (! operand_equal_p (a0, a1, flags))
3317 return 0;
3319 /* If we get here and both argument lists are exhausted
3320 then the CALL_EXPRs are equal. */
3321 return ! (a0 || a1);
3323 default:
3324 return 0;
3327 case tcc_declaration:
3328 /* Consider __builtin_sqrt equal to sqrt. */
3329 return (TREE_CODE (arg0) == FUNCTION_DECL
3330 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3331 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3332 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3334 default:
3335 return 0;
3338 #undef OP_SAME
3339 #undef OP_SAME_WITH_NULL
3342 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3343 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3345 When in doubt, return 0. */
3347 static int
3348 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3350 int unsignedp1, unsignedpo;
3351 tree primarg0, primarg1, primother;
3352 unsigned int correct_width;
3354 if (operand_equal_p (arg0, arg1, 0))
3355 return 1;
3357 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3358 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3359 return 0;
3361 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3362 and see if the inner values are the same. This removes any
3363 signedness comparison, which doesn't matter here. */
3364 primarg0 = arg0, primarg1 = arg1;
3365 STRIP_NOPS (primarg0);
3366 STRIP_NOPS (primarg1);
3367 if (operand_equal_p (primarg0, primarg1, 0))
3368 return 1;
3370 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3371 actual comparison operand, ARG0.
3373 First throw away any conversions to wider types
3374 already present in the operands. */
3376 primarg1 = get_narrower (arg1, &unsignedp1);
3377 primother = get_narrower (other, &unsignedpo);
3379 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3380 if (unsignedp1 == unsignedpo
3381 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3382 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3384 tree type = TREE_TYPE (arg0);
3386 /* Make sure shorter operand is extended the right way
3387 to match the longer operand. */
3388 primarg1 = fold_convert (signed_or_unsigned_type_for
3389 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3391 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3392 return 1;
3395 return 0;
3398 /* See if ARG is an expression that is either a comparison or is performing
3399 arithmetic on comparisons. The comparisons must only be comparing
3400 two different values, which will be stored in *CVAL1 and *CVAL2; if
3401 they are nonzero it means that some operands have already been found.
3402 No variables may be used anywhere else in the expression except in the
3403 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3404 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3406 If this is true, return 1. Otherwise, return zero. */
3408 static int
3409 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3411 enum tree_code code = TREE_CODE (arg);
3412 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3414 /* We can handle some of the tcc_expression cases here. */
3415 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3416 tclass = tcc_unary;
3417 else if (tclass == tcc_expression
3418 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3419 || code == COMPOUND_EXPR))
3420 tclass = tcc_binary;
3422 else if (tclass == tcc_expression && code == SAVE_EXPR
3423 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3425 /* If we've already found a CVAL1 or CVAL2, this expression is
3426 two complex to handle. */
3427 if (*cval1 || *cval2)
3428 return 0;
3430 tclass = tcc_unary;
3431 *save_p = 1;
3434 switch (tclass)
3436 case tcc_unary:
3437 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3439 case tcc_binary:
3440 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3441 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3442 cval1, cval2, save_p));
3444 case tcc_constant:
3445 return 1;
3447 case tcc_expression:
3448 if (code == COND_EXPR)
3449 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3450 cval1, cval2, save_p)
3451 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3452 cval1, cval2, save_p)
3453 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3454 cval1, cval2, save_p));
3455 return 0;
3457 case tcc_comparison:
3458 /* First see if we can handle the first operand, then the second. For
3459 the second operand, we know *CVAL1 can't be zero. It must be that
3460 one side of the comparison is each of the values; test for the
3461 case where this isn't true by failing if the two operands
3462 are the same. */
3464 if (operand_equal_p (TREE_OPERAND (arg, 0),
3465 TREE_OPERAND (arg, 1), 0))
3466 return 0;
3468 if (*cval1 == 0)
3469 *cval1 = TREE_OPERAND (arg, 0);
3470 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3472 else if (*cval2 == 0)
3473 *cval2 = TREE_OPERAND (arg, 0);
3474 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3476 else
3477 return 0;
3479 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3481 else if (*cval2 == 0)
3482 *cval2 = TREE_OPERAND (arg, 1);
3483 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3485 else
3486 return 0;
3488 return 1;
3490 default:
3491 return 0;
3495 /* ARG is a tree that is known to contain just arithmetic operations and
3496 comparisons. Evaluate the operations in the tree substituting NEW0 for
3497 any occurrence of OLD0 as an operand of a comparison and likewise for
3498 NEW1 and OLD1. */
3500 static tree
3501 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3503 tree type = TREE_TYPE (arg);
3504 enum tree_code code = TREE_CODE (arg);
3505 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3507 /* We can handle some of the tcc_expression cases here. */
3508 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3509 tclass = tcc_unary;
3510 else if (tclass == tcc_expression
3511 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3512 tclass = tcc_binary;
3514 switch (tclass)
3516 case tcc_unary:
3517 return fold_build1 (code, type,
3518 eval_subst (TREE_OPERAND (arg, 0),
3519 old0, new0, old1, new1));
3521 case tcc_binary:
3522 return fold_build2 (code, type,
3523 eval_subst (TREE_OPERAND (arg, 0),
3524 old0, new0, old1, new1),
3525 eval_subst (TREE_OPERAND (arg, 1),
3526 old0, new0, old1, new1));
3528 case tcc_expression:
3529 switch (code)
3531 case SAVE_EXPR:
3532 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3534 case COMPOUND_EXPR:
3535 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3537 case COND_EXPR:
3538 return fold_build3 (code, type,
3539 eval_subst (TREE_OPERAND (arg, 0),
3540 old0, new0, old1, new1),
3541 eval_subst (TREE_OPERAND (arg, 1),
3542 old0, new0, old1, new1),
3543 eval_subst (TREE_OPERAND (arg, 2),
3544 old0, new0, old1, new1));
3545 default:
3546 break;
3548 /* Fall through - ??? */
3550 case tcc_comparison:
3552 tree arg0 = TREE_OPERAND (arg, 0);
3553 tree arg1 = TREE_OPERAND (arg, 1);
3555 /* We need to check both for exact equality and tree equality. The
3556 former will be true if the operand has a side-effect. In that
3557 case, we know the operand occurred exactly once. */
3559 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3560 arg0 = new0;
3561 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3562 arg0 = new1;
3564 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3565 arg1 = new0;
3566 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3567 arg1 = new1;
3569 return fold_build2 (code, type, arg0, arg1);
3572 default:
3573 return arg;
3577 /* Return a tree for the case when the result of an expression is RESULT
3578 converted to TYPE and OMITTED was previously an operand of the expression
3579 but is now not needed (e.g., we folded OMITTED * 0).
3581 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3582 the conversion of RESULT to TYPE. */
3584 tree
3585 omit_one_operand (tree type, tree result, tree omitted)
3587 tree t = fold_convert (type, result);
3589 /* If the resulting operand is an empty statement, just return the omitted
3590 statement casted to void. */
3591 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3592 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3594 if (TREE_SIDE_EFFECTS (omitted))
3595 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3597 return non_lvalue (t);
3600 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3602 static tree
3603 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3605 tree t = fold_convert (type, result);
3607 /* If the resulting operand is an empty statement, just return the omitted
3608 statement casted to void. */
3609 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3610 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3612 if (TREE_SIDE_EFFECTS (omitted))
3613 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3615 return pedantic_non_lvalue (t);
3618 /* Return a tree for the case when the result of an expression is RESULT
3619 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3620 of the expression but are now not needed.
3622 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3623 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3624 evaluated before OMITTED2. Otherwise, if neither has side effects,
3625 just do the conversion of RESULT to TYPE. */
3627 tree
3628 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3630 tree t = fold_convert (type, result);
3632 if (TREE_SIDE_EFFECTS (omitted2))
3633 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3634 if (TREE_SIDE_EFFECTS (omitted1))
3635 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3637 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3641 /* Return a simplified tree node for the truth-negation of ARG. This
3642 never alters ARG itself. We assume that ARG is an operation that
3643 returns a truth value (0 or 1).
3645 FIXME: one would think we would fold the result, but it causes
3646 problems with the dominator optimizer. */
3648 tree
3649 fold_truth_not_expr (tree arg)
3651 tree type = TREE_TYPE (arg);
3652 enum tree_code code = TREE_CODE (arg);
3654 /* If this is a comparison, we can simply invert it, except for
3655 floating-point non-equality comparisons, in which case we just
3656 enclose a TRUTH_NOT_EXPR around what we have. */
3658 if (TREE_CODE_CLASS (code) == tcc_comparison)
3660 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3661 if (FLOAT_TYPE_P (op_type)
3662 && flag_trapping_math
3663 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3664 && code != NE_EXPR && code != EQ_EXPR)
3665 return NULL_TREE;
3666 else
3668 code = invert_tree_comparison (code,
3669 HONOR_NANS (TYPE_MODE (op_type)));
3670 if (code == ERROR_MARK)
3671 return NULL_TREE;
3672 else
3673 return build2 (code, type,
3674 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3678 switch (code)
3680 case INTEGER_CST:
3681 return constant_boolean_node (integer_zerop (arg), type);
3683 case TRUTH_AND_EXPR:
3684 return build2 (TRUTH_OR_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 invert_truthvalue (TREE_OPERAND (arg, 1)));
3688 case TRUTH_OR_EXPR:
3689 return build2 (TRUTH_AND_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3693 case TRUTH_XOR_EXPR:
3694 /* Here we can invert either operand. We invert the first operand
3695 unless the second operand is a TRUTH_NOT_EXPR in which case our
3696 result is the XOR of the first operand with the inside of the
3697 negation of the second operand. */
3699 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3700 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3701 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3702 else
3703 return build2 (TRUTH_XOR_EXPR, type,
3704 invert_truthvalue (TREE_OPERAND (arg, 0)),
3705 TREE_OPERAND (arg, 1));
3707 case TRUTH_ANDIF_EXPR:
3708 return build2 (TRUTH_ORIF_EXPR, type,
3709 invert_truthvalue (TREE_OPERAND (arg, 0)),
3710 invert_truthvalue (TREE_OPERAND (arg, 1)));
3712 case TRUTH_ORIF_EXPR:
3713 return build2 (TRUTH_ANDIF_EXPR, type,
3714 invert_truthvalue (TREE_OPERAND (arg, 0)),
3715 invert_truthvalue (TREE_OPERAND (arg, 1)));
3717 case TRUTH_NOT_EXPR:
3718 return TREE_OPERAND (arg, 0);
3720 case COND_EXPR:
3722 tree arg1 = TREE_OPERAND (arg, 1);
3723 tree arg2 = TREE_OPERAND (arg, 2);
3724 /* A COND_EXPR may have a throw as one operand, which
3725 then has void type. Just leave void operands
3726 as they are. */
3727 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3728 VOID_TYPE_P (TREE_TYPE (arg1))
3729 ? arg1 : invert_truthvalue (arg1),
3730 VOID_TYPE_P (TREE_TYPE (arg2))
3731 ? arg2 : invert_truthvalue (arg2));
3734 case COMPOUND_EXPR:
3735 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3736 invert_truthvalue (TREE_OPERAND (arg, 1)));
3738 case NON_LVALUE_EXPR:
3739 return invert_truthvalue (TREE_OPERAND (arg, 0));
3741 case NOP_EXPR:
3742 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3743 return build1 (TRUTH_NOT_EXPR, type, arg);
3745 case CONVERT_EXPR:
3746 case FLOAT_EXPR:
3747 return build1 (TREE_CODE (arg), type,
3748 invert_truthvalue (TREE_OPERAND (arg, 0)));
3750 case BIT_AND_EXPR:
3751 if (!integer_onep (TREE_OPERAND (arg, 1)))
3752 break;
3753 return build2 (EQ_EXPR, type, arg,
3754 build_int_cst (type, 0));
3756 case SAVE_EXPR:
3757 return build1 (TRUTH_NOT_EXPR, type, arg);
3759 case CLEANUP_POINT_EXPR:
3760 return build1 (CLEANUP_POINT_EXPR, type,
3761 invert_truthvalue (TREE_OPERAND (arg, 0)));
3763 default:
3764 break;
3767 return NULL_TREE;
3770 /* Return a simplified tree node for the truth-negation of ARG. This
3771 never alters ARG itself. We assume that ARG is an operation that
3772 returns a truth value (0 or 1).
3774 FIXME: one would think we would fold the result, but it causes
3775 problems with the dominator optimizer. */
3777 tree
3778 invert_truthvalue (tree arg)
3780 tree tem;
3782 if (TREE_CODE (arg) == ERROR_MARK)
3783 return arg;
3785 tem = fold_truth_not_expr (arg);
3786 if (!tem)
3787 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3789 return tem;
3792 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3793 operands are another bit-wise operation with a common input. If so,
3794 distribute the bit operations to save an operation and possibly two if
3795 constants are involved. For example, convert
3796 (A | B) & (A | C) into A | (B & C)
3797 Further simplification will occur if B and C are constants.
3799 If this optimization cannot be done, 0 will be returned. */
3801 static tree
3802 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3804 tree common;
3805 tree left, right;
3807 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3808 || TREE_CODE (arg0) == code
3809 || (TREE_CODE (arg0) != BIT_AND_EXPR
3810 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3811 return 0;
3813 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3815 common = TREE_OPERAND (arg0, 0);
3816 left = TREE_OPERAND (arg0, 1);
3817 right = TREE_OPERAND (arg1, 1);
3819 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3821 common = TREE_OPERAND (arg0, 0);
3822 left = TREE_OPERAND (arg0, 1);
3823 right = TREE_OPERAND (arg1, 0);
3825 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3827 common = TREE_OPERAND (arg0, 1);
3828 left = TREE_OPERAND (arg0, 0);
3829 right = TREE_OPERAND (arg1, 1);
3831 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3833 common = TREE_OPERAND (arg0, 1);
3834 left = TREE_OPERAND (arg0, 0);
3835 right = TREE_OPERAND (arg1, 0);
3837 else
3838 return 0;
3840 common = fold_convert (type, common);
3841 left = fold_convert (type, left);
3842 right = fold_convert (type, right);
3843 return fold_build2 (TREE_CODE (arg0), type, common,
3844 fold_build2 (code, type, left, right));
3847 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3848 with code CODE. This optimization is unsafe. */
3849 static tree
3850 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3852 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3853 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3855 /* (A / C) +- (B / C) -> (A +- B) / C. */
3856 if (mul0 == mul1
3857 && operand_equal_p (TREE_OPERAND (arg0, 1),
3858 TREE_OPERAND (arg1, 1), 0))
3859 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3860 fold_build2 (code, type,
3861 TREE_OPERAND (arg0, 0),
3862 TREE_OPERAND (arg1, 0)),
3863 TREE_OPERAND (arg0, 1));
3865 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3866 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3867 TREE_OPERAND (arg1, 0), 0)
3868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3869 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3871 REAL_VALUE_TYPE r0, r1;
3872 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3873 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3874 if (!mul0)
3875 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3876 if (!mul1)
3877 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3878 real_arithmetic (&r0, code, &r0, &r1);
3879 return fold_build2 (MULT_EXPR, type,
3880 TREE_OPERAND (arg0, 0),
3881 build_real (type, r0));
3884 return NULL_TREE;
3887 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3888 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3890 static tree
3891 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3892 HOST_WIDE_INT bitpos, int unsignedp)
3894 tree result, bftype;
3896 if (bitpos == 0)
3898 tree size = TYPE_SIZE (TREE_TYPE (inner));
3899 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3900 || POINTER_TYPE_P (TREE_TYPE (inner)))
3901 && host_integerp (size, 0)
3902 && tree_low_cst (size, 0) == bitsize)
3903 return fold_convert (type, inner);
3906 bftype = type;
3907 if (TYPE_PRECISION (bftype) != bitsize
3908 || TYPE_UNSIGNED (bftype) == !unsignedp)
3909 bftype = build_nonstandard_integer_type (bitsize, 0);
3911 result = build3 (BIT_FIELD_REF, bftype, inner,
3912 size_int (bitsize), bitsize_int (bitpos));
3914 if (bftype != type)
3915 result = fold_convert (type, result);
3917 return result;
3920 /* Optimize a bit-field compare.
3922 There are two cases: First is a compare against a constant and the
3923 second is a comparison of two items where the fields are at the same
3924 bit position relative to the start of a chunk (byte, halfword, word)
3925 large enough to contain it. In these cases we can avoid the shift
3926 implicit in bitfield extractions.
3928 For constants, we emit a compare of the shifted constant with the
3929 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3930 compared. For two fields at the same position, we do the ANDs with the
3931 similar mask and compare the result of the ANDs.
3933 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3934 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3935 are the left and right operands of the comparison, respectively.
3937 If the optimization described above can be done, we return the resulting
3938 tree. Otherwise we return zero. */
3940 static tree
3941 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3942 tree lhs, tree rhs)
3944 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3945 tree type = TREE_TYPE (lhs);
3946 tree signed_type, unsigned_type;
3947 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3948 enum machine_mode lmode, rmode, nmode;
3949 int lunsignedp, runsignedp;
3950 int lvolatilep = 0, rvolatilep = 0;
3951 tree linner, rinner = NULL_TREE;
3952 tree mask;
3953 tree offset;
3955 /* Get all the information about the extractions being done. If the bit size
3956 if the same as the size of the underlying object, we aren't doing an
3957 extraction at all and so can do nothing. We also don't want to
3958 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3959 then will no longer be able to replace it. */
3960 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3961 &lunsignedp, &lvolatilep, false);
3962 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3963 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3964 return 0;
3966 if (!const_p)
3968 /* If this is not a constant, we can only do something if bit positions,
3969 sizes, and signedness are the same. */
3970 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3971 &runsignedp, &rvolatilep, false);
3973 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3974 || lunsignedp != runsignedp || offset != 0
3975 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3976 return 0;
3979 /* See if we can find a mode to refer to this field. We should be able to,
3980 but fail if we can't. */
3981 nmode = get_best_mode (lbitsize, lbitpos,
3982 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3983 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3984 TYPE_ALIGN (TREE_TYPE (rinner))),
3985 word_mode, lvolatilep || rvolatilep);
3986 if (nmode == VOIDmode)
3987 return 0;
3989 /* Set signed and unsigned types of the precision of this mode for the
3990 shifts below. */
3991 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3992 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3994 /* Compute the bit position and size for the new reference and our offset
3995 within it. If the new reference is the same size as the original, we
3996 won't optimize anything, so return zero. */
3997 nbitsize = GET_MODE_BITSIZE (nmode);
3998 nbitpos = lbitpos & ~ (nbitsize - 1);
3999 lbitpos -= nbitpos;
4000 if (nbitsize == lbitsize)
4001 return 0;
4003 if (BYTES_BIG_ENDIAN)
4004 lbitpos = nbitsize - lbitsize - lbitpos;
4006 /* Make the mask to be used against the extracted field. */
4007 mask = build_int_cst_type (unsigned_type, -1);
4008 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4009 mask = const_binop (RSHIFT_EXPR, mask,
4010 size_int (nbitsize - lbitsize - lbitpos), 0);
4012 if (! const_p)
4013 /* If not comparing with constant, just rework the comparison
4014 and return. */
4015 return fold_build2 (code, compare_type,
4016 fold_build2 (BIT_AND_EXPR, unsigned_type,
4017 make_bit_field_ref (linner,
4018 unsigned_type,
4019 nbitsize, nbitpos,
4021 mask),
4022 fold_build2 (BIT_AND_EXPR, unsigned_type,
4023 make_bit_field_ref (rinner,
4024 unsigned_type,
4025 nbitsize, nbitpos,
4027 mask));
4029 /* Otherwise, we are handling the constant case. See if the constant is too
4030 big for the field. Warn and return a tree of for 0 (false) if so. We do
4031 this not only for its own sake, but to avoid having to test for this
4032 error case below. If we didn't, we might generate wrong code.
4034 For unsigned fields, the constant shifted right by the field length should
4035 be all zero. For signed fields, the high-order bits should agree with
4036 the sign bit. */
4038 if (lunsignedp)
4040 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4041 fold_convert (unsigned_type, rhs),
4042 size_int (lbitsize), 0)))
4044 warning (0, "comparison is always %d due to width of bit-field",
4045 code == NE_EXPR);
4046 return constant_boolean_node (code == NE_EXPR, compare_type);
4049 else
4051 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4052 size_int (lbitsize - 1), 0);
4053 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4055 warning (0, "comparison is always %d due to width of bit-field",
4056 code == NE_EXPR);
4057 return constant_boolean_node (code == NE_EXPR, compare_type);
4061 /* Single-bit compares should always be against zero. */
4062 if (lbitsize == 1 && ! integer_zerop (rhs))
4064 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4065 rhs = build_int_cst (type, 0);
4068 /* Make a new bitfield reference, shift the constant over the
4069 appropriate number of bits and mask it with the computed mask
4070 (in case this was a signed field). If we changed it, make a new one. */
4071 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4072 if (lvolatilep)
4074 TREE_SIDE_EFFECTS (lhs) = 1;
4075 TREE_THIS_VOLATILE (lhs) = 1;
4078 rhs = const_binop (BIT_AND_EXPR,
4079 const_binop (LSHIFT_EXPR,
4080 fold_convert (unsigned_type, rhs),
4081 size_int (lbitpos), 0),
4082 mask, 0);
4084 return build2 (code, compare_type,
4085 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4086 rhs);
4089 /* Subroutine for fold_truthop: decode a field reference.
4091 If EXP is a comparison reference, we return the innermost reference.
4093 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4094 set to the starting bit number.
4096 If the innermost field can be completely contained in a mode-sized
4097 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4099 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4100 otherwise it is not changed.
4102 *PUNSIGNEDP is set to the signedness of the field.
4104 *PMASK is set to the mask used. This is either contained in a
4105 BIT_AND_EXPR or derived from the width of the field.
4107 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4109 Return 0 if this is not a component reference or is one that we can't
4110 do anything with. */
4112 static tree
4113 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4114 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4115 int *punsignedp, int *pvolatilep,
4116 tree *pmask, tree *pand_mask)
4118 tree outer_type = 0;
4119 tree and_mask = 0;
4120 tree mask, inner, offset;
4121 tree unsigned_type;
4122 unsigned int precision;
4124 /* All the optimizations using this function assume integer fields.
4125 There are problems with FP fields since the type_for_size call
4126 below can fail for, e.g., XFmode. */
4127 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4128 return 0;
4130 /* We are interested in the bare arrangement of bits, so strip everything
4131 that doesn't affect the machine mode. However, record the type of the
4132 outermost expression if it may matter below. */
4133 if (CONVERT_EXPR_P (exp)
4134 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4135 outer_type = TREE_TYPE (exp);
4136 STRIP_NOPS (exp);
4138 if (TREE_CODE (exp) == BIT_AND_EXPR)
4140 and_mask = TREE_OPERAND (exp, 1);
4141 exp = TREE_OPERAND (exp, 0);
4142 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4143 if (TREE_CODE (and_mask) != INTEGER_CST)
4144 return 0;
4147 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4148 punsignedp, pvolatilep, false);
4149 if ((inner == exp && and_mask == 0)
4150 || *pbitsize < 0 || offset != 0
4151 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4152 return 0;
4154 /* If the number of bits in the reference is the same as the bitsize of
4155 the outer type, then the outer type gives the signedness. Otherwise
4156 (in case of a small bitfield) the signedness is unchanged. */
4157 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4158 *punsignedp = TYPE_UNSIGNED (outer_type);
4160 /* Compute the mask to access the bitfield. */
4161 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4162 precision = TYPE_PRECISION (unsigned_type);
4164 mask = build_int_cst_type (unsigned_type, -1);
4166 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4167 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4169 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4170 if (and_mask != 0)
4171 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4172 fold_convert (unsigned_type, and_mask), mask);
4174 *pmask = mask;
4175 *pand_mask = and_mask;
4176 return inner;
4179 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4180 bit positions. */
4182 static int
4183 all_ones_mask_p (const_tree mask, int size)
4185 tree type = TREE_TYPE (mask);
4186 unsigned int precision = TYPE_PRECISION (type);
4187 tree tmask;
4189 tmask = build_int_cst_type (signed_type_for (type), -1);
4191 return
4192 tree_int_cst_equal (mask,
4193 const_binop (RSHIFT_EXPR,
4194 const_binop (LSHIFT_EXPR, tmask,
4195 size_int (precision - size),
4197 size_int (precision - size), 0));
4200 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4201 represents the sign bit of EXP's type. If EXP represents a sign
4202 or zero extension, also test VAL against the unextended type.
4203 The return value is the (sub)expression whose sign bit is VAL,
4204 or NULL_TREE otherwise. */
4206 static tree
4207 sign_bit_p (tree exp, const_tree val)
4209 unsigned HOST_WIDE_INT mask_lo, lo;
4210 HOST_WIDE_INT mask_hi, hi;
4211 int width;
4212 tree t;
4214 /* Tree EXP must have an integral type. */
4215 t = TREE_TYPE (exp);
4216 if (! INTEGRAL_TYPE_P (t))
4217 return NULL_TREE;
4219 /* Tree VAL must be an integer constant. */
4220 if (TREE_CODE (val) != INTEGER_CST
4221 || TREE_OVERFLOW (val))
4222 return NULL_TREE;
4224 width = TYPE_PRECISION (t);
4225 if (width > HOST_BITS_PER_WIDE_INT)
4227 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4228 lo = 0;
4230 mask_hi = ((unsigned HOST_WIDE_INT) -1
4231 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4232 mask_lo = -1;
4234 else
4236 hi = 0;
4237 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4239 mask_hi = 0;
4240 mask_lo = ((unsigned HOST_WIDE_INT) -1
4241 >> (HOST_BITS_PER_WIDE_INT - width));
4244 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4245 treat VAL as if it were unsigned. */
4246 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4247 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4248 return exp;
4250 /* Handle extension from a narrower type. */
4251 if (TREE_CODE (exp) == NOP_EXPR
4252 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4253 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4255 return NULL_TREE;
4258 /* Subroutine for fold_truthop: determine if an operand is simple enough
4259 to be evaluated unconditionally. */
4261 static int
4262 simple_operand_p (const_tree exp)
4264 /* Strip any conversions that don't change the machine mode. */
4265 STRIP_NOPS (exp);
4267 return (CONSTANT_CLASS_P (exp)
4268 || TREE_CODE (exp) == SSA_NAME
4269 || (DECL_P (exp)
4270 && ! TREE_ADDRESSABLE (exp)
4271 && ! TREE_THIS_VOLATILE (exp)
4272 && ! DECL_NONLOCAL (exp)
4273 /* Don't regard global variables as simple. They may be
4274 allocated in ways unknown to the compiler (shared memory,
4275 #pragma weak, etc). */
4276 && ! TREE_PUBLIC (exp)
4277 && ! DECL_EXTERNAL (exp)
4278 /* Loading a static variable is unduly expensive, but global
4279 registers aren't expensive. */
4280 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4283 /* The following functions are subroutines to fold_range_test and allow it to
4284 try to change a logical combination of comparisons into a range test.
4286 For example, both
4287 X == 2 || X == 3 || X == 4 || X == 5
4289 X >= 2 && X <= 5
4290 are converted to
4291 (unsigned) (X - 2) <= 3
4293 We describe each set of comparisons as being either inside or outside
4294 a range, using a variable named like IN_P, and then describe the
4295 range with a lower and upper bound. If one of the bounds is omitted,
4296 it represents either the highest or lowest value of the type.
4298 In the comments below, we represent a range by two numbers in brackets
4299 preceded by a "+" to designate being inside that range, or a "-" to
4300 designate being outside that range, so the condition can be inverted by
4301 flipping the prefix. An omitted bound is represented by a "-". For
4302 example, "- [-, 10]" means being outside the range starting at the lowest
4303 possible value and ending at 10, in other words, being greater than 10.
4304 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4305 always false.
4307 We set up things so that the missing bounds are handled in a consistent
4308 manner so neither a missing bound nor "true" and "false" need to be
4309 handled using a special case. */
4311 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4312 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4313 and UPPER1_P are nonzero if the respective argument is an upper bound
4314 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4315 must be specified for a comparison. ARG1 will be converted to ARG0's
4316 type if both are specified. */
4318 static tree
4319 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4320 tree arg1, int upper1_p)
4322 tree tem;
4323 int result;
4324 int sgn0, sgn1;
4326 /* If neither arg represents infinity, do the normal operation.
4327 Else, if not a comparison, return infinity. Else handle the special
4328 comparison rules. Note that most of the cases below won't occur, but
4329 are handled for consistency. */
4331 if (arg0 != 0 && arg1 != 0)
4333 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4334 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4335 STRIP_NOPS (tem);
4336 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4339 if (TREE_CODE_CLASS (code) != tcc_comparison)
4340 return 0;
4342 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4343 for neither. In real maths, we cannot assume open ended ranges are
4344 the same. But, this is computer arithmetic, where numbers are finite.
4345 We can therefore make the transformation of any unbounded range with
4346 the value Z, Z being greater than any representable number. This permits
4347 us to treat unbounded ranges as equal. */
4348 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4349 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4350 switch (code)
4352 case EQ_EXPR:
4353 result = sgn0 == sgn1;
4354 break;
4355 case NE_EXPR:
4356 result = sgn0 != sgn1;
4357 break;
4358 case LT_EXPR:
4359 result = sgn0 < sgn1;
4360 break;
4361 case LE_EXPR:
4362 result = sgn0 <= sgn1;
4363 break;
4364 case GT_EXPR:
4365 result = sgn0 > sgn1;
4366 break;
4367 case GE_EXPR:
4368 result = sgn0 >= sgn1;
4369 break;
4370 default:
4371 gcc_unreachable ();
4374 return constant_boolean_node (result, type);
4377 /* Given EXP, a logical expression, set the range it is testing into
4378 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4379 actually being tested. *PLOW and *PHIGH will be made of the same
4380 type as the returned expression. If EXP is not a comparison, we
4381 will most likely not be returning a useful value and range. Set
4382 *STRICT_OVERFLOW_P to true if the return value is only valid
4383 because signed overflow is undefined; otherwise, do not change
4384 *STRICT_OVERFLOW_P. */
4386 static tree
4387 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4388 bool *strict_overflow_p)
4390 enum tree_code code;
4391 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4392 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4393 int in_p, n_in_p;
4394 tree low, high, n_low, n_high;
4396 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4397 and see if we can refine the range. Some of the cases below may not
4398 happen, but it doesn't seem worth worrying about this. We "continue"
4399 the outer loop when we've changed something; otherwise we "break"
4400 the switch, which will "break" the while. */
4402 in_p = 0;
4403 low = high = build_int_cst (TREE_TYPE (exp), 0);
4405 while (1)
4407 code = TREE_CODE (exp);
4408 exp_type = TREE_TYPE (exp);
4410 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4412 if (TREE_OPERAND_LENGTH (exp) > 0)
4413 arg0 = TREE_OPERAND (exp, 0);
4414 if (TREE_CODE_CLASS (code) == tcc_comparison
4415 || TREE_CODE_CLASS (code) == tcc_unary
4416 || TREE_CODE_CLASS (code) == tcc_binary)
4417 arg0_type = TREE_TYPE (arg0);
4418 if (TREE_CODE_CLASS (code) == tcc_binary
4419 || TREE_CODE_CLASS (code) == tcc_comparison
4420 || (TREE_CODE_CLASS (code) == tcc_expression
4421 && TREE_OPERAND_LENGTH (exp) > 1))
4422 arg1 = TREE_OPERAND (exp, 1);
4425 switch (code)
4427 case TRUTH_NOT_EXPR:
4428 in_p = ! in_p, exp = arg0;
4429 continue;
4431 case EQ_EXPR: case NE_EXPR:
4432 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4433 /* We can only do something if the range is testing for zero
4434 and if the second operand is an integer constant. Note that
4435 saying something is "in" the range we make is done by
4436 complementing IN_P since it will set in the initial case of
4437 being not equal to zero; "out" is leaving it alone. */
4438 if (low == 0 || high == 0
4439 || ! integer_zerop (low) || ! integer_zerop (high)
4440 || TREE_CODE (arg1) != INTEGER_CST)
4441 break;
4443 switch (code)
4445 case NE_EXPR: /* - [c, c] */
4446 low = high = arg1;
4447 break;
4448 case EQ_EXPR: /* + [c, c] */
4449 in_p = ! in_p, low = high = arg1;
4450 break;
4451 case GT_EXPR: /* - [-, c] */
4452 low = 0, high = arg1;
4453 break;
4454 case GE_EXPR: /* + [c, -] */
4455 in_p = ! in_p, low = arg1, high = 0;
4456 break;
4457 case LT_EXPR: /* - [c, -] */
4458 low = arg1, high = 0;
4459 break;
4460 case LE_EXPR: /* + [-, c] */
4461 in_p = ! in_p, low = 0, high = arg1;
4462 break;
4463 default:
4464 gcc_unreachable ();
4467 /* If this is an unsigned comparison, we also know that EXP is
4468 greater than or equal to zero. We base the range tests we make
4469 on that fact, so we record it here so we can parse existing
4470 range tests. We test arg0_type since often the return type
4471 of, e.g. EQ_EXPR, is boolean. */
4472 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4474 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4475 in_p, low, high, 1,
4476 build_int_cst (arg0_type, 0),
4477 NULL_TREE))
4478 break;
4480 in_p = n_in_p, low = n_low, high = n_high;
4482 /* If the high bound is missing, but we have a nonzero low
4483 bound, reverse the range so it goes from zero to the low bound
4484 minus 1. */
4485 if (high == 0 && low && ! integer_zerop (low))
4487 in_p = ! in_p;
4488 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4489 integer_one_node, 0);
4490 low = build_int_cst (arg0_type, 0);
4494 exp = arg0;
4495 continue;
4497 case NEGATE_EXPR:
4498 /* (-x) IN [a,b] -> x in [-b, -a] */
4499 n_low = range_binop (MINUS_EXPR, exp_type,
4500 build_int_cst (exp_type, 0),
4501 0, high, 1);
4502 n_high = range_binop (MINUS_EXPR, exp_type,
4503 build_int_cst (exp_type, 0),
4504 0, low, 0);
4505 low = n_low, high = n_high;
4506 exp = arg0;
4507 continue;
4509 case BIT_NOT_EXPR:
4510 /* ~ X -> -X - 1 */
4511 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4512 build_int_cst (exp_type, 1));
4513 continue;
4515 case PLUS_EXPR: case MINUS_EXPR:
4516 if (TREE_CODE (arg1) != INTEGER_CST)
4517 break;
4519 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4520 move a constant to the other side. */
4521 if (!TYPE_UNSIGNED (arg0_type)
4522 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4523 break;
4525 /* If EXP is signed, any overflow in the computation is undefined,
4526 so we don't worry about it so long as our computations on
4527 the bounds don't overflow. For unsigned, overflow is defined
4528 and this is exactly the right thing. */
4529 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4530 arg0_type, low, 0, arg1, 0);
4531 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4532 arg0_type, high, 1, arg1, 0);
4533 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4534 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4535 break;
4537 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4538 *strict_overflow_p = true;
4540 /* Check for an unsigned range which has wrapped around the maximum
4541 value thus making n_high < n_low, and normalize it. */
4542 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4544 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4545 integer_one_node, 0);
4546 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4547 integer_one_node, 0);
4549 /* If the range is of the form +/- [ x+1, x ], we won't
4550 be able to normalize it. But then, it represents the
4551 whole range or the empty set, so make it
4552 +/- [ -, - ]. */
4553 if (tree_int_cst_equal (n_low, low)
4554 && tree_int_cst_equal (n_high, high))
4555 low = high = 0;
4556 else
4557 in_p = ! in_p;
4559 else
4560 low = n_low, high = n_high;
4562 exp = arg0;
4563 continue;
4565 CASE_CONVERT: case NON_LVALUE_EXPR:
4566 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4567 break;
4569 if (! INTEGRAL_TYPE_P (arg0_type)
4570 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4571 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4572 break;
4574 n_low = low, n_high = high;
4576 if (n_low != 0)
4577 n_low = fold_convert (arg0_type, n_low);
4579 if (n_high != 0)
4580 n_high = fold_convert (arg0_type, n_high);
4583 /* If we're converting arg0 from an unsigned type, to exp,
4584 a signed type, we will be doing the comparison as unsigned.
4585 The tests above have already verified that LOW and HIGH
4586 are both positive.
4588 So we have to ensure that we will handle large unsigned
4589 values the same way that the current signed bounds treat
4590 negative values. */
4592 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4594 tree high_positive;
4595 tree equiv_type;
4596 /* For fixed-point modes, we need to pass the saturating flag
4597 as the 2nd parameter. */
4598 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4599 equiv_type = lang_hooks.types.type_for_mode
4600 (TYPE_MODE (arg0_type),
4601 TYPE_SATURATING (arg0_type));
4602 else
4603 equiv_type = lang_hooks.types.type_for_mode
4604 (TYPE_MODE (arg0_type), 1);
4606 /* A range without an upper bound is, naturally, unbounded.
4607 Since convert would have cropped a very large value, use
4608 the max value for the destination type. */
4609 high_positive
4610 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4611 : TYPE_MAX_VALUE (arg0_type);
4613 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4614 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4615 fold_convert (arg0_type,
4616 high_positive),
4617 build_int_cst (arg0_type, 1));
4619 /* If the low bound is specified, "and" the range with the
4620 range for which the original unsigned value will be
4621 positive. */
4622 if (low != 0)
4624 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4625 1, n_low, n_high, 1,
4626 fold_convert (arg0_type,
4627 integer_zero_node),
4628 high_positive))
4629 break;
4631 in_p = (n_in_p == in_p);
4633 else
4635 /* Otherwise, "or" the range with the range of the input
4636 that will be interpreted as negative. */
4637 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4638 0, n_low, n_high, 1,
4639 fold_convert (arg0_type,
4640 integer_zero_node),
4641 high_positive))
4642 break;
4644 in_p = (in_p != n_in_p);
4648 exp = arg0;
4649 low = n_low, high = n_high;
4650 continue;
4652 default:
4653 break;
4656 break;
4659 /* If EXP is a constant, we can evaluate whether this is true or false. */
4660 if (TREE_CODE (exp) == INTEGER_CST)
4662 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4663 exp, 0, low, 0))
4664 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4665 exp, 1, high, 1)));
4666 low = high = 0;
4667 exp = 0;
4670 *pin_p = in_p, *plow = low, *phigh = high;
4671 return exp;
4674 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4675 type, TYPE, return an expression to test if EXP is in (or out of, depending
4676 on IN_P) the range. Return 0 if the test couldn't be created. */
4678 static tree
4679 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4681 tree etype = TREE_TYPE (exp);
4682 tree value;
4684 #ifdef HAVE_canonicalize_funcptr_for_compare
4685 /* Disable this optimization for function pointer expressions
4686 on targets that require function pointer canonicalization. */
4687 if (HAVE_canonicalize_funcptr_for_compare
4688 && TREE_CODE (etype) == POINTER_TYPE
4689 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4690 return NULL_TREE;
4691 #endif
4693 if (! in_p)
4695 value = build_range_check (type, exp, 1, low, high);
4696 if (value != 0)
4697 return invert_truthvalue (value);
4699 return 0;
4702 if (low == 0 && high == 0)
4703 return build_int_cst (type, 1);
4705 if (low == 0)
4706 return fold_build2 (LE_EXPR, type, exp,
4707 fold_convert (etype, high));
4709 if (high == 0)
4710 return fold_build2 (GE_EXPR, type, exp,
4711 fold_convert (etype, low));
4713 if (operand_equal_p (low, high, 0))
4714 return fold_build2 (EQ_EXPR, type, exp,
4715 fold_convert (etype, low));
4717 if (integer_zerop (low))
4719 if (! TYPE_UNSIGNED (etype))
4721 etype = unsigned_type_for (etype);
4722 high = fold_convert (etype, high);
4723 exp = fold_convert (etype, exp);
4725 return build_range_check (type, exp, 1, 0, high);
4728 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4729 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4731 unsigned HOST_WIDE_INT lo;
4732 HOST_WIDE_INT hi;
4733 int prec;
4735 prec = TYPE_PRECISION (etype);
4736 if (prec <= HOST_BITS_PER_WIDE_INT)
4738 hi = 0;
4739 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4741 else
4743 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4744 lo = (unsigned HOST_WIDE_INT) -1;
4747 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4749 if (TYPE_UNSIGNED (etype))
4751 tree signed_etype = signed_type_for (etype);
4752 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4753 etype
4754 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4755 else
4756 etype = signed_etype;
4757 exp = fold_convert (etype, exp);
4759 return fold_build2 (GT_EXPR, type, exp,
4760 build_int_cst (etype, 0));
4764 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4765 This requires wrap-around arithmetics for the type of the expression. */
4766 switch (TREE_CODE (etype))
4768 case INTEGER_TYPE:
4769 /* There is no requirement that LOW be within the range of ETYPE
4770 if the latter is a subtype. It must, however, be within the base
4771 type of ETYPE. So be sure we do the subtraction in that type. */
4772 if (TREE_TYPE (etype))
4773 etype = TREE_TYPE (etype);
4774 break;
4776 case ENUMERAL_TYPE:
4777 case BOOLEAN_TYPE:
4778 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4779 TYPE_UNSIGNED (etype));
4780 break;
4782 default:
4783 break;
4786 high = fold_convert (etype, high);
4787 low = fold_convert (etype, low);
4788 exp = fold_convert (etype, exp);
4790 value = const_binop (MINUS_EXPR, high, low, 0);
4793 if (POINTER_TYPE_P (etype))
4795 if (value != 0 && !TREE_OVERFLOW (value))
4797 low = fold_convert (sizetype, low);
4798 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4799 return build_range_check (type,
4800 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4801 1, build_int_cst (etype, 0), value);
4803 return 0;
4806 if (value != 0 && !TREE_OVERFLOW (value))
4807 return build_range_check (type,
4808 fold_build2 (MINUS_EXPR, etype, exp, low),
4809 1, build_int_cst (etype, 0), value);
4811 return 0;
4814 /* Return the predecessor of VAL in its type, handling the infinite case. */
4816 static tree
4817 range_predecessor (tree val)
4819 tree type = TREE_TYPE (val);
4821 if (INTEGRAL_TYPE_P (type)
4822 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4823 return 0;
4824 else
4825 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4828 /* Return the successor of VAL in its type, handling the infinite case. */
4830 static tree
4831 range_successor (tree val)
4833 tree type = TREE_TYPE (val);
4835 if (INTEGRAL_TYPE_P (type)
4836 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4837 return 0;
4838 else
4839 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4842 /* Given two ranges, see if we can merge them into one. Return 1 if we
4843 can, 0 if we can't. Set the output range into the specified parameters. */
4845 static int
4846 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4847 tree high0, int in1_p, tree low1, tree high1)
4849 int no_overlap;
4850 int subset;
4851 int temp;
4852 tree tem;
4853 int in_p;
4854 tree low, high;
4855 int lowequal = ((low0 == 0 && low1 == 0)
4856 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4857 low0, 0, low1, 0)));
4858 int highequal = ((high0 == 0 && high1 == 0)
4859 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4860 high0, 1, high1, 1)));
4862 /* Make range 0 be the range that starts first, or ends last if they
4863 start at the same value. Swap them if it isn't. */
4864 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4865 low0, 0, low1, 0))
4866 || (lowequal
4867 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4868 high1, 1, high0, 1))))
4870 temp = in0_p, in0_p = in1_p, in1_p = temp;
4871 tem = low0, low0 = low1, low1 = tem;
4872 tem = high0, high0 = high1, high1 = tem;
4875 /* Now flag two cases, whether the ranges are disjoint or whether the
4876 second range is totally subsumed in the first. Note that the tests
4877 below are simplified by the ones above. */
4878 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4879 high0, 1, low1, 0));
4880 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4881 high1, 1, high0, 1));
4883 /* We now have four cases, depending on whether we are including or
4884 excluding the two ranges. */
4885 if (in0_p && in1_p)
4887 /* If they don't overlap, the result is false. If the second range
4888 is a subset it is the result. Otherwise, the range is from the start
4889 of the second to the end of the first. */
4890 if (no_overlap)
4891 in_p = 0, low = high = 0;
4892 else if (subset)
4893 in_p = 1, low = low1, high = high1;
4894 else
4895 in_p = 1, low = low1, high = high0;
4898 else if (in0_p && ! in1_p)
4900 /* If they don't overlap, the result is the first range. If they are
4901 equal, the result is false. If the second range is a subset of the
4902 first, and the ranges begin at the same place, we go from just after
4903 the end of the second range to the end of the first. If the second
4904 range is not a subset of the first, or if it is a subset and both
4905 ranges end at the same place, the range starts at the start of the
4906 first range and ends just before the second range.
4907 Otherwise, we can't describe this as a single range. */
4908 if (no_overlap)
4909 in_p = 1, low = low0, high = high0;
4910 else if (lowequal && highequal)
4911 in_p = 0, low = high = 0;
4912 else if (subset && lowequal)
4914 low = range_successor (high1);
4915 high = high0;
4916 in_p = 1;
4917 if (low == 0)
4919 /* We are in the weird situation where high0 > high1 but
4920 high1 has no successor. Punt. */
4921 return 0;
4924 else if (! subset || highequal)
4926 low = low0;
4927 high = range_predecessor (low1);
4928 in_p = 1;
4929 if (high == 0)
4931 /* low0 < low1 but low1 has no predecessor. Punt. */
4932 return 0;
4935 else
4936 return 0;
4939 else if (! in0_p && in1_p)
4941 /* If they don't overlap, the result is the second range. If the second
4942 is a subset of the first, the result is false. Otherwise,
4943 the range starts just after the first range and ends at the
4944 end of the second. */
4945 if (no_overlap)
4946 in_p = 1, low = low1, high = high1;
4947 else if (subset || highequal)
4948 in_p = 0, low = high = 0;
4949 else
4951 low = range_successor (high0);
4952 high = high1;
4953 in_p = 1;
4954 if (low == 0)
4956 /* high1 > high0 but high0 has no successor. Punt. */
4957 return 0;
4962 else
4964 /* The case where we are excluding both ranges. Here the complex case
4965 is if they don't overlap. In that case, the only time we have a
4966 range is if they are adjacent. If the second is a subset of the
4967 first, the result is the first. Otherwise, the range to exclude
4968 starts at the beginning of the first range and ends at the end of the
4969 second. */
4970 if (no_overlap)
4972 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4973 range_successor (high0),
4974 1, low1, 0)))
4975 in_p = 0, low = low0, high = high1;
4976 else
4978 /* Canonicalize - [min, x] into - [-, x]. */
4979 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4980 switch (TREE_CODE (TREE_TYPE (low0)))
4982 case ENUMERAL_TYPE:
4983 if (TYPE_PRECISION (TREE_TYPE (low0))
4984 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4985 break;
4986 /* FALLTHROUGH */
4987 case INTEGER_TYPE:
4988 if (tree_int_cst_equal (low0,
4989 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4990 low0 = 0;
4991 break;
4992 case POINTER_TYPE:
4993 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4994 && integer_zerop (low0))
4995 low0 = 0;
4996 break;
4997 default:
4998 break;
5001 /* Canonicalize - [x, max] into - [x, -]. */
5002 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5003 switch (TREE_CODE (TREE_TYPE (high1)))
5005 case ENUMERAL_TYPE:
5006 if (TYPE_PRECISION (TREE_TYPE (high1))
5007 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5008 break;
5009 /* FALLTHROUGH */
5010 case INTEGER_TYPE:
5011 if (tree_int_cst_equal (high1,
5012 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5013 high1 = 0;
5014 break;
5015 case POINTER_TYPE:
5016 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5017 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5018 high1, 1,
5019 integer_one_node, 1)))
5020 high1 = 0;
5021 break;
5022 default:
5023 break;
5026 /* The ranges might be also adjacent between the maximum and
5027 minimum values of the given type. For
5028 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5029 return + [x + 1, y - 1]. */
5030 if (low0 == 0 && high1 == 0)
5032 low = range_successor (high0);
5033 high = range_predecessor (low1);
5034 if (low == 0 || high == 0)
5035 return 0;
5037 in_p = 1;
5039 else
5040 return 0;
5043 else if (subset)
5044 in_p = 0, low = low0, high = high0;
5045 else
5046 in_p = 0, low = low0, high = high1;
5049 *pin_p = in_p, *plow = low, *phigh = high;
5050 return 1;
5054 /* Subroutine of fold, looking inside expressions of the form
5055 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5056 of the COND_EXPR. This function is being used also to optimize
5057 A op B ? C : A, by reversing the comparison first.
5059 Return a folded expression whose code is not a COND_EXPR
5060 anymore, or NULL_TREE if no folding opportunity is found. */
5062 static tree
5063 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5065 enum tree_code comp_code = TREE_CODE (arg0);
5066 tree arg00 = TREE_OPERAND (arg0, 0);
5067 tree arg01 = TREE_OPERAND (arg0, 1);
5068 tree arg1_type = TREE_TYPE (arg1);
5069 tree tem;
5071 STRIP_NOPS (arg1);
5072 STRIP_NOPS (arg2);
5074 /* If we have A op 0 ? A : -A, consider applying the following
5075 transformations:
5077 A == 0? A : -A same as -A
5078 A != 0? A : -A same as A
5079 A >= 0? A : -A same as abs (A)
5080 A > 0? A : -A same as abs (A)
5081 A <= 0? A : -A same as -abs (A)
5082 A < 0? A : -A same as -abs (A)
5084 None of these transformations work for modes with signed
5085 zeros. If A is +/-0, the first two transformations will
5086 change the sign of the result (from +0 to -0, or vice
5087 versa). The last four will fix the sign of the result,
5088 even though the original expressions could be positive or
5089 negative, depending on the sign of A.
5091 Note that all these transformations are correct if A is
5092 NaN, since the two alternatives (A and -A) are also NaNs. */
5093 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5094 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5095 ? real_zerop (arg01)
5096 : integer_zerop (arg01))
5097 && ((TREE_CODE (arg2) == NEGATE_EXPR
5098 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5099 /* In the case that A is of the form X-Y, '-A' (arg2) may
5100 have already been folded to Y-X, check for that. */
5101 || (TREE_CODE (arg1) == MINUS_EXPR
5102 && TREE_CODE (arg2) == MINUS_EXPR
5103 && operand_equal_p (TREE_OPERAND (arg1, 0),
5104 TREE_OPERAND (arg2, 1), 0)
5105 && operand_equal_p (TREE_OPERAND (arg1, 1),
5106 TREE_OPERAND (arg2, 0), 0))))
5107 switch (comp_code)
5109 case EQ_EXPR:
5110 case UNEQ_EXPR:
5111 tem = fold_convert (arg1_type, arg1);
5112 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5113 case NE_EXPR:
5114 case LTGT_EXPR:
5115 return pedantic_non_lvalue (fold_convert (type, arg1));
5116 case UNGE_EXPR:
5117 case UNGT_EXPR:
5118 if (flag_trapping_math)
5119 break;
5120 /* Fall through. */
5121 case GE_EXPR:
5122 case GT_EXPR:
5123 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5124 arg1 = fold_convert (signed_type_for
5125 (TREE_TYPE (arg1)), arg1);
5126 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5127 return pedantic_non_lvalue (fold_convert (type, tem));
5128 case UNLE_EXPR:
5129 case UNLT_EXPR:
5130 if (flag_trapping_math)
5131 break;
5132 case LE_EXPR:
5133 case LT_EXPR:
5134 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5135 arg1 = fold_convert (signed_type_for
5136 (TREE_TYPE (arg1)), arg1);
5137 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5138 return negate_expr (fold_convert (type, tem));
5139 default:
5140 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5141 break;
5144 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5145 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5146 both transformations are correct when A is NaN: A != 0
5147 is then true, and A == 0 is false. */
5149 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5150 && integer_zerop (arg01) && integer_zerop (arg2))
5152 if (comp_code == NE_EXPR)
5153 return pedantic_non_lvalue (fold_convert (type, arg1));
5154 else if (comp_code == EQ_EXPR)
5155 return build_int_cst (type, 0);
5158 /* Try some transformations of A op B ? A : B.
5160 A == B? A : B same as B
5161 A != B? A : B same as A
5162 A >= B? A : B same as max (A, B)
5163 A > B? A : B same as max (B, A)
5164 A <= B? A : B same as min (A, B)
5165 A < B? A : B same as min (B, A)
5167 As above, these transformations don't work in the presence
5168 of signed zeros. For example, if A and B are zeros of
5169 opposite sign, the first two transformations will change
5170 the sign of the result. In the last four, the original
5171 expressions give different results for (A=+0, B=-0) and
5172 (A=-0, B=+0), but the transformed expressions do not.
5174 The first two transformations are correct if either A or B
5175 is a NaN. In the first transformation, the condition will
5176 be false, and B will indeed be chosen. In the case of the
5177 second transformation, the condition A != B will be true,
5178 and A will be chosen.
5180 The conversions to max() and min() are not correct if B is
5181 a number and A is not. The conditions in the original
5182 expressions will be false, so all four give B. The min()
5183 and max() versions would give a NaN instead. */
5184 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5185 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5186 /* Avoid these transformations if the COND_EXPR may be used
5187 as an lvalue in the C++ front-end. PR c++/19199. */
5188 && (in_gimple_form
5189 || (strcmp (lang_hooks.name, "GNU C++") != 0
5190 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5191 || ! maybe_lvalue_p (arg1)
5192 || ! maybe_lvalue_p (arg2)))
5194 tree comp_op0 = arg00;
5195 tree comp_op1 = arg01;
5196 tree comp_type = TREE_TYPE (comp_op0);
5198 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5199 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5201 comp_type = type;
5202 comp_op0 = arg1;
5203 comp_op1 = arg2;
5206 switch (comp_code)
5208 case EQ_EXPR:
5209 return pedantic_non_lvalue (fold_convert (type, arg2));
5210 case NE_EXPR:
5211 return pedantic_non_lvalue (fold_convert (type, arg1));
5212 case LE_EXPR:
5213 case LT_EXPR:
5214 case UNLE_EXPR:
5215 case UNLT_EXPR:
5216 /* In C++ a ?: expression can be an lvalue, so put the
5217 operand which will be used if they are equal first
5218 so that we can convert this back to the
5219 corresponding COND_EXPR. */
5220 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5222 comp_op0 = fold_convert (comp_type, comp_op0);
5223 comp_op1 = fold_convert (comp_type, comp_op1);
5224 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5225 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5226 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5227 return pedantic_non_lvalue (fold_convert (type, tem));
5229 break;
5230 case GE_EXPR:
5231 case GT_EXPR:
5232 case UNGE_EXPR:
5233 case UNGT_EXPR:
5234 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5236 comp_op0 = fold_convert (comp_type, comp_op0);
5237 comp_op1 = fold_convert (comp_type, comp_op1);
5238 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5239 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5240 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5241 return pedantic_non_lvalue (fold_convert (type, tem));
5243 break;
5244 case UNEQ_EXPR:
5245 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5246 return pedantic_non_lvalue (fold_convert (type, arg2));
5247 break;
5248 case LTGT_EXPR:
5249 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5250 return pedantic_non_lvalue (fold_convert (type, arg1));
5251 break;
5252 default:
5253 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5254 break;
5258 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5259 we might still be able to simplify this. For example,
5260 if C1 is one less or one more than C2, this might have started
5261 out as a MIN or MAX and been transformed by this function.
5262 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5264 if (INTEGRAL_TYPE_P (type)
5265 && TREE_CODE (arg01) == INTEGER_CST
5266 && TREE_CODE (arg2) == INTEGER_CST)
5267 switch (comp_code)
5269 case EQ_EXPR:
5270 /* We can replace A with C1 in this case. */
5271 arg1 = fold_convert (type, arg01);
5272 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5274 case LT_EXPR:
5275 /* If C1 is C2 + 1, this is min(A, C2). */
5276 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5277 OEP_ONLY_CONST)
5278 && operand_equal_p (arg01,
5279 const_binop (PLUS_EXPR, arg2,
5280 build_int_cst (type, 1), 0),
5281 OEP_ONLY_CONST))
5282 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5283 type,
5284 fold_convert (type, arg1),
5285 arg2));
5286 break;
5288 case LE_EXPR:
5289 /* If C1 is C2 - 1, this is min(A, C2). */
5290 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5291 OEP_ONLY_CONST)
5292 && operand_equal_p (arg01,
5293 const_binop (MINUS_EXPR, arg2,
5294 build_int_cst (type, 1), 0),
5295 OEP_ONLY_CONST))
5296 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5297 type,
5298 fold_convert (type, arg1),
5299 arg2));
5300 break;
5302 case GT_EXPR:
5303 /* If C1 is C2 - 1, this is max(A, C2). */
5304 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5305 OEP_ONLY_CONST)
5306 && operand_equal_p (arg01,
5307 const_binop (MINUS_EXPR, arg2,
5308 build_int_cst (type, 1), 0),
5309 OEP_ONLY_CONST))
5310 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5311 type,
5312 fold_convert (type, arg1),
5313 arg2));
5314 break;
5316 case GE_EXPR:
5317 /* If C1 is C2 + 1, this is max(A, C2). */
5318 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5319 OEP_ONLY_CONST)
5320 && operand_equal_p (arg01,
5321 const_binop (PLUS_EXPR, arg2,
5322 build_int_cst (type, 1), 0),
5323 OEP_ONLY_CONST))
5324 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5325 type,
5326 fold_convert (type, arg1),
5327 arg2));
5328 break;
5329 case NE_EXPR:
5330 break;
5331 default:
5332 gcc_unreachable ();
5335 return NULL_TREE;
5340 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5341 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5342 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5343 false) >= 2)
5344 #endif
5346 /* EXP is some logical combination of boolean tests. See if we can
5347 merge it into some range test. Return the new tree if so. */
5349 static tree
5350 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5352 int or_op = (code == TRUTH_ORIF_EXPR
5353 || code == TRUTH_OR_EXPR);
5354 int in0_p, in1_p, in_p;
5355 tree low0, low1, low, high0, high1, high;
5356 bool strict_overflow_p = false;
5357 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5358 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5359 tree tem;
5360 const char * const warnmsg = G_("assuming signed overflow does not occur "
5361 "when simplifying range test");
5363 /* If this is an OR operation, invert both sides; we will invert
5364 again at the end. */
5365 if (or_op)
5366 in0_p = ! in0_p, in1_p = ! in1_p;
5368 /* If both expressions are the same, if we can merge the ranges, and we
5369 can build the range test, return it or it inverted. If one of the
5370 ranges is always true or always false, consider it to be the same
5371 expression as the other. */
5372 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5373 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5374 in1_p, low1, high1)
5375 && 0 != (tem = (build_range_check (type,
5376 lhs != 0 ? lhs
5377 : rhs != 0 ? rhs : integer_zero_node,
5378 in_p, low, high))))
5380 if (strict_overflow_p)
5381 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5382 return or_op ? invert_truthvalue (tem) : tem;
5385 /* On machines where the branch cost is expensive, if this is a
5386 short-circuited branch and the underlying object on both sides
5387 is the same, make a non-short-circuit operation. */
5388 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5389 && lhs != 0 && rhs != 0
5390 && (code == TRUTH_ANDIF_EXPR
5391 || code == TRUTH_ORIF_EXPR)
5392 && operand_equal_p (lhs, rhs, 0))
5394 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5395 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5396 which cases we can't do this. */
5397 if (simple_operand_p (lhs))
5398 return build2 (code == TRUTH_ANDIF_EXPR
5399 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5400 type, op0, op1);
5402 else if (lang_hooks.decls.global_bindings_p () == 0
5403 && ! CONTAINS_PLACEHOLDER_P (lhs))
5405 tree common = save_expr (lhs);
5407 if (0 != (lhs = build_range_check (type, common,
5408 or_op ? ! in0_p : in0_p,
5409 low0, high0))
5410 && (0 != (rhs = build_range_check (type, common,
5411 or_op ? ! in1_p : in1_p,
5412 low1, high1))))
5414 if (strict_overflow_p)
5415 fold_overflow_warning (warnmsg,
5416 WARN_STRICT_OVERFLOW_COMPARISON);
5417 return build2 (code == TRUTH_ANDIF_EXPR
5418 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5419 type, lhs, rhs);
5424 return 0;
5427 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5428 bit value. Arrange things so the extra bits will be set to zero if and
5429 only if C is signed-extended to its full width. If MASK is nonzero,
5430 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5432 static tree
5433 unextend (tree c, int p, int unsignedp, tree mask)
5435 tree type = TREE_TYPE (c);
5436 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5437 tree temp;
5439 if (p == modesize || unsignedp)
5440 return c;
5442 /* We work by getting just the sign bit into the low-order bit, then
5443 into the high-order bit, then sign-extend. We then XOR that value
5444 with C. */
5445 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5446 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5448 /* We must use a signed type in order to get an arithmetic right shift.
5449 However, we must also avoid introducing accidental overflows, so that
5450 a subsequent call to integer_zerop will work. Hence we must
5451 do the type conversion here. At this point, the constant is either
5452 zero or one, and the conversion to a signed type can never overflow.
5453 We could get an overflow if this conversion is done anywhere else. */
5454 if (TYPE_UNSIGNED (type))
5455 temp = fold_convert (signed_type_for (type), temp);
5457 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5458 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5459 if (mask != 0)
5460 temp = const_binop (BIT_AND_EXPR, temp,
5461 fold_convert (TREE_TYPE (c), mask), 0);
5462 /* If necessary, convert the type back to match the type of C. */
5463 if (TYPE_UNSIGNED (type))
5464 temp = fold_convert (type, temp);
5466 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5469 /* Find ways of folding logical expressions of LHS and RHS:
5470 Try to merge two comparisons to the same innermost item.
5471 Look for range tests like "ch >= '0' && ch <= '9'".
5472 Look for combinations of simple terms on machines with expensive branches
5473 and evaluate the RHS unconditionally.
5475 For example, if we have p->a == 2 && p->b == 4 and we can make an
5476 object large enough to span both A and B, we can do this with a comparison
5477 against the object ANDed with the a mask.
5479 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5480 operations to do this with one comparison.
5482 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5483 function and the one above.
5485 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5486 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5488 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5489 two operands.
5491 We return the simplified tree or 0 if no optimization is possible. */
5493 static tree
5494 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5496 /* If this is the "or" of two comparisons, we can do something if
5497 the comparisons are NE_EXPR. If this is the "and", we can do something
5498 if the comparisons are EQ_EXPR. I.e.,
5499 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5501 WANTED_CODE is this operation code. For single bit fields, we can
5502 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5503 comparison for one-bit fields. */
5505 enum tree_code wanted_code;
5506 enum tree_code lcode, rcode;
5507 tree ll_arg, lr_arg, rl_arg, rr_arg;
5508 tree ll_inner, lr_inner, rl_inner, rr_inner;
5509 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5510 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5511 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5512 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5513 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5514 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5515 enum machine_mode lnmode, rnmode;
5516 tree ll_mask, lr_mask, rl_mask, rr_mask;
5517 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5518 tree l_const, r_const;
5519 tree lntype, rntype, result;
5520 HOST_WIDE_INT first_bit, end_bit;
5521 int volatilep;
5522 tree orig_lhs = lhs, orig_rhs = rhs;
5523 enum tree_code orig_code = code;
5525 /* Start by getting the comparison codes. Fail if anything is volatile.
5526 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5527 it were surrounded with a NE_EXPR. */
5529 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5530 return 0;
5532 lcode = TREE_CODE (lhs);
5533 rcode = TREE_CODE (rhs);
5535 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5537 lhs = build2 (NE_EXPR, truth_type, lhs,
5538 build_int_cst (TREE_TYPE (lhs), 0));
5539 lcode = NE_EXPR;
5542 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5544 rhs = build2 (NE_EXPR, truth_type, rhs,
5545 build_int_cst (TREE_TYPE (rhs), 0));
5546 rcode = NE_EXPR;
5549 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5550 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5551 return 0;
5553 ll_arg = TREE_OPERAND (lhs, 0);
5554 lr_arg = TREE_OPERAND (lhs, 1);
5555 rl_arg = TREE_OPERAND (rhs, 0);
5556 rr_arg = TREE_OPERAND (rhs, 1);
5558 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5559 if (simple_operand_p (ll_arg)
5560 && simple_operand_p (lr_arg))
5562 tree result;
5563 if (operand_equal_p (ll_arg, rl_arg, 0)
5564 && operand_equal_p (lr_arg, rr_arg, 0))
5566 result = combine_comparisons (code, lcode, rcode,
5567 truth_type, ll_arg, lr_arg);
5568 if (result)
5569 return result;
5571 else if (operand_equal_p (ll_arg, rr_arg, 0)
5572 && operand_equal_p (lr_arg, rl_arg, 0))
5574 result = combine_comparisons (code, lcode,
5575 swap_tree_comparison (rcode),
5576 truth_type, ll_arg, lr_arg);
5577 if (result)
5578 return result;
5582 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5583 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5585 /* If the RHS can be evaluated unconditionally and its operands are
5586 simple, it wins to evaluate the RHS unconditionally on machines
5587 with expensive branches. In this case, this isn't a comparison
5588 that can be merged. Avoid doing this if the RHS is a floating-point
5589 comparison since those can trap. */
5591 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5592 false) >= 2
5593 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5594 && simple_operand_p (rl_arg)
5595 && simple_operand_p (rr_arg))
5597 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5598 if (code == TRUTH_OR_EXPR
5599 && lcode == NE_EXPR && integer_zerop (lr_arg)
5600 && rcode == NE_EXPR && integer_zerop (rr_arg)
5601 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5602 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5603 return build2 (NE_EXPR, truth_type,
5604 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5605 ll_arg, rl_arg),
5606 build_int_cst (TREE_TYPE (ll_arg), 0));
5608 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5609 if (code == TRUTH_AND_EXPR
5610 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5611 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5612 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5613 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5614 return build2 (EQ_EXPR, truth_type,
5615 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5616 ll_arg, rl_arg),
5617 build_int_cst (TREE_TYPE (ll_arg), 0));
5619 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5621 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5622 return build2 (code, truth_type, lhs, rhs);
5623 return NULL_TREE;
5627 /* See if the comparisons can be merged. Then get all the parameters for
5628 each side. */
5630 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5631 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5632 return 0;
5634 volatilep = 0;
5635 ll_inner = decode_field_reference (ll_arg,
5636 &ll_bitsize, &ll_bitpos, &ll_mode,
5637 &ll_unsignedp, &volatilep, &ll_mask,
5638 &ll_and_mask);
5639 lr_inner = decode_field_reference (lr_arg,
5640 &lr_bitsize, &lr_bitpos, &lr_mode,
5641 &lr_unsignedp, &volatilep, &lr_mask,
5642 &lr_and_mask);
5643 rl_inner = decode_field_reference (rl_arg,
5644 &rl_bitsize, &rl_bitpos, &rl_mode,
5645 &rl_unsignedp, &volatilep, &rl_mask,
5646 &rl_and_mask);
5647 rr_inner = decode_field_reference (rr_arg,
5648 &rr_bitsize, &rr_bitpos, &rr_mode,
5649 &rr_unsignedp, &volatilep, &rr_mask,
5650 &rr_and_mask);
5652 /* It must be true that the inner operation on the lhs of each
5653 comparison must be the same if we are to be able to do anything.
5654 Then see if we have constants. If not, the same must be true for
5655 the rhs's. */
5656 if (volatilep || ll_inner == 0 || rl_inner == 0
5657 || ! operand_equal_p (ll_inner, rl_inner, 0))
5658 return 0;
5660 if (TREE_CODE (lr_arg) == INTEGER_CST
5661 && TREE_CODE (rr_arg) == INTEGER_CST)
5662 l_const = lr_arg, r_const = rr_arg;
5663 else if (lr_inner == 0 || rr_inner == 0
5664 || ! operand_equal_p (lr_inner, rr_inner, 0))
5665 return 0;
5666 else
5667 l_const = r_const = 0;
5669 /* If either comparison code is not correct for our logical operation,
5670 fail. However, we can convert a one-bit comparison against zero into
5671 the opposite comparison against that bit being set in the field. */
5673 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5674 if (lcode != wanted_code)
5676 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5678 /* Make the left operand unsigned, since we are only interested
5679 in the value of one bit. Otherwise we are doing the wrong
5680 thing below. */
5681 ll_unsignedp = 1;
5682 l_const = ll_mask;
5684 else
5685 return 0;
5688 /* This is analogous to the code for l_const above. */
5689 if (rcode != wanted_code)
5691 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5693 rl_unsignedp = 1;
5694 r_const = rl_mask;
5696 else
5697 return 0;
5700 /* See if we can find a mode that contains both fields being compared on
5701 the left. If we can't, fail. Otherwise, update all constants and masks
5702 to be relative to a field of that size. */
5703 first_bit = MIN (ll_bitpos, rl_bitpos);
5704 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5705 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5706 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5707 volatilep);
5708 if (lnmode == VOIDmode)
5709 return 0;
5711 lnbitsize = GET_MODE_BITSIZE (lnmode);
5712 lnbitpos = first_bit & ~ (lnbitsize - 1);
5713 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5714 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5716 if (BYTES_BIG_ENDIAN)
5718 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5719 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5722 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5723 size_int (xll_bitpos), 0);
5724 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5725 size_int (xrl_bitpos), 0);
5727 if (l_const)
5729 l_const = fold_convert (lntype, l_const);
5730 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5731 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5732 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5733 fold_build1 (BIT_NOT_EXPR,
5734 lntype, ll_mask),
5735 0)))
5737 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5739 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5742 if (r_const)
5744 r_const = fold_convert (lntype, r_const);
5745 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5746 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5747 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5748 fold_build1 (BIT_NOT_EXPR,
5749 lntype, rl_mask),
5750 0)))
5752 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5754 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5758 /* If the right sides are not constant, do the same for it. Also,
5759 disallow this optimization if a size or signedness mismatch occurs
5760 between the left and right sides. */
5761 if (l_const == 0)
5763 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5764 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5765 /* Make sure the two fields on the right
5766 correspond to the left without being swapped. */
5767 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5768 return 0;
5770 first_bit = MIN (lr_bitpos, rr_bitpos);
5771 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5772 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5773 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5774 volatilep);
5775 if (rnmode == VOIDmode)
5776 return 0;
5778 rnbitsize = GET_MODE_BITSIZE (rnmode);
5779 rnbitpos = first_bit & ~ (rnbitsize - 1);
5780 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5781 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5783 if (BYTES_BIG_ENDIAN)
5785 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5786 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5789 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5790 size_int (xlr_bitpos), 0);
5791 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5792 size_int (xrr_bitpos), 0);
5794 /* Make a mask that corresponds to both fields being compared.
5795 Do this for both items being compared. If the operands are the
5796 same size and the bits being compared are in the same position
5797 then we can do this by masking both and comparing the masked
5798 results. */
5799 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5800 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5801 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5803 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5804 ll_unsignedp || rl_unsignedp);
5805 if (! all_ones_mask_p (ll_mask, lnbitsize))
5806 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5808 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5809 lr_unsignedp || rr_unsignedp);
5810 if (! all_ones_mask_p (lr_mask, rnbitsize))
5811 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5813 return build2 (wanted_code, truth_type, lhs, rhs);
5816 /* There is still another way we can do something: If both pairs of
5817 fields being compared are adjacent, we may be able to make a wider
5818 field containing them both.
5820 Note that we still must mask the lhs/rhs expressions. Furthermore,
5821 the mask must be shifted to account for the shift done by
5822 make_bit_field_ref. */
5823 if ((ll_bitsize + ll_bitpos == rl_bitpos
5824 && lr_bitsize + lr_bitpos == rr_bitpos)
5825 || (ll_bitpos == rl_bitpos + rl_bitsize
5826 && lr_bitpos == rr_bitpos + rr_bitsize))
5828 tree type;
5830 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5831 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5832 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5833 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5835 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5836 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5837 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5838 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5840 /* Convert to the smaller type before masking out unwanted bits. */
5841 type = lntype;
5842 if (lntype != rntype)
5844 if (lnbitsize > rnbitsize)
5846 lhs = fold_convert (rntype, lhs);
5847 ll_mask = fold_convert (rntype, ll_mask);
5848 type = rntype;
5850 else if (lnbitsize < rnbitsize)
5852 rhs = fold_convert (lntype, rhs);
5853 lr_mask = fold_convert (lntype, lr_mask);
5854 type = lntype;
5858 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5859 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5861 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5862 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5864 return build2 (wanted_code, truth_type, lhs, rhs);
5867 return 0;
5870 /* Handle the case of comparisons with constants. If there is something in
5871 common between the masks, those bits of the constants must be the same.
5872 If not, the condition is always false. Test for this to avoid generating
5873 incorrect code below. */
5874 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5875 if (! integer_zerop (result)
5876 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5877 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5879 if (wanted_code == NE_EXPR)
5881 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5882 return constant_boolean_node (true, truth_type);
5884 else
5886 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5887 return constant_boolean_node (false, truth_type);
5891 /* Construct the expression we will return. First get the component
5892 reference we will make. Unless the mask is all ones the width of
5893 that field, perform the mask operation. Then compare with the
5894 merged constant. */
5895 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5896 ll_unsignedp || rl_unsignedp);
5898 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5899 if (! all_ones_mask_p (ll_mask, lnbitsize))
5900 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5902 return build2 (wanted_code, truth_type, result,
5903 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5906 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5907 constant. */
5909 static tree
5910 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5912 tree arg0 = op0;
5913 enum tree_code op_code;
5914 tree comp_const;
5915 tree minmax_const;
5916 int consts_equal, consts_lt;
5917 tree inner;
5919 STRIP_SIGN_NOPS (arg0);
5921 op_code = TREE_CODE (arg0);
5922 minmax_const = TREE_OPERAND (arg0, 1);
5923 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5924 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5925 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5926 inner = TREE_OPERAND (arg0, 0);
5928 /* If something does not permit us to optimize, return the original tree. */
5929 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5930 || TREE_CODE (comp_const) != INTEGER_CST
5931 || TREE_OVERFLOW (comp_const)
5932 || TREE_CODE (minmax_const) != INTEGER_CST
5933 || TREE_OVERFLOW (minmax_const))
5934 return NULL_TREE;
5936 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5937 and GT_EXPR, doing the rest with recursive calls using logical
5938 simplifications. */
5939 switch (code)
5941 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5943 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5944 type, op0, op1);
5945 if (tem)
5946 return invert_truthvalue (tem);
5947 return NULL_TREE;
5950 case GE_EXPR:
5951 return
5952 fold_build2 (TRUTH_ORIF_EXPR, type,
5953 optimize_minmax_comparison
5954 (EQ_EXPR, type, arg0, comp_const),
5955 optimize_minmax_comparison
5956 (GT_EXPR, type, arg0, comp_const));
5958 case EQ_EXPR:
5959 if (op_code == MAX_EXPR && consts_equal)
5960 /* MAX (X, 0) == 0 -> X <= 0 */
5961 return fold_build2 (LE_EXPR, type, inner, comp_const);
5963 else if (op_code == MAX_EXPR && consts_lt)
5964 /* MAX (X, 0) == 5 -> X == 5 */
5965 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5967 else if (op_code == MAX_EXPR)
5968 /* MAX (X, 0) == -1 -> false */
5969 return omit_one_operand (type, integer_zero_node, inner);
5971 else if (consts_equal)
5972 /* MIN (X, 0) == 0 -> X >= 0 */
5973 return fold_build2 (GE_EXPR, type, inner, comp_const);
5975 else if (consts_lt)
5976 /* MIN (X, 0) == 5 -> false */
5977 return omit_one_operand (type, integer_zero_node, inner);
5979 else
5980 /* MIN (X, 0) == -1 -> X == -1 */
5981 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5983 case GT_EXPR:
5984 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5985 /* MAX (X, 0) > 0 -> X > 0
5986 MAX (X, 0) > 5 -> X > 5 */
5987 return fold_build2 (GT_EXPR, type, inner, comp_const);
5989 else if (op_code == MAX_EXPR)
5990 /* MAX (X, 0) > -1 -> true */
5991 return omit_one_operand (type, integer_one_node, inner);
5993 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5994 /* MIN (X, 0) > 0 -> false
5995 MIN (X, 0) > 5 -> false */
5996 return omit_one_operand (type, integer_zero_node, inner);
5998 else
5999 /* MIN (X, 0) > -1 -> X > -1 */
6000 return fold_build2 (GT_EXPR, type, inner, comp_const);
6002 default:
6003 return NULL_TREE;
6007 /* T is an integer expression that is being multiplied, divided, or taken a
6008 modulus (CODE says which and what kind of divide or modulus) by a
6009 constant C. See if we can eliminate that operation by folding it with
6010 other operations already in T. WIDE_TYPE, if non-null, is a type that
6011 should be used for the computation if wider than our type.
6013 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6014 (X * 2) + (Y * 4). We must, however, be assured that either the original
6015 expression would not overflow or that overflow is undefined for the type
6016 in the language in question.
6018 If we return a non-null expression, it is an equivalent form of the
6019 original computation, but need not be in the original type.
6021 We set *STRICT_OVERFLOW_P to true if the return values depends on
6022 signed overflow being undefined. Otherwise we do not change
6023 *STRICT_OVERFLOW_P. */
6025 static tree
6026 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6027 bool *strict_overflow_p)
6029 /* To avoid exponential search depth, refuse to allow recursion past
6030 three levels. Beyond that (1) it's highly unlikely that we'll find
6031 something interesting and (2) we've probably processed it before
6032 when we built the inner expression. */
6034 static int depth;
6035 tree ret;
6037 if (depth > 3)
6038 return NULL;
6040 depth++;
6041 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6042 depth--;
6044 return ret;
6047 static tree
6048 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6049 bool *strict_overflow_p)
6051 tree type = TREE_TYPE (t);
6052 enum tree_code tcode = TREE_CODE (t);
6053 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6054 > GET_MODE_SIZE (TYPE_MODE (type)))
6055 ? wide_type : type);
6056 tree t1, t2;
6057 int same_p = tcode == code;
6058 tree op0 = NULL_TREE, op1 = NULL_TREE;
6059 bool sub_strict_overflow_p;
6061 /* Don't deal with constants of zero here; they confuse the code below. */
6062 if (integer_zerop (c))
6063 return NULL_TREE;
6065 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6066 op0 = TREE_OPERAND (t, 0);
6068 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6069 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6071 /* Note that we need not handle conditional operations here since fold
6072 already handles those cases. So just do arithmetic here. */
6073 switch (tcode)
6075 case INTEGER_CST:
6076 /* For a constant, we can always simplify if we are a multiply
6077 or (for divide and modulus) if it is a multiple of our constant. */
6078 if (code == MULT_EXPR
6079 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6080 return const_binop (code, fold_convert (ctype, t),
6081 fold_convert (ctype, c), 0);
6082 break;
6084 CASE_CONVERT: case NON_LVALUE_EXPR:
6085 /* If op0 is an expression ... */
6086 if ((COMPARISON_CLASS_P (op0)
6087 || UNARY_CLASS_P (op0)
6088 || BINARY_CLASS_P (op0)
6089 || VL_EXP_CLASS_P (op0)
6090 || EXPRESSION_CLASS_P (op0))
6091 /* ... and has wrapping overflow, and its type is smaller
6092 than ctype, then we cannot pass through as widening. */
6093 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6094 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6095 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6096 && (TYPE_PRECISION (ctype)
6097 > TYPE_PRECISION (TREE_TYPE (op0))))
6098 /* ... or this is a truncation (t is narrower than op0),
6099 then we cannot pass through this narrowing. */
6100 || (TYPE_PRECISION (type)
6101 < TYPE_PRECISION (TREE_TYPE (op0)))
6102 /* ... or signedness changes for division or modulus,
6103 then we cannot pass through this conversion. */
6104 || (code != MULT_EXPR
6105 && (TYPE_UNSIGNED (ctype)
6106 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6107 /* ... or has undefined overflow while the converted to
6108 type has not, we cannot do the operation in the inner type
6109 as that would introduce undefined overflow. */
6110 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6111 && !TYPE_OVERFLOW_UNDEFINED (type))))
6112 break;
6114 /* Pass the constant down and see if we can make a simplification. If
6115 we can, replace this expression with the inner simplification for
6116 possible later conversion to our or some other type. */
6117 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6118 && TREE_CODE (t2) == INTEGER_CST
6119 && !TREE_OVERFLOW (t2)
6120 && (0 != (t1 = extract_muldiv (op0, t2, code,
6121 code == MULT_EXPR
6122 ? ctype : NULL_TREE,
6123 strict_overflow_p))))
6124 return t1;
6125 break;
6127 case ABS_EXPR:
6128 /* If widening the type changes it from signed to unsigned, then we
6129 must avoid building ABS_EXPR itself as unsigned. */
6130 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6132 tree cstype = (*signed_type_for) (ctype);
6133 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6134 != 0)
6136 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6137 return fold_convert (ctype, t1);
6139 break;
6141 /* If the constant is negative, we cannot simplify this. */
6142 if (tree_int_cst_sgn (c) == -1)
6143 break;
6144 /* FALLTHROUGH */
6145 case NEGATE_EXPR:
6146 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6147 != 0)
6148 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6149 break;
6151 case MIN_EXPR: case MAX_EXPR:
6152 /* If widening the type changes the signedness, then we can't perform
6153 this optimization as that changes the result. */
6154 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6155 break;
6157 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6158 sub_strict_overflow_p = false;
6159 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6160 &sub_strict_overflow_p)) != 0
6161 && (t2 = extract_muldiv (op1, c, code, wide_type,
6162 &sub_strict_overflow_p)) != 0)
6164 if (tree_int_cst_sgn (c) < 0)
6165 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6166 if (sub_strict_overflow_p)
6167 *strict_overflow_p = true;
6168 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6169 fold_convert (ctype, t2));
6171 break;
6173 case LSHIFT_EXPR: case RSHIFT_EXPR:
6174 /* If the second operand is constant, this is a multiplication
6175 or floor division, by a power of two, so we can treat it that
6176 way unless the multiplier or divisor overflows. Signed
6177 left-shift overflow is implementation-defined rather than
6178 undefined in C90, so do not convert signed left shift into
6179 multiplication. */
6180 if (TREE_CODE (op1) == INTEGER_CST
6181 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6182 /* const_binop may not detect overflow correctly,
6183 so check for it explicitly here. */
6184 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6185 && TREE_INT_CST_HIGH (op1) == 0
6186 && 0 != (t1 = fold_convert (ctype,
6187 const_binop (LSHIFT_EXPR,
6188 size_one_node,
6189 op1, 0)))
6190 && !TREE_OVERFLOW (t1))
6191 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6192 ? MULT_EXPR : FLOOR_DIV_EXPR,
6193 ctype, fold_convert (ctype, op0), t1),
6194 c, code, wide_type, strict_overflow_p);
6195 break;
6197 case PLUS_EXPR: case MINUS_EXPR:
6198 /* See if we can eliminate the operation on both sides. If we can, we
6199 can return a new PLUS or MINUS. If we can't, the only remaining
6200 cases where we can do anything are if the second operand is a
6201 constant. */
6202 sub_strict_overflow_p = false;
6203 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6204 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6205 if (t1 != 0 && t2 != 0
6206 && (code == MULT_EXPR
6207 /* If not multiplication, we can only do this if both operands
6208 are divisible by c. */
6209 || (multiple_of_p (ctype, op0, c)
6210 && multiple_of_p (ctype, op1, c))))
6212 if (sub_strict_overflow_p)
6213 *strict_overflow_p = true;
6214 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6215 fold_convert (ctype, t2));
6218 /* If this was a subtraction, negate OP1 and set it to be an addition.
6219 This simplifies the logic below. */
6220 if (tcode == MINUS_EXPR)
6221 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6223 if (TREE_CODE (op1) != INTEGER_CST)
6224 break;
6226 /* If either OP1 or C are negative, this optimization is not safe for
6227 some of the division and remainder types while for others we need
6228 to change the code. */
6229 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6231 if (code == CEIL_DIV_EXPR)
6232 code = FLOOR_DIV_EXPR;
6233 else if (code == FLOOR_DIV_EXPR)
6234 code = CEIL_DIV_EXPR;
6235 else if (code != MULT_EXPR
6236 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6237 break;
6240 /* If it's a multiply or a division/modulus operation of a multiple
6241 of our constant, do the operation. */
6242 if (code == MULT_EXPR
6243 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6245 op1 = const_binop (code, fold_convert (ctype, op1),
6246 fold_convert (ctype, c), 0);
6247 if (op1 == 0)
6248 break;
6250 else
6251 break;
6253 /* If we have an unsigned type is not a sizetype, we cannot widen
6254 the operation since it will change the result if the original
6255 computation overflowed. */
6256 if (TYPE_UNSIGNED (ctype)
6257 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6258 && ctype != type)
6259 break;
6261 /* If we were able to eliminate our operation from the first side,
6262 apply our operation to the second side and reform the PLUS. */
6263 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6264 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6266 /* The last case is if we are a multiply. In that case, we can
6267 apply the distributive law to commute the multiply and addition
6268 if the multiplication of the constants doesn't overflow. */
6269 if (code == MULT_EXPR)
6270 return fold_build2 (tcode, ctype,
6271 fold_build2 (code, ctype,
6272 fold_convert (ctype, op0),
6273 fold_convert (ctype, c)),
6274 op1);
6276 break;
6278 case MULT_EXPR:
6279 /* We have a special case here if we are doing something like
6280 (C * 8) % 4 since we know that's zero. */
6281 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6282 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6283 /* If the multiplication can overflow we cannot optimize this.
6284 ??? Until we can properly mark individual operations as
6285 not overflowing we need to treat sizetype special here as
6286 stor-layout relies on this opimization to make
6287 DECL_FIELD_BIT_OFFSET always a constant. */
6288 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6289 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6290 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6291 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6292 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6294 *strict_overflow_p = true;
6295 return omit_one_operand (type, integer_zero_node, op0);
6298 /* ... fall through ... */
6300 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6301 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6302 /* If we can extract our operation from the LHS, do so and return a
6303 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6304 do something only if the second operand is a constant. */
6305 if (same_p
6306 && (t1 = extract_muldiv (op0, c, code, wide_type,
6307 strict_overflow_p)) != 0)
6308 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6309 fold_convert (ctype, op1));
6310 else if (tcode == MULT_EXPR && code == MULT_EXPR
6311 && (t1 = extract_muldiv (op1, c, code, wide_type,
6312 strict_overflow_p)) != 0)
6313 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6314 fold_convert (ctype, t1));
6315 else if (TREE_CODE (op1) != INTEGER_CST)
6316 return 0;
6318 /* If these are the same operation types, we can associate them
6319 assuming no overflow. */
6320 if (tcode == code
6321 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6322 fold_convert (ctype, c), 1))
6323 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6324 TREE_INT_CST_HIGH (t1),
6325 (TYPE_UNSIGNED (ctype)
6326 && tcode != MULT_EXPR) ? -1 : 1,
6327 TREE_OVERFLOW (t1)))
6328 && !TREE_OVERFLOW (t1))
6329 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6331 /* If these operations "cancel" each other, we have the main
6332 optimizations of this pass, which occur when either constant is a
6333 multiple of the other, in which case we replace this with either an
6334 operation or CODE or TCODE.
6336 If we have an unsigned type that is not a sizetype, we cannot do
6337 this since it will change the result if the original computation
6338 overflowed. */
6339 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6340 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6341 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6342 || (tcode == MULT_EXPR
6343 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6344 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6345 && code != MULT_EXPR)))
6347 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6349 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6350 *strict_overflow_p = true;
6351 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6352 fold_convert (ctype,
6353 const_binop (TRUNC_DIV_EXPR,
6354 op1, c, 0)));
6356 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6358 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6359 *strict_overflow_p = true;
6360 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6361 fold_convert (ctype,
6362 const_binop (TRUNC_DIV_EXPR,
6363 c, op1, 0)));
6366 break;
6368 default:
6369 break;
6372 return 0;
6375 /* Return a node which has the indicated constant VALUE (either 0 or
6376 1), and is of the indicated TYPE. */
6378 tree
6379 constant_boolean_node (int value, tree type)
6381 if (type == integer_type_node)
6382 return value ? integer_one_node : integer_zero_node;
6383 else if (type == boolean_type_node)
6384 return value ? boolean_true_node : boolean_false_node;
6385 else
6386 return build_int_cst (type, value);
6390 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6391 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6392 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6393 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6394 COND is the first argument to CODE; otherwise (as in the example
6395 given here), it is the second argument. TYPE is the type of the
6396 original expression. Return NULL_TREE if no simplification is
6397 possible. */
6399 static tree
6400 fold_binary_op_with_conditional_arg (enum tree_code code,
6401 tree type, tree op0, tree op1,
6402 tree cond, tree arg, int cond_first_p)
6404 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6405 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6406 tree test, true_value, false_value;
6407 tree lhs = NULL_TREE;
6408 tree rhs = NULL_TREE;
6410 /* This transformation is only worthwhile if we don't have to wrap
6411 arg in a SAVE_EXPR, and the operation can be simplified on at least
6412 one of the branches once its pushed inside the COND_EXPR. */
6413 if (!TREE_CONSTANT (arg))
6414 return NULL_TREE;
6416 if (TREE_CODE (cond) == COND_EXPR)
6418 test = TREE_OPERAND (cond, 0);
6419 true_value = TREE_OPERAND (cond, 1);
6420 false_value = TREE_OPERAND (cond, 2);
6421 /* If this operand throws an expression, then it does not make
6422 sense to try to perform a logical or arithmetic operation
6423 involving it. */
6424 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6425 lhs = true_value;
6426 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6427 rhs = false_value;
6429 else
6431 tree testtype = TREE_TYPE (cond);
6432 test = cond;
6433 true_value = constant_boolean_node (true, testtype);
6434 false_value = constant_boolean_node (false, testtype);
6437 arg = fold_convert (arg_type, arg);
6438 if (lhs == 0)
6440 true_value = fold_convert (cond_type, true_value);
6441 if (cond_first_p)
6442 lhs = fold_build2 (code, type, true_value, arg);
6443 else
6444 lhs = fold_build2 (code, type, arg, true_value);
6446 if (rhs == 0)
6448 false_value = fold_convert (cond_type, false_value);
6449 if (cond_first_p)
6450 rhs = fold_build2 (code, type, false_value, arg);
6451 else
6452 rhs = fold_build2 (code, type, arg, false_value);
6455 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6456 return fold_convert (type, test);
6460 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6462 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6463 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6464 ADDEND is the same as X.
6466 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6467 and finite. The problematic cases are when X is zero, and its mode
6468 has signed zeros. In the case of rounding towards -infinity,
6469 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6470 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6472 bool
6473 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6475 if (!real_zerop (addend))
6476 return false;
6478 /* Don't allow the fold with -fsignaling-nans. */
6479 if (HONOR_SNANS (TYPE_MODE (type)))
6480 return false;
6482 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6484 return true;
6486 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6487 if (TREE_CODE (addend) == REAL_CST
6488 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6489 negate = !negate;
6491 /* The mode has signed zeros, and we have to honor their sign.
6492 In this situation, there is only one case we can return true for.
6493 X - 0 is the same as X unless rounding towards -infinity is
6494 supported. */
6495 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6498 /* Subroutine of fold() that checks comparisons of built-in math
6499 functions against real constants.
6501 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6502 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6503 is the type of the result and ARG0 and ARG1 are the operands of the
6504 comparison. ARG1 must be a TREE_REAL_CST.
6506 The function returns the constant folded tree if a simplification
6507 can be made, and NULL_TREE otherwise. */
6509 static tree
6510 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6511 tree type, tree arg0, tree arg1)
6513 REAL_VALUE_TYPE c;
6515 if (BUILTIN_SQRT_P (fcode))
6517 tree arg = CALL_EXPR_ARG (arg0, 0);
6518 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6520 c = TREE_REAL_CST (arg1);
6521 if (REAL_VALUE_NEGATIVE (c))
6523 /* sqrt(x) < y is always false, if y is negative. */
6524 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6525 return omit_one_operand (type, integer_zero_node, arg);
6527 /* sqrt(x) > y is always true, if y is negative and we
6528 don't care about NaNs, i.e. negative values of x. */
6529 if (code == NE_EXPR || !HONOR_NANS (mode))
6530 return omit_one_operand (type, integer_one_node, arg);
6532 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6533 return fold_build2 (GE_EXPR, type, arg,
6534 build_real (TREE_TYPE (arg), dconst0));
6536 else if (code == GT_EXPR || code == GE_EXPR)
6538 REAL_VALUE_TYPE c2;
6540 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6541 real_convert (&c2, mode, &c2);
6543 if (REAL_VALUE_ISINF (c2))
6545 /* sqrt(x) > y is x == +Inf, when y is very large. */
6546 if (HONOR_INFINITIES (mode))
6547 return fold_build2 (EQ_EXPR, type, arg,
6548 build_real (TREE_TYPE (arg), c2));
6550 /* sqrt(x) > y is always false, when y is very large
6551 and we don't care about infinities. */
6552 return omit_one_operand (type, integer_zero_node, arg);
6555 /* sqrt(x) > c is the same as x > c*c. */
6556 return fold_build2 (code, type, arg,
6557 build_real (TREE_TYPE (arg), c2));
6559 else if (code == LT_EXPR || code == LE_EXPR)
6561 REAL_VALUE_TYPE c2;
6563 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6564 real_convert (&c2, mode, &c2);
6566 if (REAL_VALUE_ISINF (c2))
6568 /* sqrt(x) < y is always true, when y is a very large
6569 value and we don't care about NaNs or Infinities. */
6570 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6571 return omit_one_operand (type, integer_one_node, arg);
6573 /* sqrt(x) < y is x != +Inf when y is very large and we
6574 don't care about NaNs. */
6575 if (! HONOR_NANS (mode))
6576 return fold_build2 (NE_EXPR, type, arg,
6577 build_real (TREE_TYPE (arg), c2));
6579 /* sqrt(x) < y is x >= 0 when y is very large and we
6580 don't care about Infinities. */
6581 if (! HONOR_INFINITIES (mode))
6582 return fold_build2 (GE_EXPR, type, arg,
6583 build_real (TREE_TYPE (arg), dconst0));
6585 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6586 if (lang_hooks.decls.global_bindings_p () != 0
6587 || CONTAINS_PLACEHOLDER_P (arg))
6588 return NULL_TREE;
6590 arg = save_expr (arg);
6591 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6592 fold_build2 (GE_EXPR, type, arg,
6593 build_real (TREE_TYPE (arg),
6594 dconst0)),
6595 fold_build2 (NE_EXPR, type, arg,
6596 build_real (TREE_TYPE (arg),
6597 c2)));
6600 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6601 if (! HONOR_NANS (mode))
6602 return fold_build2 (code, type, arg,
6603 build_real (TREE_TYPE (arg), c2));
6605 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6606 if (lang_hooks.decls.global_bindings_p () == 0
6607 && ! CONTAINS_PLACEHOLDER_P (arg))
6609 arg = save_expr (arg);
6610 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6611 fold_build2 (GE_EXPR, type, arg,
6612 build_real (TREE_TYPE (arg),
6613 dconst0)),
6614 fold_build2 (code, type, arg,
6615 build_real (TREE_TYPE (arg),
6616 c2)));
6621 return NULL_TREE;
6624 /* Subroutine of fold() that optimizes comparisons against Infinities,
6625 either +Inf or -Inf.
6627 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6628 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6629 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6631 The function returns the constant folded tree if a simplification
6632 can be made, and NULL_TREE otherwise. */
6634 static tree
6635 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6637 enum machine_mode mode;
6638 REAL_VALUE_TYPE max;
6639 tree temp;
6640 bool neg;
6642 mode = TYPE_MODE (TREE_TYPE (arg0));
6644 /* For negative infinity swap the sense of the comparison. */
6645 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6646 if (neg)
6647 code = swap_tree_comparison (code);
6649 switch (code)
6651 case GT_EXPR:
6652 /* x > +Inf is always false, if with ignore sNANs. */
6653 if (HONOR_SNANS (mode))
6654 return NULL_TREE;
6655 return omit_one_operand (type, integer_zero_node, arg0);
6657 case LE_EXPR:
6658 /* x <= +Inf is always true, if we don't case about NaNs. */
6659 if (! HONOR_NANS (mode))
6660 return omit_one_operand (type, integer_one_node, arg0);
6662 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6663 if (lang_hooks.decls.global_bindings_p () == 0
6664 && ! CONTAINS_PLACEHOLDER_P (arg0))
6666 arg0 = save_expr (arg0);
6667 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6669 break;
6671 case EQ_EXPR:
6672 case GE_EXPR:
6673 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6674 real_maxval (&max, neg, mode);
6675 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6676 arg0, build_real (TREE_TYPE (arg0), max));
6678 case LT_EXPR:
6679 /* x < +Inf is always equal to x <= DBL_MAX. */
6680 real_maxval (&max, neg, mode);
6681 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6682 arg0, build_real (TREE_TYPE (arg0), max));
6684 case NE_EXPR:
6685 /* x != +Inf is always equal to !(x > DBL_MAX). */
6686 real_maxval (&max, neg, mode);
6687 if (! HONOR_NANS (mode))
6688 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6689 arg0, build_real (TREE_TYPE (arg0), max));
6691 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6692 arg0, build_real (TREE_TYPE (arg0), max));
6693 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6695 default:
6696 break;
6699 return NULL_TREE;
6702 /* Subroutine of fold() that optimizes comparisons of a division by
6703 a nonzero integer constant against an integer constant, i.e.
6704 X/C1 op C2.
6706 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6707 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6708 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6710 The function returns the constant folded tree if a simplification
6711 can be made, and NULL_TREE otherwise. */
6713 static tree
6714 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6716 tree prod, tmp, hi, lo;
6717 tree arg00 = TREE_OPERAND (arg0, 0);
6718 tree arg01 = TREE_OPERAND (arg0, 1);
6719 unsigned HOST_WIDE_INT lpart;
6720 HOST_WIDE_INT hpart;
6721 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6722 bool neg_overflow;
6723 int overflow;
6725 /* We have to do this the hard way to detect unsigned overflow.
6726 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6727 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6728 TREE_INT_CST_HIGH (arg01),
6729 TREE_INT_CST_LOW (arg1),
6730 TREE_INT_CST_HIGH (arg1),
6731 &lpart, &hpart, unsigned_p);
6732 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6733 -1, overflow);
6734 neg_overflow = false;
6736 if (unsigned_p)
6738 tmp = int_const_binop (MINUS_EXPR, arg01,
6739 build_int_cst (TREE_TYPE (arg01), 1), 0);
6740 lo = prod;
6742 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6743 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6744 TREE_INT_CST_HIGH (prod),
6745 TREE_INT_CST_LOW (tmp),
6746 TREE_INT_CST_HIGH (tmp),
6747 &lpart, &hpart, unsigned_p);
6748 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6749 -1, overflow | TREE_OVERFLOW (prod));
6751 else if (tree_int_cst_sgn (arg01) >= 0)
6753 tmp = int_const_binop (MINUS_EXPR, arg01,
6754 build_int_cst (TREE_TYPE (arg01), 1), 0);
6755 switch (tree_int_cst_sgn (arg1))
6757 case -1:
6758 neg_overflow = true;
6759 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6760 hi = prod;
6761 break;
6763 case 0:
6764 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6765 hi = tmp;
6766 break;
6768 case 1:
6769 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6770 lo = prod;
6771 break;
6773 default:
6774 gcc_unreachable ();
6777 else
6779 /* A negative divisor reverses the relational operators. */
6780 code = swap_tree_comparison (code);
6782 tmp = int_const_binop (PLUS_EXPR, arg01,
6783 build_int_cst (TREE_TYPE (arg01), 1), 0);
6784 switch (tree_int_cst_sgn (arg1))
6786 case -1:
6787 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6788 lo = prod;
6789 break;
6791 case 0:
6792 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6793 lo = tmp;
6794 break;
6796 case 1:
6797 neg_overflow = true;
6798 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6799 hi = prod;
6800 break;
6802 default:
6803 gcc_unreachable ();
6807 switch (code)
6809 case EQ_EXPR:
6810 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6811 return omit_one_operand (type, integer_zero_node, arg00);
6812 if (TREE_OVERFLOW (hi))
6813 return fold_build2 (GE_EXPR, type, arg00, lo);
6814 if (TREE_OVERFLOW (lo))
6815 return fold_build2 (LE_EXPR, type, arg00, hi);
6816 return build_range_check (type, arg00, 1, lo, hi);
6818 case NE_EXPR:
6819 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6820 return omit_one_operand (type, integer_one_node, arg00);
6821 if (TREE_OVERFLOW (hi))
6822 return fold_build2 (LT_EXPR, type, arg00, lo);
6823 if (TREE_OVERFLOW (lo))
6824 return fold_build2 (GT_EXPR, type, arg00, hi);
6825 return build_range_check (type, arg00, 0, lo, hi);
6827 case LT_EXPR:
6828 if (TREE_OVERFLOW (lo))
6830 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6831 return omit_one_operand (type, tmp, arg00);
6833 return fold_build2 (LT_EXPR, type, arg00, lo);
6835 case LE_EXPR:
6836 if (TREE_OVERFLOW (hi))
6838 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6839 return omit_one_operand (type, tmp, arg00);
6841 return fold_build2 (LE_EXPR, type, arg00, hi);
6843 case GT_EXPR:
6844 if (TREE_OVERFLOW (hi))
6846 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6847 return omit_one_operand (type, tmp, arg00);
6849 return fold_build2 (GT_EXPR, type, arg00, hi);
6851 case GE_EXPR:
6852 if (TREE_OVERFLOW (lo))
6854 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6855 return omit_one_operand (type, tmp, arg00);
6857 return fold_build2 (GE_EXPR, type, arg00, lo);
6859 default:
6860 break;
6863 return NULL_TREE;
6867 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6868 equality/inequality test, then return a simplified form of the test
6869 using a sign testing. Otherwise return NULL. TYPE is the desired
6870 result type. */
6872 static tree
6873 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6874 tree result_type)
6876 /* If this is testing a single bit, we can optimize the test. */
6877 if ((code == NE_EXPR || code == EQ_EXPR)
6878 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6879 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6881 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6882 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6883 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6885 if (arg00 != NULL_TREE
6886 /* This is only a win if casting to a signed type is cheap,
6887 i.e. when arg00's type is not a partial mode. */
6888 && TYPE_PRECISION (TREE_TYPE (arg00))
6889 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6891 tree stype = signed_type_for (TREE_TYPE (arg00));
6892 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6893 result_type, fold_convert (stype, arg00),
6894 build_int_cst (stype, 0));
6898 return NULL_TREE;
6901 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6902 equality/inequality test, then return a simplified form of
6903 the test using shifts and logical operations. Otherwise return
6904 NULL. TYPE is the desired result type. */
6906 tree
6907 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6908 tree result_type)
6910 /* If this is testing a single bit, we can optimize the test. */
6911 if ((code == NE_EXPR || code == EQ_EXPR)
6912 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6913 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6915 tree inner = TREE_OPERAND (arg0, 0);
6916 tree type = TREE_TYPE (arg0);
6917 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6918 enum machine_mode operand_mode = TYPE_MODE (type);
6919 int ops_unsigned;
6920 tree signed_type, unsigned_type, intermediate_type;
6921 tree tem, one;
6923 /* First, see if we can fold the single bit test into a sign-bit
6924 test. */
6925 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6926 result_type);
6927 if (tem)
6928 return tem;
6930 /* Otherwise we have (A & C) != 0 where C is a single bit,
6931 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6932 Similarly for (A & C) == 0. */
6934 /* If INNER is a right shift of a constant and it plus BITNUM does
6935 not overflow, adjust BITNUM and INNER. */
6936 if (TREE_CODE (inner) == RSHIFT_EXPR
6937 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6938 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6939 && bitnum < TYPE_PRECISION (type)
6940 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6941 bitnum - TYPE_PRECISION (type)))
6943 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6944 inner = TREE_OPERAND (inner, 0);
6947 /* If we are going to be able to omit the AND below, we must do our
6948 operations as unsigned. If we must use the AND, we have a choice.
6949 Normally unsigned is faster, but for some machines signed is. */
6950 #ifdef LOAD_EXTEND_OP
6951 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6952 && !flag_syntax_only) ? 0 : 1;
6953 #else
6954 ops_unsigned = 1;
6955 #endif
6957 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6958 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6959 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6960 inner = fold_convert (intermediate_type, inner);
6962 if (bitnum != 0)
6963 inner = build2 (RSHIFT_EXPR, intermediate_type,
6964 inner, size_int (bitnum));
6966 one = build_int_cst (intermediate_type, 1);
6968 if (code == EQ_EXPR)
6969 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6971 /* Put the AND last so it can combine with more things. */
6972 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6974 /* Make sure to return the proper type. */
6975 inner = fold_convert (result_type, inner);
6977 return inner;
6979 return NULL_TREE;
6982 /* Check whether we are allowed to reorder operands arg0 and arg1,
6983 such that the evaluation of arg1 occurs before arg0. */
6985 static bool
6986 reorder_operands_p (const_tree arg0, const_tree arg1)
6988 if (! flag_evaluation_order)
6989 return true;
6990 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6991 return true;
6992 return ! TREE_SIDE_EFFECTS (arg0)
6993 && ! TREE_SIDE_EFFECTS (arg1);
6996 /* Test whether it is preferable two swap two operands, ARG0 and
6997 ARG1, for example because ARG0 is an integer constant and ARG1
6998 isn't. If REORDER is true, only recommend swapping if we can
6999 evaluate the operands in reverse order. */
7001 bool
7002 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7004 STRIP_SIGN_NOPS (arg0);
7005 STRIP_SIGN_NOPS (arg1);
7007 if (TREE_CODE (arg1) == INTEGER_CST)
7008 return 0;
7009 if (TREE_CODE (arg0) == INTEGER_CST)
7010 return 1;
7012 if (TREE_CODE (arg1) == REAL_CST)
7013 return 0;
7014 if (TREE_CODE (arg0) == REAL_CST)
7015 return 1;
7017 if (TREE_CODE (arg1) == FIXED_CST)
7018 return 0;
7019 if (TREE_CODE (arg0) == FIXED_CST)
7020 return 1;
7022 if (TREE_CODE (arg1) == COMPLEX_CST)
7023 return 0;
7024 if (TREE_CODE (arg0) == COMPLEX_CST)
7025 return 1;
7027 if (TREE_CONSTANT (arg1))
7028 return 0;
7029 if (TREE_CONSTANT (arg0))
7030 return 1;
7032 if (optimize_function_for_size_p (cfun))
7033 return 0;
7035 if (reorder && flag_evaluation_order
7036 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7037 return 0;
7039 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7040 for commutative and comparison operators. Ensuring a canonical
7041 form allows the optimizers to find additional redundancies without
7042 having to explicitly check for both orderings. */
7043 if (TREE_CODE (arg0) == SSA_NAME
7044 && TREE_CODE (arg1) == SSA_NAME
7045 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7046 return 1;
7048 /* Put SSA_NAMEs last. */
7049 if (TREE_CODE (arg1) == SSA_NAME)
7050 return 0;
7051 if (TREE_CODE (arg0) == SSA_NAME)
7052 return 1;
7054 /* Put variables last. */
7055 if (DECL_P (arg1))
7056 return 0;
7057 if (DECL_P (arg0))
7058 return 1;
7060 return 0;
7063 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7064 ARG0 is extended to a wider type. */
7066 static tree
7067 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7069 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7070 tree arg1_unw;
7071 tree shorter_type, outer_type;
7072 tree min, max;
7073 bool above, below;
7075 if (arg0_unw == arg0)
7076 return NULL_TREE;
7077 shorter_type = TREE_TYPE (arg0_unw);
7079 #ifdef HAVE_canonicalize_funcptr_for_compare
7080 /* Disable this optimization if we're casting a function pointer
7081 type on targets that require function pointer canonicalization. */
7082 if (HAVE_canonicalize_funcptr_for_compare
7083 && TREE_CODE (shorter_type) == POINTER_TYPE
7084 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7085 return NULL_TREE;
7086 #endif
7088 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7089 return NULL_TREE;
7091 arg1_unw = get_unwidened (arg1, NULL_TREE);
7093 /* If possible, express the comparison in the shorter mode. */
7094 if ((code == EQ_EXPR || code == NE_EXPR
7095 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7096 && (TREE_TYPE (arg1_unw) == shorter_type
7097 || ((TYPE_PRECISION (shorter_type)
7098 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7099 && (TYPE_UNSIGNED (shorter_type)
7100 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7101 || (TREE_CODE (arg1_unw) == INTEGER_CST
7102 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7103 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7104 && int_fits_type_p (arg1_unw, shorter_type))))
7105 return fold_build2 (code, type, arg0_unw,
7106 fold_convert (shorter_type, arg1_unw));
7108 if (TREE_CODE (arg1_unw) != INTEGER_CST
7109 || TREE_CODE (shorter_type) != INTEGER_TYPE
7110 || !int_fits_type_p (arg1_unw, shorter_type))
7111 return NULL_TREE;
7113 /* If we are comparing with the integer that does not fit into the range
7114 of the shorter type, the result is known. */
7115 outer_type = TREE_TYPE (arg1_unw);
7116 min = lower_bound_in_type (outer_type, shorter_type);
7117 max = upper_bound_in_type (outer_type, shorter_type);
7119 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7120 max, arg1_unw));
7121 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7122 arg1_unw, min));
7124 switch (code)
7126 case EQ_EXPR:
7127 if (above || below)
7128 return omit_one_operand (type, integer_zero_node, arg0);
7129 break;
7131 case NE_EXPR:
7132 if (above || below)
7133 return omit_one_operand (type, integer_one_node, arg0);
7134 break;
7136 case LT_EXPR:
7137 case LE_EXPR:
7138 if (above)
7139 return omit_one_operand (type, integer_one_node, arg0);
7140 else if (below)
7141 return omit_one_operand (type, integer_zero_node, arg0);
7143 case GT_EXPR:
7144 case GE_EXPR:
7145 if (above)
7146 return omit_one_operand (type, integer_zero_node, arg0);
7147 else if (below)
7148 return omit_one_operand (type, integer_one_node, arg0);
7150 default:
7151 break;
7154 return NULL_TREE;
7157 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7158 ARG0 just the signedness is changed. */
7160 static tree
7161 fold_sign_changed_comparison (enum tree_code code, tree type,
7162 tree arg0, tree arg1)
7164 tree arg0_inner;
7165 tree inner_type, outer_type;
7167 if (!CONVERT_EXPR_P (arg0))
7168 return NULL_TREE;
7170 outer_type = TREE_TYPE (arg0);
7171 arg0_inner = TREE_OPERAND (arg0, 0);
7172 inner_type = TREE_TYPE (arg0_inner);
7174 #ifdef HAVE_canonicalize_funcptr_for_compare
7175 /* Disable this optimization if we're casting a function pointer
7176 type on targets that require function pointer canonicalization. */
7177 if (HAVE_canonicalize_funcptr_for_compare
7178 && TREE_CODE (inner_type) == POINTER_TYPE
7179 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7180 return NULL_TREE;
7181 #endif
7183 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7184 return NULL_TREE;
7186 /* If the conversion is from an integral subtype to its basetype
7187 leave it alone. */
7188 if (TREE_TYPE (inner_type) == outer_type)
7189 return NULL_TREE;
7191 if (TREE_CODE (arg1) != INTEGER_CST
7192 && !(CONVERT_EXPR_P (arg1)
7193 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7194 return NULL_TREE;
7196 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7197 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7198 && code != NE_EXPR
7199 && code != EQ_EXPR)
7200 return NULL_TREE;
7202 if (TREE_CODE (arg1) == INTEGER_CST)
7203 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7204 TREE_INT_CST_HIGH (arg1), 0,
7205 TREE_OVERFLOW (arg1));
7206 else
7207 arg1 = fold_convert (inner_type, arg1);
7209 return fold_build2 (code, type, arg0_inner, arg1);
7212 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7213 step of the array. Reconstructs s and delta in the case of s * delta
7214 being an integer constant (and thus already folded).
7215 ADDR is the address. MULT is the multiplicative expression.
7216 If the function succeeds, the new address expression is returned. Otherwise
7217 NULL_TREE is returned. */
7219 static tree
7220 try_move_mult_to_index (tree addr, tree op1)
7222 tree s, delta, step;
7223 tree ref = TREE_OPERAND (addr, 0), pref;
7224 tree ret, pos;
7225 tree itype;
7226 bool mdim = false;
7228 /* Strip the nops that might be added when converting op1 to sizetype. */
7229 STRIP_NOPS (op1);
7231 /* Canonicalize op1 into a possibly non-constant delta
7232 and an INTEGER_CST s. */
7233 if (TREE_CODE (op1) == MULTNV_EXPR)
7235 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7237 STRIP_NOPS (arg0);
7238 STRIP_NOPS (arg1);
7240 if (TREE_CODE (arg0) == INTEGER_CST)
7242 s = arg0;
7243 delta = arg1;
7245 else if (TREE_CODE (arg1) == INTEGER_CST)
7247 s = arg1;
7248 delta = arg0;
7250 else
7251 return NULL_TREE;
7253 else if (TREE_CODE (op1) == INTEGER_CST)
7255 delta = op1;
7256 s = NULL_TREE;
7258 else
7260 /* Simulate we are delta * 1. */
7261 delta = op1;
7262 s = integer_one_node;
7265 for (;; ref = TREE_OPERAND (ref, 0))
7267 if (TREE_CODE (ref) == ARRAY_REF)
7269 /* Remember if this was a multi-dimensional array. */
7270 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7271 mdim = true;
7273 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7274 if (! itype)
7275 continue;
7277 step = array_ref_element_size (ref);
7278 if (TREE_CODE (step) != INTEGER_CST)
7279 continue;
7281 if (s)
7283 if (! tree_int_cst_equal (step, s))
7284 continue;
7286 else
7288 /* Try if delta is a multiple of step. */
7289 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7290 if (! tmp)
7291 continue;
7292 delta = tmp;
7295 /* Only fold here if we can verify we do not overflow one
7296 dimension of a multi-dimensional array. */
7297 if (mdim)
7299 tree tmp;
7301 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7302 || !INTEGRAL_TYPE_P (itype)
7303 || !TYPE_MAX_VALUE (itype)
7304 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7305 continue;
7307 tmp = fold_binary (PLUS_EXPR, itype,
7308 fold_convert (itype,
7309 TREE_OPERAND (ref, 1)),
7310 fold_convert (itype, delta));
7311 if (!tmp
7312 || TREE_CODE (tmp) != INTEGER_CST
7313 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7314 continue;
7317 break;
7319 else
7320 mdim = false;
7322 if (!handled_component_p (ref))
7323 return NULL_TREE;
7326 /* We found the suitable array reference. So copy everything up to it,
7327 and replace the index. */
7329 pref = TREE_OPERAND (addr, 0);
7330 ret = copy_node (pref);
7331 pos = ret;
7333 while (pref != ref)
7335 pref = TREE_OPERAND (pref, 0);
7336 TREE_OPERAND (pos, 0) = copy_node (pref);
7337 pos = TREE_OPERAND (pos, 0);
7340 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7341 fold_convert (itype,
7342 TREE_OPERAND (pos, 1)),
7343 fold_convert (itype, delta));
7345 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7349 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7350 means A >= Y && A != MAX, but in this case we know that
7351 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7353 static tree
7354 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7356 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7358 if (TREE_CODE (bound) == LT_EXPR)
7359 a = TREE_OPERAND (bound, 0);
7360 else if (TREE_CODE (bound) == GT_EXPR)
7361 a = TREE_OPERAND (bound, 1);
7362 else
7363 return NULL_TREE;
7365 typea = TREE_TYPE (a);
7366 if (!INTEGRAL_TYPE_P (typea)
7367 && !POINTER_TYPE_P (typea))
7368 return NULL_TREE;
7370 if (TREE_CODE (ineq) == LT_EXPR)
7372 a1 = TREE_OPERAND (ineq, 1);
7373 y = TREE_OPERAND (ineq, 0);
7375 else if (TREE_CODE (ineq) == GT_EXPR)
7377 a1 = TREE_OPERAND (ineq, 0);
7378 y = TREE_OPERAND (ineq, 1);
7380 else
7381 return NULL_TREE;
7383 if (TREE_TYPE (a1) != typea)
7384 return NULL_TREE;
7386 if (POINTER_TYPE_P (typea))
7388 /* Convert the pointer types into integer before taking the difference. */
7389 tree ta = fold_convert (ssizetype, a);
7390 tree ta1 = fold_convert (ssizetype, a1);
7391 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7393 else
7394 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7396 if (!diff || !integer_onep (diff))
7397 return NULL_TREE;
7399 return fold_build2 (GE_EXPR, type, a, y);
7402 /* Fold a sum or difference of at least one multiplication.
7403 Returns the folded tree or NULL if no simplification could be made. */
7405 static tree
7406 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7408 tree arg00, arg01, arg10, arg11;
7409 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7411 /* (A * C) +- (B * C) -> (A+-B) * C.
7412 (A * C) +- A -> A * (C+-1).
7413 We are most concerned about the case where C is a constant,
7414 but other combinations show up during loop reduction. Since
7415 it is not difficult, try all four possibilities. */
7417 if (MULT_EXPR_P (arg0))
7419 arg00 = TREE_OPERAND (arg0, 0);
7420 arg01 = TREE_OPERAND (arg0, 1);
7422 else if (TREE_CODE (arg0) == INTEGER_CST)
7424 arg00 = build_one_cst (type);
7425 arg01 = arg0;
7427 else
7429 /* We cannot generate constant 1 for fract. */
7430 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7431 return NULL_TREE;
7432 arg00 = arg0;
7433 arg01 = build_one_cst (type);
7435 if (MULT_EXPR_P (arg1))
7437 arg10 = TREE_OPERAND (arg1, 0);
7438 arg11 = TREE_OPERAND (arg1, 1);
7440 else if (TREE_CODE (arg1) == INTEGER_CST)
7442 arg10 = build_one_cst (type);
7443 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7444 the purpose of this canonicalization. */
7445 if (TREE_INT_CST_HIGH (arg1) == -1
7446 && negate_expr_p (arg1)
7447 && PLUS_EXPR_CODE_P (code))
7449 arg11 = negate_expr (arg1);
7450 code = MINUS_EXPR;
7452 else
7453 arg11 = arg1;
7455 else
7457 /* We cannot generate constant 1 for fract. */
7458 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7459 return NULL_TREE;
7460 arg10 = arg1;
7461 arg11 = build_one_cst (type);
7463 same = NULL_TREE;
7465 if (operand_equal_p (arg01, arg11, 0))
7466 same = arg01, alt0 = arg00, alt1 = arg10;
7467 else if (operand_equal_p (arg00, arg10, 0))
7468 same = arg00, alt0 = arg01, alt1 = arg11;
7469 else if (operand_equal_p (arg00, arg11, 0))
7470 same = arg00, alt0 = arg01, alt1 = arg10;
7471 else if (operand_equal_p (arg01, arg10, 0))
7472 same = arg01, alt0 = arg00, alt1 = arg11;
7474 /* No identical multiplicands; see if we can find a common
7475 power-of-two factor in non-power-of-two multiplies. This
7476 can help in multi-dimensional array access. */
7477 else if (host_integerp (arg01, 0)
7478 && host_integerp (arg11, 0))
7480 HOST_WIDE_INT int01, int11, tmp;
7481 bool swap = false;
7482 tree maybe_same;
7483 int01 = TREE_INT_CST_LOW (arg01);
7484 int11 = TREE_INT_CST_LOW (arg11);
7486 /* Move min of absolute values to int11. */
7487 if ((int01 >= 0 ? int01 : -int01)
7488 < (int11 >= 0 ? int11 : -int11))
7490 tmp = int01, int01 = int11, int11 = tmp;
7491 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7492 maybe_same = arg01;
7493 swap = true;
7495 else
7496 maybe_same = arg11;
7498 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7500 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7501 build_int_cst (TREE_TYPE (arg00),
7502 int01 / int11));
7503 alt1 = arg10;
7504 same = maybe_same;
7505 if (swap)
7506 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7510 if (same)
7511 return fold_build2 (MULT_EXPR, type,
7512 fold_build2 (strip_nv (code), type,
7513 fold_convert (type, alt0),
7514 fold_convert (type, alt1)),
7515 fold_convert (type, same));
7517 return NULL_TREE;
7520 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7521 specified by EXPR into the buffer PTR of length LEN bytes.
7522 Return the number of bytes placed in the buffer, or zero
7523 upon failure. */
7525 static int
7526 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7528 tree type = TREE_TYPE (expr);
7529 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7530 int byte, offset, word, words;
7531 unsigned char value;
7533 if (total_bytes > len)
7534 return 0;
7535 words = total_bytes / UNITS_PER_WORD;
7537 for (byte = 0; byte < total_bytes; byte++)
7539 int bitpos = byte * BITS_PER_UNIT;
7540 if (bitpos < HOST_BITS_PER_WIDE_INT)
7541 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7542 else
7543 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7544 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7546 if (total_bytes > UNITS_PER_WORD)
7548 word = byte / UNITS_PER_WORD;
7549 if (WORDS_BIG_ENDIAN)
7550 word = (words - 1) - word;
7551 offset = word * UNITS_PER_WORD;
7552 if (BYTES_BIG_ENDIAN)
7553 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7554 else
7555 offset += byte % UNITS_PER_WORD;
7557 else
7558 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7559 ptr[offset] = value;
7561 return total_bytes;
7565 /* Subroutine of native_encode_expr. Encode the REAL_CST
7566 specified by EXPR into the buffer PTR of length LEN bytes.
7567 Return the number of bytes placed in the buffer, or zero
7568 upon failure. */
7570 static int
7571 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7573 tree type = TREE_TYPE (expr);
7574 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7575 int byte, offset, word, words, bitpos;
7576 unsigned char value;
7578 /* There are always 32 bits in each long, no matter the size of
7579 the hosts long. We handle floating point representations with
7580 up to 192 bits. */
7581 long tmp[6];
7583 if (total_bytes > len)
7584 return 0;
7585 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7587 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7589 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7590 bitpos += BITS_PER_UNIT)
7592 byte = (bitpos / BITS_PER_UNIT) & 3;
7593 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7595 if (UNITS_PER_WORD < 4)
7597 word = byte / UNITS_PER_WORD;
7598 if (WORDS_BIG_ENDIAN)
7599 word = (words - 1) - word;
7600 offset = word * UNITS_PER_WORD;
7601 if (BYTES_BIG_ENDIAN)
7602 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7603 else
7604 offset += byte % UNITS_PER_WORD;
7606 else
7607 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7608 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7610 return total_bytes;
7613 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7614 specified by EXPR into the buffer PTR of length LEN bytes.
7615 Return the number of bytes placed in the buffer, or zero
7616 upon failure. */
7618 static int
7619 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7621 int rsize, isize;
7622 tree part;
7624 part = TREE_REALPART (expr);
7625 rsize = native_encode_expr (part, ptr, len);
7626 if (rsize == 0)
7627 return 0;
7628 part = TREE_IMAGPART (expr);
7629 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7630 if (isize != rsize)
7631 return 0;
7632 return rsize + isize;
7636 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7637 specified by EXPR into the buffer PTR of length LEN bytes.
7638 Return the number of bytes placed in the buffer, or zero
7639 upon failure. */
7641 static int
7642 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7644 int i, size, offset, count;
7645 tree itype, elem, elements;
7647 offset = 0;
7648 elements = TREE_VECTOR_CST_ELTS (expr);
7649 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7650 itype = TREE_TYPE (TREE_TYPE (expr));
7651 size = GET_MODE_SIZE (TYPE_MODE (itype));
7652 for (i = 0; i < count; i++)
7654 if (elements)
7656 elem = TREE_VALUE (elements);
7657 elements = TREE_CHAIN (elements);
7659 else
7660 elem = NULL_TREE;
7662 if (elem)
7664 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7665 return 0;
7667 else
7669 if (offset + size > len)
7670 return 0;
7671 memset (ptr+offset, 0, size);
7673 offset += size;
7675 return offset;
7679 /* Subroutine of native_encode_expr. Encode the STRING_CST
7680 specified by EXPR into the buffer PTR of length LEN bytes.
7681 Return the number of bytes placed in the buffer, or zero
7682 upon failure. */
7684 static int
7685 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7687 tree type = TREE_TYPE (expr);
7688 HOST_WIDE_INT total_bytes;
7690 if (TREE_CODE (type) != ARRAY_TYPE
7691 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7692 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7693 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7694 return 0;
7695 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7696 if (total_bytes > len)
7697 return 0;
7698 if (TREE_STRING_LENGTH (expr) < total_bytes)
7700 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7701 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7702 total_bytes - TREE_STRING_LENGTH (expr));
7704 else
7705 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7706 return total_bytes;
7710 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7711 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7712 buffer PTR of length LEN bytes. Return the number of bytes
7713 placed in the buffer, or zero upon failure. */
7716 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7718 switch (TREE_CODE (expr))
7720 case INTEGER_CST:
7721 return native_encode_int (expr, ptr, len);
7723 case REAL_CST:
7724 return native_encode_real (expr, ptr, len);
7726 case COMPLEX_CST:
7727 return native_encode_complex (expr, ptr, len);
7729 case VECTOR_CST:
7730 return native_encode_vector (expr, ptr, len);
7732 case STRING_CST:
7733 return native_encode_string (expr, ptr, len);
7735 default:
7736 return 0;
7741 /* Subroutine of native_interpret_expr. Interpret the contents of
7742 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7743 If the buffer cannot be interpreted, return NULL_TREE. */
7745 static tree
7746 native_interpret_int (tree type, const unsigned char *ptr, int len)
7748 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7749 int byte, offset, word, words;
7750 unsigned char value;
7751 unsigned int HOST_WIDE_INT lo = 0;
7752 HOST_WIDE_INT hi = 0;
7754 if (total_bytes > len)
7755 return NULL_TREE;
7756 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7757 return NULL_TREE;
7758 words = total_bytes / UNITS_PER_WORD;
7760 for (byte = 0; byte < total_bytes; byte++)
7762 int bitpos = byte * BITS_PER_UNIT;
7763 if (total_bytes > UNITS_PER_WORD)
7765 word = byte / UNITS_PER_WORD;
7766 if (WORDS_BIG_ENDIAN)
7767 word = (words - 1) - word;
7768 offset = word * UNITS_PER_WORD;
7769 if (BYTES_BIG_ENDIAN)
7770 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7771 else
7772 offset += byte % UNITS_PER_WORD;
7774 else
7775 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7776 value = ptr[offset];
7778 if (bitpos < HOST_BITS_PER_WIDE_INT)
7779 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7780 else
7781 hi |= (unsigned HOST_WIDE_INT) value
7782 << (bitpos - HOST_BITS_PER_WIDE_INT);
7785 return build_int_cst_wide_type (type, lo, hi);
7789 /* Subroutine of native_interpret_expr. Interpret the contents of
7790 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7791 If the buffer cannot be interpreted, return NULL_TREE. */
7793 static tree
7794 native_interpret_real (tree type, const unsigned char *ptr, int len)
7796 enum machine_mode mode = TYPE_MODE (type);
7797 int total_bytes = GET_MODE_SIZE (mode);
7798 int byte, offset, word, words, bitpos;
7799 unsigned char value;
7800 /* There are always 32 bits in each long, no matter the size of
7801 the hosts long. We handle floating point representations with
7802 up to 192 bits. */
7803 REAL_VALUE_TYPE r;
7804 long tmp[6];
7806 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7807 if (total_bytes > len || total_bytes > 24)
7808 return NULL_TREE;
7809 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7811 memset (tmp, 0, sizeof (tmp));
7812 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7813 bitpos += BITS_PER_UNIT)
7815 byte = (bitpos / BITS_PER_UNIT) & 3;
7816 if (UNITS_PER_WORD < 4)
7818 word = byte / UNITS_PER_WORD;
7819 if (WORDS_BIG_ENDIAN)
7820 word = (words - 1) - word;
7821 offset = word * UNITS_PER_WORD;
7822 if (BYTES_BIG_ENDIAN)
7823 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7824 else
7825 offset += byte % UNITS_PER_WORD;
7827 else
7828 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7829 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7831 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7834 real_from_target (&r, tmp, mode);
7835 return build_real (type, r);
7839 /* Subroutine of native_interpret_expr. Interpret the contents of
7840 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7841 If the buffer cannot be interpreted, return NULL_TREE. */
7843 static tree
7844 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7846 tree etype, rpart, ipart;
7847 int size;
7849 etype = TREE_TYPE (type);
7850 size = GET_MODE_SIZE (TYPE_MODE (etype));
7851 if (size * 2 > len)
7852 return NULL_TREE;
7853 rpart = native_interpret_expr (etype, ptr, size);
7854 if (!rpart)
7855 return NULL_TREE;
7856 ipart = native_interpret_expr (etype, ptr+size, size);
7857 if (!ipart)
7858 return NULL_TREE;
7859 return build_complex (type, rpart, ipart);
7863 /* Subroutine of native_interpret_expr. Interpret the contents of
7864 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7865 If the buffer cannot be interpreted, return NULL_TREE. */
7867 static tree
7868 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7870 tree etype, elem, elements;
7871 int i, size, count;
7873 etype = TREE_TYPE (type);
7874 size = GET_MODE_SIZE (TYPE_MODE (etype));
7875 count = TYPE_VECTOR_SUBPARTS (type);
7876 if (size * count > len)
7877 return NULL_TREE;
7879 elements = NULL_TREE;
7880 for (i = count - 1; i >= 0; i--)
7882 elem = native_interpret_expr (etype, ptr+(i*size), size);
7883 if (!elem)
7884 return NULL_TREE;
7885 elements = tree_cons (NULL_TREE, elem, elements);
7887 return build_vector (type, elements);
7891 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7892 the buffer PTR of length LEN as a constant of type TYPE. For
7893 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7894 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7895 return NULL_TREE. */
7897 tree
7898 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7900 switch (TREE_CODE (type))
7902 case INTEGER_TYPE:
7903 case ENUMERAL_TYPE:
7904 case BOOLEAN_TYPE:
7905 return native_interpret_int (type, ptr, len);
7907 case REAL_TYPE:
7908 return native_interpret_real (type, ptr, len);
7910 case COMPLEX_TYPE:
7911 return native_interpret_complex (type, ptr, len);
7913 case VECTOR_TYPE:
7914 return native_interpret_vector (type, ptr, len);
7916 default:
7917 return NULL_TREE;
7922 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7923 TYPE at compile-time. If we're unable to perform the conversion
7924 return NULL_TREE. */
7926 static tree
7927 fold_view_convert_expr (tree type, tree expr)
7929 /* We support up to 512-bit values (for V8DFmode). */
7930 unsigned char buffer[64];
7931 int len;
7933 /* Check that the host and target are sane. */
7934 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7935 return NULL_TREE;
7937 len = native_encode_expr (expr, buffer, sizeof (buffer));
7938 if (len == 0)
7939 return NULL_TREE;
7941 return native_interpret_expr (type, buffer, len);
7944 /* Build an expression for the address of T. Folds away INDIRECT_REF
7945 to avoid confusing the gimplify process. When IN_FOLD is true
7946 avoid modifications of T. */
7948 static tree
7949 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7951 /* The size of the object is not relevant when talking about its address. */
7952 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7953 t = TREE_OPERAND (t, 0);
7955 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7956 if (TREE_CODE (t) == INDIRECT_REF
7957 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7959 t = TREE_OPERAND (t, 0);
7961 if (TREE_TYPE (t) != ptrtype)
7962 t = build1 (NOP_EXPR, ptrtype, t);
7964 else if (!in_fold)
7966 tree base = t;
7968 while (handled_component_p (base))
7969 base = TREE_OPERAND (base, 0);
7971 if (DECL_P (base))
7972 TREE_ADDRESSABLE (base) = 1;
7974 t = build1 (ADDR_EXPR, ptrtype, t);
7976 else
7977 t = build1 (ADDR_EXPR, ptrtype, t);
7979 return t;
7982 /* Build an expression for the address of T with type PTRTYPE. This
7983 function modifies the input parameter 'T' by sometimes setting the
7984 TREE_ADDRESSABLE flag. */
7986 tree
7987 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7989 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7992 /* Build an expression for the address of T. This function modifies
7993 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7994 flag. When called from fold functions, use fold_addr_expr instead. */
7996 tree
7997 build_fold_addr_expr (tree t)
7999 return build_fold_addr_expr_with_type_1 (t,
8000 build_pointer_type (TREE_TYPE (t)),
8001 false);
8004 /* Same as build_fold_addr_expr, builds an expression for the address
8005 of T, but avoids touching the input node 't'. Fold functions
8006 should use this version. */
8008 static tree
8009 fold_addr_expr (tree t)
8011 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8013 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
8016 /* Fold a unary expression of code CODE and type TYPE with operand
8017 OP0. Return the folded expression if folding is successful.
8018 Otherwise, return NULL_TREE. */
8020 tree
8021 fold_unary (enum tree_code code, tree type, tree op0)
8023 tree tem;
8024 tree arg0;
8025 enum tree_code_class kind = TREE_CODE_CLASS (code);
8027 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8028 && TREE_CODE_LENGTH (code) == 1);
8030 arg0 = op0;
8031 if (arg0)
8033 if (CONVERT_EXPR_CODE_P (code)
8034 || code == FLOAT_EXPR || code == ABS_EXPR)
8036 /* Don't use STRIP_NOPS, because signedness of argument type
8037 matters. */
8038 STRIP_SIGN_NOPS (arg0);
8040 else
8042 /* Strip any conversions that don't change the mode. This
8043 is safe for every expression, except for a comparison
8044 expression because its signedness is derived from its
8045 operands.
8047 Note that this is done as an internal manipulation within
8048 the constant folder, in order to find the simplest
8049 representation of the arguments so that their form can be
8050 studied. In any cases, the appropriate type conversions
8051 should be put back in the tree that will get out of the
8052 constant folder. */
8053 STRIP_NOPS (arg0);
8057 if (TREE_CODE_CLASS (code) == tcc_unary)
8059 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8060 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8061 fold_build1 (code, type,
8062 fold_convert (TREE_TYPE (op0),
8063 TREE_OPERAND (arg0, 1))));
8064 else if (TREE_CODE (arg0) == COND_EXPR)
8066 tree arg01 = TREE_OPERAND (arg0, 1);
8067 tree arg02 = TREE_OPERAND (arg0, 2);
8068 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8069 arg01 = fold_build1 (code, type,
8070 fold_convert (TREE_TYPE (op0), arg01));
8071 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8072 arg02 = fold_build1 (code, type,
8073 fold_convert (TREE_TYPE (op0), arg02));
8074 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8075 arg01, arg02);
8077 /* If this was a conversion, and all we did was to move into
8078 inside the COND_EXPR, bring it back out. But leave it if
8079 it is a conversion from integer to integer and the
8080 result precision is no wider than a word since such a
8081 conversion is cheap and may be optimized away by combine,
8082 while it couldn't if it were outside the COND_EXPR. Then return
8083 so we don't get into an infinite recursion loop taking the
8084 conversion out and then back in. */
8086 if ((CONVERT_EXPR_CODE_P (code)
8087 || code == NON_LVALUE_EXPR)
8088 && TREE_CODE (tem) == COND_EXPR
8089 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8090 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8091 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8092 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8093 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8094 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8095 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8096 && (INTEGRAL_TYPE_P
8097 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8098 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8099 || flag_syntax_only))
8100 tem = build1 (code, type,
8101 build3 (COND_EXPR,
8102 TREE_TYPE (TREE_OPERAND
8103 (TREE_OPERAND (tem, 1), 0)),
8104 TREE_OPERAND (tem, 0),
8105 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8106 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8107 return tem;
8109 else if (COMPARISON_CLASS_P (arg0))
8111 if (TREE_CODE (type) == BOOLEAN_TYPE)
8113 arg0 = copy_node (arg0);
8114 TREE_TYPE (arg0) = type;
8115 return arg0;
8117 else if (TREE_CODE (type) != INTEGER_TYPE)
8118 return fold_build3 (COND_EXPR, type, arg0,
8119 fold_build1 (code, type,
8120 integer_one_node),
8121 fold_build1 (code, type,
8122 integer_zero_node));
8126 switch (code)
8128 case PAREN_EXPR:
8129 /* Re-association barriers around constants and other re-association
8130 barriers can be removed. */
8131 if (CONSTANT_CLASS_P (op0)
8132 || TREE_CODE (op0) == PAREN_EXPR)
8133 return fold_convert (type, op0);
8134 return NULL_TREE;
8136 CASE_CONVERT:
8137 case FLOAT_EXPR:
8138 case FIX_TRUNC_EXPR:
8139 if (TREE_TYPE (op0) == type)
8140 return op0;
8142 /* If we have (type) (a CMP b) and type is an integral type, return
8143 new expression involving the new type. */
8144 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8145 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8146 TREE_OPERAND (op0, 1));
8148 /* Handle cases of two conversions in a row. */
8149 if (CONVERT_EXPR_P (op0))
8151 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8152 tree inter_type = TREE_TYPE (op0);
8153 int inside_int = INTEGRAL_TYPE_P (inside_type);
8154 int inside_ptr = POINTER_TYPE_P (inside_type);
8155 int inside_float = FLOAT_TYPE_P (inside_type);
8156 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8157 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8158 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8159 int inter_int = INTEGRAL_TYPE_P (inter_type);
8160 int inter_ptr = POINTER_TYPE_P (inter_type);
8161 int inter_float = FLOAT_TYPE_P (inter_type);
8162 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8163 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8164 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8165 int final_int = INTEGRAL_TYPE_P (type);
8166 int final_ptr = POINTER_TYPE_P (type);
8167 int final_float = FLOAT_TYPE_P (type);
8168 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8169 unsigned int final_prec = TYPE_PRECISION (type);
8170 int final_unsignedp = TYPE_UNSIGNED (type);
8172 /* In addition to the cases of two conversions in a row
8173 handled below, if we are converting something to its own
8174 type via an object of identical or wider precision, neither
8175 conversion is needed. */
8176 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8177 && (((inter_int || inter_ptr) && final_int)
8178 || (inter_float && final_float))
8179 && inter_prec >= final_prec)
8180 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8182 /* Likewise, if the intermediate and final types are either both
8183 float or both integer, we don't need the middle conversion if
8184 it is wider than the final type and doesn't change the signedness
8185 (for integers). Avoid this if the final type is a pointer
8186 since then we sometimes need the inner conversion. Likewise if
8187 the outer has a precision not equal to the size of its mode. */
8188 if (((inter_int && inside_int)
8189 || (inter_float && inside_float)
8190 || (inter_vec && inside_vec))
8191 && inter_prec >= inside_prec
8192 && (inter_float || inter_vec
8193 || inter_unsignedp == inside_unsignedp)
8194 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8195 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8196 && ! final_ptr
8197 && (! final_vec || inter_prec == inside_prec))
8198 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8200 /* If we have a sign-extension of a zero-extended value, we can
8201 replace that by a single zero-extension. */
8202 if (inside_int && inter_int && final_int
8203 && inside_prec < inter_prec && inter_prec < final_prec
8204 && inside_unsignedp && !inter_unsignedp)
8205 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8207 /* Two conversions in a row are not needed unless:
8208 - some conversion is floating-point (overstrict for now), or
8209 - some conversion is a vector (overstrict for now), or
8210 - the intermediate type is narrower than both initial and
8211 final, or
8212 - the intermediate type and innermost type differ in signedness,
8213 and the outermost type is wider than the intermediate, or
8214 - the initial type is a pointer type and the precisions of the
8215 intermediate and final types differ, or
8216 - the final type is a pointer type and the precisions of the
8217 initial and intermediate types differ. */
8218 if (! inside_float && ! inter_float && ! final_float
8219 && ! inside_vec && ! inter_vec && ! final_vec
8220 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8221 && ! (inside_int && inter_int
8222 && inter_unsignedp != inside_unsignedp
8223 && inter_prec < final_prec)
8224 && ((inter_unsignedp && inter_prec > inside_prec)
8225 == (final_unsignedp && final_prec > inter_prec))
8226 && ! (inside_ptr && inter_prec != final_prec)
8227 && ! (final_ptr && inside_prec != inter_prec)
8228 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8229 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8230 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8233 /* Handle (T *)&A.B.C for A being of type T and B and C
8234 living at offset zero. This occurs frequently in
8235 C++ upcasting and then accessing the base. */
8236 if (TREE_CODE (op0) == ADDR_EXPR
8237 && POINTER_TYPE_P (type)
8238 && handled_component_p (TREE_OPERAND (op0, 0)))
8240 HOST_WIDE_INT bitsize, bitpos;
8241 tree offset;
8242 enum machine_mode mode;
8243 int unsignedp, volatilep;
8244 tree base = TREE_OPERAND (op0, 0);
8245 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8246 &mode, &unsignedp, &volatilep, false);
8247 /* If the reference was to a (constant) zero offset, we can use
8248 the address of the base if it has the same base type
8249 as the result type. */
8250 if (! offset && bitpos == 0
8251 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8252 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8253 return fold_convert (type, fold_addr_expr (base));
8256 if (TREE_CODE (op0) == MODIFY_EXPR
8257 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8258 /* Detect assigning a bitfield. */
8259 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8260 && DECL_BIT_FIELD
8261 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8263 /* Don't leave an assignment inside a conversion
8264 unless assigning a bitfield. */
8265 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8266 /* First do the assignment, then return converted constant. */
8267 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8268 TREE_NO_WARNING (tem) = 1;
8269 TREE_USED (tem) = 1;
8270 return tem;
8273 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8274 constants (if x has signed type, the sign bit cannot be set
8275 in c). This folds extension into the BIT_AND_EXPR.
8276 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8277 very likely don't have maximal range for their precision and this
8278 transformation effectively doesn't preserve non-maximal ranges. */
8279 if (TREE_CODE (type) == INTEGER_TYPE
8280 && TREE_CODE (op0) == BIT_AND_EXPR
8281 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8282 /* Not if the conversion is to the sub-type. */
8283 && TREE_TYPE (type) != TREE_TYPE (op0))
8285 tree and = op0;
8286 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8287 int change = 0;
8289 if (TYPE_UNSIGNED (TREE_TYPE (and))
8290 || (TYPE_PRECISION (type)
8291 <= TYPE_PRECISION (TREE_TYPE (and))))
8292 change = 1;
8293 else if (TYPE_PRECISION (TREE_TYPE (and1))
8294 <= HOST_BITS_PER_WIDE_INT
8295 && host_integerp (and1, 1))
8297 unsigned HOST_WIDE_INT cst;
8299 cst = tree_low_cst (and1, 1);
8300 cst &= (HOST_WIDE_INT) -1
8301 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8302 change = (cst == 0);
8303 #ifdef LOAD_EXTEND_OP
8304 if (change
8305 && !flag_syntax_only
8306 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8307 == ZERO_EXTEND))
8309 tree uns = unsigned_type_for (TREE_TYPE (and0));
8310 and0 = fold_convert (uns, and0);
8311 and1 = fold_convert (uns, and1);
8313 #endif
8315 if (change)
8317 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8318 TREE_INT_CST_HIGH (and1), 0,
8319 TREE_OVERFLOW (and1));
8320 return fold_build2 (BIT_AND_EXPR, type,
8321 fold_convert (type, and0), tem);
8325 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type. */
8326 if (POINTER_TYPE_P (type)
8327 && POINTER_PLUS_EXPR_P (arg0))
8329 tree arg00 = TREE_OPERAND (arg0, 0);
8330 tree arg01 = TREE_OPERAND (arg0, 1);
8332 return fold_build2 (TREE_CODE (arg0), type,
8333 fold_convert (type, arg00), arg01);
8336 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8337 of the same precision, and X is an integer type not narrower than
8338 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8339 if (INTEGRAL_TYPE_P (type)
8340 && TREE_CODE (op0) == BIT_NOT_EXPR
8341 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8342 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8343 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8345 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8346 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8347 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8348 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8351 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8352 type of X and Y (integer types only). */
8353 if (INTEGRAL_TYPE_P (type)
8354 && MULT_EXPR_P (op0)
8355 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8356 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8358 /* Be careful to fall back to MULT_EXPR from MULTNV_EXPR. */
8359 return fold_build2 (MULT_EXPR, type,
8360 fold_convert (type,
8361 TREE_OPERAND (op0, 0)),
8362 fold_convert (type,
8363 TREE_OPERAND (op0, 1)));
8366 tem = fold_convert_const (code, type, op0);
8367 return tem ? tem : NULL_TREE;
8369 case FIXED_CONVERT_EXPR:
8370 tem = fold_convert_const (code, type, arg0);
8371 return tem ? tem : NULL_TREE;
8373 case VIEW_CONVERT_EXPR:
8374 if (TREE_TYPE (op0) == type)
8375 return op0;
8376 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8377 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8379 /* For integral conversions with the same precision or pointer
8380 conversions use a NOP_EXPR instead. */
8381 if ((INTEGRAL_TYPE_P (type)
8382 || POINTER_TYPE_P (type))
8383 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8384 || POINTER_TYPE_P (TREE_TYPE (op0)))
8385 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8386 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8387 a sub-type to its base type as generated by the Ada FE. */
8388 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8389 && TREE_TYPE (TREE_TYPE (op0))))
8390 return fold_convert (type, op0);
8392 /* Strip inner integral conversions that do not change the precision. */
8393 if (CONVERT_EXPR_P (op0)
8394 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8395 || POINTER_TYPE_P (TREE_TYPE (op0)))
8396 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8397 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8398 && (TYPE_PRECISION (TREE_TYPE (op0))
8399 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8400 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8402 return fold_view_convert_expr (type, op0);
8404 case NEGATE_EXPR:
8405 case NEGATENV_EXPR:
8406 tem = fold_negate_expr (arg0);
8407 if (tem)
8408 return fold_convert (type, tem);
8409 return NULL_TREE;
8411 case ABS_EXPR:
8412 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8413 return fold_abs_const (arg0, type);
8414 else if (NEGATE_EXPR_P (arg0))
8415 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8416 /* Convert fabs((double)float) into (double)fabsf(float). */
8417 else if (TREE_CODE (arg0) == NOP_EXPR
8418 && TREE_CODE (type) == REAL_TYPE)
8420 tree targ0 = strip_float_extensions (arg0);
8421 if (targ0 != arg0)
8422 return fold_convert (type, fold_build1 (ABS_EXPR,
8423 TREE_TYPE (targ0),
8424 targ0));
8426 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8427 else if (TREE_CODE (arg0) == ABS_EXPR)
8428 return arg0;
8429 else if (tree_expr_nonnegative_p (arg0))
8430 return arg0;
8432 /* Strip sign ops from argument. */
8433 if (TREE_CODE (type) == REAL_TYPE)
8435 tem = fold_strip_sign_ops (arg0);
8436 if (tem)
8437 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8439 return NULL_TREE;
8441 case CONJ_EXPR:
8442 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8443 return fold_convert (type, arg0);
8444 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8446 tree itype = TREE_TYPE (type);
8447 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8448 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8449 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8451 if (TREE_CODE (arg0) == COMPLEX_CST)
8453 tree itype = TREE_TYPE (type);
8454 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8455 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8456 return build_complex (type, rpart, negate_expr (ipart));
8458 if (TREE_CODE (arg0) == CONJ_EXPR)
8459 return fold_convert (type, TREE_OPERAND (arg0, 0));
8460 return NULL_TREE;
8462 case BIT_NOT_EXPR:
8463 if (TREE_CODE (arg0) == INTEGER_CST)
8464 return fold_not_const (arg0, type);
8465 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8466 return fold_convert (type, TREE_OPERAND (arg0, 0));
8467 /* Convert ~ (-A) to A - 1. */
8468 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8469 return fold_build2 (MINUS_EXPR, type,
8470 fold_convert (type, TREE_OPERAND (arg0, 0)),
8471 build_int_cst (type, 1));
8472 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8473 else if (INTEGRAL_TYPE_P (type)
8474 && ((TREE_CODE (arg0) == MINUS_EXPR
8475 && integer_onep (TREE_OPERAND (arg0, 1)))
8476 || (TREE_CODE (arg0) == PLUS_EXPR
8477 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8478 return fold_build1 (NEGATE_EXPR, type,
8479 fold_convert (type, TREE_OPERAND (arg0, 0)));
8480 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8481 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8482 && (tem = fold_unary (BIT_NOT_EXPR, type,
8483 fold_convert (type,
8484 TREE_OPERAND (arg0, 0)))))
8485 return fold_build2 (BIT_XOR_EXPR, type, tem,
8486 fold_convert (type, TREE_OPERAND (arg0, 1)));
8487 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8488 && (tem = fold_unary (BIT_NOT_EXPR, type,
8489 fold_convert (type,
8490 TREE_OPERAND (arg0, 1)))))
8491 return fold_build2 (BIT_XOR_EXPR, type,
8492 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8493 /* Perform BIT_NOT_EXPR on each element individually. */
8494 else if (TREE_CODE (arg0) == VECTOR_CST)
8496 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8497 int count = TYPE_VECTOR_SUBPARTS (type), i;
8499 for (i = 0; i < count; i++)
8501 if (elements)
8503 elem = TREE_VALUE (elements);
8504 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8505 if (elem == NULL_TREE)
8506 break;
8507 elements = TREE_CHAIN (elements);
8509 else
8510 elem = build_int_cst (TREE_TYPE (type), -1);
8511 list = tree_cons (NULL_TREE, elem, list);
8513 if (i == count)
8514 return build_vector (type, nreverse (list));
8517 return NULL_TREE;
8519 case TRUTH_NOT_EXPR:
8520 /* The argument to invert_truthvalue must have Boolean type. */
8521 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8522 arg0 = fold_convert (boolean_type_node, arg0);
8524 /* Note that the operand of this must be an int
8525 and its values must be 0 or 1.
8526 ("true" is a fixed value perhaps depending on the language,
8527 but we don't handle values other than 1 correctly yet.) */
8528 tem = fold_truth_not_expr (arg0);
8529 if (!tem)
8530 return NULL_TREE;
8531 return fold_convert (type, tem);
8533 case REALPART_EXPR:
8534 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8535 return fold_convert (type, arg0);
8536 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8537 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8538 TREE_OPERAND (arg0, 1));
8539 if (TREE_CODE (arg0) == COMPLEX_CST)
8540 return fold_convert (type, TREE_REALPART (arg0));
8541 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8543 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8544 tem = fold_build2 (TREE_CODE (arg0), itype,
8545 fold_build1 (REALPART_EXPR, itype,
8546 TREE_OPERAND (arg0, 0)),
8547 fold_build1 (REALPART_EXPR, itype,
8548 TREE_OPERAND (arg0, 1)));
8549 return fold_convert (type, tem);
8551 if (TREE_CODE (arg0) == CONJ_EXPR)
8553 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8554 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8555 return fold_convert (type, tem);
8557 if (TREE_CODE (arg0) == CALL_EXPR)
8559 tree fn = get_callee_fndecl (arg0);
8560 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8561 switch (DECL_FUNCTION_CODE (fn))
8563 CASE_FLT_FN (BUILT_IN_CEXPI):
8564 fn = mathfn_built_in (type, BUILT_IN_COS);
8565 if (fn)
8566 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8567 break;
8569 default:
8570 break;
8573 return NULL_TREE;
8575 case IMAGPART_EXPR:
8576 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8577 return fold_convert (type, integer_zero_node);
8578 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8579 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8580 TREE_OPERAND (arg0, 0));
8581 if (TREE_CODE (arg0) == COMPLEX_CST)
8582 return fold_convert (type, TREE_IMAGPART (arg0));
8583 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8585 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8586 tem = fold_build2 (TREE_CODE (arg0), itype,
8587 fold_build1 (IMAGPART_EXPR, itype,
8588 TREE_OPERAND (arg0, 0)),
8589 fold_build1 (IMAGPART_EXPR, itype,
8590 TREE_OPERAND (arg0, 1)));
8591 return fold_convert (type, tem);
8593 if (TREE_CODE (arg0) == CONJ_EXPR)
8595 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8596 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8597 return fold_convert (type, negate_expr (tem));
8599 if (TREE_CODE (arg0) == CALL_EXPR)
8601 tree fn = get_callee_fndecl (arg0);
8602 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8603 switch (DECL_FUNCTION_CODE (fn))
8605 CASE_FLT_FN (BUILT_IN_CEXPI):
8606 fn = mathfn_built_in (type, BUILT_IN_SIN);
8607 if (fn)
8608 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8609 break;
8611 default:
8612 break;
8615 return NULL_TREE;
8617 default:
8618 return NULL_TREE;
8619 } /* switch (code) */
8623 /* If the operation was a conversion do _not_ mark a resulting constant
8624 with TREE_OVERFLOW if the original constant was not. These conversions
8625 have implementation defined behavior and retaining the TREE_OVERFLOW
8626 flag here would confuse later passes such as VRP. */
8627 tree
8628 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8630 tree res = fold_unary (code, type, op0);
8631 if (res
8632 && TREE_CODE (res) == INTEGER_CST
8633 && TREE_CODE (op0) == INTEGER_CST
8634 && CONVERT_EXPR_CODE_P (code))
8635 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8637 return res;
8640 /* Fold a binary expression of code CODE and type TYPE with operands
8641 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8642 Return the folded expression if folding is successful. Otherwise,
8643 return NULL_TREE. */
8645 static tree
8646 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8648 enum tree_code compl_code;
8650 if (code == MIN_EXPR)
8651 compl_code = MAX_EXPR;
8652 else if (code == MAX_EXPR)
8653 compl_code = MIN_EXPR;
8654 else
8655 gcc_unreachable ();
8657 /* MIN (MAX (a, b), b) == b. */
8658 if (TREE_CODE (op0) == compl_code
8659 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8660 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8662 /* MIN (MAX (b, a), b) == b. */
8663 if (TREE_CODE (op0) == compl_code
8664 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8665 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8666 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8668 /* MIN (a, MAX (a, b)) == a. */
8669 if (TREE_CODE (op1) == compl_code
8670 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8671 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8672 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8674 /* MIN (a, MAX (b, a)) == a. */
8675 if (TREE_CODE (op1) == compl_code
8676 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8677 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8678 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8680 return NULL_TREE;
8683 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8684 by changing CODE to reduce the magnitude of constants involved in
8685 ARG0 of the comparison.
8686 Returns a canonicalized comparison tree if a simplification was
8687 possible, otherwise returns NULL_TREE.
8688 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8689 valid if signed overflow is undefined. */
8691 static tree
8692 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8693 tree arg0, tree arg1,
8694 bool *strict_overflow_p)
8696 enum tree_code code0 = TREE_CODE (arg0);
8697 tree t, cst0 = NULL_TREE;
8698 int sgn0;
8699 bool swap = false;
8701 /* Match A +- CST code arg1 and CST code arg1. We can change the
8702 first form only if the operation does not wrap. */
8703 if (!((/* In principle pointer arithmetic also can be non-wrapping,
8704 but that causes problems elsewhere. */
8705 (code0 == MINUSNV_EXPR
8706 || code0 == PLUSNV_EXPR)
8707 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8708 || code0 == INTEGER_CST))
8709 return NULL_TREE;
8711 /* Identify the constant in arg0 and its sign. */
8712 if (code0 == INTEGER_CST)
8713 cst0 = arg0;
8714 else
8715 cst0 = TREE_OPERAND (arg0, 1);
8716 sgn0 = tree_int_cst_sgn (cst0);
8718 /* Overflowed constants and zero will cause problems. */
8719 if (integer_zerop (cst0)
8720 || TREE_OVERFLOW (cst0))
8721 return NULL_TREE;
8723 /* See if we can reduce the magnitude of the constant in
8724 arg0 by changing the comparison code. */
8725 if (code0 == INTEGER_CST)
8727 /* CST <= arg1 -> CST-1 < arg1. */
8728 if (code == LE_EXPR && sgn0 == 1)
8729 code = LT_EXPR;
8730 /* -CST < arg1 -> -CST-1 <= arg1. */
8731 else if (code == LT_EXPR && sgn0 == -1)
8732 code = LE_EXPR;
8733 /* CST > arg1 -> CST-1 >= arg1. */
8734 else if (code == GT_EXPR && sgn0 == 1)
8735 code = GE_EXPR;
8736 /* -CST >= arg1 -> -CST-1 > arg1. */
8737 else if (code == GE_EXPR && sgn0 == -1)
8738 code = GT_EXPR;
8739 else
8740 return NULL_TREE;
8741 /* arg1 code' CST' might be more canonical. */
8742 swap = true;
8744 else
8746 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8747 if (code == LT_EXPR
8748 && code0 == ((sgn0 == -1) ? PLUSNV_EXPR : MINUSNV_EXPR))
8749 code = LE_EXPR;
8750 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8751 else if (code == GT_EXPR
8752 && code0 == ((sgn0 == -1) ? MINUSNV_EXPR : PLUSNV_EXPR))
8753 code = GE_EXPR;
8754 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8755 else if (code == LE_EXPR
8756 && code0 == ((sgn0 == -1) ? MINUSNV_EXPR : PLUSNV_EXPR))
8757 code = LT_EXPR;
8758 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8759 else if (code == GE_EXPR
8760 && code0 == ((sgn0 == -1) ? PLUSNV_EXPR : MINUSNV_EXPR))
8761 code = GT_EXPR;
8762 else
8763 return NULL_TREE;
8764 if (!TREE_NO_WARNING (arg0))
8765 *strict_overflow_p = true;
8768 /* Now build the constant reduced in magnitude. But not if that
8769 would produce one outside of its types range. */
8770 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8771 && ((sgn0 == 1
8772 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8773 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8774 || (sgn0 == -1
8775 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8776 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8777 /* We cannot swap the comparison here as that would cause us to
8778 endlessly recurse. */
8779 return NULL_TREE;
8781 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8782 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8783 /* If A - CST didn't overflow so does A - (CST - 1). So it is safe
8784 to keep the *NV_EXPR variants. */
8785 if (code0 != INTEGER_CST)
8786 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8788 /* If swapping might yield to a more canonical form, do so. */
8789 if (swap)
8790 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8791 else
8792 return fold_build2 (code, type, t, arg1);
8795 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8796 overflow further. Try to decrease the magnitude of constants involved
8797 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8798 and put sole constants at the second argument position.
8799 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8801 static tree
8802 maybe_canonicalize_comparison (enum tree_code code, tree type,
8803 tree arg0, tree arg1)
8805 tree t;
8806 bool strict_overflow_p;
8807 const char * const warnmsg = G_("assuming signed overflow does not occur "
8808 "when reducing constant in comparison");
8810 /* Try canonicalization by simplifying arg0. */
8811 strict_overflow_p = false;
8812 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8813 &strict_overflow_p);
8814 if (t)
8816 if (strict_overflow_p)
8817 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8818 return t;
8821 /* Try canonicalization by simplifying arg1 using the swapped
8822 comparison. */
8823 code = swap_tree_comparison (code);
8824 strict_overflow_p = false;
8825 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8826 &strict_overflow_p);
8827 if (t && strict_overflow_p)
8828 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8829 return t;
8832 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8833 space. This is used to avoid issuing overflow warnings for
8834 expressions like &p->x which can not wrap. */
8836 static bool
8837 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8839 unsigned HOST_WIDE_INT offset_low, total_low;
8840 HOST_WIDE_INT size, offset_high, total_high;
8842 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8843 return true;
8845 if (bitpos < 0)
8846 return true;
8848 if (offset == NULL_TREE)
8850 offset_low = 0;
8851 offset_high = 0;
8853 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8854 return true;
8855 else
8857 offset_low = TREE_INT_CST_LOW (offset);
8858 offset_high = TREE_INT_CST_HIGH (offset);
8861 if (add_double_with_sign (offset_low, offset_high,
8862 bitpos / BITS_PER_UNIT, 0,
8863 &total_low, &total_high,
8864 true))
8865 return true;
8867 if (total_high != 0)
8868 return true;
8870 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8871 if (size <= 0)
8872 return true;
8874 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8875 array. */
8876 if (TREE_CODE (base) == ADDR_EXPR)
8878 HOST_WIDE_INT base_size;
8880 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8881 if (base_size > 0 && size < base_size)
8882 size = base_size;
8885 return total_low > (unsigned HOST_WIDE_INT) size;
8888 /* Subroutine of fold_binary. This routine performs all of the
8889 transformations that are common to the equality/inequality
8890 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8891 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8892 fold_binary should call fold_binary. Fold a comparison with
8893 tree code CODE and type TYPE with operands OP0 and OP1. Return
8894 the folded comparison or NULL_TREE. */
8896 static tree
8897 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8899 tree arg0, arg1, tem;
8901 arg0 = op0;
8902 arg1 = op1;
8904 STRIP_SIGN_NOPS (arg0);
8905 STRIP_SIGN_NOPS (arg1);
8907 tem = fold_relational_const (code, type, arg0, arg1);
8908 if (tem != NULL_TREE)
8909 return tem;
8911 /* If one arg is a real or integer constant, put it last. */
8912 if (tree_swap_operands_p (arg0, arg1, true))
8913 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8915 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1
8916 if the original addition does not overflow. */
8917 if ((TREE_CODE (arg0) == PLUSNV_EXPR || TREE_CODE (arg0) == MINUSNV_EXPR)
8918 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8919 && TREE_CODE (arg1) == INTEGER_CST)
8921 tree const1 = TREE_OPERAND (arg0, 1);
8922 tree const2 = arg1;
8923 tree variable = TREE_OPERAND (arg0, 0);
8924 tree lhs;
8925 int lhs_add;
8926 unsigned HOST_WIDE_INT low;
8927 HOST_WIDE_INT hi;
8928 int overflow;
8930 lhs_add = TREE_CODE (arg0) != PLUSNV_EXPR;
8932 overflow = int_const_binop_1 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8933 const2, const1, &low, &hi);
8934 overflow |= fit_double_type (low, hi, &low, &hi, TREE_TYPE (arg1));
8936 /* If there was overflow on combining the two constants we have to
8937 flip the comparison code. */
8938 if (overflow)
8939 code = swap_tree_comparison (code);
8941 lhs = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
8942 if (!TREE_NO_WARNING (arg0))
8943 fold_overflow_warning (("assuming signed overflow does not occur "
8944 "when changing X +- C1 cmp C2 to "
8945 "X cmp C1 +- C2"),
8946 WARN_STRICT_OVERFLOW_COMPARISON);
8947 return fold_build2 (code, type, variable, lhs);
8950 /* For comparisons of pointers we can decompose it to a compile time
8951 comparison of the base objects and the offsets into the object.
8952 This requires at least one operand being an ADDR_EXPR or a
8953 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8954 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8955 && (TREE_CODE (arg0) == ADDR_EXPR
8956 || TREE_CODE (arg1) == ADDR_EXPR
8957 || POINTER_PLUS_EXPR_P (arg0)
8958 || POINTER_PLUS_EXPR_P (arg1)))
8960 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8961 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8962 enum machine_mode mode;
8963 int volatilep, unsignedp;
8964 bool indirect_base0 = false, indirect_base1 = false;
8965 bool no_overflow = true;
8967 /* Get base and offset for the access. Strip ADDR_EXPR for
8968 get_inner_reference, but put it back by stripping INDIRECT_REF
8969 off the base object if possible. indirect_baseN will be true
8970 if baseN is not an address but refers to the object itself. */
8971 base0 = arg0;
8972 if (TREE_CODE (arg0) == ADDR_EXPR)
8974 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8975 &bitsize, &bitpos0, &offset0, &mode,
8976 &unsignedp, &volatilep, false);
8977 if (TREE_CODE (base0) == INDIRECT_REF)
8978 base0 = TREE_OPERAND (base0, 0);
8979 else
8980 indirect_base0 = true;
8982 else if (POINTER_PLUS_EXPR_P (arg0))
8984 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8985 no_overflow = false;
8986 base0 = TREE_OPERAND (arg0, 0);
8987 offset0 = TREE_OPERAND (arg0, 1);
8990 base1 = arg1;
8991 if (TREE_CODE (arg1) == ADDR_EXPR)
8993 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8994 &bitsize, &bitpos1, &offset1, &mode,
8995 &unsignedp, &volatilep, false);
8996 if (TREE_CODE (base1) == INDIRECT_REF)
8997 base1 = TREE_OPERAND (base1, 0);
8998 else
8999 indirect_base1 = true;
9001 else if (POINTER_PLUS_EXPR_P (arg1))
9003 if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9004 no_overflow = false;
9005 base1 = TREE_OPERAND (arg1, 0);
9006 offset1 = TREE_OPERAND (arg1, 1);
9009 /* If we have equivalent bases we might be able to simplify. */
9010 if (indirect_base0 == indirect_base1
9011 && operand_equal_p (base0, base1, 0))
9013 /* We can fold this expression to a constant if the non-constant
9014 offset parts are equal. */
9015 if ((offset0 == offset1
9016 || (offset0 && offset1
9017 && operand_equal_p (offset0, offset1, 0)))
9018 && (code == EQ_EXPR
9019 || code == NE_EXPR
9020 || no_overflow))
9022 if (code != EQ_EXPR
9023 && code != NE_EXPR
9024 && bitpos0 != bitpos1
9025 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9026 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9027 fold_overflow_warning (("assuming pointer wraparound does not "
9028 "occur when comparing P +- C1 with "
9029 "P +- C2"),
9030 WARN_STRICT_OVERFLOW_CONDITIONAL);
9032 switch (code)
9034 case EQ_EXPR:
9035 return constant_boolean_node (bitpos0 == bitpos1, type);
9036 case NE_EXPR:
9037 return constant_boolean_node (bitpos0 != bitpos1, type);
9038 case LT_EXPR:
9039 return constant_boolean_node (bitpos0 < bitpos1, type);
9040 case LE_EXPR:
9041 return constant_boolean_node (bitpos0 <= bitpos1, type);
9042 case GE_EXPR:
9043 return constant_boolean_node (bitpos0 >= bitpos1, type);
9044 case GT_EXPR:
9045 return constant_boolean_node (bitpos0 > bitpos1, type);
9046 default:;
9049 /* We can simplify the comparison to a comparison of the variable
9050 offset parts if the constant offset parts are equal.
9051 Be careful to use signed size type here because otherwise we
9052 mess with array offsets in the wrong way. This is possible
9053 because pointer arithmetic is restricted to retain within an
9054 object and overflow on pointer differences is undefined as of
9055 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9056 else if (bitpos0 == bitpos1
9057 && ((code == EQ_EXPR || code == NE_EXPR)
9058 || no_overflow))
9060 tree signed_size_type_node;
9061 signed_size_type_node = signed_type_for (size_type_node);
9063 /* By converting to signed size type we cover middle-end pointer
9064 arithmetic which operates on unsigned pointer types of size
9065 type size and ARRAY_REF offsets which are properly sign or
9066 zero extended from their type in case it is narrower than
9067 size type. */
9068 if (offset0 == NULL_TREE)
9069 offset0 = build_int_cst (signed_size_type_node, 0);
9070 else
9071 offset0 = fold_convert (signed_size_type_node, offset0);
9072 if (offset1 == NULL_TREE)
9073 offset1 = build_int_cst (signed_size_type_node, 0);
9074 else
9075 offset1 = fold_convert (signed_size_type_node, offset1);
9077 if (code != EQ_EXPR
9078 && code != NE_EXPR
9079 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9080 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9081 fold_overflow_warning (("assuming pointer wraparound does not "
9082 "occur when comparing P +- C1 with "
9083 "P +- C2"),
9084 WARN_STRICT_OVERFLOW_COMPARISON);
9086 return fold_build2 (code, type, offset0, offset1);
9089 /* For non-equal bases we can simplify if they are addresses
9090 of local binding decls or constants. */
9091 else if (indirect_base0 && indirect_base1
9092 /* We know that !operand_equal_p (base0, base1, 0)
9093 because the if condition was false. But make
9094 sure two decls are not the same. */
9095 && base0 != base1
9096 && TREE_CODE (arg0) == ADDR_EXPR
9097 && TREE_CODE (arg1) == ADDR_EXPR
9098 && (((TREE_CODE (base0) == VAR_DECL
9099 || TREE_CODE (base0) == PARM_DECL)
9100 && (targetm.binds_local_p (base0)
9101 || CONSTANT_CLASS_P (base1)))
9102 || CONSTANT_CLASS_P (base0))
9103 && (((TREE_CODE (base1) == VAR_DECL
9104 || TREE_CODE (base1) == PARM_DECL)
9105 && (targetm.binds_local_p (base1)
9106 || CONSTANT_CLASS_P (base0)))
9107 || CONSTANT_CLASS_P (base1)))
9109 if (code == EQ_EXPR)
9110 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9111 else if (code == NE_EXPR)
9112 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9114 /* For equal offsets we can simplify to a comparison of the
9115 base addresses. */
9116 else if (bitpos0 == bitpos1
9117 && (indirect_base0
9118 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9119 && (indirect_base1
9120 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9121 && ((offset0 == offset1)
9122 || (offset0 && offset1
9123 && operand_equal_p (offset0, offset1, 0))))
9125 if (indirect_base0)
9126 base0 = fold_addr_expr (base0);
9127 if (indirect_base1)
9128 base1 = fold_addr_expr (base1);
9129 return fold_build2 (code, type, base0, base1);
9133 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9134 X CMP Y +- C2 +- C1 if X +- C1 does not overflow.. This is then
9135 valid if the resulting offset is smaller in absolute value than the
9136 original one. */
9137 if ((((TREE_CODE (arg0) == PLUSNV_EXPR || TREE_CODE (arg0) == MINUSNV_EXPR)
9138 && (PLUS_EXPR_P (arg1) || MINUS_EXPR_P (arg1)))
9139 || ((TREE_CODE (arg1) == PLUSNV_EXPR || TREE_CODE (arg1) == MINUSNV_EXPR)
9140 && (PLUS_EXPR_P (arg0) || MINUS_EXPR_P (arg0))))
9141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9142 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST)
9144 tree const1 = TREE_OPERAND (arg0, 1);
9145 tree const2 = TREE_OPERAND (arg1, 1);
9146 tree variable1 = TREE_OPERAND (arg0, 0);
9147 tree variable2 = TREE_OPERAND (arg1, 0);
9148 tree cst;
9149 unsigned HOST_WIDE_INT low;
9150 HOST_WIDE_INT hi;
9151 int overflow;
9152 const char * const warnmsg = G_("assuming signed overflow does not "
9153 "occur when combining constants around "
9154 "a comparison");
9156 /* Put the constant on the side where it doesn't overflow and is
9157 of lower absolute value than before if the operation on
9158 the other side doesn't overflow. */
9159 if (TREE_CODE (arg0) == PLUSNV_EXPR || TREE_CODE (arg0) == MINUSNV_EXPR)
9161 overflow = int_const_binop_1 ((strip_nv (TREE_CODE (arg0))
9162 == strip_nv (TREE_CODE (arg1)))
9163 ? MINUS_EXPR : PLUS_EXPR,
9164 const2, const1, &low, &hi);
9165 overflow |= fit_double_type (low, hi, &low, &hi, TREE_TYPE (arg1));
9166 cst = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
9167 if (!overflow
9168 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9170 if (!TREE_NO_WARNING (arg0))
9171 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9172 return fold_build2 (code, type,
9173 variable1,
9174 fold_build2 (strip_nv (TREE_CODE (arg1)),
9175 TREE_TYPE (arg1),
9176 variable2, cst));
9180 if (TREE_CODE (arg1) == PLUSNV_EXPR || TREE_CODE (arg1) == MINUSNV_EXPR)
9182 overflow = int_const_binop_1 ((strip_nv (TREE_CODE (arg0))
9183 == strip_nv (TREE_CODE (arg1)))
9184 ? MINUS_EXPR : PLUS_EXPR,
9185 const1, const2, &low, &hi);
9186 overflow |= fit_double_type (low, hi, &low, &hi, TREE_TYPE (arg0));
9187 cst = build_int_cst_wide (TREE_TYPE (arg0), low, hi);
9188 if (!overflow
9189 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9191 if (!TREE_NO_WARNING (arg1))
9192 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9193 return fold_build2 (code, type,
9194 fold_build2 (strip_nv (TREE_CODE (arg0)),
9195 TREE_TYPE (arg0),
9196 variable1, cst),
9197 variable2);
9202 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9203 signed arithmetic case. That form is created by the compiler
9204 often enough for folding it to be of value. One example is in
9205 computing loop trip counts after Operator Strength Reduction. */
9206 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9207 && TREE_CODE (arg0) == MULT_EXPR
9208 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9209 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9210 && integer_zerop (arg1))
9212 tree const1 = TREE_OPERAND (arg0, 1);
9213 tree const2 = arg1; /* zero */
9214 tree variable1 = TREE_OPERAND (arg0, 0);
9215 enum tree_code cmp_code = code;
9217 gcc_assert (!integer_zerop (const1));
9219 fold_overflow_warning (("assuming signed overflow does not occur when "
9220 "eliminating multiplication in comparison "
9221 "with zero"),
9222 WARN_STRICT_OVERFLOW_COMPARISON);
9224 /* If const1 is negative we swap the sense of the comparison. */
9225 if (tree_int_cst_sgn (const1) < 0)
9226 cmp_code = swap_tree_comparison (cmp_code);
9228 return fold_build2 (cmp_code, type, variable1, const2);
9231 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9232 if (tem)
9233 return tem;
9235 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9237 tree targ0 = strip_float_extensions (arg0);
9238 tree targ1 = strip_float_extensions (arg1);
9239 tree newtype = TREE_TYPE (targ0);
9241 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9242 newtype = TREE_TYPE (targ1);
9244 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9245 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9246 return fold_build2 (code, type, fold_convert (newtype, targ0),
9247 fold_convert (newtype, targ1));
9249 /* (-a) CMP (-b) -> b CMP a */
9250 if (TREE_CODE (arg0) == NEGATE_EXPR
9251 && TREE_CODE (arg1) == NEGATE_EXPR)
9252 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9253 TREE_OPERAND (arg0, 0));
9255 if (TREE_CODE (arg1) == REAL_CST)
9257 REAL_VALUE_TYPE cst;
9258 cst = TREE_REAL_CST (arg1);
9260 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9261 if (TREE_CODE (arg0) == NEGATE_EXPR)
9262 return fold_build2 (swap_tree_comparison (code), type,
9263 TREE_OPERAND (arg0, 0),
9264 build_real (TREE_TYPE (arg1),
9265 REAL_VALUE_NEGATE (cst)));
9267 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9268 /* a CMP (-0) -> a CMP 0 */
9269 if (REAL_VALUE_MINUS_ZERO (cst))
9270 return fold_build2 (code, type, arg0,
9271 build_real (TREE_TYPE (arg1), dconst0));
9273 /* x != NaN is always true, other ops are always false. */
9274 if (REAL_VALUE_ISNAN (cst)
9275 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9277 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9278 return omit_one_operand (type, tem, arg0);
9281 /* Fold comparisons against infinity. */
9282 if (REAL_VALUE_ISINF (cst))
9284 tem = fold_inf_compare (code, type, arg0, arg1);
9285 if (tem != NULL_TREE)
9286 return tem;
9290 /* If this is a comparison of a real constant with a PLUS_EXPR
9291 or a MINUS_EXPR of a real constant, we can convert it into a
9292 comparison with a revised real constant as long as no overflow
9293 occurs when unsafe_math_optimizations are enabled. */
9294 if (flag_unsafe_math_optimizations
9295 && TREE_CODE (arg1) == REAL_CST
9296 && (TREE_CODE (arg0) == PLUS_EXPR
9297 || TREE_CODE (arg0) == MINUS_EXPR)
9298 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9299 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9300 ? MINUS_EXPR : PLUS_EXPR,
9301 arg1, TREE_OPERAND (arg0, 1), 0))
9302 && !TREE_OVERFLOW (tem))
9303 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9305 /* Likewise, we can simplify a comparison of a real constant with
9306 a MINUS_EXPR whose first operand is also a real constant, i.e.
9307 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9308 floating-point types only if -fassociative-math is set. */
9309 if (flag_associative_math
9310 && TREE_CODE (arg1) == REAL_CST
9311 && TREE_CODE (arg0) == MINUS_EXPR
9312 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9313 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9314 arg1, 0))
9315 && !TREE_OVERFLOW (tem))
9316 return fold_build2 (swap_tree_comparison (code), type,
9317 TREE_OPERAND (arg0, 1), tem);
9319 /* Fold comparisons against built-in math functions. */
9320 if (TREE_CODE (arg1) == REAL_CST
9321 && flag_unsafe_math_optimizations
9322 && ! flag_errno_math)
9324 enum built_in_function fcode = builtin_mathfn_code (arg0);
9326 if (fcode != END_BUILTINS)
9328 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9329 if (tem != NULL_TREE)
9330 return tem;
9335 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9336 && CONVERT_EXPR_P (arg0))
9338 /* If we are widening one operand of an integer comparison,
9339 see if the other operand is similarly being widened. Perhaps we
9340 can do the comparison in the narrower type. */
9341 tem = fold_widened_comparison (code, type, arg0, arg1);
9342 if (tem)
9343 return tem;
9345 /* Or if we are changing signedness. */
9346 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9347 if (tem)
9348 return tem;
9351 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9352 constant, we can simplify it. */
9353 if (TREE_CODE (arg1) == INTEGER_CST
9354 && (TREE_CODE (arg0) == MIN_EXPR
9355 || TREE_CODE (arg0) == MAX_EXPR)
9356 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9358 tem = optimize_minmax_comparison (code, type, op0, op1);
9359 if (tem)
9360 return tem;
9363 /* Simplify comparison of something with itself. (For IEEE
9364 floating-point, we can only do some of these simplifications.) */
9365 if (operand_equal_p (arg0, arg1, 0))
9367 switch (code)
9369 case EQ_EXPR:
9370 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9371 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9372 return constant_boolean_node (1, type);
9373 break;
9375 case GE_EXPR:
9376 case LE_EXPR:
9377 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9378 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9379 return constant_boolean_node (1, type);
9380 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9382 case NE_EXPR:
9383 /* For NE, we can only do this simplification if integer
9384 or we don't honor IEEE floating point NaNs. */
9385 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9386 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9387 break;
9388 /* ... fall through ... */
9389 case GT_EXPR:
9390 case LT_EXPR:
9391 return constant_boolean_node (0, type);
9392 default:
9393 gcc_unreachable ();
9397 /* If we are comparing an expression that just has comparisons
9398 of two integer values, arithmetic expressions of those comparisons,
9399 and constants, we can simplify it. There are only three cases
9400 to check: the two values can either be equal, the first can be
9401 greater, or the second can be greater. Fold the expression for
9402 those three values. Since each value must be 0 or 1, we have
9403 eight possibilities, each of which corresponds to the constant 0
9404 or 1 or one of the six possible comparisons.
9406 This handles common cases like (a > b) == 0 but also handles
9407 expressions like ((x > y) - (y > x)) > 0, which supposedly
9408 occur in macroized code. */
9410 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9412 tree cval1 = 0, cval2 = 0;
9413 int save_p = 0;
9415 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9416 /* Don't handle degenerate cases here; they should already
9417 have been handled anyway. */
9418 && cval1 != 0 && cval2 != 0
9419 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9420 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9421 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9422 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9423 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9424 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9425 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9427 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9428 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9430 /* We can't just pass T to eval_subst in case cval1 or cval2
9431 was the same as ARG1. */
9433 tree high_result
9434 = fold_build2 (code, type,
9435 eval_subst (arg0, cval1, maxval,
9436 cval2, minval),
9437 arg1);
9438 tree equal_result
9439 = fold_build2 (code, type,
9440 eval_subst (arg0, cval1, maxval,
9441 cval2, maxval),
9442 arg1);
9443 tree low_result
9444 = fold_build2 (code, type,
9445 eval_subst (arg0, cval1, minval,
9446 cval2, maxval),
9447 arg1);
9449 /* All three of these results should be 0 or 1. Confirm they are.
9450 Then use those values to select the proper code to use. */
9452 if (TREE_CODE (high_result) == INTEGER_CST
9453 && TREE_CODE (equal_result) == INTEGER_CST
9454 && TREE_CODE (low_result) == INTEGER_CST)
9456 /* Make a 3-bit mask with the high-order bit being the
9457 value for `>', the next for '=', and the low for '<'. */
9458 switch ((integer_onep (high_result) * 4)
9459 + (integer_onep (equal_result) * 2)
9460 + integer_onep (low_result))
9462 case 0:
9463 /* Always false. */
9464 return omit_one_operand (type, integer_zero_node, arg0);
9465 case 1:
9466 code = LT_EXPR;
9467 break;
9468 case 2:
9469 code = EQ_EXPR;
9470 break;
9471 case 3:
9472 code = LE_EXPR;
9473 break;
9474 case 4:
9475 code = GT_EXPR;
9476 break;
9477 case 5:
9478 code = NE_EXPR;
9479 break;
9480 case 6:
9481 code = GE_EXPR;
9482 break;
9483 case 7:
9484 /* Always true. */
9485 return omit_one_operand (type, integer_one_node, arg0);
9488 if (save_p)
9489 return save_expr (build2 (code, type, cval1, cval2));
9490 return fold_build2 (code, type, cval1, cval2);
9495 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9496 into a single range test. */
9497 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9498 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9499 && TREE_CODE (arg1) == INTEGER_CST
9500 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9501 && !integer_zerop (TREE_OPERAND (arg0, 1))
9502 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9503 && !TREE_OVERFLOW (arg1))
9505 tem = fold_div_compare (code, type, arg0, arg1);
9506 if (tem != NULL_TREE)
9507 return tem;
9510 /* Fold ~X op ~Y as Y op X. */
9511 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9512 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9514 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9515 return fold_build2 (code, type,
9516 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9517 TREE_OPERAND (arg0, 0));
9520 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9521 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9522 && TREE_CODE (arg1) == INTEGER_CST)
9524 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9525 return fold_build2 (swap_tree_comparison (code), type,
9526 TREE_OPERAND (arg0, 0),
9527 fold_build1 (BIT_NOT_EXPR, cmp_type,
9528 fold_convert (cmp_type, arg1)));
9531 return NULL_TREE;
9535 /* Subroutine of fold_binary. Optimize complex multiplications of the
9536 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9537 argument EXPR represents the expression "z" of type TYPE. */
9539 static tree
9540 fold_mult_zconjz (tree type, tree expr)
9542 tree itype = TREE_TYPE (type);
9543 tree rpart, ipart, tem;
9545 if (TREE_CODE (expr) == COMPLEX_EXPR)
9547 rpart = TREE_OPERAND (expr, 0);
9548 ipart = TREE_OPERAND (expr, 1);
9550 else if (TREE_CODE (expr) == COMPLEX_CST)
9552 rpart = TREE_REALPART (expr);
9553 ipart = TREE_IMAGPART (expr);
9555 else
9557 expr = save_expr (expr);
9558 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9559 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9562 rpart = save_expr (rpart);
9563 ipart = save_expr (ipart);
9564 tem = fold_build2 (PLUS_EXPR, itype,
9565 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9566 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9567 return fold_build2 (COMPLEX_EXPR, type, tem,
9568 fold_convert (itype, integer_zero_node));
9572 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9573 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9574 guarantees that P and N have the same least significant log2(M) bits.
9575 N is not otherwise constrained. In particular, N is not normalized to
9576 0 <= N < M as is common. In general, the precise value of P is unknown.
9577 M is chosen as large as possible such that constant N can be determined.
9579 Returns M and sets *RESIDUE to N. */
9581 static unsigned HOST_WIDE_INT
9582 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9584 enum tree_code code;
9586 *residue = 0;
9588 code = TREE_CODE (expr);
9589 if (code == ADDR_EXPR)
9591 expr = TREE_OPERAND (expr, 0);
9592 if (handled_component_p (expr))
9594 HOST_WIDE_INT bitsize, bitpos;
9595 tree offset;
9596 enum machine_mode mode;
9597 int unsignedp, volatilep;
9599 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9600 &mode, &unsignedp, &volatilep, false);
9601 *residue = bitpos / BITS_PER_UNIT;
9602 if (offset)
9604 if (TREE_CODE (offset) == INTEGER_CST)
9605 *residue += TREE_INT_CST_LOW (offset);
9606 else
9607 /* We don't handle more complicated offset expressions. */
9608 return 1;
9612 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9613 return DECL_ALIGN_UNIT (expr);
9615 else if (code == POINTER_PLUS_EXPR)
9617 tree op0, op1;
9618 unsigned HOST_WIDE_INT modulus;
9619 enum tree_code inner_code;
9621 op0 = TREE_OPERAND (expr, 0);
9622 STRIP_NOPS (op0);
9623 modulus = get_pointer_modulus_and_residue (op0, residue);
9625 op1 = TREE_OPERAND (expr, 1);
9626 STRIP_NOPS (op1);
9627 inner_code = TREE_CODE (op1);
9628 if (inner_code == INTEGER_CST)
9630 *residue += TREE_INT_CST_LOW (op1);
9631 return modulus;
9633 else if (inner_code == MULT_EXPR)
9635 op1 = TREE_OPERAND (op1, 1);
9636 if (TREE_CODE (op1) == INTEGER_CST)
9638 unsigned HOST_WIDE_INT align;
9640 /* Compute the greatest power-of-2 divisor of op1. */
9641 align = TREE_INT_CST_LOW (op1);
9642 align &= -align;
9644 /* If align is non-zero and less than *modulus, replace
9645 *modulus with align., If align is 0, then either op1 is 0
9646 or the greatest power-of-2 divisor of op1 doesn't fit in an
9647 unsigned HOST_WIDE_INT. In either case, no additional
9648 constraint is imposed. */
9649 if (align)
9650 modulus = MIN (modulus, align);
9652 return modulus;
9657 /* If we get here, we were unable to determine anything useful about the
9658 expression. */
9659 return 1;
9663 /* Fold a binary expression of code CODE and type TYPE with operands
9664 OP0 and OP1. Return the folded expression if folding is
9665 successful. Otherwise, return NULL_TREE. */
9667 tree
9668 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9670 enum tree_code_class kind = TREE_CODE_CLASS (code);
9671 tree arg0, arg1, tem;
9672 tree t1 = NULL_TREE;
9673 bool strict_overflow_p;
9675 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9676 && TREE_CODE_LENGTH (code) == 2
9677 && op0 != NULL_TREE
9678 && op1 != NULL_TREE);
9680 arg0 = op0;
9681 arg1 = op1;
9683 /* Strip any conversions that don't change the mode. This is
9684 safe for every expression, except for a comparison expression
9685 because its signedness is derived from its operands. So, in
9686 the latter case, only strip conversions that don't change the
9687 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9688 preserved.
9690 Note that this is done as an internal manipulation within the
9691 constant folder, in order to find the simplest representation
9692 of the arguments so that their form can be studied. In any
9693 cases, the appropriate type conversions should be put back in
9694 the tree that will get out of the constant folder. */
9696 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9698 STRIP_SIGN_NOPS (arg0);
9699 STRIP_SIGN_NOPS (arg1);
9701 else
9703 STRIP_NOPS (arg0);
9704 STRIP_NOPS (arg1);
9707 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9708 constant but we can't do arithmetic on them. */
9709 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9710 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9711 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9712 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9713 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9714 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9716 if (kind == tcc_binary)
9718 /* Make sure type and arg0 have the same saturating flag. */
9719 gcc_assert (TYPE_SATURATING (type)
9720 == TYPE_SATURATING (TREE_TYPE (arg0)));
9721 tem = const_binop (code, arg0, arg1, 0);
9723 else if (kind == tcc_comparison)
9724 tem = fold_relational_const (code, type, arg0, arg1);
9725 else
9726 tem = NULL_TREE;
9728 if (tem != NULL_TREE)
9730 if (TREE_TYPE (tem) != type)
9731 tem = fold_convert (type, tem);
9732 return tem;
9736 /* If this is a commutative operation, and ARG0 is a constant, move it
9737 to ARG1 to reduce the number of tests below. */
9738 if (commutative_tree_code (code)
9739 && tree_swap_operands_p (arg0, arg1, true))
9740 return fold_build2 (code, type, op1, op0);
9742 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9744 First check for cases where an arithmetic operation is applied to a
9745 compound, conditional, or comparison operation. Push the arithmetic
9746 operation inside the compound or conditional to see if any folding
9747 can then be done. Convert comparison to conditional for this purpose.
9748 The also optimizes non-constant cases that used to be done in
9749 expand_expr.
9751 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9752 one of the operands is a comparison and the other is a comparison, a
9753 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9754 code below would make the expression more complex. Change it to a
9755 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9756 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9758 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9759 || code == EQ_EXPR || code == NE_EXPR)
9760 && ((truth_value_p (TREE_CODE (arg0))
9761 && (truth_value_p (TREE_CODE (arg1))
9762 || (TREE_CODE (arg1) == BIT_AND_EXPR
9763 && integer_onep (TREE_OPERAND (arg1, 1)))))
9764 || (truth_value_p (TREE_CODE (arg1))
9765 && (truth_value_p (TREE_CODE (arg0))
9766 || (TREE_CODE (arg0) == BIT_AND_EXPR
9767 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9769 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9770 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9771 : TRUTH_XOR_EXPR,
9772 boolean_type_node,
9773 fold_convert (boolean_type_node, arg0),
9774 fold_convert (boolean_type_node, arg1));
9776 if (code == EQ_EXPR)
9777 tem = invert_truthvalue (tem);
9779 return fold_convert (type, tem);
9782 if (TREE_CODE_CLASS (code) == tcc_binary
9783 || TREE_CODE_CLASS (code) == tcc_comparison)
9785 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9786 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9787 fold_build2 (code, type,
9788 fold_convert (TREE_TYPE (op0),
9789 TREE_OPERAND (arg0, 1)),
9790 op1));
9791 if (TREE_CODE (arg1) == COMPOUND_EXPR
9792 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9793 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9794 fold_build2 (code, type, op0,
9795 fold_convert (TREE_TYPE (op1),
9796 TREE_OPERAND (arg1, 1))));
9798 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9800 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9801 arg0, arg1,
9802 /*cond_first_p=*/1);
9803 if (tem != NULL_TREE)
9804 return tem;
9807 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9809 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9810 arg1, arg0,
9811 /*cond_first_p=*/0);
9812 if (tem != NULL_TREE)
9813 return tem;
9817 switch (code)
9819 case POINTER_PLUS_EXPR:
9820 case POINTER_PLUSNV_EXPR:
9821 /* 0 +p index -> (type)index */
9822 if (integer_zerop (arg0))
9823 return non_lvalue (fold_convert (type, arg1));
9825 /* PTR +p 0 -> PTR */
9826 if (integer_zerop (arg1))
9827 return non_lvalue (fold_convert (type, arg0));
9829 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9830 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9831 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9832 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9833 fold_convert (sizetype, arg1),
9834 fold_convert (sizetype, arg0)));
9836 /* index +p PTR -> PTR +p index */
9837 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9838 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9839 return fold_build2 (code, type,
9840 fold_convert (type, arg1),
9841 fold_convert (sizetype, arg0));
9843 /* (PTR +p B) +p A -> PTR +p (B + A) */
9844 if (POINTER_PLUS_EXPR_P (arg0))
9846 tree inner;
9847 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9848 tree arg00 = TREE_OPERAND (arg0, 0);
9849 enum tree_code ncode = POINTER_PLUS_EXPR;
9850 if (code == POINTER_PLUSNV_EXPR
9851 && TREE_CODE (arg0) == POINTER_PLUSNV_EXPR)
9852 ncode = POINTER_PLUSNV_EXPR;
9853 inner = fold_build2 (PLUS_EXPR, sizetype,
9854 arg01, fold_convert (sizetype, arg1));
9855 return fold_convert (type,
9856 fold_build2 (ncode,
9857 TREE_TYPE (arg00), arg00, inner));
9860 /* PTR_CST +p CST -> CST1 */
9861 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9862 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9864 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9865 of the array. Loop optimizer sometimes produce this type of
9866 expressions. */
9867 if (TREE_CODE (arg0) == ADDR_EXPR)
9869 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9870 if (tem)
9871 return fold_convert (type, tem);
9874 return NULL_TREE;
9876 case PLUS_EXPR:
9877 case PLUSNV_EXPR:
9878 /* A + (-B) -> A - B */
9879 if (NEGATE_EXPR_P (arg1))
9880 return fold_build2 (MINUS_EXPR, type,
9881 fold_convert (type, arg0),
9882 fold_convert (type, TREE_OPERAND (arg1, 0)));
9883 /* (-A) + B -> B - A */
9884 if (NEGATE_EXPR_P (arg0)
9885 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9886 return fold_build2 (MINUS_EXPR, type,
9887 fold_convert (type, arg1),
9888 fold_convert (type, TREE_OPERAND (arg0, 0)));
9890 if (INTEGRAL_TYPE_P (type))
9892 /* Convert ~A + 1 to -A. */
9893 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9894 && integer_onep (arg1))
9895 return fold_build1 (NEGATE_EXPR, type,
9896 fold_convert (type, TREE_OPERAND (arg0, 0)));
9898 /* ~X + X is -1. */
9899 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9900 && !TYPE_OVERFLOW_TRAPS (type))
9902 tree tem = TREE_OPERAND (arg0, 0);
9904 STRIP_NOPS (tem);
9905 if (operand_equal_p (tem, arg1, 0))
9907 t1 = build_int_cst_type (type, -1);
9908 return omit_one_operand (type, t1, arg1);
9912 /* X + ~X is -1. */
9913 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9914 && !TYPE_OVERFLOW_TRAPS (type))
9916 tree tem = TREE_OPERAND (arg1, 0);
9918 STRIP_NOPS (tem);
9919 if (operand_equal_p (arg0, tem, 0))
9921 t1 = build_int_cst_type (type, -1);
9922 return omit_one_operand (type, t1, arg0);
9926 /* X + (X / CST) * -CST is X % CST. */
9927 if (MULT_EXPR_P (arg1)
9928 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9929 && operand_equal_p (arg0,
9930 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9932 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9933 tree cst1 = TREE_OPERAND (arg1, 1);
9934 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9935 if (sum && integer_zerop (sum))
9936 return fold_convert (type,
9937 fold_build2 (TRUNC_MOD_EXPR,
9938 TREE_TYPE (arg0), arg0, cst0));
9942 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9943 same or one. Make sure type is not saturating.
9944 fold_plusminus_mult_expr will re-associate. */
9945 if ((MULT_EXPR_P (arg0)
9946 || MULT_EXPR_P (arg1))
9947 && !TYPE_SATURATING (type)
9948 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9950 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9951 if (tem)
9952 return tem;
9955 if (! FLOAT_TYPE_P (type))
9957 if (integer_zerop (arg1))
9958 return non_lvalue (fold_convert (type, arg0));
9960 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9961 with a constant, and the two constants have no bits in common,
9962 we should treat this as a BIT_IOR_EXPR since this may produce more
9963 simplifications. */
9964 if (TREE_CODE (arg0) == BIT_AND_EXPR
9965 && TREE_CODE (arg1) == BIT_AND_EXPR
9966 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9967 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9968 && integer_zerop (const_binop (BIT_AND_EXPR,
9969 TREE_OPERAND (arg0, 1),
9970 TREE_OPERAND (arg1, 1), 0)))
9972 code = BIT_IOR_EXPR;
9973 goto bit_ior;
9976 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9977 (plus (plus (mult) (mult)) (foo)) so that we can
9978 take advantage of the factoring cases below. */
9979 if (((PLUS_EXPR_P (arg0) || MINUS_EXPR_P (arg0))
9980 && MULT_EXPR_P (arg1))
9981 || ((PLUS_EXPR_P (arg1) || MINUS_EXPR_P (arg1))
9982 && MULT_EXPR_P (arg0)))
9984 tree parg0, parg1, parg, marg;
9985 enum tree_code pcode;
9987 if (MULT_EXPR_P (arg1))
9988 parg = arg0, marg = arg1;
9989 else
9990 parg = arg1, marg = arg0;
9991 pcode = strip_nv (TREE_CODE (parg));
9992 parg0 = TREE_OPERAND (parg, 0);
9993 parg1 = TREE_OPERAND (parg, 1);
9994 STRIP_NOPS (parg0);
9995 STRIP_NOPS (parg1);
9997 if (MULT_EXPR_P (parg0)
9998 && !MULT_EXPR_P (parg1))
9999 return fold_build2 (pcode, type,
10000 fold_build2 (PLUS_EXPR, type,
10001 fold_convert (type, parg0),
10002 fold_convert (type, marg)),
10003 fold_convert (type, parg1));
10004 if (!MULT_EXPR_P (parg0)
10005 && MULT_EXPR_P (parg1))
10006 return fold_build2 (PLUS_EXPR, type,
10007 fold_convert (type, parg0),
10008 fold_build2 (pcode, type,
10009 fold_convert (type, marg),
10010 fold_convert (type,
10011 parg1)));
10014 else
10016 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10017 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10018 return non_lvalue (fold_convert (type, arg0));
10020 /* Likewise if the operands are reversed. */
10021 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10022 return non_lvalue (fold_convert (type, arg1));
10024 /* Convert X + -C into X - C. */
10025 if (TREE_CODE (arg1) == REAL_CST
10026 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10028 tem = fold_negate_const (arg1, type);
10029 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10030 return fold_build2 (MINUS_EXPR, type,
10031 fold_convert (type, arg0),
10032 fold_convert (type, tem));
10035 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10036 to __complex__ ( x, y ). This is not the same for SNaNs or
10037 if signed zeros are involved. */
10038 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10039 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10040 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10042 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10043 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10044 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10045 bool arg0rz = false, arg0iz = false;
10046 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10047 || (arg0i && (arg0iz = real_zerop (arg0i))))
10049 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10050 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10051 if (arg0rz && arg1i && real_zerop (arg1i))
10053 tree rp = arg1r ? arg1r
10054 : build1 (REALPART_EXPR, rtype, arg1);
10055 tree ip = arg0i ? arg0i
10056 : build1 (IMAGPART_EXPR, rtype, arg0);
10057 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10059 else if (arg0iz && arg1r && real_zerop (arg1r))
10061 tree rp = arg0r ? arg0r
10062 : build1 (REALPART_EXPR, rtype, arg0);
10063 tree ip = arg1i ? arg1i
10064 : build1 (IMAGPART_EXPR, rtype, arg1);
10065 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10070 if (flag_unsafe_math_optimizations
10071 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10072 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10073 && (tem = distribute_real_division (code, type, arg0, arg1)))
10074 return tem;
10076 /* Convert x+x into x*2.0. */
10077 if (operand_equal_p (arg0, arg1, 0)
10078 && SCALAR_FLOAT_TYPE_P (type))
10079 return fold_build2 (MULT_EXPR, type, arg0,
10080 build_real (type, dconst2));
10082 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10083 We associate floats only if the user has specified
10084 -fassociative-math. */
10085 if (flag_associative_math
10086 && TREE_CODE (arg1) == PLUS_EXPR
10087 && TREE_CODE (arg0) != MULT_EXPR)
10089 tree tree10 = TREE_OPERAND (arg1, 0);
10090 tree tree11 = TREE_OPERAND (arg1, 1);
10091 if (TREE_CODE (tree11) == MULT_EXPR
10092 && TREE_CODE (tree10) == MULT_EXPR)
10094 tree tree0;
10095 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10096 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10099 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10100 We associate floats only if the user has specified
10101 -fassociative-math. */
10102 if (flag_associative_math
10103 && TREE_CODE (arg0) == PLUS_EXPR
10104 && TREE_CODE (arg1) != MULT_EXPR)
10106 tree tree00 = TREE_OPERAND (arg0, 0);
10107 tree tree01 = TREE_OPERAND (arg0, 1);
10108 if (TREE_CODE (tree01) == MULT_EXPR
10109 && TREE_CODE (tree00) == MULT_EXPR)
10111 tree tree0;
10112 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10113 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10118 bit_rotate:
10119 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10120 is a rotate of A by C1 bits. */
10121 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10122 is a rotate of A by B bits. */
10124 enum tree_code code0, code1;
10125 tree rtype;
10126 code0 = TREE_CODE (arg0);
10127 code1 = TREE_CODE (arg1);
10128 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10129 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10130 && operand_equal_p (TREE_OPERAND (arg0, 0),
10131 TREE_OPERAND (arg1, 0), 0)
10132 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10133 TYPE_UNSIGNED (rtype))
10134 /* Only create rotates in complete modes. Other cases are not
10135 expanded properly. */
10136 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10138 tree tree01, tree11;
10139 enum tree_code code01, code11;
10141 tree01 = TREE_OPERAND (arg0, 1);
10142 tree11 = TREE_OPERAND (arg1, 1);
10143 STRIP_NOPS (tree01);
10144 STRIP_NOPS (tree11);
10145 code01 = TREE_CODE (tree01);
10146 code11 = TREE_CODE (tree11);
10147 if (code01 == INTEGER_CST
10148 && code11 == INTEGER_CST
10149 && TREE_INT_CST_HIGH (tree01) == 0
10150 && TREE_INT_CST_HIGH (tree11) == 0
10151 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10152 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10153 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10154 code0 == LSHIFT_EXPR ? tree01 : tree11);
10155 else if (MINUS_EXPR_CODE_P (code11))
10157 tree tree110, tree111;
10158 tree110 = TREE_OPERAND (tree11, 0);
10159 tree111 = TREE_OPERAND (tree11, 1);
10160 STRIP_NOPS (tree110);
10161 STRIP_NOPS (tree111);
10162 if (TREE_CODE (tree110) == INTEGER_CST
10163 && 0 == compare_tree_int (tree110,
10164 TYPE_PRECISION
10165 (TREE_TYPE (TREE_OPERAND
10166 (arg0, 0))))
10167 && operand_equal_p (tree01, tree111, 0))
10168 return build2 ((code0 == LSHIFT_EXPR
10169 ? LROTATE_EXPR
10170 : RROTATE_EXPR),
10171 type, TREE_OPERAND (arg0, 0), tree01);
10173 else if (MINUS_EXPR_CODE_P (code01))
10175 tree tree010, tree011;
10176 tree010 = TREE_OPERAND (tree01, 0);
10177 tree011 = TREE_OPERAND (tree01, 1);
10178 STRIP_NOPS (tree010);
10179 STRIP_NOPS (tree011);
10180 if (TREE_CODE (tree010) == INTEGER_CST
10181 && 0 == compare_tree_int (tree010,
10182 TYPE_PRECISION
10183 (TREE_TYPE (TREE_OPERAND
10184 (arg0, 0))))
10185 && operand_equal_p (tree11, tree011, 0))
10186 return build2 ((code0 != LSHIFT_EXPR
10187 ? LROTATE_EXPR
10188 : RROTATE_EXPR),
10189 type, TREE_OPERAND (arg0, 0), tree11);
10194 associate:
10195 /* In most languages, can't associate operations on floats through
10196 parentheses. Rather than remember where the parentheses were, we
10197 don't associate floats at all, unless the user has specified
10198 -fassociative-math.
10199 And, we need to make sure type is not saturating. */
10201 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10202 && !TYPE_SATURATING (type))
10204 tree var0, con0, lit0, minus_lit0;
10205 tree var1, con1, lit1, minus_lit1;
10207 /* Split both trees into variables, constants, and literals. Then
10208 associate each group together, the constants with literals,
10209 then the result with variables. This increases the chances of
10210 literals being recombined later and of generating relocatable
10211 expressions for the sum of a constant and literal. */
10212 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10213 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10214 MINUS_EXPR_CODE_P (code));
10216 /* Only do something if we found more than two objects. Otherwise,
10217 nothing has changed and we risk infinite recursion. */
10218 if (2 < ((var0 != 0) + (var1 != 0)
10219 + (con0 != 0) + (con1 != 0)
10220 + (lit0 != 0) + (lit1 != 0)
10221 + (minus_lit0 != 0) + (minus_lit1 != 0)))
10223 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10224 if (MINUS_EXPR_CODE_P (code))
10225 code = PLUS_EXPR;
10227 var0 = associate_trees (var0, var1, code, type);
10228 con0 = associate_trees (con0, con1, code, type);
10229 lit0 = associate_trees (lit0, lit1, code, type);
10230 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10232 /* Preserve the MINUS_EXPR if the negative part of the literal is
10233 greater than the positive part. Otherwise, the multiplicative
10234 folding code (i.e extract_muldiv) may be fooled in case
10235 unsigned constants are subtracted, like in the following
10236 example: ((X*2 + 4) - 8U)/2. */
10237 if (minus_lit0 && lit0)
10239 if (TREE_CODE (lit0) == INTEGER_CST
10240 && TREE_CODE (minus_lit0) == INTEGER_CST
10241 && tree_int_cst_lt (lit0, minus_lit0))
10243 minus_lit0 = associate_trees (minus_lit0, lit0,
10244 MINUS_EXPR, type);
10245 lit0 = 0;
10247 else
10249 lit0 = associate_trees (lit0, minus_lit0,
10250 MINUS_EXPR, type);
10251 minus_lit0 = 0;
10254 if (minus_lit0)
10256 if (con0 == 0)
10257 return fold_convert (type,
10258 associate_trees (var0, minus_lit0,
10259 MINUS_EXPR, type));
10260 else
10262 con0 = associate_trees (con0, minus_lit0,
10263 MINUS_EXPR, type);
10264 return fold_convert (type,
10265 associate_trees (var0, con0,
10266 PLUS_EXPR, type));
10270 con0 = associate_trees (con0, lit0, code, type);
10271 return fold_convert (type, associate_trees (var0, con0,
10272 code, type));
10276 return NULL_TREE;
10278 case MINUS_EXPR:
10279 case MINUSNV_EXPR:
10280 /* Pointer simplifications for subtraction, simple reassociations. */
10281 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10283 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10284 if (POINTER_PLUS_EXPR_P (arg0)
10285 && POINTER_PLUS_EXPR_P (arg1))
10287 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10288 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10289 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10290 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10291 return fold_build2 (PLUS_EXPR, type,
10292 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10293 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10295 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming
10296 PTR0 - PTR1 simplifies. */
10297 else if (POINTER_PLUS_EXPR_P (arg0))
10299 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10300 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10301 tree tmp = fold_binary (MINUS_EXPR, type, arg00,
10302 fold_convert (type, arg1));
10303 if (tmp)
10304 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10308 /* A - (-B) -> A + B */
10309 if (NEGATE_EXPR_P (arg1))
10310 return fold_build2 (PLUS_EXPR, type, op0,
10311 fold_convert (type, TREE_OPERAND (arg1, 0)));
10312 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10313 if (NEGATE_EXPR_P (arg0)
10314 && (FLOAT_TYPE_P (type)
10315 || INTEGRAL_TYPE_P (type))
10316 && negate_expr_p (arg1)
10317 && reorder_operands_p (arg0, arg1))
10318 return fold_build2 (MINUS_EXPR, type,
10319 fold_convert (type, negate_expr (arg1)),
10320 fold_convert (type, TREE_OPERAND (arg0, 0)));
10321 /* Convert -A - 1 to ~A. */
10322 if (INTEGRAL_TYPE_P (type)
10323 && ((TREE_CODE (arg0) == NEGATE_EXPR
10324 && !TYPE_OVERFLOW_TRAPS (type))
10325 || TREE_CODE (arg0) == NEGATENV_EXPR)
10326 && integer_onep (arg1))
10327 return fold_build1 (BIT_NOT_EXPR, type,
10328 fold_convert (type, TREE_OPERAND (arg0, 0)));
10330 /* Convert -1 - A to ~A. */
10331 if (INTEGRAL_TYPE_P (type)
10332 && integer_all_onesp (arg0))
10333 return fold_build1 (BIT_NOT_EXPR, type, op1);
10336 /* X - (X / CST) * CST is X % CST. */
10337 if (INTEGRAL_TYPE_P (type)
10338 && MULT_EXPR_P (arg1)
10339 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10340 && operand_equal_p (arg0,
10341 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10342 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10343 TREE_OPERAND (arg1, 1), 0))
10344 return fold_convert (type,
10345 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10346 arg0, TREE_OPERAND (arg1, 1)));
10348 if (! FLOAT_TYPE_P (type))
10350 if (integer_zerop (arg0))
10351 return negate_expr (fold_convert (type, arg1));
10352 if (integer_zerop (arg1))
10353 return non_lvalue (fold_convert (type, arg0));
10355 /* Fold A - (A & B) into ~B & A. */
10356 if (!TREE_SIDE_EFFECTS (arg0)
10357 && TREE_CODE (arg1) == BIT_AND_EXPR)
10359 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10361 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10362 return fold_build2 (BIT_AND_EXPR, type,
10363 fold_build1 (BIT_NOT_EXPR, type, arg10),
10364 fold_convert (type, arg0));
10366 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10368 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10369 return fold_build2 (BIT_AND_EXPR, type,
10370 fold_build1 (BIT_NOT_EXPR, type, arg11),
10371 fold_convert (type, arg0));
10375 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10376 any power of 2 minus 1. */
10377 if (TREE_CODE (arg0) == BIT_AND_EXPR
10378 && TREE_CODE (arg1) == BIT_AND_EXPR
10379 && operand_equal_p (TREE_OPERAND (arg0, 0),
10380 TREE_OPERAND (arg1, 0), 0))
10382 tree mask0 = TREE_OPERAND (arg0, 1);
10383 tree mask1 = TREE_OPERAND (arg1, 1);
10384 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10386 if (operand_equal_p (tem, mask1, 0))
10388 tem = fold_build2 (BIT_XOR_EXPR, type,
10389 TREE_OPERAND (arg0, 0), mask1);
10390 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10395 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10396 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10397 return non_lvalue (fold_convert (type, arg0));
10399 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10400 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10401 (-ARG1 + ARG0) reduces to -ARG1. */
10402 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10403 return negate_expr (fold_convert (type, arg1));
10405 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10406 __complex__ ( x, -y ). This is not the same for SNaNs or if
10407 signed zeros are involved. */
10408 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10409 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10410 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10412 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10413 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10414 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10415 bool arg0rz = false, arg0iz = false;
10416 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10417 || (arg0i && (arg0iz = real_zerop (arg0i))))
10419 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10420 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10421 if (arg0rz && arg1i && real_zerop (arg1i))
10423 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10424 arg1r ? arg1r
10425 : build1 (REALPART_EXPR, rtype, arg1));
10426 tree ip = arg0i ? arg0i
10427 : build1 (IMAGPART_EXPR, rtype, arg0);
10428 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10430 else if (arg0iz && arg1r && real_zerop (arg1r))
10432 tree rp = arg0r ? arg0r
10433 : build1 (REALPART_EXPR, rtype, arg0);
10434 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10435 arg1i ? arg1i
10436 : build1 (IMAGPART_EXPR, rtype, arg1));
10437 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10442 /* Fold &x - &x. This can happen from &x.foo - &x.
10443 This is unsafe for certain floats even in non-IEEE formats.
10444 In IEEE, it is unsafe because it does wrong for NaNs.
10445 Also note that operand_equal_p is always false if an operand
10446 is volatile. */
10448 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10449 && operand_equal_p (arg0, arg1, 0))
10450 return fold_convert (type, integer_zero_node);
10452 /* A - B -> A + (-B) if B is easily negatable. */
10453 if ((negate_expr_p (arg1)
10454 /* Avoid negating constants if that would change overflow
10455 behavior. */
10456 && (code == MINUS_EXPR
10457 || TREE_CODE (arg1) != INTEGER_CST
10458 || may_negate_without_overflow_p (arg1)))
10459 && ((FLOAT_TYPE_P (type)
10460 /* Avoid this transformation if B is a positive REAL_CST. */
10461 && (TREE_CODE (arg1) != REAL_CST
10462 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10463 || INTEGRAL_TYPE_P (type)))
10465 enum tree_code ncode = PLUS_EXPR;
10466 /* If the original subtraction is signed and did not overflow
10467 so does the new addition if the negation of arg1 does not
10468 overflow (which we know for constants only). */
10469 if (code == MINUSNV_EXPR
10470 && TREE_CODE (arg1) == INTEGER_CST
10471 && !TYPE_UNSIGNED (type))
10472 ncode = PLUSNV_EXPR;
10473 return fold_build2 (ncode, type,
10474 fold_convert (type, arg0),
10475 fold_convert (type, negate_expr (arg1)));
10478 /* Try folding difference of addresses. */
10480 HOST_WIDE_INT diff;
10482 if ((TREE_CODE (arg0) == ADDR_EXPR
10483 || TREE_CODE (arg1) == ADDR_EXPR)
10484 && ptr_difference_const (arg0, arg1, &diff))
10485 return build_int_cst_type (type, diff);
10488 /* Fold &a[i] - &a[j] to i-j. */
10489 if (TREE_CODE (arg0) == ADDR_EXPR
10490 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10491 && TREE_CODE (arg1) == ADDR_EXPR
10492 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10494 tree aref0 = TREE_OPERAND (arg0, 0);
10495 tree aref1 = TREE_OPERAND (arg1, 0);
10496 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10497 TREE_OPERAND (aref1, 0), 0))
10499 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10500 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10501 tree esz = array_ref_element_size (aref0);
10502 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10503 return fold_build2 (MULT_EXPR, type, diff,
10504 fold_convert (type, esz));
10509 if (flag_unsafe_math_optimizations
10510 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10511 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10512 && (tem = distribute_real_division (code, type, arg0, arg1)))
10513 return tem;
10515 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10516 same or one. Make sure type is not saturating.
10517 fold_plusminus_mult_expr will re-associate. */
10518 if ((MULT_EXPR_P (arg0)
10519 || MULT_EXPR_P (arg1))
10520 && !TYPE_SATURATING (type)
10521 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10523 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10524 if (tem)
10525 return tem;
10528 goto associate;
10530 case MULT_EXPR:
10531 case MULTNV_EXPR:
10532 if (! FLOAT_TYPE_P (type))
10534 if (integer_zerop (arg1))
10535 return omit_one_operand (type, arg1, arg0);
10536 if (integer_onep (arg1))
10537 return non_lvalue (fold_convert (type, arg0));
10538 /* Transform x * -1 into -x. Make sure to do the negation
10539 on the original operand with conversions not stripped
10540 because we can only strip non-sign-changing conversions. */
10541 if (integer_all_onesp (arg1))
10542 return fold_convert (type, negate_expr (op0));
10545 /* (-A) * (-B) -> A * B. */
10546 if (NEGATE_EXPR_P (arg0) && negate_expr_p (arg1))
10547 return fold_build2 (MULT_EXPR, type,
10548 fold_convert (type, TREE_OPERAND (arg0, 0)),
10549 fold_convert (type, negate_expr (arg1)));
10550 if (NEGATE_EXPR_P (arg1) && negate_expr_p (arg0))
10551 return fold_build2 (MULT_EXPR, type,
10552 fold_convert (type, negate_expr (arg0)),
10553 fold_convert (type, TREE_OPERAND (arg1, 0)));
10555 if (! FLOAT_TYPE_P (type))
10557 /* Transform x * -C into -x * C if x is easily negatable. */
10558 if (TREE_CODE (arg1) == INTEGER_CST
10559 && tree_int_cst_sgn (arg1) == -1
10560 && negate_expr_p (arg0)
10561 && (tem = negate_expr (arg1)) != arg1
10562 && !TREE_OVERFLOW (tem))
10563 return fold_build2 (MULT_EXPR, type,
10564 fold_convert (type, negate_expr (arg0)), tem);
10566 /* (a * (1 << b)) is (a << b) */
10567 if (TREE_CODE (arg1) == LSHIFT_EXPR
10568 && integer_onep (TREE_OPERAND (arg1, 0)))
10569 return fold_build2 (LSHIFT_EXPR, type, op0,
10570 TREE_OPERAND (arg1, 1));
10571 if (TREE_CODE (arg0) == LSHIFT_EXPR
10572 && integer_onep (TREE_OPERAND (arg0, 0)))
10573 return fold_build2 (LSHIFT_EXPR, type, op1,
10574 TREE_OPERAND (arg0, 1));
10576 /* (A + A) * C -> A * 2 * C */
10577 if (PLUS_EXPR_P (arg0)
10578 && TREE_CODE (arg1) == INTEGER_CST
10579 && operand_equal_p (TREE_OPERAND (arg0, 0),
10580 TREE_OPERAND (arg0, 1), 0))
10581 return fold_build2 (MULT_EXPR, type,
10582 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10583 TREE_OPERAND (arg0, 1)),
10584 fold_build2 (MULT_EXPR, type,
10585 build_int_cst (type, 2) , arg1));
10587 strict_overflow_p = false;
10588 if (TREE_CODE (arg1) == INTEGER_CST
10589 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10590 &strict_overflow_p)))
10592 if (strict_overflow_p)
10593 fold_overflow_warning (("assuming signed overflow does not "
10594 "occur when simplifying "
10595 "multiplication"),
10596 WARN_STRICT_OVERFLOW_MISC);
10597 return fold_convert (type, tem);
10600 /* Optimize z * conj(z) for integer complex numbers. */
10601 if (TREE_CODE (arg0) == CONJ_EXPR
10602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10603 return fold_mult_zconjz (type, arg1);
10604 if (TREE_CODE (arg1) == CONJ_EXPR
10605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10606 return fold_mult_zconjz (type, arg0);
10608 else
10610 /* Maybe fold x * 0 to 0. The expressions aren't the same
10611 when x is NaN, since x * 0 is also NaN. Nor are they the
10612 same in modes with signed zeros, since multiplying a
10613 negative value by 0 gives -0, not +0. */
10614 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10615 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10616 && real_zerop (arg1))
10617 return omit_one_operand (type, arg1, arg0);
10618 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10619 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10620 && real_onep (arg1))
10621 return non_lvalue (fold_convert (type, arg0));
10623 /* Transform x * -1.0 into -x. */
10624 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10625 && real_minus_onep (arg1))
10626 return fold_convert (type, negate_expr (arg0));
10628 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10629 the result for floating point types due to rounding so it is applied
10630 only if -fassociative-math was specify. */
10631 if (flag_associative_math
10632 && TREE_CODE (arg0) == RDIV_EXPR
10633 && TREE_CODE (arg1) == REAL_CST
10634 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10636 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10637 arg1, 0);
10638 if (tem)
10639 return fold_build2 (RDIV_EXPR, type, tem,
10640 TREE_OPERAND (arg0, 1));
10643 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10644 if (operand_equal_p (arg0, arg1, 0))
10646 tree tem = fold_strip_sign_ops (arg0);
10647 if (tem != NULL_TREE)
10649 tem = fold_convert (type, tem);
10650 return fold_build2 (MULT_EXPR, type, tem, tem);
10654 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10655 This is not the same for NaNs or if signed zeros are
10656 involved. */
10657 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10658 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10659 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10660 && TREE_CODE (arg1) == COMPLEX_CST
10661 && real_zerop (TREE_REALPART (arg1)))
10663 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10664 if (real_onep (TREE_IMAGPART (arg1)))
10665 return fold_build2 (COMPLEX_EXPR, type,
10666 negate_expr (fold_build1 (IMAGPART_EXPR,
10667 rtype, arg0)),
10668 fold_build1 (REALPART_EXPR, rtype, arg0));
10669 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10670 return fold_build2 (COMPLEX_EXPR, type,
10671 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10672 negate_expr (fold_build1 (REALPART_EXPR,
10673 rtype, arg0)));
10676 /* Optimize z * conj(z) for floating point complex numbers.
10677 Guarded by flag_unsafe_math_optimizations as non-finite
10678 imaginary components don't produce scalar results. */
10679 if (flag_unsafe_math_optimizations
10680 && TREE_CODE (arg0) == CONJ_EXPR
10681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10682 return fold_mult_zconjz (type, arg1);
10683 if (flag_unsafe_math_optimizations
10684 && TREE_CODE (arg1) == CONJ_EXPR
10685 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10686 return fold_mult_zconjz (type, arg0);
10688 if (flag_unsafe_math_optimizations)
10690 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10691 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10693 /* Optimizations of root(...)*root(...). */
10694 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10696 tree rootfn, arg;
10697 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10698 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10700 /* Optimize sqrt(x)*sqrt(x) as x. */
10701 if (BUILTIN_SQRT_P (fcode0)
10702 && operand_equal_p (arg00, arg10, 0)
10703 && ! HONOR_SNANS (TYPE_MODE (type)))
10704 return arg00;
10706 /* Optimize root(x)*root(y) as root(x*y). */
10707 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10708 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10709 return build_call_expr (rootfn, 1, arg);
10712 /* Optimize expN(x)*expN(y) as expN(x+y). */
10713 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10715 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10716 tree arg = fold_build2 (PLUS_EXPR, type,
10717 CALL_EXPR_ARG (arg0, 0),
10718 CALL_EXPR_ARG (arg1, 0));
10719 return build_call_expr (expfn, 1, arg);
10722 /* Optimizations of pow(...)*pow(...). */
10723 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10724 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10725 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10727 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10728 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10729 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10730 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10732 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10733 if (operand_equal_p (arg01, arg11, 0))
10735 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10736 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10737 return build_call_expr (powfn, 2, arg, arg01);
10740 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10741 if (operand_equal_p (arg00, arg10, 0))
10743 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10744 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10745 return build_call_expr (powfn, 2, arg00, arg);
10749 /* Optimize tan(x)*cos(x) as sin(x). */
10750 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10751 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10752 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10753 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10754 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10755 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10756 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10757 CALL_EXPR_ARG (arg1, 0), 0))
10759 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10761 if (sinfn != NULL_TREE)
10762 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10765 /* Optimize x*pow(x,c) as pow(x,c+1). */
10766 if (fcode1 == BUILT_IN_POW
10767 || fcode1 == BUILT_IN_POWF
10768 || fcode1 == BUILT_IN_POWL)
10770 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10771 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10772 if (TREE_CODE (arg11) == REAL_CST
10773 && !TREE_OVERFLOW (arg11)
10774 && operand_equal_p (arg0, arg10, 0))
10776 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10777 REAL_VALUE_TYPE c;
10778 tree arg;
10780 c = TREE_REAL_CST (arg11);
10781 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10782 arg = build_real (type, c);
10783 return build_call_expr (powfn, 2, arg0, arg);
10787 /* Optimize pow(x,c)*x as pow(x,c+1). */
10788 if (fcode0 == BUILT_IN_POW
10789 || fcode0 == BUILT_IN_POWF
10790 || fcode0 == BUILT_IN_POWL)
10792 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10793 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10794 if (TREE_CODE (arg01) == REAL_CST
10795 && !TREE_OVERFLOW (arg01)
10796 && operand_equal_p (arg1, arg00, 0))
10798 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10799 REAL_VALUE_TYPE c;
10800 tree arg;
10802 c = TREE_REAL_CST (arg01);
10803 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10804 arg = build_real (type, c);
10805 return build_call_expr (powfn, 2, arg1, arg);
10809 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10810 if (optimize_function_for_speed_p (cfun)
10811 && operand_equal_p (arg0, arg1, 0))
10813 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10815 if (powfn)
10817 tree arg = build_real (type, dconst2);
10818 return build_call_expr (powfn, 2, arg0, arg);
10823 goto associate;
10825 case BIT_IOR_EXPR:
10826 bit_ior:
10827 if (integer_all_onesp (arg1))
10828 return omit_one_operand (type, arg1, arg0);
10829 if (integer_zerop (arg1))
10830 return non_lvalue (fold_convert (type, arg0));
10831 if (operand_equal_p (arg0, arg1, 0))
10832 return non_lvalue (fold_convert (type, arg0));
10834 /* ~X | X is -1. */
10835 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10836 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10838 t1 = fold_convert (type, integer_zero_node);
10839 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10840 return omit_one_operand (type, t1, arg1);
10843 /* X | ~X is -1. */
10844 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10845 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10847 t1 = fold_convert (type, integer_zero_node);
10848 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10849 return omit_one_operand (type, t1, arg0);
10852 /* Canonicalize (X & C1) | C2. */
10853 if (TREE_CODE (arg0) == BIT_AND_EXPR
10854 && TREE_CODE (arg1) == INTEGER_CST
10855 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10857 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10858 int width = TYPE_PRECISION (type), w;
10859 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10860 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10861 hi2 = TREE_INT_CST_HIGH (arg1);
10862 lo2 = TREE_INT_CST_LOW (arg1);
10864 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10865 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10866 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10868 if (width > HOST_BITS_PER_WIDE_INT)
10870 mhi = (unsigned HOST_WIDE_INT) -1
10871 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10872 mlo = -1;
10874 else
10876 mhi = 0;
10877 mlo = (unsigned HOST_WIDE_INT) -1
10878 >> (HOST_BITS_PER_WIDE_INT - width);
10881 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10882 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10883 return fold_build2 (BIT_IOR_EXPR, type,
10884 TREE_OPERAND (arg0, 0), arg1);
10886 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10887 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10888 mode which allows further optimizations. */
10889 hi1 &= mhi;
10890 lo1 &= mlo;
10891 hi2 &= mhi;
10892 lo2 &= mlo;
10893 hi3 = hi1 & ~hi2;
10894 lo3 = lo1 & ~lo2;
10895 for (w = BITS_PER_UNIT;
10896 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10897 w <<= 1)
10899 unsigned HOST_WIDE_INT mask
10900 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10901 if (((lo1 | lo2) & mask) == mask
10902 && (lo1 & ~mask) == 0 && hi1 == 0)
10904 hi3 = 0;
10905 lo3 = mask;
10906 break;
10909 if (hi3 != hi1 || lo3 != lo1)
10910 return fold_build2 (BIT_IOR_EXPR, type,
10911 fold_build2 (BIT_AND_EXPR, type,
10912 TREE_OPERAND (arg0, 0),
10913 build_int_cst_wide (type,
10914 lo3, hi3)),
10915 arg1);
10918 /* (X & Y) | Y is (X, Y). */
10919 if (TREE_CODE (arg0) == BIT_AND_EXPR
10920 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10921 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10922 /* (X & Y) | X is (Y, X). */
10923 if (TREE_CODE (arg0) == BIT_AND_EXPR
10924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10925 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10926 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10927 /* X | (X & Y) is (Y, X). */
10928 if (TREE_CODE (arg1) == BIT_AND_EXPR
10929 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10930 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10931 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10932 /* X | (Y & X) is (Y, X). */
10933 if (TREE_CODE (arg1) == BIT_AND_EXPR
10934 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10935 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10936 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10938 t1 = distribute_bit_expr (code, type, arg0, arg1);
10939 if (t1 != NULL_TREE)
10940 return t1;
10942 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10944 This results in more efficient code for machines without a NAND
10945 instruction. Combine will canonicalize to the first form
10946 which will allow use of NAND instructions provided by the
10947 backend if they exist. */
10948 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10949 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10951 return fold_build1 (BIT_NOT_EXPR, type,
10952 build2 (BIT_AND_EXPR, type,
10953 fold_convert (type,
10954 TREE_OPERAND (arg0, 0)),
10955 fold_convert (type,
10956 TREE_OPERAND (arg1, 0))));
10959 /* See if this can be simplified into a rotate first. If that
10960 is unsuccessful continue in the association code. */
10961 goto bit_rotate;
10963 case BIT_XOR_EXPR:
10964 if (integer_zerop (arg1))
10965 return non_lvalue (fold_convert (type, arg0));
10966 if (integer_all_onesp (arg1))
10967 return fold_build1 (BIT_NOT_EXPR, type, op0);
10968 if (operand_equal_p (arg0, arg1, 0))
10969 return omit_one_operand (type, integer_zero_node, arg0);
10971 /* ~X ^ X is -1. */
10972 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10973 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10975 t1 = fold_convert (type, integer_zero_node);
10976 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10977 return omit_one_operand (type, t1, arg1);
10980 /* X ^ ~X is -1. */
10981 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10984 t1 = fold_convert (type, integer_zero_node);
10985 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10986 return omit_one_operand (type, t1, arg0);
10989 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10990 with a constant, and the two constants have no bits in common,
10991 we should treat this as a BIT_IOR_EXPR since this may produce more
10992 simplifications. */
10993 if (TREE_CODE (arg0) == BIT_AND_EXPR
10994 && TREE_CODE (arg1) == BIT_AND_EXPR
10995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10996 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10997 && integer_zerop (const_binop (BIT_AND_EXPR,
10998 TREE_OPERAND (arg0, 1),
10999 TREE_OPERAND (arg1, 1), 0)))
11001 code = BIT_IOR_EXPR;
11002 goto bit_ior;
11005 /* (X | Y) ^ X -> Y & ~ X*/
11006 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11007 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11009 tree t2 = TREE_OPERAND (arg0, 1);
11010 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11011 arg1);
11012 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11013 fold_convert (type, t1));
11014 return t1;
11017 /* (Y | X) ^ X -> Y & ~ X*/
11018 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11019 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11021 tree t2 = TREE_OPERAND (arg0, 0);
11022 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11023 arg1);
11024 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11025 fold_convert (type, t1));
11026 return t1;
11029 /* X ^ (X | Y) -> Y & ~ X*/
11030 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11031 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11033 tree t2 = TREE_OPERAND (arg1, 1);
11034 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11035 arg0);
11036 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11037 fold_convert (type, t1));
11038 return t1;
11041 /* X ^ (Y | X) -> Y & ~ X*/
11042 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11043 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11045 tree t2 = TREE_OPERAND (arg1, 0);
11046 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11047 arg0);
11048 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11049 fold_convert (type, t1));
11050 return t1;
11053 /* Convert ~X ^ ~Y to X ^ Y. */
11054 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11055 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11056 return fold_build2 (code, type,
11057 fold_convert (type, TREE_OPERAND (arg0, 0)),
11058 fold_convert (type, TREE_OPERAND (arg1, 0)));
11060 /* Convert ~X ^ C to X ^ ~C. */
11061 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11062 && TREE_CODE (arg1) == INTEGER_CST)
11063 return fold_build2 (code, type,
11064 fold_convert (type, TREE_OPERAND (arg0, 0)),
11065 fold_build1 (BIT_NOT_EXPR, type, arg1));
11067 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11068 if (TREE_CODE (arg0) == BIT_AND_EXPR
11069 && integer_onep (TREE_OPERAND (arg0, 1))
11070 && integer_onep (arg1))
11071 return fold_build2 (EQ_EXPR, type, arg0,
11072 build_int_cst (TREE_TYPE (arg0), 0));
11074 /* Fold (X & Y) ^ Y as ~X & Y. */
11075 if (TREE_CODE (arg0) == BIT_AND_EXPR
11076 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11078 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11079 return fold_build2 (BIT_AND_EXPR, type,
11080 fold_build1 (BIT_NOT_EXPR, type, tem),
11081 fold_convert (type, arg1));
11083 /* Fold (X & Y) ^ X as ~Y & X. */
11084 if (TREE_CODE (arg0) == BIT_AND_EXPR
11085 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11086 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11088 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11089 return fold_build2 (BIT_AND_EXPR, type,
11090 fold_build1 (BIT_NOT_EXPR, type, tem),
11091 fold_convert (type, arg1));
11093 /* Fold X ^ (X & Y) as X & ~Y. */
11094 if (TREE_CODE (arg1) == BIT_AND_EXPR
11095 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11097 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11098 return fold_build2 (BIT_AND_EXPR, type,
11099 fold_convert (type, arg0),
11100 fold_build1 (BIT_NOT_EXPR, type, tem));
11102 /* Fold X ^ (Y & X) as ~Y & X. */
11103 if (TREE_CODE (arg1) == BIT_AND_EXPR
11104 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11105 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11107 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11108 return fold_build2 (BIT_AND_EXPR, type,
11109 fold_build1 (BIT_NOT_EXPR, type, tem),
11110 fold_convert (type, arg0));
11113 /* See if this can be simplified into a rotate first. If that
11114 is unsuccessful continue in the association code. */
11115 goto bit_rotate;
11117 case BIT_AND_EXPR:
11118 if (integer_all_onesp (arg1))
11119 return non_lvalue (fold_convert (type, arg0));
11120 if (integer_zerop (arg1))
11121 return omit_one_operand (type, arg1, arg0);
11122 if (operand_equal_p (arg0, arg1, 0))
11123 return non_lvalue (fold_convert (type, arg0));
11125 /* ~X & X is always zero. */
11126 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11128 return omit_one_operand (type, integer_zero_node, arg1);
11130 /* X & ~X is always zero. */
11131 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11132 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11133 return omit_one_operand (type, integer_zero_node, arg0);
11135 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11136 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11137 && TREE_CODE (arg1) == INTEGER_CST
11138 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11140 tree tmp1 = fold_convert (type, arg1);
11141 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11142 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11143 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11144 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11145 return fold_convert (type,
11146 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11149 /* (X | Y) & Y is (X, Y). */
11150 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11151 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11152 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11153 /* (X | Y) & X is (Y, X). */
11154 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11156 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11157 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11158 /* X & (X | Y) is (Y, X). */
11159 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11160 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11161 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11162 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11163 /* X & (Y | X) is (Y, X). */
11164 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11165 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11166 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11167 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11169 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11170 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11171 && integer_onep (TREE_OPERAND (arg0, 1))
11172 && integer_onep (arg1))
11174 tem = TREE_OPERAND (arg0, 0);
11175 return fold_build2 (EQ_EXPR, type,
11176 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11177 build_int_cst (TREE_TYPE (tem), 1)),
11178 build_int_cst (TREE_TYPE (tem), 0));
11180 /* Fold ~X & 1 as (X & 1) == 0. */
11181 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11182 && integer_onep (arg1))
11184 tem = TREE_OPERAND (arg0, 0);
11185 return fold_build2 (EQ_EXPR, type,
11186 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11187 build_int_cst (TREE_TYPE (tem), 1)),
11188 build_int_cst (TREE_TYPE (tem), 0));
11191 /* Fold (X ^ Y) & Y as ~X & Y. */
11192 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11193 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11195 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11196 return fold_build2 (BIT_AND_EXPR, type,
11197 fold_build1 (BIT_NOT_EXPR, type, tem),
11198 fold_convert (type, arg1));
11200 /* Fold (X ^ Y) & X as ~Y & X. */
11201 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11202 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11203 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11205 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11206 return fold_build2 (BIT_AND_EXPR, type,
11207 fold_build1 (BIT_NOT_EXPR, type, tem),
11208 fold_convert (type, arg1));
11210 /* Fold X & (X ^ Y) as X & ~Y. */
11211 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11212 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11214 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11215 return fold_build2 (BIT_AND_EXPR, type,
11216 fold_convert (type, arg0),
11217 fold_build1 (BIT_NOT_EXPR, type, tem));
11219 /* Fold X & (Y ^ X) as ~Y & X. */
11220 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11221 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11222 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11224 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11225 return fold_build2 (BIT_AND_EXPR, type,
11226 fold_build1 (BIT_NOT_EXPR, type, tem),
11227 fold_convert (type, arg0));
11230 t1 = distribute_bit_expr (code, type, arg0, arg1);
11231 if (t1 != NULL_TREE)
11232 return t1;
11233 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11234 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11235 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11237 unsigned int prec
11238 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11240 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11241 && (~TREE_INT_CST_LOW (arg1)
11242 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11243 return fold_convert (type, TREE_OPERAND (arg0, 0));
11246 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11248 This results in more efficient code for machines without a NOR
11249 instruction. Combine will canonicalize to the first form
11250 which will allow use of NOR instructions provided by the
11251 backend if they exist. */
11252 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11253 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11255 return fold_build1 (BIT_NOT_EXPR, type,
11256 build2 (BIT_IOR_EXPR, type,
11257 fold_convert (type,
11258 TREE_OPERAND (arg0, 0)),
11259 fold_convert (type,
11260 TREE_OPERAND (arg1, 0))));
11263 /* If arg0 is derived from the address of an object or function, we may
11264 be able to fold this expression using the object or function's
11265 alignment. */
11266 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11268 unsigned HOST_WIDE_INT modulus, residue;
11269 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11271 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11273 /* This works because modulus is a power of 2. If this weren't the
11274 case, we'd have to replace it by its greatest power-of-2
11275 divisor: modulus & -modulus. */
11276 if (low < modulus)
11277 return build_int_cst (type, residue & low);
11280 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11281 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11282 if the new mask might be further optimized. */
11283 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11284 || TREE_CODE (arg0) == RSHIFT_EXPR)
11285 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11286 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11287 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11288 < TYPE_PRECISION (TREE_TYPE (arg0))
11289 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11290 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11292 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11293 unsigned HOST_WIDE_INT mask
11294 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11295 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11296 tree shift_type = TREE_TYPE (arg0);
11298 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11299 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11300 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11301 && TYPE_PRECISION (TREE_TYPE (arg0))
11302 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11304 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11305 tree arg00 = TREE_OPERAND (arg0, 0);
11306 /* See if more bits can be proven as zero because of
11307 zero extension. */
11308 if (TREE_CODE (arg00) == NOP_EXPR
11309 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11311 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11312 if (TYPE_PRECISION (inner_type)
11313 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11314 && TYPE_PRECISION (inner_type) < prec)
11316 prec = TYPE_PRECISION (inner_type);
11317 /* See if we can shorten the right shift. */
11318 if (shiftc < prec)
11319 shift_type = inner_type;
11322 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11323 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11324 zerobits <<= prec - shiftc;
11325 /* For arithmetic shift if sign bit could be set, zerobits
11326 can contain actually sign bits, so no transformation is
11327 possible, unless MASK masks them all away. In that
11328 case the shift needs to be converted into logical shift. */
11329 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11330 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11332 if ((mask & zerobits) == 0)
11333 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11334 else
11335 zerobits = 0;
11339 /* ((X << 16) & 0xff00) is (X, 0). */
11340 if ((mask & zerobits) == mask)
11341 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11343 newmask = mask | zerobits;
11344 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11346 unsigned int prec;
11348 /* Only do the transformation if NEWMASK is some integer
11349 mode's mask. */
11350 for (prec = BITS_PER_UNIT;
11351 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11352 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11353 break;
11354 if (prec < HOST_BITS_PER_WIDE_INT
11355 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11357 if (shift_type != TREE_TYPE (arg0))
11359 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11360 fold_convert (shift_type,
11361 TREE_OPERAND (arg0, 0)),
11362 TREE_OPERAND (arg0, 1));
11363 tem = fold_convert (type, tem);
11365 else
11366 tem = op0;
11367 return fold_build2 (BIT_AND_EXPR, type, tem,
11368 build_int_cst_type (TREE_TYPE (op1),
11369 newmask));
11374 goto associate;
11376 case RDIV_EXPR:
11377 /* Don't touch a floating-point divide by zero unless the mode
11378 of the constant can represent infinity. */
11379 if (TREE_CODE (arg1) == REAL_CST
11380 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11381 && real_zerop (arg1))
11382 return NULL_TREE;
11384 /* Optimize A / A to 1.0 if we don't care about
11385 NaNs or Infinities. Skip the transformation
11386 for non-real operands. */
11387 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11388 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11389 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11390 && operand_equal_p (arg0, arg1, 0))
11392 tree r = build_real (TREE_TYPE (arg0), dconst1);
11394 return omit_two_operands (type, r, arg0, arg1);
11397 /* The complex version of the above A / A optimization. */
11398 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11399 && operand_equal_p (arg0, arg1, 0))
11401 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11402 if (! HONOR_NANS (TYPE_MODE (elem_type))
11403 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11405 tree r = build_real (elem_type, dconst1);
11406 /* omit_two_operands will call fold_convert for us. */
11407 return omit_two_operands (type, r, arg0, arg1);
11411 /* (-A) / (-B) -> A / B */
11412 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11413 return fold_build2 (RDIV_EXPR, type,
11414 TREE_OPERAND (arg0, 0),
11415 negate_expr (arg1));
11416 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11417 return fold_build2 (RDIV_EXPR, type,
11418 negate_expr (arg0),
11419 TREE_OPERAND (arg1, 0));
11421 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11422 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11423 && real_onep (arg1))
11424 return non_lvalue (fold_convert (type, arg0));
11426 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11427 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11428 && real_minus_onep (arg1))
11429 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11431 /* If ARG1 is a constant, we can convert this to a multiply by the
11432 reciprocal. This does not have the same rounding properties,
11433 so only do this if -freciprocal-math. We can actually
11434 always safely do it if ARG1 is a power of two, but it's hard to
11435 tell if it is or not in a portable manner. */
11436 if (TREE_CODE (arg1) == REAL_CST)
11438 if (flag_reciprocal_math
11439 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11440 arg1, 0)))
11441 return fold_build2 (MULT_EXPR, type, arg0, tem);
11442 /* Find the reciprocal if optimizing and the result is exact. */
11443 if (optimize)
11445 REAL_VALUE_TYPE r;
11446 r = TREE_REAL_CST (arg1);
11447 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11449 tem = build_real (type, r);
11450 return fold_build2 (MULT_EXPR, type,
11451 fold_convert (type, arg0), tem);
11455 /* Convert A/B/C to A/(B*C). */
11456 if (flag_reciprocal_math
11457 && TREE_CODE (arg0) == RDIV_EXPR)
11458 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11459 fold_build2 (MULT_EXPR, type,
11460 TREE_OPERAND (arg0, 1), arg1));
11462 /* Convert A/(B/C) to (A/B)*C. */
11463 if (flag_reciprocal_math
11464 && TREE_CODE (arg1) == RDIV_EXPR)
11465 return fold_build2 (MULT_EXPR, type,
11466 fold_build2 (RDIV_EXPR, type, arg0,
11467 TREE_OPERAND (arg1, 0)),
11468 TREE_OPERAND (arg1, 1));
11470 /* Convert C1/(X*C2) into (C1/C2)/X. */
11471 if (flag_reciprocal_math
11472 && TREE_CODE (arg1) == MULT_EXPR
11473 && TREE_CODE (arg0) == REAL_CST
11474 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11476 tree tem = const_binop (RDIV_EXPR, arg0,
11477 TREE_OPERAND (arg1, 1), 0);
11478 if (tem)
11479 return fold_build2 (RDIV_EXPR, type, tem,
11480 TREE_OPERAND (arg1, 0));
11483 if (flag_unsafe_math_optimizations)
11485 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11486 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11488 /* Optimize sin(x)/cos(x) as tan(x). */
11489 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11490 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11491 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11492 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11493 CALL_EXPR_ARG (arg1, 0), 0))
11495 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11497 if (tanfn != NULL_TREE)
11498 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11501 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11502 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11503 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11504 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11505 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11506 CALL_EXPR_ARG (arg1, 0), 0))
11508 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11510 if (tanfn != NULL_TREE)
11512 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11513 return fold_build2 (RDIV_EXPR, type,
11514 build_real (type, dconst1), tmp);
11518 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11519 NaNs or Infinities. */
11520 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11521 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11522 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11524 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11525 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11527 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11528 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11529 && operand_equal_p (arg00, arg01, 0))
11531 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11533 if (cosfn != NULL_TREE)
11534 return build_call_expr (cosfn, 1, arg00);
11538 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11539 NaNs or Infinities. */
11540 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11541 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11542 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11544 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11545 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11547 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11548 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11549 && operand_equal_p (arg00, arg01, 0))
11551 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11553 if (cosfn != NULL_TREE)
11555 tree tmp = build_call_expr (cosfn, 1, arg00);
11556 return fold_build2 (RDIV_EXPR, type,
11557 build_real (type, dconst1),
11558 tmp);
11563 /* Optimize pow(x,c)/x as pow(x,c-1). */
11564 if (fcode0 == BUILT_IN_POW
11565 || fcode0 == BUILT_IN_POWF
11566 || fcode0 == BUILT_IN_POWL)
11568 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11569 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11570 if (TREE_CODE (arg01) == REAL_CST
11571 && !TREE_OVERFLOW (arg01)
11572 && operand_equal_p (arg1, arg00, 0))
11574 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11575 REAL_VALUE_TYPE c;
11576 tree arg;
11578 c = TREE_REAL_CST (arg01);
11579 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11580 arg = build_real (type, c);
11581 return build_call_expr (powfn, 2, arg1, arg);
11585 /* Optimize a/root(b/c) into a*root(c/b). */
11586 if (BUILTIN_ROOT_P (fcode1))
11588 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11590 if (TREE_CODE (rootarg) == RDIV_EXPR)
11592 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11593 tree b = TREE_OPERAND (rootarg, 0);
11594 tree c = TREE_OPERAND (rootarg, 1);
11596 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11598 tmp = build_call_expr (rootfn, 1, tmp);
11599 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11603 /* Optimize x/expN(y) into x*expN(-y). */
11604 if (BUILTIN_EXPONENT_P (fcode1))
11606 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11607 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11608 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11609 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11612 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11613 if (fcode1 == BUILT_IN_POW
11614 || fcode1 == BUILT_IN_POWF
11615 || fcode1 == BUILT_IN_POWL)
11617 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11618 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11619 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11620 tree neg11 = fold_convert (type, negate_expr (arg11));
11621 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11622 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11625 return NULL_TREE;
11627 case TRUNC_DIV_EXPR:
11628 case FLOOR_DIV_EXPR:
11629 /* Simplify A / (B << N) where A and B are positive and B is
11630 a power of 2, to A >> (N + log2(B)). */
11631 strict_overflow_p = false;
11632 if (TREE_CODE (arg1) == LSHIFT_EXPR
11633 && (TYPE_UNSIGNED (type)
11634 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11636 tree sval = TREE_OPERAND (arg1, 0);
11637 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11639 tree sh_cnt = TREE_OPERAND (arg1, 1);
11640 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11642 if (strict_overflow_p)
11643 fold_overflow_warning (("assuming signed overflow does not "
11644 "occur when simplifying A / (B << N)"),
11645 WARN_STRICT_OVERFLOW_MISC);
11647 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11648 sh_cnt, build_int_cst (NULL_TREE, pow2));
11649 return fold_build2 (RSHIFT_EXPR, type,
11650 fold_convert (type, arg0), sh_cnt);
11654 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11655 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11656 if (INTEGRAL_TYPE_P (type)
11657 && TYPE_UNSIGNED (type)
11658 && code == FLOOR_DIV_EXPR)
11659 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11661 /* Fall thru */
11663 case ROUND_DIV_EXPR:
11664 case CEIL_DIV_EXPR:
11665 case EXACT_DIV_EXPR:
11666 if (integer_onep (arg1))
11667 return non_lvalue (fold_convert (type, arg0));
11668 if (integer_zerop (arg1))
11669 return NULL_TREE;
11670 /* X / -1 is -X. */
11671 if (!TYPE_UNSIGNED (type)
11672 && TREE_CODE (arg1) == INTEGER_CST
11673 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11674 && TREE_INT_CST_HIGH (arg1) == -1)
11675 return fold_convert (type, negate_expr (arg0));
11677 /* Convert -A / -B to A / B when the type is signed and overflow is
11678 undefined. */
11679 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11680 && TREE_CODE (arg0) == NEGATE_EXPR
11681 && negate_expr_p (arg1))
11683 if (INTEGRAL_TYPE_P (type))
11684 fold_overflow_warning (("assuming signed overflow does not occur "
11685 "when distributing negation across "
11686 "division"),
11687 WARN_STRICT_OVERFLOW_MISC);
11688 return fold_build2 (code, type,
11689 fold_convert (type, TREE_OPERAND (arg0, 0)),
11690 fold_convert (type, negate_expr (arg1)));
11692 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11693 && TREE_CODE (arg1) == NEGATE_EXPR
11694 && negate_expr_p (arg0))
11696 if (INTEGRAL_TYPE_P (type))
11697 fold_overflow_warning (("assuming signed overflow does not occur "
11698 "when distributing negation across "
11699 "division"),
11700 WARN_STRICT_OVERFLOW_MISC);
11701 return fold_build2 (code, type,
11702 fold_convert (type, negate_expr (arg0)),
11703 fold_convert (type, TREE_OPERAND (arg1, 0)));
11706 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11707 operation, EXACT_DIV_EXPR.
11709 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11710 At one time others generated faster code, it's not clear if they do
11711 after the last round to changes to the DIV code in expmed.c. */
11712 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11713 && multiple_of_p (type, arg0, arg1))
11714 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11716 strict_overflow_p = false;
11717 if (TREE_CODE (arg1) == INTEGER_CST
11718 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11719 &strict_overflow_p)))
11721 if (strict_overflow_p)
11722 fold_overflow_warning (("assuming signed overflow does not occur "
11723 "when simplifying division"),
11724 WARN_STRICT_OVERFLOW_MISC);
11725 return fold_convert (type, tem);
11728 return NULL_TREE;
11730 case CEIL_MOD_EXPR:
11731 case FLOOR_MOD_EXPR:
11732 case ROUND_MOD_EXPR:
11733 case TRUNC_MOD_EXPR:
11734 /* X % 1 is always zero, but be sure to preserve any side
11735 effects in X. */
11736 if (integer_onep (arg1))
11737 return omit_one_operand (type, integer_zero_node, arg0);
11739 /* X % 0, return X % 0 unchanged so that we can get the
11740 proper warnings and errors. */
11741 if (integer_zerop (arg1))
11742 return NULL_TREE;
11744 /* 0 % X is always zero, but be sure to preserve any side
11745 effects in X. Place this after checking for X == 0. */
11746 if (integer_zerop (arg0))
11747 return omit_one_operand (type, integer_zero_node, arg1);
11749 /* X % -1 is zero. */
11750 if (!TYPE_UNSIGNED (type)
11751 && TREE_CODE (arg1) == INTEGER_CST
11752 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11753 && TREE_INT_CST_HIGH (arg1) == -1)
11754 return omit_one_operand (type, integer_zero_node, arg0);
11756 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11757 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11758 strict_overflow_p = false;
11759 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11760 && (TYPE_UNSIGNED (type)
11761 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11763 tree c = arg1;
11764 /* Also optimize A % (C << N) where C is a power of 2,
11765 to A & ((C << N) - 1). */
11766 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11767 c = TREE_OPERAND (arg1, 0);
11769 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11771 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11772 build_int_cst (TREE_TYPE (arg1), 1));
11773 if (strict_overflow_p)
11774 fold_overflow_warning (("assuming signed overflow does not "
11775 "occur when simplifying "
11776 "X % (power of two)"),
11777 WARN_STRICT_OVERFLOW_MISC);
11778 return fold_build2 (BIT_AND_EXPR, type,
11779 fold_convert (type, arg0),
11780 fold_convert (type, mask));
11784 /* X % -C is the same as X % C. */
11785 if (code == TRUNC_MOD_EXPR
11786 && !TYPE_UNSIGNED (type)
11787 && TREE_CODE (arg1) == INTEGER_CST
11788 && !TREE_OVERFLOW (arg1)
11789 && TREE_INT_CST_HIGH (arg1) < 0
11790 && !TYPE_OVERFLOW_TRAPS (type)
11791 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11792 && !sign_bit_p (arg1, arg1))
11793 return fold_build2 (code, type, fold_convert (type, arg0),
11794 fold_convert (type, negate_expr (arg1)));
11796 /* X % -Y is the same as X % Y. */
11797 if (code == TRUNC_MOD_EXPR
11798 && !TYPE_UNSIGNED (type)
11799 && TREE_CODE (arg1) == NEGATE_EXPR
11800 && !TYPE_OVERFLOW_TRAPS (type))
11801 return fold_build2 (code, type, fold_convert (type, arg0),
11802 fold_convert (type, TREE_OPERAND (arg1, 0)));
11804 if (TREE_CODE (arg1) == INTEGER_CST
11805 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11806 &strict_overflow_p)))
11808 if (strict_overflow_p)
11809 fold_overflow_warning (("assuming signed overflow does not occur "
11810 "when simplifying modulus"),
11811 WARN_STRICT_OVERFLOW_MISC);
11812 return fold_convert (type, tem);
11815 return NULL_TREE;
11817 case LROTATE_EXPR:
11818 case RROTATE_EXPR:
11819 if (integer_all_onesp (arg0))
11820 return omit_one_operand (type, arg0, arg1);
11821 goto shift;
11823 case RSHIFT_EXPR:
11824 /* Optimize -1 >> x for arithmetic right shifts. */
11825 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11826 && tree_expr_nonnegative_p (arg1))
11827 return omit_one_operand (type, arg0, arg1);
11828 /* ... fall through ... */
11830 case LSHIFT_EXPR:
11831 shift:
11832 if (integer_zerop (arg1))
11833 return non_lvalue (fold_convert (type, arg0));
11834 if (integer_zerop (arg0))
11835 return omit_one_operand (type, arg0, arg1);
11837 /* Since negative shift count is not well-defined,
11838 don't try to compute it in the compiler. */
11839 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11840 return NULL_TREE;
11842 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11843 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11844 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11845 && host_integerp (TREE_OPERAND (arg0, 1), false)
11846 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11848 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11849 + TREE_INT_CST_LOW (arg1));
11851 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11852 being well defined. */
11853 if (low >= TYPE_PRECISION (type))
11855 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11856 low = low % TYPE_PRECISION (type);
11857 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11858 return build_int_cst (type, 0);
11859 else
11860 low = TYPE_PRECISION (type) - 1;
11863 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11864 build_int_cst (type, low));
11867 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11868 into x & ((unsigned)-1 >> c) for unsigned types. */
11869 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11870 || (TYPE_UNSIGNED (type)
11871 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11872 && host_integerp (arg1, false)
11873 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11874 && host_integerp (TREE_OPERAND (arg0, 1), false)
11875 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11877 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11878 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11879 tree lshift;
11880 tree arg00;
11882 if (low0 == low1)
11884 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11886 lshift = build_int_cst (type, -1);
11887 lshift = int_const_binop (code, lshift, arg1, 0);
11889 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11893 /* Rewrite an LROTATE_EXPR by a constant into an
11894 RROTATE_EXPR by a new constant. */
11895 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11897 tree tem = build_int_cst (TREE_TYPE (arg1),
11898 TYPE_PRECISION (type));
11899 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11900 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11903 /* If we have a rotate of a bit operation with the rotate count and
11904 the second operand of the bit operation both constant,
11905 permute the two operations. */
11906 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11907 && (TREE_CODE (arg0) == BIT_AND_EXPR
11908 || TREE_CODE (arg0) == BIT_IOR_EXPR
11909 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11910 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11911 return fold_build2 (TREE_CODE (arg0), type,
11912 fold_build2 (code, type,
11913 TREE_OPERAND (arg0, 0), arg1),
11914 fold_build2 (code, type,
11915 TREE_OPERAND (arg0, 1), arg1));
11917 /* Two consecutive rotates adding up to the precision of the
11918 type can be ignored. */
11919 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11920 && TREE_CODE (arg0) == RROTATE_EXPR
11921 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11922 && TREE_INT_CST_HIGH (arg1) == 0
11923 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11924 && ((TREE_INT_CST_LOW (arg1)
11925 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11926 == (unsigned int) TYPE_PRECISION (type)))
11927 return TREE_OPERAND (arg0, 0);
11929 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11930 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11931 if the latter can be further optimized. */
11932 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11933 && TREE_CODE (arg0) == BIT_AND_EXPR
11934 && TREE_CODE (arg1) == INTEGER_CST
11935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11937 tree mask = fold_build2 (code, type,
11938 fold_convert (type, TREE_OPERAND (arg0, 1)),
11939 arg1);
11940 tree shift = fold_build2 (code, type,
11941 fold_convert (type, TREE_OPERAND (arg0, 0)),
11942 arg1);
11943 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11944 if (tem)
11945 return tem;
11948 return NULL_TREE;
11950 case MIN_EXPR:
11951 if (operand_equal_p (arg0, arg1, 0))
11952 return omit_one_operand (type, arg0, arg1);
11953 if (INTEGRAL_TYPE_P (type)
11954 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11955 return omit_one_operand (type, arg1, arg0);
11956 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11957 if (tem)
11958 return tem;
11959 goto associate;
11961 case MAX_EXPR:
11962 if (operand_equal_p (arg0, arg1, 0))
11963 return omit_one_operand (type, arg0, arg1);
11964 if (INTEGRAL_TYPE_P (type)
11965 && TYPE_MAX_VALUE (type)
11966 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11967 return omit_one_operand (type, arg1, arg0);
11968 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11969 if (tem)
11970 return tem;
11971 goto associate;
11973 case TRUTH_ANDIF_EXPR:
11974 /* Note that the operands of this must be ints
11975 and their values must be 0 or 1.
11976 ("true" is a fixed value perhaps depending on the language.) */
11977 /* If first arg is constant zero, return it. */
11978 if (integer_zerop (arg0))
11979 return fold_convert (type, arg0);
11980 case TRUTH_AND_EXPR:
11981 /* If either arg is constant true, drop it. */
11982 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11983 return non_lvalue (fold_convert (type, arg1));
11984 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11985 /* Preserve sequence points. */
11986 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11987 return non_lvalue (fold_convert (type, arg0));
11988 /* If second arg is constant zero, result is zero, but first arg
11989 must be evaluated. */
11990 if (integer_zerop (arg1))
11991 return omit_one_operand (type, arg1, arg0);
11992 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11993 case will be handled here. */
11994 if (integer_zerop (arg0))
11995 return omit_one_operand (type, arg0, arg1);
11997 /* !X && X is always false. */
11998 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11999 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12000 return omit_one_operand (type, integer_zero_node, arg1);
12001 /* X && !X is always false. */
12002 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12003 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12004 return omit_one_operand (type, integer_zero_node, arg0);
12006 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12007 means A >= Y && A != MAX, but in this case we know that
12008 A < X <= MAX. */
12010 if (!TREE_SIDE_EFFECTS (arg0)
12011 && !TREE_SIDE_EFFECTS (arg1))
12013 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12014 if (tem && !operand_equal_p (tem, arg0, 0))
12015 return fold_build2 (code, type, tem, arg1);
12017 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12018 if (tem && !operand_equal_p (tem, arg1, 0))
12019 return fold_build2 (code, type, arg0, tem);
12022 truth_andor:
12023 /* We only do these simplifications if we are optimizing. */
12024 if (!optimize)
12025 return NULL_TREE;
12027 /* Check for things like (A || B) && (A || C). We can convert this
12028 to A || (B && C). Note that either operator can be any of the four
12029 truth and/or operations and the transformation will still be
12030 valid. Also note that we only care about order for the
12031 ANDIF and ORIF operators. If B contains side effects, this
12032 might change the truth-value of A. */
12033 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12034 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12035 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12036 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12037 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12038 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12040 tree a00 = TREE_OPERAND (arg0, 0);
12041 tree a01 = TREE_OPERAND (arg0, 1);
12042 tree a10 = TREE_OPERAND (arg1, 0);
12043 tree a11 = TREE_OPERAND (arg1, 1);
12044 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12045 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12046 && (code == TRUTH_AND_EXPR
12047 || code == TRUTH_OR_EXPR));
12049 if (operand_equal_p (a00, a10, 0))
12050 return fold_build2 (TREE_CODE (arg0), type, a00,
12051 fold_build2 (code, type, a01, a11));
12052 else if (commutative && operand_equal_p (a00, a11, 0))
12053 return fold_build2 (TREE_CODE (arg0), type, a00,
12054 fold_build2 (code, type, a01, a10));
12055 else if (commutative && operand_equal_p (a01, a10, 0))
12056 return fold_build2 (TREE_CODE (arg0), type, a01,
12057 fold_build2 (code, type, a00, a11));
12059 /* This case if tricky because we must either have commutative
12060 operators or else A10 must not have side-effects. */
12062 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12063 && operand_equal_p (a01, a11, 0))
12064 return fold_build2 (TREE_CODE (arg0), type,
12065 fold_build2 (code, type, a00, a10),
12066 a01);
12069 /* See if we can build a range comparison. */
12070 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12071 return tem;
12073 /* Check for the possibility of merging component references. If our
12074 lhs is another similar operation, try to merge its rhs with our
12075 rhs. Then try to merge our lhs and rhs. */
12076 if (TREE_CODE (arg0) == code
12077 && 0 != (tem = fold_truthop (code, type,
12078 TREE_OPERAND (arg0, 1), arg1)))
12079 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12081 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12082 return tem;
12084 return NULL_TREE;
12086 case TRUTH_ORIF_EXPR:
12087 /* Note that the operands of this must be ints
12088 and their values must be 0 or true.
12089 ("true" is a fixed value perhaps depending on the language.) */
12090 /* If first arg is constant true, return it. */
12091 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12092 return fold_convert (type, arg0);
12093 case TRUTH_OR_EXPR:
12094 /* If either arg is constant zero, drop it. */
12095 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12096 return non_lvalue (fold_convert (type, arg1));
12097 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12098 /* Preserve sequence points. */
12099 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12100 return non_lvalue (fold_convert (type, arg0));
12101 /* If second arg is constant true, result is true, but we must
12102 evaluate first arg. */
12103 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12104 return omit_one_operand (type, arg1, arg0);
12105 /* Likewise for first arg, but note this only occurs here for
12106 TRUTH_OR_EXPR. */
12107 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12108 return omit_one_operand (type, arg0, arg1);
12110 /* !X || X is always true. */
12111 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12112 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12113 return omit_one_operand (type, integer_one_node, arg1);
12114 /* X || !X is always true. */
12115 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12116 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12117 return omit_one_operand (type, integer_one_node, arg0);
12119 goto truth_andor;
12121 case TRUTH_XOR_EXPR:
12122 /* If the second arg is constant zero, drop it. */
12123 if (integer_zerop (arg1))
12124 return non_lvalue (fold_convert (type, arg0));
12125 /* If the second arg is constant true, this is a logical inversion. */
12126 if (integer_onep (arg1))
12128 /* Only call invert_truthvalue if operand is a truth value. */
12129 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12130 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12131 else
12132 tem = invert_truthvalue (arg0);
12133 return non_lvalue (fold_convert (type, tem));
12135 /* Identical arguments cancel to zero. */
12136 if (operand_equal_p (arg0, arg1, 0))
12137 return omit_one_operand (type, integer_zero_node, arg0);
12139 /* !X ^ X is always true. */
12140 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12141 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12142 return omit_one_operand (type, integer_one_node, arg1);
12144 /* X ^ !X is always true. */
12145 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12146 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12147 return omit_one_operand (type, integer_one_node, arg0);
12149 return NULL_TREE;
12151 case EQ_EXPR:
12152 case NE_EXPR:
12153 tem = fold_comparison (code, type, op0, op1);
12154 if (tem != NULL_TREE)
12155 return tem;
12157 /* bool_var != 0 becomes bool_var. */
12158 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12159 && code == NE_EXPR)
12160 return non_lvalue (fold_convert (type, arg0));
12162 /* bool_var == 1 becomes bool_var. */
12163 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12164 && code == EQ_EXPR)
12165 return non_lvalue (fold_convert (type, arg0));
12167 /* bool_var != 1 becomes !bool_var. */
12168 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12169 && code == NE_EXPR)
12170 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12172 /* bool_var == 0 becomes !bool_var. */
12173 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12174 && code == EQ_EXPR)
12175 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12177 /* If this is an equality comparison of the address of two non-weak,
12178 unaliased symbols neither of which are extern (since we do not
12179 have access to attributes for externs), then we know the result. */
12180 if (TREE_CODE (arg0) == ADDR_EXPR
12181 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12182 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12183 && ! lookup_attribute ("alias",
12184 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12185 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12186 && TREE_CODE (arg1) == ADDR_EXPR
12187 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12188 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12189 && ! lookup_attribute ("alias",
12190 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12191 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12193 /* We know that we're looking at the address of two
12194 non-weak, unaliased, static _DECL nodes.
12196 It is both wasteful and incorrect to call operand_equal_p
12197 to compare the two ADDR_EXPR nodes. It is wasteful in that
12198 all we need to do is test pointer equality for the arguments
12199 to the two ADDR_EXPR nodes. It is incorrect to use
12200 operand_equal_p as that function is NOT equivalent to a
12201 C equality test. It can in fact return false for two
12202 objects which would test as equal using the C equality
12203 operator. */
12204 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12205 return constant_boolean_node (equal
12206 ? code == EQ_EXPR : code != EQ_EXPR,
12207 type);
12210 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12211 a MINUS_EXPR of a constant, we can convert it into a comparison with
12212 a revised constant as long as no overflow occurs. */
12213 if (TREE_CODE (arg1) == INTEGER_CST
12214 && (TREE_CODE (arg0) == PLUS_EXPR
12215 || TREE_CODE (arg0) == MINUS_EXPR)
12216 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12217 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12218 ? MINUS_EXPR : PLUS_EXPR,
12219 fold_convert (TREE_TYPE (arg0), arg1),
12220 TREE_OPERAND (arg0, 1), 0))
12221 && !TREE_OVERFLOW (tem))
12222 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12224 /* Similarly for a NEGATE_EXPR. */
12225 if (TREE_CODE (arg0) == NEGATE_EXPR
12226 && TREE_CODE (arg1) == INTEGER_CST
12227 && 0 != (tem = negate_expr (arg1))
12228 && TREE_CODE (tem) == INTEGER_CST
12229 && !TREE_OVERFLOW (tem))
12230 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12232 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12233 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12234 && TREE_CODE (arg1) == INTEGER_CST
12235 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12236 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12237 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12238 fold_convert (TREE_TYPE (arg0), arg1),
12239 TREE_OPERAND (arg0, 1)));
12241 /* Transform comparisons of the form X +- C CMP X. */
12242 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12243 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12244 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12245 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12246 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12248 tree cst = TREE_OPERAND (arg0, 1);
12250 if (code == EQ_EXPR
12251 && !integer_zerop (cst))
12252 return omit_two_operands (type, boolean_false_node,
12253 TREE_OPERAND (arg0, 0), arg1);
12254 else
12255 return omit_two_operands (type, boolean_true_node,
12256 TREE_OPERAND (arg0, 0), arg1);
12259 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12260 for !=. Don't do this for ordered comparisons due to overflow. */
12261 if (TREE_CODE (arg0) == MINUS_EXPR
12262 && integer_zerop (arg1))
12263 return fold_build2 (code, type,
12264 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12266 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12267 if (TREE_CODE (arg0) == ABS_EXPR
12268 && (integer_zerop (arg1) || real_zerop (arg1)))
12269 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12271 /* If this is an EQ or NE comparison with zero and ARG0 is
12272 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12273 two operations, but the latter can be done in one less insn
12274 on machines that have only two-operand insns or on which a
12275 constant cannot be the first operand. */
12276 if (TREE_CODE (arg0) == BIT_AND_EXPR
12277 && integer_zerop (arg1))
12279 tree arg00 = TREE_OPERAND (arg0, 0);
12280 tree arg01 = TREE_OPERAND (arg0, 1);
12281 if (TREE_CODE (arg00) == LSHIFT_EXPR
12282 && integer_onep (TREE_OPERAND (arg00, 0)))
12284 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12285 arg01, TREE_OPERAND (arg00, 1));
12286 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12287 build_int_cst (TREE_TYPE (arg0), 1));
12288 return fold_build2 (code, type,
12289 fold_convert (TREE_TYPE (arg1), tem), arg1);
12291 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12292 && integer_onep (TREE_OPERAND (arg01, 0)))
12294 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12295 arg00, TREE_OPERAND (arg01, 1));
12296 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12297 build_int_cst (TREE_TYPE (arg0), 1));
12298 return fold_build2 (code, type,
12299 fold_convert (TREE_TYPE (arg1), tem), arg1);
12303 /* If this is an NE or EQ comparison of zero against the result of a
12304 signed MOD operation whose second operand is a power of 2, make
12305 the MOD operation unsigned since it is simpler and equivalent. */
12306 if (integer_zerop (arg1)
12307 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12308 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12309 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12310 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12311 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12312 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12314 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12315 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12316 fold_convert (newtype,
12317 TREE_OPERAND (arg0, 0)),
12318 fold_convert (newtype,
12319 TREE_OPERAND (arg0, 1)));
12321 return fold_build2 (code, type, newmod,
12322 fold_convert (newtype, arg1));
12325 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12326 C1 is a valid shift constant, and C2 is a power of two, i.e.
12327 a single bit. */
12328 if (TREE_CODE (arg0) == BIT_AND_EXPR
12329 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12330 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12331 == INTEGER_CST
12332 && integer_pow2p (TREE_OPERAND (arg0, 1))
12333 && integer_zerop (arg1))
12335 tree itype = TREE_TYPE (arg0);
12336 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12337 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12339 /* Check for a valid shift count. */
12340 if (TREE_INT_CST_HIGH (arg001) == 0
12341 && TREE_INT_CST_LOW (arg001) < prec)
12343 tree arg01 = TREE_OPERAND (arg0, 1);
12344 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12345 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12346 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12347 can be rewritten as (X & (C2 << C1)) != 0. */
12348 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12350 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12351 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12352 return fold_build2 (code, type, tem, arg1);
12354 /* Otherwise, for signed (arithmetic) shifts,
12355 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12356 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12357 else if (!TYPE_UNSIGNED (itype))
12358 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12359 arg000, build_int_cst (itype, 0));
12360 /* Otherwise, of unsigned (logical) shifts,
12361 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12362 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12363 else
12364 return omit_one_operand (type,
12365 code == EQ_EXPR ? integer_one_node
12366 : integer_zero_node,
12367 arg000);
12371 /* If this is an NE comparison of zero with an AND of one, remove the
12372 comparison since the AND will give the correct value. */
12373 if (code == NE_EXPR
12374 && integer_zerop (arg1)
12375 && TREE_CODE (arg0) == BIT_AND_EXPR
12376 && integer_onep (TREE_OPERAND (arg0, 1)))
12377 return fold_convert (type, arg0);
12379 /* If we have (A & C) == C where C is a power of 2, convert this into
12380 (A & C) != 0. Similarly for NE_EXPR. */
12381 if (TREE_CODE (arg0) == BIT_AND_EXPR
12382 && integer_pow2p (TREE_OPERAND (arg0, 1))
12383 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12384 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12385 arg0, fold_convert (TREE_TYPE (arg0),
12386 integer_zero_node));
12388 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12389 bit, then fold the expression into A < 0 or A >= 0. */
12390 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12391 if (tem)
12392 return tem;
12394 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12395 Similarly for NE_EXPR. */
12396 if (TREE_CODE (arg0) == BIT_AND_EXPR
12397 && TREE_CODE (arg1) == INTEGER_CST
12398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12400 tree notc = fold_build1 (BIT_NOT_EXPR,
12401 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12402 TREE_OPERAND (arg0, 1));
12403 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12404 arg1, notc);
12405 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12406 if (integer_nonzerop (dandnotc))
12407 return omit_one_operand (type, rslt, arg0);
12410 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12411 Similarly for NE_EXPR. */
12412 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12413 && TREE_CODE (arg1) == INTEGER_CST
12414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12416 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12417 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12418 TREE_OPERAND (arg0, 1), notd);
12419 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12420 if (integer_nonzerop (candnotd))
12421 return omit_one_operand (type, rslt, arg0);
12424 /* If this is a comparison of a field, we may be able to simplify it. */
12425 if ((TREE_CODE (arg0) == COMPONENT_REF
12426 || TREE_CODE (arg0) == BIT_FIELD_REF)
12427 /* Handle the constant case even without -O
12428 to make sure the warnings are given. */
12429 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12431 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12432 if (t1)
12433 return t1;
12436 /* Optimize comparisons of strlen vs zero to a compare of the
12437 first character of the string vs zero. To wit,
12438 strlen(ptr) == 0 => *ptr == 0
12439 strlen(ptr) != 0 => *ptr != 0
12440 Other cases should reduce to one of these two (or a constant)
12441 due to the return value of strlen being unsigned. */
12442 if (TREE_CODE (arg0) == CALL_EXPR
12443 && integer_zerop (arg1))
12445 tree fndecl = get_callee_fndecl (arg0);
12447 if (fndecl
12448 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12449 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12450 && call_expr_nargs (arg0) == 1
12451 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12453 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12454 return fold_build2 (code, type, iref,
12455 build_int_cst (TREE_TYPE (iref), 0));
12459 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12460 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12461 if (TREE_CODE (arg0) == RSHIFT_EXPR
12462 && integer_zerop (arg1)
12463 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12465 tree arg00 = TREE_OPERAND (arg0, 0);
12466 tree arg01 = TREE_OPERAND (arg0, 1);
12467 tree itype = TREE_TYPE (arg00);
12468 if (TREE_INT_CST_HIGH (arg01) == 0
12469 && TREE_INT_CST_LOW (arg01)
12470 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12472 if (TYPE_UNSIGNED (itype))
12474 itype = signed_type_for (itype);
12475 arg00 = fold_convert (itype, arg00);
12477 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12478 type, arg00, build_int_cst (itype, 0));
12482 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12483 if (integer_zerop (arg1)
12484 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12485 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12486 TREE_OPERAND (arg0, 1));
12488 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12489 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12490 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12491 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12492 build_int_cst (TREE_TYPE (arg1), 0));
12493 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12494 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12495 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12496 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12497 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12498 build_int_cst (TREE_TYPE (arg1), 0));
12500 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12501 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12502 && TREE_CODE (arg1) == INTEGER_CST
12503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12504 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12505 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12506 TREE_OPERAND (arg0, 1), arg1));
12508 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12509 (X & C) == 0 when C is a single bit. */
12510 if (TREE_CODE (arg0) == BIT_AND_EXPR
12511 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12512 && integer_zerop (arg1)
12513 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12515 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12516 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12517 TREE_OPERAND (arg0, 1));
12518 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12519 type, tem, arg1);
12522 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12523 constant C is a power of two, i.e. a single bit. */
12524 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12525 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12526 && integer_zerop (arg1)
12527 && integer_pow2p (TREE_OPERAND (arg0, 1))
12528 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12529 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12531 tree arg00 = TREE_OPERAND (arg0, 0);
12532 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12533 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12536 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12537 when is C is a power of two, i.e. a single bit. */
12538 if (TREE_CODE (arg0) == BIT_AND_EXPR
12539 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12540 && integer_zerop (arg1)
12541 && integer_pow2p (TREE_OPERAND (arg0, 1))
12542 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12543 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12545 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12546 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12547 arg000, TREE_OPERAND (arg0, 1));
12548 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12549 tem, build_int_cst (TREE_TYPE (tem), 0));
12552 if (integer_zerop (arg1)
12553 && tree_expr_nonzero_p (arg0))
12555 tree res = constant_boolean_node (code==NE_EXPR, type);
12556 return omit_one_operand (type, res, arg0);
12559 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12560 if (TREE_CODE (arg0) == NEGATE_EXPR
12561 && TREE_CODE (arg1) == NEGATE_EXPR)
12562 return fold_build2 (code, type,
12563 TREE_OPERAND (arg0, 0),
12564 TREE_OPERAND (arg1, 0));
12566 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12567 if (TREE_CODE (arg0) == BIT_AND_EXPR
12568 && TREE_CODE (arg1) == BIT_AND_EXPR)
12570 tree arg00 = TREE_OPERAND (arg0, 0);
12571 tree arg01 = TREE_OPERAND (arg0, 1);
12572 tree arg10 = TREE_OPERAND (arg1, 0);
12573 tree arg11 = TREE_OPERAND (arg1, 1);
12574 tree itype = TREE_TYPE (arg0);
12576 if (operand_equal_p (arg01, arg11, 0))
12577 return fold_build2 (code, type,
12578 fold_build2 (BIT_AND_EXPR, itype,
12579 fold_build2 (BIT_XOR_EXPR, itype,
12580 arg00, arg10),
12581 arg01),
12582 build_int_cst (itype, 0));
12584 if (operand_equal_p (arg01, arg10, 0))
12585 return fold_build2 (code, type,
12586 fold_build2 (BIT_AND_EXPR, itype,
12587 fold_build2 (BIT_XOR_EXPR, itype,
12588 arg00, arg11),
12589 arg01),
12590 build_int_cst (itype, 0));
12592 if (operand_equal_p (arg00, arg11, 0))
12593 return fold_build2 (code, type,
12594 fold_build2 (BIT_AND_EXPR, itype,
12595 fold_build2 (BIT_XOR_EXPR, itype,
12596 arg01, arg10),
12597 arg00),
12598 build_int_cst (itype, 0));
12600 if (operand_equal_p (arg00, arg10, 0))
12601 return fold_build2 (code, type,
12602 fold_build2 (BIT_AND_EXPR, itype,
12603 fold_build2 (BIT_XOR_EXPR, itype,
12604 arg01, arg11),
12605 arg00),
12606 build_int_cst (itype, 0));
12609 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12610 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12612 tree arg00 = TREE_OPERAND (arg0, 0);
12613 tree arg01 = TREE_OPERAND (arg0, 1);
12614 tree arg10 = TREE_OPERAND (arg1, 0);
12615 tree arg11 = TREE_OPERAND (arg1, 1);
12616 tree itype = TREE_TYPE (arg0);
12618 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12619 operand_equal_p guarantees no side-effects so we don't need
12620 to use omit_one_operand on Z. */
12621 if (operand_equal_p (arg01, arg11, 0))
12622 return fold_build2 (code, type, arg00, arg10);
12623 if (operand_equal_p (arg01, arg10, 0))
12624 return fold_build2 (code, type, arg00, arg11);
12625 if (operand_equal_p (arg00, arg11, 0))
12626 return fold_build2 (code, type, arg01, arg10);
12627 if (operand_equal_p (arg00, arg10, 0))
12628 return fold_build2 (code, type, arg01, arg11);
12630 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12631 if (TREE_CODE (arg01) == INTEGER_CST
12632 && TREE_CODE (arg11) == INTEGER_CST)
12633 return fold_build2 (code, type,
12634 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12635 fold_build2 (BIT_XOR_EXPR, itype,
12636 arg01, arg11)),
12637 arg10);
12640 /* Attempt to simplify equality/inequality comparisons of complex
12641 values. Only lower the comparison if the result is known or
12642 can be simplified to a single scalar comparison. */
12643 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12644 || TREE_CODE (arg0) == COMPLEX_CST)
12645 && (TREE_CODE (arg1) == COMPLEX_EXPR
12646 || TREE_CODE (arg1) == COMPLEX_CST))
12648 tree real0, imag0, real1, imag1;
12649 tree rcond, icond;
12651 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12653 real0 = TREE_OPERAND (arg0, 0);
12654 imag0 = TREE_OPERAND (arg0, 1);
12656 else
12658 real0 = TREE_REALPART (arg0);
12659 imag0 = TREE_IMAGPART (arg0);
12662 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12664 real1 = TREE_OPERAND (arg1, 0);
12665 imag1 = TREE_OPERAND (arg1, 1);
12667 else
12669 real1 = TREE_REALPART (arg1);
12670 imag1 = TREE_IMAGPART (arg1);
12673 rcond = fold_binary (code, type, real0, real1);
12674 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12676 if (integer_zerop (rcond))
12678 if (code == EQ_EXPR)
12679 return omit_two_operands (type, boolean_false_node,
12680 imag0, imag1);
12681 return fold_build2 (NE_EXPR, type, imag0, imag1);
12683 else
12685 if (code == NE_EXPR)
12686 return omit_two_operands (type, boolean_true_node,
12687 imag0, imag1);
12688 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12692 icond = fold_binary (code, type, imag0, imag1);
12693 if (icond && TREE_CODE (icond) == INTEGER_CST)
12695 if (integer_zerop (icond))
12697 if (code == EQ_EXPR)
12698 return omit_two_operands (type, boolean_false_node,
12699 real0, real1);
12700 return fold_build2 (NE_EXPR, type, real0, real1);
12702 else
12704 if (code == NE_EXPR)
12705 return omit_two_operands (type, boolean_true_node,
12706 real0, real1);
12707 return fold_build2 (EQ_EXPR, type, real0, real1);
12712 return NULL_TREE;
12714 case LT_EXPR:
12715 case GT_EXPR:
12716 case LE_EXPR:
12717 case GE_EXPR:
12718 tem = fold_comparison (code, type, op0, op1);
12719 if (tem != NULL_TREE)
12720 return tem;
12722 /* Transform comparisons of the form X +- C CMP X. */
12723 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12724 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12725 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12726 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12727 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12728 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12730 tree arg01 = TREE_OPERAND (arg0, 1);
12731 enum tree_code code0 = TREE_CODE (arg0);
12732 int is_positive;
12734 if (TREE_CODE (arg01) == REAL_CST)
12735 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12736 else
12737 is_positive = tree_int_cst_sgn (arg01);
12739 /* (X - c) > X becomes false. */
12740 if (code == GT_EXPR
12741 && ((code0 == MINUS_EXPR && is_positive >= 0)
12742 || (code0 == PLUS_EXPR && is_positive <= 0)))
12744 if (TREE_CODE (arg01) == INTEGER_CST
12745 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12746 fold_overflow_warning (("assuming signed overflow does not "
12747 "occur when assuming that (X - c) > X "
12748 "is always false"),
12749 WARN_STRICT_OVERFLOW_ALL);
12750 return constant_boolean_node (0, type);
12753 /* Likewise (X + c) < X becomes false. */
12754 if (code == LT_EXPR
12755 && ((code0 == PLUS_EXPR && is_positive >= 0)
12756 || (code0 == MINUS_EXPR && is_positive <= 0)))
12758 if (TREE_CODE (arg01) == INTEGER_CST
12759 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12760 fold_overflow_warning (("assuming signed overflow does not "
12761 "occur when assuming that "
12762 "(X + c) < X is always false"),
12763 WARN_STRICT_OVERFLOW_ALL);
12764 return constant_boolean_node (0, type);
12767 /* Convert (X - c) <= X to true. */
12768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12769 && code == LE_EXPR
12770 && ((code0 == MINUS_EXPR && is_positive >= 0)
12771 || (code0 == PLUS_EXPR && is_positive <= 0)))
12773 if (TREE_CODE (arg01) == INTEGER_CST
12774 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12775 fold_overflow_warning (("assuming signed overflow does not "
12776 "occur when assuming that "
12777 "(X - c) <= X is always true"),
12778 WARN_STRICT_OVERFLOW_ALL);
12779 return constant_boolean_node (1, type);
12782 /* Convert (X + c) >= X to true. */
12783 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12784 && code == GE_EXPR
12785 && ((code0 == PLUS_EXPR && is_positive >= 0)
12786 || (code0 == MINUS_EXPR && is_positive <= 0)))
12788 if (TREE_CODE (arg01) == INTEGER_CST
12789 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12790 fold_overflow_warning (("assuming signed overflow does not "
12791 "occur when assuming that "
12792 "(X + c) >= X is always true"),
12793 WARN_STRICT_OVERFLOW_ALL);
12794 return constant_boolean_node (1, type);
12797 if (TREE_CODE (arg01) == INTEGER_CST)
12799 /* Convert X + c > X and X - c < X to true for integers. */
12800 if (code == GT_EXPR
12801 && ((code0 == PLUS_EXPR && is_positive > 0)
12802 || (code0 == MINUS_EXPR && is_positive < 0)))
12804 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12805 fold_overflow_warning (("assuming signed overflow does "
12806 "not occur when assuming that "
12807 "(X + c) > X is always true"),
12808 WARN_STRICT_OVERFLOW_ALL);
12809 return constant_boolean_node (1, type);
12812 if (code == LT_EXPR
12813 && ((code0 == MINUS_EXPR && is_positive > 0)
12814 || (code0 == PLUS_EXPR && is_positive < 0)))
12816 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12817 fold_overflow_warning (("assuming signed overflow does "
12818 "not occur when assuming that "
12819 "(X - c) < X is always true"),
12820 WARN_STRICT_OVERFLOW_ALL);
12821 return constant_boolean_node (1, type);
12824 /* Convert X + c <= X and X - c >= X to false for integers. */
12825 if (code == LE_EXPR
12826 && ((code0 == PLUS_EXPR && is_positive > 0)
12827 || (code0 == MINUS_EXPR && is_positive < 0)))
12829 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12830 fold_overflow_warning (("assuming signed overflow does "
12831 "not occur when assuming that "
12832 "(X + c) <= X is always false"),
12833 WARN_STRICT_OVERFLOW_ALL);
12834 return constant_boolean_node (0, type);
12837 if (code == GE_EXPR
12838 && ((code0 == MINUS_EXPR && is_positive > 0)
12839 || (code0 == PLUS_EXPR && is_positive < 0)))
12841 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12842 fold_overflow_warning (("assuming signed overflow does "
12843 "not occur when assuming that "
12844 "(X - c) >= X is always false"),
12845 WARN_STRICT_OVERFLOW_ALL);
12846 return constant_boolean_node (0, type);
12851 /* Comparisons with the highest or lowest possible integer of
12852 the specified precision will have known values. */
12854 tree arg1_type = TREE_TYPE (arg1);
12855 unsigned int width = TYPE_PRECISION (arg1_type);
12857 if (TREE_CODE (arg1) == INTEGER_CST
12858 && width <= 2 * HOST_BITS_PER_WIDE_INT
12859 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12861 HOST_WIDE_INT signed_max_hi;
12862 unsigned HOST_WIDE_INT signed_max_lo;
12863 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12865 if (width <= HOST_BITS_PER_WIDE_INT)
12867 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12868 - 1;
12869 signed_max_hi = 0;
12870 max_hi = 0;
12872 if (TYPE_UNSIGNED (arg1_type))
12874 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12875 min_lo = 0;
12876 min_hi = 0;
12878 else
12880 max_lo = signed_max_lo;
12881 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12882 min_hi = -1;
12885 else
12887 width -= HOST_BITS_PER_WIDE_INT;
12888 signed_max_lo = -1;
12889 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12890 - 1;
12891 max_lo = -1;
12892 min_lo = 0;
12894 if (TYPE_UNSIGNED (arg1_type))
12896 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12897 min_hi = 0;
12899 else
12901 max_hi = signed_max_hi;
12902 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12906 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12907 && TREE_INT_CST_LOW (arg1) == max_lo)
12908 switch (code)
12910 case GT_EXPR:
12911 return omit_one_operand (type, integer_zero_node, arg0);
12913 case GE_EXPR:
12914 return fold_build2 (EQ_EXPR, type, op0, op1);
12916 case LE_EXPR:
12917 return omit_one_operand (type, integer_one_node, arg0);
12919 case LT_EXPR:
12920 return fold_build2 (NE_EXPR, type, op0, op1);
12922 /* The GE_EXPR and LT_EXPR cases above are not normally
12923 reached because of previous transformations. */
12925 default:
12926 break;
12928 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12929 == max_hi
12930 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12931 switch (code)
12933 case GT_EXPR:
12934 arg1 = const_binop (PLUS_EXPR, arg1,
12935 build_int_cst (TREE_TYPE (arg1), 1), 0);
12936 return fold_build2 (EQ_EXPR, type,
12937 fold_convert (TREE_TYPE (arg1), arg0),
12938 arg1);
12939 case LE_EXPR:
12940 arg1 = const_binop (PLUS_EXPR, arg1,
12941 build_int_cst (TREE_TYPE (arg1), 1), 0);
12942 return fold_build2 (NE_EXPR, type,
12943 fold_convert (TREE_TYPE (arg1), arg0),
12944 arg1);
12945 default:
12946 break;
12948 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12949 == min_hi
12950 && TREE_INT_CST_LOW (arg1) == min_lo)
12951 switch (code)
12953 case LT_EXPR:
12954 return omit_one_operand (type, integer_zero_node, arg0);
12956 case LE_EXPR:
12957 return fold_build2 (EQ_EXPR, type, op0, op1);
12959 case GE_EXPR:
12960 return omit_one_operand (type, integer_one_node, arg0);
12962 case GT_EXPR:
12963 return fold_build2 (NE_EXPR, type, op0, op1);
12965 default:
12966 break;
12968 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12969 == min_hi
12970 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12971 switch (code)
12973 case GE_EXPR:
12974 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12975 return fold_build2 (NE_EXPR, type,
12976 fold_convert (TREE_TYPE (arg1), arg0),
12977 arg1);
12978 case LT_EXPR:
12979 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12980 return fold_build2 (EQ_EXPR, type,
12981 fold_convert (TREE_TYPE (arg1), arg0),
12982 arg1);
12983 default:
12984 break;
12987 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12988 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12989 && TYPE_UNSIGNED (arg1_type)
12990 /* We will flip the signedness of the comparison operator
12991 associated with the mode of arg1, so the sign bit is
12992 specified by this mode. Check that arg1 is the signed
12993 max associated with this sign bit. */
12994 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12995 /* signed_type does not work on pointer types. */
12996 && INTEGRAL_TYPE_P (arg1_type))
12998 /* The following case also applies to X < signed_max+1
12999 and X >= signed_max+1 because previous transformations. */
13000 if (code == LE_EXPR || code == GT_EXPR)
13002 tree st;
13003 st = signed_type_for (TREE_TYPE (arg1));
13004 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13005 type, fold_convert (st, arg0),
13006 build_int_cst (st, 0));
13012 /* If we are comparing an ABS_EXPR with a constant, we can
13013 convert all the cases into explicit comparisons, but they may
13014 well not be faster than doing the ABS and one comparison.
13015 But ABS (X) <= C is a range comparison, which becomes a subtraction
13016 and a comparison, and is probably faster. */
13017 if (code == LE_EXPR
13018 && TREE_CODE (arg1) == INTEGER_CST
13019 && TREE_CODE (arg0) == ABS_EXPR
13020 && ! TREE_SIDE_EFFECTS (arg0)
13021 && (0 != (tem = negate_expr (arg1)))
13022 && TREE_CODE (tem) == INTEGER_CST
13023 && !TREE_OVERFLOW (tem))
13024 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13025 build2 (GE_EXPR, type,
13026 TREE_OPERAND (arg0, 0), tem),
13027 build2 (LE_EXPR, type,
13028 TREE_OPERAND (arg0, 0), arg1));
13030 /* Convert ABS_EXPR<x> >= 0 to true. */
13031 strict_overflow_p = false;
13032 if (code == GE_EXPR
13033 && (integer_zerop (arg1)
13034 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13035 && real_zerop (arg1)))
13036 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13038 if (strict_overflow_p)
13039 fold_overflow_warning (("assuming signed overflow does not occur "
13040 "when simplifying comparison of "
13041 "absolute value and zero"),
13042 WARN_STRICT_OVERFLOW_CONDITIONAL);
13043 return omit_one_operand (type, integer_one_node, arg0);
13046 /* Convert ABS_EXPR<x> < 0 to false. */
13047 strict_overflow_p = false;
13048 if (code == LT_EXPR
13049 && (integer_zerop (arg1) || real_zerop (arg1))
13050 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13052 if (strict_overflow_p)
13053 fold_overflow_warning (("assuming signed overflow does not occur "
13054 "when simplifying comparison of "
13055 "absolute value and zero"),
13056 WARN_STRICT_OVERFLOW_CONDITIONAL);
13057 return omit_one_operand (type, integer_zero_node, arg0);
13060 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13061 and similarly for >= into !=. */
13062 if ((code == LT_EXPR || code == GE_EXPR)
13063 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13064 && TREE_CODE (arg1) == LSHIFT_EXPR
13065 && integer_onep (TREE_OPERAND (arg1, 0)))
13066 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13067 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13068 TREE_OPERAND (arg1, 1)),
13069 build_int_cst (TREE_TYPE (arg0), 0));
13071 if ((code == LT_EXPR || code == GE_EXPR)
13072 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13073 && CONVERT_EXPR_P (arg1)
13074 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13075 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13076 return
13077 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13078 fold_convert (TREE_TYPE (arg0),
13079 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13080 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13081 1))),
13082 build_int_cst (TREE_TYPE (arg0), 0));
13084 return NULL_TREE;
13086 case UNORDERED_EXPR:
13087 case ORDERED_EXPR:
13088 case UNLT_EXPR:
13089 case UNLE_EXPR:
13090 case UNGT_EXPR:
13091 case UNGE_EXPR:
13092 case UNEQ_EXPR:
13093 case LTGT_EXPR:
13094 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13096 t1 = fold_relational_const (code, type, arg0, arg1);
13097 if (t1 != NULL_TREE)
13098 return t1;
13101 /* If the first operand is NaN, the result is constant. */
13102 if (TREE_CODE (arg0) == REAL_CST
13103 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13104 && (code != LTGT_EXPR || ! flag_trapping_math))
13106 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13107 ? integer_zero_node
13108 : integer_one_node;
13109 return omit_one_operand (type, t1, arg1);
13112 /* If the second operand is NaN, the result is constant. */
13113 if (TREE_CODE (arg1) == REAL_CST
13114 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13115 && (code != LTGT_EXPR || ! flag_trapping_math))
13117 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13118 ? integer_zero_node
13119 : integer_one_node;
13120 return omit_one_operand (type, t1, arg0);
13123 /* Simplify unordered comparison of something with itself. */
13124 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13125 && operand_equal_p (arg0, arg1, 0))
13126 return constant_boolean_node (1, type);
13128 if (code == LTGT_EXPR
13129 && !flag_trapping_math
13130 && operand_equal_p (arg0, arg1, 0))
13131 return constant_boolean_node (0, type);
13133 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13135 tree targ0 = strip_float_extensions (arg0);
13136 tree targ1 = strip_float_extensions (arg1);
13137 tree newtype = TREE_TYPE (targ0);
13139 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13140 newtype = TREE_TYPE (targ1);
13142 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13143 return fold_build2 (code, type, fold_convert (newtype, targ0),
13144 fold_convert (newtype, targ1));
13147 return NULL_TREE;
13149 case COMPOUND_EXPR:
13150 /* When pedantic, a compound expression can be neither an lvalue
13151 nor an integer constant expression. */
13152 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13153 return NULL_TREE;
13154 /* Don't let (0, 0) be null pointer constant. */
13155 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13156 : fold_convert (type, arg1);
13157 return pedantic_non_lvalue (tem);
13159 case COMPLEX_EXPR:
13160 if ((TREE_CODE (arg0) == REAL_CST
13161 && TREE_CODE (arg1) == REAL_CST)
13162 || (TREE_CODE (arg0) == INTEGER_CST
13163 && TREE_CODE (arg1) == INTEGER_CST))
13164 return build_complex (type, arg0, arg1);
13165 return NULL_TREE;
13167 case ASSERT_EXPR:
13168 /* An ASSERT_EXPR should never be passed to fold_binary. */
13169 gcc_unreachable ();
13171 default:
13172 return NULL_TREE;
13173 } /* switch (code) */
13176 /* Callback for walk_tree, looking for LABEL_EXPR.
13177 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13178 Do not check the sub-tree of GOTO_EXPR. */
13180 static tree
13181 contains_label_1 (tree *tp,
13182 int *walk_subtrees,
13183 void *data ATTRIBUTE_UNUSED)
13185 switch (TREE_CODE (*tp))
13187 case LABEL_EXPR:
13188 return *tp;
13189 case GOTO_EXPR:
13190 *walk_subtrees = 0;
13191 /* no break */
13192 default:
13193 return NULL_TREE;
13197 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13198 accessible from outside the sub-tree. Returns NULL_TREE if no
13199 addressable label is found. */
13201 static bool
13202 contains_label_p (tree st)
13204 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13207 /* Fold a ternary expression of code CODE and type TYPE with operands
13208 OP0, OP1, and OP2. Return the folded expression if folding is
13209 successful. Otherwise, return NULL_TREE. */
13211 tree
13212 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13214 tree tem;
13215 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13216 enum tree_code_class kind = TREE_CODE_CLASS (code);
13218 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13219 && TREE_CODE_LENGTH (code) == 3);
13221 /* Strip any conversions that don't change the mode. This is safe
13222 for every expression, except for a comparison expression because
13223 its signedness is derived from its operands. So, in the latter
13224 case, only strip conversions that don't change the signedness.
13226 Note that this is done as an internal manipulation within the
13227 constant folder, in order to find the simplest representation of
13228 the arguments so that their form can be studied. In any cases,
13229 the appropriate type conversions should be put back in the tree
13230 that will get out of the constant folder. */
13231 if (op0)
13233 arg0 = op0;
13234 STRIP_NOPS (arg0);
13237 if (op1)
13239 arg1 = op1;
13240 STRIP_NOPS (arg1);
13243 switch (code)
13245 case COMPONENT_REF:
13246 if (TREE_CODE (arg0) == CONSTRUCTOR
13247 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13249 unsigned HOST_WIDE_INT idx;
13250 tree field, value;
13251 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13252 if (field == arg1)
13253 return value;
13255 return NULL_TREE;
13257 case COND_EXPR:
13258 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13259 so all simple results must be passed through pedantic_non_lvalue. */
13260 if (TREE_CODE (arg0) == INTEGER_CST)
13262 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13263 tem = integer_zerop (arg0) ? op2 : op1;
13264 /* Only optimize constant conditions when the selected branch
13265 has the same type as the COND_EXPR. This avoids optimizing
13266 away "c ? x : throw", where the throw has a void type.
13267 Avoid throwing away that operand which contains label. */
13268 if ((!TREE_SIDE_EFFECTS (unused_op)
13269 || !contains_label_p (unused_op))
13270 && (! VOID_TYPE_P (TREE_TYPE (tem))
13271 || VOID_TYPE_P (type)))
13272 return pedantic_non_lvalue (tem);
13273 return NULL_TREE;
13275 if (operand_equal_p (arg1, op2, 0))
13276 return pedantic_omit_one_operand (type, arg1, arg0);
13278 /* If we have A op B ? A : C, we may be able to convert this to a
13279 simpler expression, depending on the operation and the values
13280 of B and C. Signed zeros prevent all of these transformations,
13281 for reasons given above each one.
13283 Also try swapping the arguments and inverting the conditional. */
13284 if (COMPARISON_CLASS_P (arg0)
13285 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13286 arg1, TREE_OPERAND (arg0, 1))
13287 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13289 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13290 if (tem)
13291 return tem;
13294 if (COMPARISON_CLASS_P (arg0)
13295 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13296 op2,
13297 TREE_OPERAND (arg0, 1))
13298 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13300 tem = fold_truth_not_expr (arg0);
13301 if (tem && COMPARISON_CLASS_P (tem))
13303 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13304 if (tem)
13305 return tem;
13309 /* If the second operand is simpler than the third, swap them
13310 since that produces better jump optimization results. */
13311 if (truth_value_p (TREE_CODE (arg0))
13312 && tree_swap_operands_p (op1, op2, false))
13314 /* See if this can be inverted. If it can't, possibly because
13315 it was a floating-point inequality comparison, don't do
13316 anything. */
13317 tem = fold_truth_not_expr (arg0);
13318 if (tem)
13319 return fold_build3 (code, type, tem, op2, op1);
13322 /* Convert A ? 1 : 0 to simply A. */
13323 if (integer_onep (op1)
13324 && integer_zerop (op2)
13325 /* If we try to convert OP0 to our type, the
13326 call to fold will try to move the conversion inside
13327 a COND, which will recurse. In that case, the COND_EXPR
13328 is probably the best choice, so leave it alone. */
13329 && type == TREE_TYPE (arg0))
13330 return pedantic_non_lvalue (arg0);
13332 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13333 over COND_EXPR in cases such as floating point comparisons. */
13334 if (integer_zerop (op1)
13335 && integer_onep (op2)
13336 && truth_value_p (TREE_CODE (arg0)))
13337 return pedantic_non_lvalue (fold_convert (type,
13338 invert_truthvalue (arg0)));
13340 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13341 if (TREE_CODE (arg0) == LT_EXPR
13342 && integer_zerop (TREE_OPERAND (arg0, 1))
13343 && integer_zerop (op2)
13344 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13346 /* sign_bit_p only checks ARG1 bits within A's precision.
13347 If <sign bit of A> has wider type than A, bits outside
13348 of A's precision in <sign bit of A> need to be checked.
13349 If they are all 0, this optimization needs to be done
13350 in unsigned A's type, if they are all 1 in signed A's type,
13351 otherwise this can't be done. */
13352 if (TYPE_PRECISION (TREE_TYPE (tem))
13353 < TYPE_PRECISION (TREE_TYPE (arg1))
13354 && TYPE_PRECISION (TREE_TYPE (tem))
13355 < TYPE_PRECISION (type))
13357 unsigned HOST_WIDE_INT mask_lo;
13358 HOST_WIDE_INT mask_hi;
13359 int inner_width, outer_width;
13360 tree tem_type;
13362 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13363 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13364 if (outer_width > TYPE_PRECISION (type))
13365 outer_width = TYPE_PRECISION (type);
13367 if (outer_width > HOST_BITS_PER_WIDE_INT)
13369 mask_hi = ((unsigned HOST_WIDE_INT) -1
13370 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13371 mask_lo = -1;
13373 else
13375 mask_hi = 0;
13376 mask_lo = ((unsigned HOST_WIDE_INT) -1
13377 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13379 if (inner_width > HOST_BITS_PER_WIDE_INT)
13381 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13382 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13383 mask_lo = 0;
13385 else
13386 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13387 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13389 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13390 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13392 tem_type = signed_type_for (TREE_TYPE (tem));
13393 tem = fold_convert (tem_type, tem);
13395 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13396 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13398 tem_type = unsigned_type_for (TREE_TYPE (tem));
13399 tem = fold_convert (tem_type, tem);
13401 else
13402 tem = NULL;
13405 if (tem)
13406 return fold_convert (type,
13407 fold_build2 (BIT_AND_EXPR,
13408 TREE_TYPE (tem), tem,
13409 fold_convert (TREE_TYPE (tem),
13410 arg1)));
13413 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13414 already handled above. */
13415 if (TREE_CODE (arg0) == BIT_AND_EXPR
13416 && integer_onep (TREE_OPERAND (arg0, 1))
13417 && integer_zerop (op2)
13418 && integer_pow2p (arg1))
13420 tree tem = TREE_OPERAND (arg0, 0);
13421 STRIP_NOPS (tem);
13422 if (TREE_CODE (tem) == RSHIFT_EXPR
13423 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13424 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13425 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13426 return fold_build2 (BIT_AND_EXPR, type,
13427 TREE_OPERAND (tem, 0), arg1);
13430 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13431 is probably obsolete because the first operand should be a
13432 truth value (that's why we have the two cases above), but let's
13433 leave it in until we can confirm this for all front-ends. */
13434 if (integer_zerop (op2)
13435 && TREE_CODE (arg0) == NE_EXPR
13436 && integer_zerop (TREE_OPERAND (arg0, 1))
13437 && integer_pow2p (arg1)
13438 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13439 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13440 arg1, OEP_ONLY_CONST))
13441 return pedantic_non_lvalue (fold_convert (type,
13442 TREE_OPERAND (arg0, 0)));
13444 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13445 if (integer_zerop (op2)
13446 && truth_value_p (TREE_CODE (arg0))
13447 && truth_value_p (TREE_CODE (arg1)))
13448 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13449 fold_convert (type, arg0),
13450 arg1);
13452 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13453 if (integer_onep (op2)
13454 && truth_value_p (TREE_CODE (arg0))
13455 && truth_value_p (TREE_CODE (arg1)))
13457 /* Only perform transformation if ARG0 is easily inverted. */
13458 tem = fold_truth_not_expr (arg0);
13459 if (tem)
13460 return fold_build2 (TRUTH_ORIF_EXPR, type,
13461 fold_convert (type, tem),
13462 arg1);
13465 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13466 if (integer_zerop (arg1)
13467 && truth_value_p (TREE_CODE (arg0))
13468 && truth_value_p (TREE_CODE (op2)))
13470 /* Only perform transformation if ARG0 is easily inverted. */
13471 tem = fold_truth_not_expr (arg0);
13472 if (tem)
13473 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13474 fold_convert (type, tem),
13475 op2);
13478 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13479 if (integer_onep (arg1)
13480 && truth_value_p (TREE_CODE (arg0))
13481 && truth_value_p (TREE_CODE (op2)))
13482 return fold_build2 (TRUTH_ORIF_EXPR, type,
13483 fold_convert (type, arg0),
13484 op2);
13486 return NULL_TREE;
13488 case CALL_EXPR:
13489 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13490 of fold_ternary on them. */
13491 gcc_unreachable ();
13493 case BIT_FIELD_REF:
13494 if ((TREE_CODE (arg0) == VECTOR_CST
13495 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13496 && type == TREE_TYPE (TREE_TYPE (arg0)))
13498 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13499 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13501 if (width != 0
13502 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13503 && (idx % width) == 0
13504 && (idx = idx / width)
13505 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13507 tree elements = NULL_TREE;
13509 if (TREE_CODE (arg0) == VECTOR_CST)
13510 elements = TREE_VECTOR_CST_ELTS (arg0);
13511 else
13513 unsigned HOST_WIDE_INT idx;
13514 tree value;
13516 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13517 elements = tree_cons (NULL_TREE, value, elements);
13519 while (idx-- > 0 && elements)
13520 elements = TREE_CHAIN (elements);
13521 if (elements)
13522 return TREE_VALUE (elements);
13523 else
13524 return fold_convert (type, integer_zero_node);
13528 /* A bit-field-ref that referenced the full argument can be stripped. */
13529 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13530 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13531 && integer_zerop (op2))
13532 return fold_convert (type, arg0);
13534 return NULL_TREE;
13536 default:
13537 return NULL_TREE;
13538 } /* switch (code) */
13541 /* Perform constant folding and related simplification of EXPR.
13542 The related simplifications include x*1 => x, x*0 => 0, etc.,
13543 and application of the associative law.
13544 NOP_EXPR conversions may be removed freely (as long as we
13545 are careful not to change the type of the overall expression).
13546 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13547 but we can constant-fold them if they have constant operands. */
13549 #ifdef ENABLE_FOLD_CHECKING
13550 # define fold(x) fold_1 (x)
13551 static tree fold_1 (tree);
13552 static
13553 #endif
13554 tree
13555 fold (tree expr)
13557 const tree t = expr;
13558 enum tree_code code = TREE_CODE (t);
13559 enum tree_code_class kind = TREE_CODE_CLASS (code);
13560 tree tem;
13562 /* Return right away if a constant. */
13563 if (kind == tcc_constant)
13564 return t;
13566 /* CALL_EXPR-like objects with variable numbers of operands are
13567 treated specially. */
13568 if (kind == tcc_vl_exp)
13570 if (code == CALL_EXPR)
13572 tem = fold_call_expr (expr, false);
13573 return tem ? tem : expr;
13575 return expr;
13578 if (IS_EXPR_CODE_CLASS (kind))
13580 tree type = TREE_TYPE (t);
13581 tree op0, op1, op2;
13583 switch (TREE_CODE_LENGTH (code))
13585 case 1:
13586 op0 = TREE_OPERAND (t, 0);
13587 tem = fold_unary (code, type, op0);
13588 return tem ? tem : expr;
13589 case 2:
13590 op0 = TREE_OPERAND (t, 0);
13591 op1 = TREE_OPERAND (t, 1);
13592 tem = fold_binary (code, type, op0, op1);
13593 return tem ? tem : expr;
13594 case 3:
13595 op0 = TREE_OPERAND (t, 0);
13596 op1 = TREE_OPERAND (t, 1);
13597 op2 = TREE_OPERAND (t, 2);
13598 tem = fold_ternary (code, type, op0, op1, op2);
13599 return tem ? tem : expr;
13600 default:
13601 break;
13605 switch (code)
13607 case ARRAY_REF:
13609 tree op0 = TREE_OPERAND (t, 0);
13610 tree op1 = TREE_OPERAND (t, 1);
13612 if (TREE_CODE (op1) == INTEGER_CST
13613 && TREE_CODE (op0) == CONSTRUCTOR
13614 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13616 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13617 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13618 unsigned HOST_WIDE_INT begin = 0;
13620 /* Find a matching index by means of a binary search. */
13621 while (begin != end)
13623 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13624 tree index = VEC_index (constructor_elt, elts, middle)->index;
13626 if (TREE_CODE (index) == INTEGER_CST
13627 && tree_int_cst_lt (index, op1))
13628 begin = middle + 1;
13629 else if (TREE_CODE (index) == INTEGER_CST
13630 && tree_int_cst_lt (op1, index))
13631 end = middle;
13632 else if (TREE_CODE (index) == RANGE_EXPR
13633 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13634 begin = middle + 1;
13635 else if (TREE_CODE (index) == RANGE_EXPR
13636 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13637 end = middle;
13638 else
13639 return VEC_index (constructor_elt, elts, middle)->value;
13643 return t;
13646 case CONST_DECL:
13647 return fold (DECL_INITIAL (t));
13649 default:
13650 return t;
13651 } /* switch (code) */
13654 #ifdef ENABLE_FOLD_CHECKING
13655 #undef fold
13657 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13658 static void fold_check_failed (const_tree, const_tree);
13659 void print_fold_checksum (const_tree);
13661 /* When --enable-checking=fold, compute a digest of expr before
13662 and after actual fold call to see if fold did not accidentally
13663 change original expr. */
13665 tree
13666 fold (tree expr)
13668 tree ret;
13669 struct md5_ctx ctx;
13670 unsigned char checksum_before[16], checksum_after[16];
13671 htab_t ht;
13673 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13674 md5_init_ctx (&ctx);
13675 fold_checksum_tree (expr, &ctx, ht);
13676 md5_finish_ctx (&ctx, checksum_before);
13677 htab_empty (ht);
13679 ret = fold_1 (expr);
13681 md5_init_ctx (&ctx);
13682 fold_checksum_tree (expr, &ctx, ht);
13683 md5_finish_ctx (&ctx, checksum_after);
13684 htab_delete (ht);
13686 if (memcmp (checksum_before, checksum_after, 16))
13687 fold_check_failed (expr, ret);
13689 return ret;
13692 void
13693 print_fold_checksum (const_tree expr)
13695 struct md5_ctx ctx;
13696 unsigned char checksum[16], cnt;
13697 htab_t ht;
13699 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13700 md5_init_ctx (&ctx);
13701 fold_checksum_tree (expr, &ctx, ht);
13702 md5_finish_ctx (&ctx, checksum);
13703 htab_delete (ht);
13704 for (cnt = 0; cnt < 16; ++cnt)
13705 fprintf (stderr, "%02x", checksum[cnt]);
13706 putc ('\n', stderr);
13709 static void
13710 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13712 internal_error ("fold check: original tree changed by fold");
13715 static void
13716 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13718 const void **slot;
13719 enum tree_code code;
13720 union tree_node buf;
13721 int i, len;
13723 recursive_label:
13725 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13726 <= sizeof (struct tree_function_decl))
13727 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13728 if (expr == NULL)
13729 return;
13730 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13731 if (*slot != NULL)
13732 return;
13733 *slot = expr;
13734 code = TREE_CODE (expr);
13735 if (TREE_CODE_CLASS (code) == tcc_declaration
13736 && DECL_ASSEMBLER_NAME_SET_P (expr))
13738 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13739 memcpy ((char *) &buf, expr, tree_size (expr));
13740 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13741 expr = (tree) &buf;
13743 else if (TREE_CODE_CLASS (code) == tcc_type
13744 && (TYPE_POINTER_TO (expr)
13745 || TYPE_REFERENCE_TO (expr)
13746 || TYPE_CACHED_VALUES_P (expr)
13747 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13748 || TYPE_NEXT_VARIANT (expr)))
13750 /* Allow these fields to be modified. */
13751 tree tmp;
13752 memcpy ((char *) &buf, expr, tree_size (expr));
13753 expr = tmp = (tree) &buf;
13754 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13755 TYPE_POINTER_TO (tmp) = NULL;
13756 TYPE_REFERENCE_TO (tmp) = NULL;
13757 TYPE_NEXT_VARIANT (tmp) = NULL;
13758 if (TYPE_CACHED_VALUES_P (tmp))
13760 TYPE_CACHED_VALUES_P (tmp) = 0;
13761 TYPE_CACHED_VALUES (tmp) = NULL;
13764 md5_process_bytes (expr, tree_size (expr), ctx);
13765 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13766 if (TREE_CODE_CLASS (code) != tcc_type
13767 && TREE_CODE_CLASS (code) != tcc_declaration
13768 && code != TREE_LIST
13769 && code != SSA_NAME)
13770 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13771 switch (TREE_CODE_CLASS (code))
13773 case tcc_constant:
13774 switch (code)
13776 case STRING_CST:
13777 md5_process_bytes (TREE_STRING_POINTER (expr),
13778 TREE_STRING_LENGTH (expr), ctx);
13779 break;
13780 case COMPLEX_CST:
13781 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13782 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13783 break;
13784 case VECTOR_CST:
13785 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13786 break;
13787 default:
13788 break;
13790 break;
13791 case tcc_exceptional:
13792 switch (code)
13794 case TREE_LIST:
13795 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13796 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13797 expr = TREE_CHAIN (expr);
13798 goto recursive_label;
13799 break;
13800 case TREE_VEC:
13801 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13802 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13803 break;
13804 default:
13805 break;
13807 break;
13808 case tcc_expression:
13809 case tcc_reference:
13810 case tcc_comparison:
13811 case tcc_unary:
13812 case tcc_binary:
13813 case tcc_statement:
13814 case tcc_vl_exp:
13815 len = TREE_OPERAND_LENGTH (expr);
13816 for (i = 0; i < len; ++i)
13817 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13818 break;
13819 case tcc_declaration:
13820 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13821 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13822 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13824 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13825 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13826 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13827 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13828 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13830 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13831 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13833 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13835 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13836 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13837 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13839 break;
13840 case tcc_type:
13841 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13842 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13843 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13844 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13845 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13846 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13847 if (INTEGRAL_TYPE_P (expr)
13848 || SCALAR_FLOAT_TYPE_P (expr))
13850 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13851 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13853 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13854 if (TREE_CODE (expr) == RECORD_TYPE
13855 || TREE_CODE (expr) == UNION_TYPE
13856 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13857 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13858 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13859 break;
13860 default:
13861 break;
13865 /* Helper function for outputting the checksum of a tree T. When
13866 debugging with gdb, you can "define mynext" to be "next" followed
13867 by "call debug_fold_checksum (op0)", then just trace down till the
13868 outputs differ. */
13870 void
13871 debug_fold_checksum (const_tree t)
13873 int i;
13874 unsigned char checksum[16];
13875 struct md5_ctx ctx;
13876 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13878 md5_init_ctx (&ctx);
13879 fold_checksum_tree (t, &ctx, ht);
13880 md5_finish_ctx (&ctx, checksum);
13881 htab_empty (ht);
13883 for (i = 0; i < 16; i++)
13884 fprintf (stderr, "%d ", checksum[i]);
13886 fprintf (stderr, "\n");
13889 #endif
13891 /* Fold a unary tree expression with code CODE of type TYPE with an
13892 operand OP0. Return a folded expression if successful. Otherwise,
13893 return a tree expression with code CODE of type TYPE with an
13894 operand OP0. */
13896 tree
13897 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13899 tree tem;
13900 #ifdef ENABLE_FOLD_CHECKING
13901 unsigned char checksum_before[16], checksum_after[16];
13902 struct md5_ctx ctx;
13903 htab_t ht;
13905 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13906 md5_init_ctx (&ctx);
13907 fold_checksum_tree (op0, &ctx, ht);
13908 md5_finish_ctx (&ctx, checksum_before);
13909 htab_empty (ht);
13910 #endif
13912 tem = fold_unary (code, type, op0);
13913 if (!tem)
13914 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13916 #ifdef ENABLE_FOLD_CHECKING
13917 md5_init_ctx (&ctx);
13918 fold_checksum_tree (op0, &ctx, ht);
13919 md5_finish_ctx (&ctx, checksum_after);
13920 htab_delete (ht);
13922 if (memcmp (checksum_before, checksum_after, 16))
13923 fold_check_failed (op0, tem);
13924 #endif
13925 return tem;
13928 /* Fold a binary tree expression with code CODE of type TYPE with
13929 operands OP0 and OP1. Return a folded expression if successful.
13930 Otherwise, return a tree expression with code CODE of type TYPE
13931 with operands OP0 and OP1. */
13933 tree
13934 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13935 MEM_STAT_DECL)
13937 tree tem;
13938 #ifdef ENABLE_FOLD_CHECKING
13939 unsigned char checksum_before_op0[16],
13940 checksum_before_op1[16],
13941 checksum_after_op0[16],
13942 checksum_after_op1[16];
13943 struct md5_ctx ctx;
13944 htab_t ht;
13946 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13947 md5_init_ctx (&ctx);
13948 fold_checksum_tree (op0, &ctx, ht);
13949 md5_finish_ctx (&ctx, checksum_before_op0);
13950 htab_empty (ht);
13952 md5_init_ctx (&ctx);
13953 fold_checksum_tree (op1, &ctx, ht);
13954 md5_finish_ctx (&ctx, checksum_before_op1);
13955 htab_empty (ht);
13956 #endif
13958 tem = fold_binary (code, type, op0, op1);
13959 if (!tem)
13960 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13962 #ifdef ENABLE_FOLD_CHECKING
13963 md5_init_ctx (&ctx);
13964 fold_checksum_tree (op0, &ctx, ht);
13965 md5_finish_ctx (&ctx, checksum_after_op0);
13966 htab_empty (ht);
13968 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13969 fold_check_failed (op0, tem);
13971 md5_init_ctx (&ctx);
13972 fold_checksum_tree (op1, &ctx, ht);
13973 md5_finish_ctx (&ctx, checksum_after_op1);
13974 htab_delete (ht);
13976 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13977 fold_check_failed (op1, tem);
13978 #endif
13979 return tem;
13982 /* Fold a ternary tree expression with code CODE of type TYPE with
13983 operands OP0, OP1, and OP2. Return a folded expression if
13984 successful. Otherwise, return a tree expression with code CODE of
13985 type TYPE with operands OP0, OP1, and OP2. */
13987 tree
13988 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13989 MEM_STAT_DECL)
13991 tree tem;
13992 #ifdef ENABLE_FOLD_CHECKING
13993 unsigned char checksum_before_op0[16],
13994 checksum_before_op1[16],
13995 checksum_before_op2[16],
13996 checksum_after_op0[16],
13997 checksum_after_op1[16],
13998 checksum_after_op2[16];
13999 struct md5_ctx ctx;
14000 htab_t ht;
14002 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14003 md5_init_ctx (&ctx);
14004 fold_checksum_tree (op0, &ctx, ht);
14005 md5_finish_ctx (&ctx, checksum_before_op0);
14006 htab_empty (ht);
14008 md5_init_ctx (&ctx);
14009 fold_checksum_tree (op1, &ctx, ht);
14010 md5_finish_ctx (&ctx, checksum_before_op1);
14011 htab_empty (ht);
14013 md5_init_ctx (&ctx);
14014 fold_checksum_tree (op2, &ctx, ht);
14015 md5_finish_ctx (&ctx, checksum_before_op2);
14016 htab_empty (ht);
14017 #endif
14019 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14020 tem = fold_ternary (code, type, op0, op1, op2);
14021 if (!tem)
14022 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14024 #ifdef ENABLE_FOLD_CHECKING
14025 md5_init_ctx (&ctx);
14026 fold_checksum_tree (op0, &ctx, ht);
14027 md5_finish_ctx (&ctx, checksum_after_op0);
14028 htab_empty (ht);
14030 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14031 fold_check_failed (op0, tem);
14033 md5_init_ctx (&ctx);
14034 fold_checksum_tree (op1, &ctx, ht);
14035 md5_finish_ctx (&ctx, checksum_after_op1);
14036 htab_empty (ht);
14038 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14039 fold_check_failed (op1, tem);
14041 md5_init_ctx (&ctx);
14042 fold_checksum_tree (op2, &ctx, ht);
14043 md5_finish_ctx (&ctx, checksum_after_op2);
14044 htab_delete (ht);
14046 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14047 fold_check_failed (op2, tem);
14048 #endif
14049 return tem;
14052 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14053 arguments in ARGARRAY, and a null static chain.
14054 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14055 of type TYPE from the given operands as constructed by build_call_array. */
14057 tree
14058 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14060 tree tem;
14061 #ifdef ENABLE_FOLD_CHECKING
14062 unsigned char checksum_before_fn[16],
14063 checksum_before_arglist[16],
14064 checksum_after_fn[16],
14065 checksum_after_arglist[16];
14066 struct md5_ctx ctx;
14067 htab_t ht;
14068 int i;
14070 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14071 md5_init_ctx (&ctx);
14072 fold_checksum_tree (fn, &ctx, ht);
14073 md5_finish_ctx (&ctx, checksum_before_fn);
14074 htab_empty (ht);
14076 md5_init_ctx (&ctx);
14077 for (i = 0; i < nargs; i++)
14078 fold_checksum_tree (argarray[i], &ctx, ht);
14079 md5_finish_ctx (&ctx, checksum_before_arglist);
14080 htab_empty (ht);
14081 #endif
14083 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14085 #ifdef ENABLE_FOLD_CHECKING
14086 md5_init_ctx (&ctx);
14087 fold_checksum_tree (fn, &ctx, ht);
14088 md5_finish_ctx (&ctx, checksum_after_fn);
14089 htab_empty (ht);
14091 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14092 fold_check_failed (fn, tem);
14094 md5_init_ctx (&ctx);
14095 for (i = 0; i < nargs; i++)
14096 fold_checksum_tree (argarray[i], &ctx, ht);
14097 md5_finish_ctx (&ctx, checksum_after_arglist);
14098 htab_delete (ht);
14100 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14101 fold_check_failed (NULL_TREE, tem);
14102 #endif
14103 return tem;
14106 /* Perform constant folding and related simplification of initializer
14107 expression EXPR. These behave identically to "fold_buildN" but ignore
14108 potential run-time traps and exceptions that fold must preserve. */
14110 #define START_FOLD_INIT \
14111 int saved_signaling_nans = flag_signaling_nans;\
14112 int saved_trapping_math = flag_trapping_math;\
14113 int saved_rounding_math = flag_rounding_math;\
14114 int saved_trapv = flag_trapv;\
14115 int saved_folding_initializer = folding_initializer;\
14116 flag_signaling_nans = 0;\
14117 flag_trapping_math = 0;\
14118 flag_rounding_math = 0;\
14119 flag_trapv = 0;\
14120 folding_initializer = 1;
14122 #define END_FOLD_INIT \
14123 flag_signaling_nans = saved_signaling_nans;\
14124 flag_trapping_math = saved_trapping_math;\
14125 flag_rounding_math = saved_rounding_math;\
14126 flag_trapv = saved_trapv;\
14127 folding_initializer = saved_folding_initializer;
14129 tree
14130 fold_build1_initializer (enum tree_code code, tree type, tree op)
14132 tree result;
14133 START_FOLD_INIT;
14135 result = fold_build1 (code, type, op);
14137 END_FOLD_INIT;
14138 return result;
14141 tree
14142 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14144 tree result;
14145 START_FOLD_INIT;
14147 result = fold_build2 (code, type, op0, op1);
14149 END_FOLD_INIT;
14150 return result;
14153 tree
14154 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14155 tree op2)
14157 tree result;
14158 START_FOLD_INIT;
14160 result = fold_build3 (code, type, op0, op1, op2);
14162 END_FOLD_INIT;
14163 return result;
14166 tree
14167 fold_build_call_array_initializer (tree type, tree fn,
14168 int nargs, tree *argarray)
14170 tree result;
14171 START_FOLD_INIT;
14173 result = fold_build_call_array (type, fn, nargs, argarray);
14175 END_FOLD_INIT;
14176 return result;
14179 #undef START_FOLD_INIT
14180 #undef END_FOLD_INIT
14182 /* Determine if first argument is a multiple of second argument. Return 0 if
14183 it is not, or we cannot easily determined it to be.
14185 An example of the sort of thing we care about (at this point; this routine
14186 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14187 fold cases do now) is discovering that
14189 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14191 is a multiple of
14193 SAVE_EXPR (J * 8)
14195 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14197 This code also handles discovering that
14199 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14201 is a multiple of 8 so we don't have to worry about dealing with a
14202 possible remainder.
14204 Note that we *look* inside a SAVE_EXPR only to determine how it was
14205 calculated; it is not safe for fold to do much of anything else with the
14206 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14207 at run time. For example, the latter example above *cannot* be implemented
14208 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14209 evaluation time of the original SAVE_EXPR is not necessarily the same at
14210 the time the new expression is evaluated. The only optimization of this
14211 sort that would be valid is changing
14213 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14215 divided by 8 to
14217 SAVE_EXPR (I) * SAVE_EXPR (J)
14219 (where the same SAVE_EXPR (J) is used in the original and the
14220 transformed version). */
14223 multiple_of_p (tree type, const_tree top, const_tree bottom)
14225 if (operand_equal_p (top, bottom, 0))
14226 return 1;
14228 if (TREE_CODE (type) != INTEGER_TYPE)
14229 return 0;
14231 switch (TREE_CODE (top))
14233 case BIT_AND_EXPR:
14234 /* Bitwise and provides a power of two multiple. If the mask is
14235 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14236 if (!integer_pow2p (bottom))
14237 return 0;
14238 /* FALLTHRU */
14240 case MULT_EXPR:
14241 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14242 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14244 case PLUS_EXPR:
14245 case MINUS_EXPR:
14246 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14247 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14249 case LSHIFT_EXPR:
14250 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14252 tree op1, t1;
14254 op1 = TREE_OPERAND (top, 1);
14255 /* const_binop may not detect overflow correctly,
14256 so check for it explicitly here. */
14257 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14258 > TREE_INT_CST_LOW (op1)
14259 && TREE_INT_CST_HIGH (op1) == 0
14260 && 0 != (t1 = fold_convert (type,
14261 const_binop (LSHIFT_EXPR,
14262 size_one_node,
14263 op1, 0)))
14264 && !TREE_OVERFLOW (t1))
14265 return multiple_of_p (type, t1, bottom);
14267 return 0;
14269 case NOP_EXPR:
14270 /* Can't handle conversions from non-integral or wider integral type. */
14271 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14272 || (TYPE_PRECISION (type)
14273 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14274 return 0;
14276 /* .. fall through ... */
14278 case SAVE_EXPR:
14279 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14281 case INTEGER_CST:
14282 if (TREE_CODE (bottom) != INTEGER_CST
14283 || integer_zerop (bottom)
14284 || (TYPE_UNSIGNED (type)
14285 && (tree_int_cst_sgn (top) < 0
14286 || tree_int_cst_sgn (bottom) < 0)))
14287 return 0;
14288 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14289 top, bottom, 0));
14291 default:
14292 return 0;
14296 /* Return true if CODE or TYPE is known to be non-negative. */
14298 static bool
14299 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14301 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14302 && truth_value_p (code))
14303 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14304 have a signed:1 type (where the value is -1 and 0). */
14305 return true;
14306 return false;
14309 /* Return true if (CODE OP0) is known to be non-negative. If the return
14310 value is based on the assumption that signed overflow is undefined,
14311 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14312 *STRICT_OVERFLOW_P. */
14314 bool
14315 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14316 bool *strict_overflow_p)
14318 if (TYPE_UNSIGNED (type))
14319 return true;
14321 switch (code)
14323 case ABS_EXPR:
14324 /* We can't return 1 if flag_wrapv is set because
14325 ABS_EXPR<INT_MIN> = INT_MIN. */
14326 if (!INTEGRAL_TYPE_P (type))
14327 return true;
14328 break;
14330 case NON_LVALUE_EXPR:
14331 case FLOAT_EXPR:
14332 case FIX_TRUNC_EXPR:
14333 return tree_expr_nonnegative_warnv_p (op0,
14334 strict_overflow_p);
14336 CASE_CONVERT:
14338 tree inner_type = TREE_TYPE (op0);
14339 tree outer_type = type;
14341 if (TREE_CODE (outer_type) == REAL_TYPE)
14343 if (TREE_CODE (inner_type) == REAL_TYPE)
14344 return tree_expr_nonnegative_warnv_p (op0,
14345 strict_overflow_p);
14346 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14348 if (TYPE_UNSIGNED (inner_type))
14349 return true;
14350 return tree_expr_nonnegative_warnv_p (op0,
14351 strict_overflow_p);
14354 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14356 if (TREE_CODE (inner_type) == REAL_TYPE)
14357 return tree_expr_nonnegative_warnv_p (op0,
14358 strict_overflow_p);
14359 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14360 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14361 && TYPE_UNSIGNED (inner_type);
14364 break;
14366 default:
14367 return tree_simple_nonnegative_warnv_p (code, type);
14370 /* We don't know sign of `t', so be conservative and return false. */
14371 return false;
14374 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14375 value is based on the assumption that signed overflow is undefined,
14376 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14377 *STRICT_OVERFLOW_P. */
14379 bool
14380 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14381 tree op1, bool *strict_overflow_p)
14383 if (TYPE_UNSIGNED (type))
14384 return true;
14386 switch (code)
14388 case POINTER_PLUSNV_EXPR:
14389 case POINTER_PLUS_EXPR:
14390 /* Pointers do not have a "sign". */
14391 return false;
14393 case PLUSNV_EXPR:
14394 if (INTEGRAL_TYPE_P (type))
14396 *strict_overflow_p = true;
14397 return (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14398 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p));
14401 /* Fallthru. */
14402 case PLUS_EXPR:
14403 if (FLOAT_TYPE_P (type))
14404 return (tree_expr_nonnegative_warnv_p (op0,
14405 strict_overflow_p)
14406 && tree_expr_nonnegative_warnv_p (op1,
14407 strict_overflow_p));
14409 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14410 both unsigned and at least 2 bits shorter than the result. */
14411 if (TREE_CODE (type) == INTEGER_TYPE
14412 && TREE_CODE (op0) == NOP_EXPR
14413 && TREE_CODE (op1) == NOP_EXPR)
14415 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14416 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14417 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14418 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14420 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14421 TYPE_PRECISION (inner2)) + 1;
14422 return prec < TYPE_PRECISION (type);
14425 break;
14427 case MULTNV_EXPR:
14428 if (INTEGRAL_TYPE_P (type))
14430 *strict_overflow_p = true;
14431 /* x * x without overflowing is always non-negative. */
14432 if (operand_equal_p (op0, op1, 0))
14433 return true;
14434 return (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14435 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p));
14438 /* Fallthru. */
14439 case MULT_EXPR:
14440 if (FLOAT_TYPE_P (type))
14442 /* x * x for floating point x is always non-negative. */
14443 if (operand_equal_p (op0, op1, 0))
14444 return true;
14445 return (tree_expr_nonnegative_warnv_p (op0,
14446 strict_overflow_p)
14447 && tree_expr_nonnegative_warnv_p (op1,
14448 strict_overflow_p));
14451 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14452 both unsigned and their total bits is shorter than the result. */
14453 if (TREE_CODE (type) == INTEGER_TYPE
14454 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14455 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14457 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14458 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14459 : TREE_TYPE (op0);
14460 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14461 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14462 : TREE_TYPE (op1);
14464 bool unsigned0 = TYPE_UNSIGNED (inner0);
14465 bool unsigned1 = TYPE_UNSIGNED (inner1);
14467 if (TREE_CODE (op0) == INTEGER_CST)
14468 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14470 if (TREE_CODE (op1) == INTEGER_CST)
14471 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14473 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14474 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14476 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14477 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14478 : TYPE_PRECISION (inner0);
14480 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14481 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14482 : TYPE_PRECISION (inner1);
14484 return precision0 + precision1 < TYPE_PRECISION (type);
14487 return false;
14489 case BIT_AND_EXPR:
14490 case MAX_EXPR:
14491 return (tree_expr_nonnegative_warnv_p (op0,
14492 strict_overflow_p)
14493 || tree_expr_nonnegative_warnv_p (op1,
14494 strict_overflow_p));
14496 case BIT_IOR_EXPR:
14497 case BIT_XOR_EXPR:
14498 case MIN_EXPR:
14499 case RDIV_EXPR:
14500 case TRUNC_DIV_EXPR:
14501 case CEIL_DIV_EXPR:
14502 case FLOOR_DIV_EXPR:
14503 case ROUND_DIV_EXPR:
14504 return (tree_expr_nonnegative_warnv_p (op0,
14505 strict_overflow_p)
14506 && tree_expr_nonnegative_warnv_p (op1,
14507 strict_overflow_p));
14509 case TRUNC_MOD_EXPR:
14510 case CEIL_MOD_EXPR:
14511 case FLOOR_MOD_EXPR:
14512 case ROUND_MOD_EXPR:
14513 return tree_expr_nonnegative_warnv_p (op0,
14514 strict_overflow_p);
14515 default:
14516 return tree_simple_nonnegative_warnv_p (code, type);
14519 /* We don't know sign of `t', so be conservative and return false. */
14520 return false;
14523 /* Return true if T is known to be non-negative. If the return
14524 value is based on the assumption that signed overflow is undefined,
14525 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14526 *STRICT_OVERFLOW_P. */
14528 bool
14529 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14531 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14532 return true;
14534 switch (TREE_CODE (t))
14536 case INTEGER_CST:
14537 return tree_int_cst_sgn (t) >= 0;
14539 case REAL_CST:
14540 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14542 case FIXED_CST:
14543 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14545 case COND_EXPR:
14546 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14547 strict_overflow_p)
14548 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14549 strict_overflow_p));
14550 default:
14551 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14552 TREE_TYPE (t));
14554 /* We don't know sign of `t', so be conservative and return false. */
14555 return false;
14558 /* Return true if T is known to be non-negative. If the return
14559 value is based on the assumption that signed overflow is undefined,
14560 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14561 *STRICT_OVERFLOW_P. */
14563 bool
14564 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14565 tree arg0, tree arg1, bool *strict_overflow_p)
14567 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14568 switch (DECL_FUNCTION_CODE (fndecl))
14570 CASE_FLT_FN (BUILT_IN_ACOS):
14571 CASE_FLT_FN (BUILT_IN_ACOSH):
14572 CASE_FLT_FN (BUILT_IN_CABS):
14573 CASE_FLT_FN (BUILT_IN_COSH):
14574 CASE_FLT_FN (BUILT_IN_ERFC):
14575 CASE_FLT_FN (BUILT_IN_EXP):
14576 CASE_FLT_FN (BUILT_IN_EXP10):
14577 CASE_FLT_FN (BUILT_IN_EXP2):
14578 CASE_FLT_FN (BUILT_IN_FABS):
14579 CASE_FLT_FN (BUILT_IN_FDIM):
14580 CASE_FLT_FN (BUILT_IN_HYPOT):
14581 CASE_FLT_FN (BUILT_IN_POW10):
14582 CASE_INT_FN (BUILT_IN_FFS):
14583 CASE_INT_FN (BUILT_IN_PARITY):
14584 CASE_INT_FN (BUILT_IN_POPCOUNT):
14585 case BUILT_IN_BSWAP32:
14586 case BUILT_IN_BSWAP64:
14587 /* Always true. */
14588 return true;
14590 CASE_FLT_FN (BUILT_IN_SQRT):
14591 /* sqrt(-0.0) is -0.0. */
14592 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14593 return true;
14594 return tree_expr_nonnegative_warnv_p (arg0,
14595 strict_overflow_p);
14597 CASE_FLT_FN (BUILT_IN_ASINH):
14598 CASE_FLT_FN (BUILT_IN_ATAN):
14599 CASE_FLT_FN (BUILT_IN_ATANH):
14600 CASE_FLT_FN (BUILT_IN_CBRT):
14601 CASE_FLT_FN (BUILT_IN_CEIL):
14602 CASE_FLT_FN (BUILT_IN_ERF):
14603 CASE_FLT_FN (BUILT_IN_EXPM1):
14604 CASE_FLT_FN (BUILT_IN_FLOOR):
14605 CASE_FLT_FN (BUILT_IN_FMOD):
14606 CASE_FLT_FN (BUILT_IN_FREXP):
14607 CASE_FLT_FN (BUILT_IN_LCEIL):
14608 CASE_FLT_FN (BUILT_IN_LDEXP):
14609 CASE_FLT_FN (BUILT_IN_LFLOOR):
14610 CASE_FLT_FN (BUILT_IN_LLCEIL):
14611 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14612 CASE_FLT_FN (BUILT_IN_LLRINT):
14613 CASE_FLT_FN (BUILT_IN_LLROUND):
14614 CASE_FLT_FN (BUILT_IN_LRINT):
14615 CASE_FLT_FN (BUILT_IN_LROUND):
14616 CASE_FLT_FN (BUILT_IN_MODF):
14617 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14618 CASE_FLT_FN (BUILT_IN_RINT):
14619 CASE_FLT_FN (BUILT_IN_ROUND):
14620 CASE_FLT_FN (BUILT_IN_SCALB):
14621 CASE_FLT_FN (BUILT_IN_SCALBLN):
14622 CASE_FLT_FN (BUILT_IN_SCALBN):
14623 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14624 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14625 CASE_FLT_FN (BUILT_IN_SINH):
14626 CASE_FLT_FN (BUILT_IN_TANH):
14627 CASE_FLT_FN (BUILT_IN_TRUNC):
14628 /* True if the 1st argument is nonnegative. */
14629 return tree_expr_nonnegative_warnv_p (arg0,
14630 strict_overflow_p);
14632 CASE_FLT_FN (BUILT_IN_FMAX):
14633 /* True if the 1st OR 2nd arguments are nonnegative. */
14634 return (tree_expr_nonnegative_warnv_p (arg0,
14635 strict_overflow_p)
14636 || (tree_expr_nonnegative_warnv_p (arg1,
14637 strict_overflow_p)));
14639 CASE_FLT_FN (BUILT_IN_FMIN):
14640 /* True if the 1st AND 2nd arguments are nonnegative. */
14641 return (tree_expr_nonnegative_warnv_p (arg0,
14642 strict_overflow_p)
14643 && (tree_expr_nonnegative_warnv_p (arg1,
14644 strict_overflow_p)));
14646 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14647 /* True if the 2nd argument is nonnegative. */
14648 return tree_expr_nonnegative_warnv_p (arg1,
14649 strict_overflow_p);
14651 CASE_FLT_FN (BUILT_IN_POWI):
14652 /* True if the 1st argument is nonnegative or the second
14653 argument is an even integer. */
14654 if (TREE_CODE (arg1) == INTEGER_CST
14655 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14656 return true;
14657 return tree_expr_nonnegative_warnv_p (arg0,
14658 strict_overflow_p);
14660 CASE_FLT_FN (BUILT_IN_POW):
14661 /* True if the 1st argument is nonnegative or the second
14662 argument is an even integer valued real. */
14663 if (TREE_CODE (arg1) == REAL_CST)
14665 REAL_VALUE_TYPE c;
14666 HOST_WIDE_INT n;
14668 c = TREE_REAL_CST (arg1);
14669 n = real_to_integer (&c);
14670 if ((n & 1) == 0)
14672 REAL_VALUE_TYPE cint;
14673 real_from_integer (&cint, VOIDmode, n,
14674 n < 0 ? -1 : 0, 0);
14675 if (real_identical (&c, &cint))
14676 return true;
14679 return tree_expr_nonnegative_warnv_p (arg0,
14680 strict_overflow_p);
14682 default:
14683 break;
14685 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14686 type);
14689 /* Return true if T is known to be non-negative. If the return
14690 value is based on the assumption that signed overflow is undefined,
14691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14692 *STRICT_OVERFLOW_P. */
14694 bool
14695 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14697 enum tree_code code = TREE_CODE (t);
14698 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14699 return true;
14701 switch (code)
14703 case TARGET_EXPR:
14705 tree temp = TARGET_EXPR_SLOT (t);
14706 t = TARGET_EXPR_INITIAL (t);
14708 /* If the initializer is non-void, then it's a normal expression
14709 that will be assigned to the slot. */
14710 if (!VOID_TYPE_P (t))
14711 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14713 /* Otherwise, the initializer sets the slot in some way. One common
14714 way is an assignment statement at the end of the initializer. */
14715 while (1)
14717 if (TREE_CODE (t) == BIND_EXPR)
14718 t = expr_last (BIND_EXPR_BODY (t));
14719 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14720 || TREE_CODE (t) == TRY_CATCH_EXPR)
14721 t = expr_last (TREE_OPERAND (t, 0));
14722 else if (TREE_CODE (t) == STATEMENT_LIST)
14723 t = expr_last (t);
14724 else
14725 break;
14727 if (TREE_CODE (t) == MODIFY_EXPR
14728 && TREE_OPERAND (t, 0) == temp)
14729 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14730 strict_overflow_p);
14732 return false;
14735 case CALL_EXPR:
14737 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14738 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14740 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14741 get_callee_fndecl (t),
14742 arg0,
14743 arg1,
14744 strict_overflow_p);
14746 case COMPOUND_EXPR:
14747 case MODIFY_EXPR:
14748 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14749 strict_overflow_p);
14750 case BIND_EXPR:
14751 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14752 strict_overflow_p);
14753 case SAVE_EXPR:
14754 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14755 strict_overflow_p);
14757 default:
14758 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14759 TREE_TYPE (t));
14762 /* We don't know sign of `t', so be conservative and return false. */
14763 return false;
14766 /* Return true if T is known to be non-negative. If the return
14767 value is based on the assumption that signed overflow is undefined,
14768 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14769 *STRICT_OVERFLOW_P. */
14771 bool
14772 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14774 enum tree_code code;
14775 if (t == error_mark_node)
14776 return false;
14778 code = TREE_CODE (t);
14779 switch (TREE_CODE_CLASS (code))
14781 case tcc_binary:
14782 case tcc_comparison:
14783 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14784 TREE_TYPE (t),
14785 TREE_OPERAND (t, 0),
14786 TREE_OPERAND (t, 1),
14787 strict_overflow_p);
14789 case tcc_unary:
14790 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14791 TREE_TYPE (t),
14792 TREE_OPERAND (t, 0),
14793 strict_overflow_p);
14795 case tcc_constant:
14796 case tcc_declaration:
14797 case tcc_reference:
14798 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14800 default:
14801 break;
14804 switch (code)
14806 case TRUTH_AND_EXPR:
14807 case TRUTH_OR_EXPR:
14808 case TRUTH_XOR_EXPR:
14809 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14810 TREE_TYPE (t),
14811 TREE_OPERAND (t, 0),
14812 TREE_OPERAND (t, 1),
14813 strict_overflow_p);
14814 case TRUTH_NOT_EXPR:
14815 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14816 TREE_TYPE (t),
14817 TREE_OPERAND (t, 0),
14818 strict_overflow_p);
14820 case COND_EXPR:
14821 case CONSTRUCTOR:
14822 case OBJ_TYPE_REF:
14823 case ASSERT_EXPR:
14824 case ADDR_EXPR:
14825 case WITH_SIZE_EXPR:
14826 case EXC_PTR_EXPR:
14827 case SSA_NAME:
14828 case FILTER_EXPR:
14829 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14831 default:
14832 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14836 /* Return true if `t' is known to be non-negative. Handle warnings
14837 about undefined signed overflow. */
14839 bool
14840 tree_expr_nonnegative_p (tree t)
14842 bool ret, strict_overflow_p;
14844 strict_overflow_p = false;
14845 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14846 if (strict_overflow_p)
14847 fold_overflow_warning (("assuming signed overflow does not occur when "
14848 "determining that expression is always "
14849 "non-negative"),
14850 WARN_STRICT_OVERFLOW_MISC);
14851 return ret;
14855 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14856 For floating point we further ensure that T is not denormal.
14857 Similar logic is present in nonzero_address in rtlanal.h.
14859 If the return value is based on the assumption that signed overflow
14860 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14861 change *STRICT_OVERFLOW_P. */
14863 bool
14864 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14865 bool *strict_overflow_p)
14867 switch (code)
14869 case ABS_EXPR:
14870 return tree_expr_nonzero_warnv_p (op0,
14871 strict_overflow_p);
14873 case NOP_EXPR:
14875 tree inner_type = TREE_TYPE (op0);
14876 tree outer_type = type;
14878 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14879 && tree_expr_nonzero_warnv_p (op0,
14880 strict_overflow_p));
14882 break;
14884 case NON_LVALUE_EXPR:
14885 return tree_expr_nonzero_warnv_p (op0,
14886 strict_overflow_p);
14888 default:
14889 break;
14892 return false;
14895 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14896 For floating point we further ensure that T is not denormal.
14897 Similar logic is present in nonzero_address in rtlanal.h.
14899 If the return value is based on the assumption that signed overflow
14900 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14901 change *STRICT_OVERFLOW_P. */
14903 bool
14904 tree_binary_nonzero_warnv_p (enum tree_code code,
14905 tree type ATTRIBUTE_UNUSED,
14906 tree op0,
14907 tree op1, bool *strict_overflow_p)
14909 bool sub_strict_overflow_p;
14910 switch (code)
14912 case POINTER_PLUS_EXPR:
14913 case PLUS_EXPR:
14914 if (TYPE_OVERFLOW_UNDEFINED (type))
14916 /* With the presence of negative values it is hard
14917 to say something. */
14918 sub_strict_overflow_p = false;
14919 if (!tree_expr_nonnegative_warnv_p (op0,
14920 &sub_strict_overflow_p)
14921 || !tree_expr_nonnegative_warnv_p (op1,
14922 &sub_strict_overflow_p))
14923 return false;
14924 /* One of operands must be positive and the other non-negative. */
14925 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14926 overflows, on a twos-complement machine the sum of two
14927 nonnegative numbers can never be zero. */
14928 return (tree_expr_nonzero_warnv_p (op0,
14929 strict_overflow_p)
14930 || tree_expr_nonzero_warnv_p (op1,
14931 strict_overflow_p));
14933 break;
14935 case MULT_EXPR:
14936 if (TYPE_OVERFLOW_UNDEFINED (type))
14938 if (tree_expr_nonzero_warnv_p (op0,
14939 strict_overflow_p)
14940 && tree_expr_nonzero_warnv_p (op1,
14941 strict_overflow_p))
14943 *strict_overflow_p = true;
14944 return true;
14947 break;
14949 case MIN_EXPR:
14950 sub_strict_overflow_p = false;
14951 if (tree_expr_nonzero_warnv_p (op0,
14952 &sub_strict_overflow_p)
14953 && tree_expr_nonzero_warnv_p (op1,
14954 &sub_strict_overflow_p))
14956 if (sub_strict_overflow_p)
14957 *strict_overflow_p = true;
14959 break;
14961 case MAX_EXPR:
14962 sub_strict_overflow_p = false;
14963 if (tree_expr_nonzero_warnv_p (op0,
14964 &sub_strict_overflow_p))
14966 if (sub_strict_overflow_p)
14967 *strict_overflow_p = true;
14969 /* When both operands are nonzero, then MAX must be too. */
14970 if (tree_expr_nonzero_warnv_p (op1,
14971 strict_overflow_p))
14972 return true;
14974 /* MAX where operand 0 is positive is positive. */
14975 return tree_expr_nonnegative_warnv_p (op0,
14976 strict_overflow_p);
14978 /* MAX where operand 1 is positive is positive. */
14979 else if (tree_expr_nonzero_warnv_p (op1,
14980 &sub_strict_overflow_p)
14981 && tree_expr_nonnegative_warnv_p (op1,
14982 &sub_strict_overflow_p))
14984 if (sub_strict_overflow_p)
14985 *strict_overflow_p = true;
14986 return true;
14988 break;
14990 case BIT_IOR_EXPR:
14991 return (tree_expr_nonzero_warnv_p (op1,
14992 strict_overflow_p)
14993 || tree_expr_nonzero_warnv_p (op0,
14994 strict_overflow_p));
14996 default:
14997 break;
15000 return false;
15003 /* Return true when T is an address and is known to be nonzero.
15004 For floating point we further ensure that T is not denormal.
15005 Similar logic is present in nonzero_address in rtlanal.h.
15007 If the return value is based on the assumption that signed overflow
15008 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15009 change *STRICT_OVERFLOW_P. */
15011 bool
15012 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15014 bool sub_strict_overflow_p;
15015 switch (TREE_CODE (t))
15017 case INTEGER_CST:
15018 return !integer_zerop (t);
15020 case ADDR_EXPR:
15022 tree base = get_base_address (TREE_OPERAND (t, 0));
15024 if (!base)
15025 return false;
15027 /* Weak declarations may link to NULL. */
15028 if (VAR_OR_FUNCTION_DECL_P (base))
15029 return !DECL_WEAK (base);
15031 /* Constants are never weak. */
15032 if (CONSTANT_CLASS_P (base))
15033 return true;
15035 return false;
15038 case COND_EXPR:
15039 sub_strict_overflow_p = false;
15040 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15041 &sub_strict_overflow_p)
15042 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15043 &sub_strict_overflow_p))
15045 if (sub_strict_overflow_p)
15046 *strict_overflow_p = true;
15047 return true;
15049 break;
15051 default:
15052 break;
15054 return false;
15057 /* Return true when T is an address and is known to be nonzero.
15058 For floating point we further ensure that T is not denormal.
15059 Similar logic is present in nonzero_address in rtlanal.h.
15061 If the return value is based on the assumption that signed overflow
15062 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15063 change *STRICT_OVERFLOW_P. */
15065 bool
15066 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15068 tree type = TREE_TYPE (t);
15069 enum tree_code code;
15071 /* Doing something useful for floating point would need more work. */
15072 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15073 return false;
15075 code = TREE_CODE (t);
15076 switch (TREE_CODE_CLASS (code))
15078 case tcc_unary:
15079 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15080 strict_overflow_p);
15081 case tcc_binary:
15082 case tcc_comparison:
15083 return tree_binary_nonzero_warnv_p (code, type,
15084 TREE_OPERAND (t, 0),
15085 TREE_OPERAND (t, 1),
15086 strict_overflow_p);
15087 case tcc_constant:
15088 case tcc_declaration:
15089 case tcc_reference:
15090 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15092 default:
15093 break;
15096 switch (code)
15098 case TRUTH_NOT_EXPR:
15099 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15100 strict_overflow_p);
15102 case TRUTH_AND_EXPR:
15103 case TRUTH_OR_EXPR:
15104 case TRUTH_XOR_EXPR:
15105 return tree_binary_nonzero_warnv_p (code, type,
15106 TREE_OPERAND (t, 0),
15107 TREE_OPERAND (t, 1),
15108 strict_overflow_p);
15110 case COND_EXPR:
15111 case CONSTRUCTOR:
15112 case OBJ_TYPE_REF:
15113 case ASSERT_EXPR:
15114 case ADDR_EXPR:
15115 case WITH_SIZE_EXPR:
15116 case EXC_PTR_EXPR:
15117 case SSA_NAME:
15118 case FILTER_EXPR:
15119 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15121 case COMPOUND_EXPR:
15122 case MODIFY_EXPR:
15123 case BIND_EXPR:
15124 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15125 strict_overflow_p);
15127 case SAVE_EXPR:
15128 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15129 strict_overflow_p);
15131 case CALL_EXPR:
15132 return alloca_call_p (t);
15134 default:
15135 break;
15137 return false;
15140 /* Return true when T is an address and is known to be nonzero.
15141 Handle warnings about undefined signed overflow. */
15143 bool
15144 tree_expr_nonzero_p (tree t)
15146 bool ret, strict_overflow_p;
15148 strict_overflow_p = false;
15149 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15150 if (strict_overflow_p)
15151 fold_overflow_warning (("assuming signed overflow does not occur when "
15152 "determining that expression is always "
15153 "non-zero"),
15154 WARN_STRICT_OVERFLOW_MISC);
15155 return ret;
15158 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15159 attempt to fold the expression to a constant without modifying TYPE,
15160 OP0 or OP1.
15162 If the expression could be simplified to a constant, then return
15163 the constant. If the expression would not be simplified to a
15164 constant, then return NULL_TREE. */
15166 tree
15167 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15169 tree tem = fold_binary (code, type, op0, op1);
15170 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15173 /* Given the components of a unary expression CODE, TYPE and OP0,
15174 attempt to fold the expression to a constant without modifying
15175 TYPE or OP0.
15177 If the expression could be simplified to a constant, then return
15178 the constant. If the expression would not be simplified to a
15179 constant, then return NULL_TREE. */
15181 tree
15182 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15184 tree tem = fold_unary (code, type, op0);
15185 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15188 /* If EXP represents referencing an element in a constant string
15189 (either via pointer arithmetic or array indexing), return the
15190 tree representing the value accessed, otherwise return NULL. */
15192 tree
15193 fold_read_from_constant_string (tree exp)
15195 if ((TREE_CODE (exp) == INDIRECT_REF
15196 || TREE_CODE (exp) == ARRAY_REF)
15197 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15199 tree exp1 = TREE_OPERAND (exp, 0);
15200 tree index;
15201 tree string;
15203 if (TREE_CODE (exp) == INDIRECT_REF)
15204 string = string_constant (exp1, &index);
15205 else
15207 tree low_bound = array_ref_low_bound (exp);
15208 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15210 /* Optimize the special-case of a zero lower bound.
15212 We convert the low_bound to sizetype to avoid some problems
15213 with constant folding. (E.g. suppose the lower bound is 1,
15214 and its mode is QI. Without the conversion,l (ARRAY
15215 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15216 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15217 if (! integer_zerop (low_bound))
15218 index = size_diffop (index, fold_convert (sizetype, low_bound));
15220 string = exp1;
15223 if (string
15224 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15225 && TREE_CODE (string) == STRING_CST
15226 && TREE_CODE (index) == INTEGER_CST
15227 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15228 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15229 == MODE_INT)
15230 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15231 return build_int_cst_type (TREE_TYPE (exp),
15232 (TREE_STRING_POINTER (string)
15233 [TREE_INT_CST_LOW (index)]));
15235 return NULL;
15238 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15239 an integer constant, real, or fixed-point constant.
15241 TYPE is the type of the result. */
15243 static tree
15244 fold_negate_const (tree arg0, tree type)
15246 tree t = NULL_TREE;
15248 switch (TREE_CODE (arg0))
15250 case INTEGER_CST:
15252 unsigned HOST_WIDE_INT low;
15253 HOST_WIDE_INT high;
15254 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15255 TREE_INT_CST_HIGH (arg0),
15256 &low, &high);
15257 t = force_fit_type_double (type, low, high, 1,
15258 (overflow | TREE_OVERFLOW (arg0))
15259 && !TYPE_UNSIGNED (type));
15260 break;
15263 case REAL_CST:
15264 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15265 break;
15267 case FIXED_CST:
15269 FIXED_VALUE_TYPE f;
15270 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15271 &(TREE_FIXED_CST (arg0)), NULL,
15272 TYPE_SATURATING (type));
15273 t = build_fixed (type, f);
15274 /* Propagate overflow flags. */
15275 if (overflow_p | TREE_OVERFLOW (arg0))
15277 TREE_OVERFLOW (t) = 1;
15278 TREE_CONSTANT_OVERFLOW (t) = 1;
15280 else if (TREE_CONSTANT_OVERFLOW (arg0))
15281 TREE_CONSTANT_OVERFLOW (t) = 1;
15282 break;
15285 default:
15286 gcc_unreachable ();
15289 return t;
15292 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15293 an integer constant or real constant.
15295 TYPE is the type of the result. */
15297 tree
15298 fold_abs_const (tree arg0, tree type)
15300 tree t = NULL_TREE;
15302 switch (TREE_CODE (arg0))
15304 case INTEGER_CST:
15305 /* If the value is unsigned, then the absolute value is
15306 the same as the ordinary value. */
15307 if (TYPE_UNSIGNED (type))
15308 t = arg0;
15309 /* Similarly, if the value is non-negative. */
15310 else if (INT_CST_LT (integer_minus_one_node, arg0))
15311 t = arg0;
15312 /* If the value is negative, then the absolute value is
15313 its negation. */
15314 else
15316 unsigned HOST_WIDE_INT low;
15317 HOST_WIDE_INT high;
15318 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15319 TREE_INT_CST_HIGH (arg0),
15320 &low, &high);
15321 t = force_fit_type_double (type, low, high, -1,
15322 overflow | TREE_OVERFLOW (arg0));
15324 break;
15326 case REAL_CST:
15327 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15328 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15329 else
15330 t = arg0;
15331 break;
15333 default:
15334 gcc_unreachable ();
15337 return t;
15340 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15341 constant. TYPE is the type of the result. */
15343 static tree
15344 fold_not_const (tree arg0, tree type)
15346 tree t = NULL_TREE;
15348 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15350 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15351 ~TREE_INT_CST_HIGH (arg0), 0,
15352 TREE_OVERFLOW (arg0));
15354 return t;
15357 /* Given CODE, a relational operator, the target type, TYPE and two
15358 constant operands OP0 and OP1, return the result of the
15359 relational operation. If the result is not a compile time
15360 constant, then return NULL_TREE. */
15362 static tree
15363 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15365 int result, invert;
15367 /* From here on, the only cases we handle are when the result is
15368 known to be a constant. */
15370 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15372 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15373 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15375 /* Handle the cases where either operand is a NaN. */
15376 if (real_isnan (c0) || real_isnan (c1))
15378 switch (code)
15380 case EQ_EXPR:
15381 case ORDERED_EXPR:
15382 result = 0;
15383 break;
15385 case NE_EXPR:
15386 case UNORDERED_EXPR:
15387 case UNLT_EXPR:
15388 case UNLE_EXPR:
15389 case UNGT_EXPR:
15390 case UNGE_EXPR:
15391 case UNEQ_EXPR:
15392 result = 1;
15393 break;
15395 case LT_EXPR:
15396 case LE_EXPR:
15397 case GT_EXPR:
15398 case GE_EXPR:
15399 case LTGT_EXPR:
15400 if (flag_trapping_math)
15401 return NULL_TREE;
15402 result = 0;
15403 break;
15405 default:
15406 gcc_unreachable ();
15409 return constant_boolean_node (result, type);
15412 return constant_boolean_node (real_compare (code, c0, c1), type);
15415 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15417 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15418 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15419 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15422 /* Handle equality/inequality of complex constants. */
15423 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15425 tree rcond = fold_relational_const (code, type,
15426 TREE_REALPART (op0),
15427 TREE_REALPART (op1));
15428 tree icond = fold_relational_const (code, type,
15429 TREE_IMAGPART (op0),
15430 TREE_IMAGPART (op1));
15431 if (code == EQ_EXPR)
15432 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15433 else if (code == NE_EXPR)
15434 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15435 else
15436 return NULL_TREE;
15439 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15441 To compute GT, swap the arguments and do LT.
15442 To compute GE, do LT and invert the result.
15443 To compute LE, swap the arguments, do LT and invert the result.
15444 To compute NE, do EQ and invert the result.
15446 Therefore, the code below must handle only EQ and LT. */
15448 if (code == LE_EXPR || code == GT_EXPR)
15450 tree tem = op0;
15451 op0 = op1;
15452 op1 = tem;
15453 code = swap_tree_comparison (code);
15456 /* Note that it is safe to invert for real values here because we
15457 have already handled the one case that it matters. */
15459 invert = 0;
15460 if (code == NE_EXPR || code == GE_EXPR)
15462 invert = 1;
15463 code = invert_tree_comparison (code, false);
15466 /* Compute a result for LT or EQ if args permit;
15467 Otherwise return T. */
15468 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15470 if (code == EQ_EXPR)
15471 result = tree_int_cst_equal (op0, op1);
15472 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15473 result = INT_CST_LT_UNSIGNED (op0, op1);
15474 else
15475 result = INT_CST_LT (op0, op1);
15477 else
15478 return NULL_TREE;
15480 if (invert)
15481 result ^= 1;
15482 return constant_boolean_node (result, type);
15485 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15486 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15487 itself. */
15489 tree
15490 fold_build_cleanup_point_expr (tree type, tree expr)
15492 /* If the expression does not have side effects then we don't have to wrap
15493 it with a cleanup point expression. */
15494 if (!TREE_SIDE_EFFECTS (expr))
15495 return expr;
15497 /* If the expression is a return, check to see if the expression inside the
15498 return has no side effects or the right hand side of the modify expression
15499 inside the return. If either don't have side effects set we don't need to
15500 wrap the expression in a cleanup point expression. Note we don't check the
15501 left hand side of the modify because it should always be a return decl. */
15502 if (TREE_CODE (expr) == RETURN_EXPR)
15504 tree op = TREE_OPERAND (expr, 0);
15505 if (!op || !TREE_SIDE_EFFECTS (op))
15506 return expr;
15507 op = TREE_OPERAND (op, 1);
15508 if (!TREE_SIDE_EFFECTS (op))
15509 return expr;
15512 return build1 (CLEANUP_POINT_EXPR, type, expr);
15515 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15516 of an indirection through OP0, or NULL_TREE if no simplification is
15517 possible. */
15519 tree
15520 fold_indirect_ref_1 (tree type, tree op0)
15522 tree sub = op0;
15523 tree subtype;
15525 STRIP_NOPS (sub);
15526 subtype = TREE_TYPE (sub);
15527 if (!POINTER_TYPE_P (subtype))
15528 return NULL_TREE;
15530 if (TREE_CODE (sub) == ADDR_EXPR)
15532 tree op = TREE_OPERAND (sub, 0);
15533 tree optype = TREE_TYPE (op);
15534 /* *&CONST_DECL -> to the value of the const decl. */
15535 if (TREE_CODE (op) == CONST_DECL)
15536 return DECL_INITIAL (op);
15537 /* *&p => p; make sure to handle *&"str"[cst] here. */
15538 if (type == optype)
15540 tree fop = fold_read_from_constant_string (op);
15541 if (fop)
15542 return fop;
15543 else
15544 return op;
15546 /* *(foo *)&fooarray => fooarray[0] */
15547 else if (TREE_CODE (optype) == ARRAY_TYPE
15548 && type == TREE_TYPE (optype))
15550 tree type_domain = TYPE_DOMAIN (optype);
15551 tree min_val = size_zero_node;
15552 if (type_domain && TYPE_MIN_VALUE (type_domain))
15553 min_val = TYPE_MIN_VALUE (type_domain);
15554 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15556 /* *(foo *)&complexfoo => __real__ complexfoo */
15557 else if (TREE_CODE (optype) == COMPLEX_TYPE
15558 && type == TREE_TYPE (optype))
15559 return fold_build1 (REALPART_EXPR, type, op);
15560 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15561 else if (TREE_CODE (optype) == VECTOR_TYPE
15562 && type == TREE_TYPE (optype))
15564 tree part_width = TYPE_SIZE (type);
15565 tree index = bitsize_int (0);
15566 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15570 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15571 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15572 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15574 tree op00 = TREE_OPERAND (sub, 0);
15575 tree op01 = TREE_OPERAND (sub, 1);
15576 tree op00type;
15578 STRIP_NOPS (op00);
15579 op00type = TREE_TYPE (op00);
15580 if (TREE_CODE (op00) == ADDR_EXPR
15581 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15582 && type == TREE_TYPE (TREE_TYPE (op00type)))
15584 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15585 tree part_width = TYPE_SIZE (type);
15586 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15587 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15588 tree index = bitsize_int (indexi);
15590 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15591 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15592 part_width, index);
15598 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15599 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15600 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15602 tree op00 = TREE_OPERAND (sub, 0);
15603 tree op01 = TREE_OPERAND (sub, 1);
15604 tree op00type;
15606 STRIP_NOPS (op00);
15607 op00type = TREE_TYPE (op00);
15608 if (TREE_CODE (op00) == ADDR_EXPR
15609 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15610 && type == TREE_TYPE (TREE_TYPE (op00type)))
15612 tree size = TYPE_SIZE_UNIT (type);
15613 if (tree_int_cst_equal (size, op01))
15614 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15618 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15619 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15620 && type == TREE_TYPE (TREE_TYPE (subtype)))
15622 tree type_domain;
15623 tree min_val = size_zero_node;
15624 sub = build_fold_indirect_ref (sub);
15625 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15626 if (type_domain && TYPE_MIN_VALUE (type_domain))
15627 min_val = TYPE_MIN_VALUE (type_domain);
15628 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15631 return NULL_TREE;
15634 /* Builds an expression for an indirection through T, simplifying some
15635 cases. */
15637 tree
15638 build_fold_indirect_ref (tree t)
15640 tree type = TREE_TYPE (TREE_TYPE (t));
15641 tree sub = fold_indirect_ref_1 (type, t);
15643 if (sub)
15644 return sub;
15645 else
15646 return build1 (INDIRECT_REF, type, t);
15649 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15651 tree
15652 fold_indirect_ref (tree t)
15654 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15656 if (sub)
15657 return sub;
15658 else
15659 return t;
15662 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15663 whose result is ignored. The type of the returned tree need not be
15664 the same as the original expression. */
15666 tree
15667 fold_ignored_result (tree t)
15669 if (!TREE_SIDE_EFFECTS (t))
15670 return integer_zero_node;
15672 for (;;)
15673 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15675 case tcc_unary:
15676 t = TREE_OPERAND (t, 0);
15677 break;
15679 case tcc_binary:
15680 case tcc_comparison:
15681 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15682 t = TREE_OPERAND (t, 0);
15683 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15684 t = TREE_OPERAND (t, 1);
15685 else
15686 return t;
15687 break;
15689 case tcc_expression:
15690 switch (TREE_CODE (t))
15692 case COMPOUND_EXPR:
15693 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15694 return t;
15695 t = TREE_OPERAND (t, 0);
15696 break;
15698 case COND_EXPR:
15699 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15700 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15701 return t;
15702 t = TREE_OPERAND (t, 0);
15703 break;
15705 default:
15706 return t;
15708 break;
15710 default:
15711 return t;
15715 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15716 This can only be applied to objects of a sizetype. */
15718 tree
15719 round_up (tree value, int divisor)
15721 tree div = NULL_TREE;
15723 gcc_assert (divisor > 0);
15724 if (divisor == 1)
15725 return value;
15727 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15728 have to do anything. Only do this when we are not given a const,
15729 because in that case, this check is more expensive than just
15730 doing it. */
15731 if (TREE_CODE (value) != INTEGER_CST)
15733 div = build_int_cst (TREE_TYPE (value), divisor);
15735 if (multiple_of_p (TREE_TYPE (value), value, div))
15736 return value;
15739 /* If divisor is a power of two, simplify this to bit manipulation. */
15740 if (divisor == (divisor & -divisor))
15742 if (TREE_CODE (value) == INTEGER_CST)
15744 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15745 unsigned HOST_WIDE_INT high;
15746 bool overflow_p;
15748 if ((low & (divisor - 1)) == 0)
15749 return value;
15751 overflow_p = TREE_OVERFLOW (value);
15752 high = TREE_INT_CST_HIGH (value);
15753 low &= ~(divisor - 1);
15754 low += divisor;
15755 if (low == 0)
15757 high++;
15758 if (high == 0)
15759 overflow_p = true;
15762 return force_fit_type_double (TREE_TYPE (value), low, high,
15763 -1, overflow_p);
15765 else
15767 tree t;
15769 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15770 value = size_binop (PLUS_EXPR, value, t);
15771 t = build_int_cst (TREE_TYPE (value), -divisor);
15772 value = size_binop (BIT_AND_EXPR, value, t);
15775 else
15777 if (!div)
15778 div = build_int_cst (TREE_TYPE (value), divisor);
15779 value = size_binop (CEIL_DIV_EXPR, value, div);
15780 value = size_binop (MULT_EXPR, value, div);
15783 return value;
15786 /* Likewise, but round down. */
15788 tree
15789 round_down (tree value, int divisor)
15791 tree div = NULL_TREE;
15793 gcc_assert (divisor > 0);
15794 if (divisor == 1)
15795 return value;
15797 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15798 have to do anything. Only do this when we are not given a const,
15799 because in that case, this check is more expensive than just
15800 doing it. */
15801 if (TREE_CODE (value) != INTEGER_CST)
15803 div = build_int_cst (TREE_TYPE (value), divisor);
15805 if (multiple_of_p (TREE_TYPE (value), value, div))
15806 return value;
15809 /* If divisor is a power of two, simplify this to bit manipulation. */
15810 if (divisor == (divisor & -divisor))
15812 tree t;
15814 t = build_int_cst (TREE_TYPE (value), -divisor);
15815 value = size_binop (BIT_AND_EXPR, value, t);
15817 else
15819 if (!div)
15820 div = build_int_cst (TREE_TYPE (value), divisor);
15821 value = size_binop (FLOOR_DIV_EXPR, value, div);
15822 value = size_binop (MULT_EXPR, value, div);
15825 return value;
15828 /* Returns the pointer to the base of the object addressed by EXP and
15829 extracts the information about the offset of the access, storing it
15830 to PBITPOS and POFFSET. */
15832 static tree
15833 split_address_to_core_and_offset (tree exp,
15834 HOST_WIDE_INT *pbitpos, tree *poffset)
15836 tree core;
15837 enum machine_mode mode;
15838 int unsignedp, volatilep;
15839 HOST_WIDE_INT bitsize;
15841 if (TREE_CODE (exp) == ADDR_EXPR)
15843 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15844 poffset, &mode, &unsignedp, &volatilep,
15845 false);
15846 core = fold_addr_expr (core);
15848 else
15850 core = exp;
15851 *pbitpos = 0;
15852 *poffset = NULL_TREE;
15855 return core;
15858 /* Returns true if addresses of E1 and E2 differ by a constant, false
15859 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15861 bool
15862 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15864 tree core1, core2;
15865 HOST_WIDE_INT bitpos1, bitpos2;
15866 tree toffset1, toffset2, tdiff, type;
15868 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15869 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15871 if (bitpos1 % BITS_PER_UNIT != 0
15872 || bitpos2 % BITS_PER_UNIT != 0
15873 || !operand_equal_p (core1, core2, 0))
15874 return false;
15876 if (toffset1 && toffset2)
15878 type = TREE_TYPE (toffset1);
15879 if (type != TREE_TYPE (toffset2))
15880 toffset2 = fold_convert (type, toffset2);
15882 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15883 if (!cst_and_fits_in_hwi (tdiff))
15884 return false;
15886 *diff = int_cst_value (tdiff);
15888 else if (toffset1 || toffset2)
15890 /* If only one of the offsets is non-constant, the difference cannot
15891 be a constant. */
15892 return false;
15894 else
15895 *diff = 0;
15897 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15898 return true;
15901 /* Simplify the floating point expression EXP when the sign of the
15902 result is not significant. Return NULL_TREE if no simplification
15903 is possible. */
15905 tree
15906 fold_strip_sign_ops (tree exp)
15908 tree arg0, arg1;
15910 switch (TREE_CODE (exp))
15912 case ABS_EXPR:
15913 case NEGATE_EXPR:
15914 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15915 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15917 case MULT_EXPR:
15918 case RDIV_EXPR:
15919 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15920 return NULL_TREE;
15921 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15922 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15923 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15924 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15925 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15926 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15927 break;
15929 case COMPOUND_EXPR:
15930 arg0 = TREE_OPERAND (exp, 0);
15931 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15932 if (arg1)
15933 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15934 break;
15936 case COND_EXPR:
15937 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15938 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15939 if (arg0 || arg1)
15940 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15941 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15942 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15943 break;
15945 case CALL_EXPR:
15947 const enum built_in_function fcode = builtin_mathfn_code (exp);
15948 switch (fcode)
15950 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15951 /* Strip copysign function call, return the 1st argument. */
15952 arg0 = CALL_EXPR_ARG (exp, 0);
15953 arg1 = CALL_EXPR_ARG (exp, 1);
15954 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15956 default:
15957 /* Strip sign ops from the argument of "odd" math functions. */
15958 if (negate_mathfn_p (fcode))
15960 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15961 if (arg0)
15962 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15964 break;
15967 break;
15969 default:
15970 break;
15972 return NULL_TREE;