* builtins.c (expand_builtin_pow, expand_builtin_powi,
[official-gcc.git] / gcc / fold-const.c
blob3a3fe36211632734bf423cc57f1d17a2f1c4d0e4
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW as appropriate.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "rtl.h"
58 #include "expr.h"
59 #include "tm_p.h"
60 #include "toplev.h"
61 #include "ggc.h"
62 #include "hashtab.h"
63 #include "langhooks.h"
64 #include "md5.h"
66 /* Non-zero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
114 tree *, tree *);
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
152 addition.
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 sign. */
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 #define LOWPART(x) \
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
174 static void
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 static void
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 HOST_WIDE_INT *hi)
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
195 /* Force the double-word integer L1, H1 to be within the range of the
196 integer type TYPE. Stores the properly truncated and sign-extended
197 double-word integer in *LV, *HV. Returns true if the operation
198 overflows, that is, argument and result are different. */
201 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
202 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
204 unsigned HOST_WIDE_INT low0 = l1;
205 HOST_WIDE_INT high0 = h1;
206 unsigned int prec;
207 int sign_extended_type;
209 if (POINTER_TYPE_P (type)
210 || TREE_CODE (type) == OFFSET_TYPE)
211 prec = POINTER_SIZE;
212 else
213 prec = TYPE_PRECISION (type);
215 /* Size types *are* sign extended. */
216 sign_extended_type = (!TYPE_UNSIGNED (type)
217 || (TREE_CODE (type) == INTEGER_TYPE
218 && TYPE_IS_SIZETYPE (type)));
220 /* First clear all bits that are beyond the type's precision. */
221 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
223 else if (prec > HOST_BITS_PER_WIDE_INT)
224 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 else
227 h1 = 0;
228 if (prec < HOST_BITS_PER_WIDE_INT)
229 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
232 /* Then do sign extension if necessary. */
233 if (!sign_extended_type)
234 /* No sign extension */;
235 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 /* Correct width already. */;
237 else if (prec > HOST_BITS_PER_WIDE_INT)
239 /* Sign extend top half? */
240 if (h1 & ((unsigned HOST_WIDE_INT)1
241 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
242 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 else if (prec == HOST_BITS_PER_WIDE_INT)
246 if ((HOST_WIDE_INT)l1 < 0)
247 h1 = -1;
249 else
251 /* Sign extend bottom half? */
252 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 h1 = -1;
255 l1 |= (HOST_WIDE_INT)(-1) << prec;
259 *lv = l1;
260 *hv = h1;
262 /* If the value didn't fit, signal overflow. */
263 return l1 != low0 || h1 != high0;
266 /* We force the double-int HIGH:LOW to the range of the type TYPE by
267 sign or zero extending it.
268 OVERFLOWABLE indicates if we are interested
269 in overflow of the value, when >0 we are only interested in signed
270 overflow, for <0 we are interested in any overflow. OVERFLOWED
271 indicates whether overflow has already occurred. CONST_OVERFLOWED
272 indicates whether constant overflow has already occurred. We force
273 T's value to be within range of T's type (by setting to 0 or 1 all
274 the bits outside the type's range). We set TREE_OVERFLOWED if,
275 OVERFLOWED is nonzero,
276 or OVERFLOWABLE is >0 and signed overflow occurs
277 or OVERFLOWABLE is <0 and any overflow occurs
278 We return a new tree node for the extended double-int. The node
279 is shared if no overflow flags are set. */
281 tree
282 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
283 HOST_WIDE_INT high, int overflowable,
284 bool overflowed, bool overflowed_const)
286 int sign_extended_type;
287 bool overflow;
289 /* Size types *are* sign extended. */
290 sign_extended_type = (!TYPE_UNSIGNED (type)
291 || (TREE_CODE (type) == INTEGER_TYPE
292 && TYPE_IS_SIZETYPE (type)));
294 overflow = fit_double_type (low, high, &low, &high, type);
296 /* If we need to set overflow flags, return a new unshared node. */
297 if (overflowed || overflowed_const || overflow)
299 if (overflowed
300 || overflowable < 0
301 || (overflowable > 0 && sign_extended_type))
303 tree t = make_node (INTEGER_CST);
304 TREE_INT_CST_LOW (t) = low;
305 TREE_INT_CST_HIGH (t) = high;
306 TREE_TYPE (t) = type;
307 TREE_OVERFLOW (t) = 1;
308 return t;
312 /* Else build a shared node. */
313 return build_int_cst_wide (type, low, high);
316 /* Add two doubleword integers with doubleword result.
317 Return nonzero if the operation overflows according to UNSIGNED_P.
318 Each argument is given as two `HOST_WIDE_INT' pieces.
319 One argument is L1 and H1; the other, L2 and H2.
320 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
323 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
324 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
325 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
326 bool unsigned_p)
328 unsigned HOST_WIDE_INT l;
329 HOST_WIDE_INT h;
331 l = l1 + l2;
332 h = h1 + h2 + (l < l1);
334 *lv = l;
335 *hv = h;
337 if (unsigned_p)
338 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
339 else
340 return OVERFLOW_SUM_SIGN (h1, h2, h);
343 /* Negate a doubleword integer with doubleword result.
344 Return nonzero if the operation overflows, assuming it's signed.
345 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
346 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
349 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
350 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 if (l1 == 0)
354 *lv = 0;
355 *hv = - h1;
356 return (*hv & h1) < 0;
358 else
360 *lv = -l1;
361 *hv = ~h1;
362 return 0;
366 /* Multiply two doubleword integers with doubleword result.
367 Return nonzero if the operation overflows according to UNSIGNED_P.
368 Each argument is given as two `HOST_WIDE_INT' pieces.
369 One argument is L1 and H1; the other, L2 and H2.
370 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
373 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
374 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
375 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
376 bool unsigned_p)
378 HOST_WIDE_INT arg1[4];
379 HOST_WIDE_INT arg2[4];
380 HOST_WIDE_INT prod[4 * 2];
381 unsigned HOST_WIDE_INT carry;
382 int i, j, k;
383 unsigned HOST_WIDE_INT toplow, neglow;
384 HOST_WIDE_INT tophigh, neghigh;
386 encode (arg1, l1, h1);
387 encode (arg2, l2, h2);
389 memset (prod, 0, sizeof prod);
391 for (i = 0; i < 4; i++)
393 carry = 0;
394 for (j = 0; j < 4; j++)
396 k = i + j;
397 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
398 carry += arg1[i] * arg2[j];
399 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
400 carry += prod[k];
401 prod[k] = LOWPART (carry);
402 carry = HIGHPART (carry);
404 prod[i + 4] = carry;
407 decode (prod, lv, hv);
408 decode (prod + 4, &toplow, &tophigh);
410 /* Unsigned overflow is immediate. */
411 if (unsigned_p)
412 return (toplow | tophigh) != 0;
414 /* Check for signed overflow by calculating the signed representation of the
415 top half of the result; it should agree with the low half's sign bit. */
416 if (h1 < 0)
418 neg_double (l2, h2, &neglow, &neghigh);
419 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 if (h2 < 0)
423 neg_double (l1, h1, &neglow, &neghigh);
424 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
429 /* Shift the doubleword integer in L1, H1 left by COUNT places
430 keeping only PREC bits of result.
431 Shift right if COUNT is negative.
432 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
433 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435 void
436 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
437 HOST_WIDE_INT count, unsigned int prec,
438 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
440 unsigned HOST_WIDE_INT signmask;
442 if (count < 0)
444 rshift_double (l1, h1, -count, prec, lv, hv, arith);
445 return;
448 if (SHIFT_COUNT_TRUNCATED)
449 count %= prec;
451 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
453 /* Shifting by the host word size is undefined according to the
454 ANSI standard, so we must handle this as a special case. */
455 *hv = 0;
456 *lv = 0;
458 else if (count >= HOST_BITS_PER_WIDE_INT)
460 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
461 *lv = 0;
463 else
465 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
466 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
467 *lv = l1 << count;
470 /* Sign extend all bits that are beyond the precision. */
472 signmask = -((prec > HOST_BITS_PER_WIDE_INT
473 ? ((unsigned HOST_WIDE_INT) *hv
474 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
475 : (*lv >> (prec - 1))) & 1);
477 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
479 else if (prec >= HOST_BITS_PER_WIDE_INT)
481 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
482 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 else
486 *hv = signmask;
487 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
488 *lv |= signmask << prec;
492 /* Shift the doubleword integer in L1, H1 right by COUNT places
493 keeping only PREC bits of result. COUNT must be positive.
494 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
495 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497 void
498 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
499 HOST_WIDE_INT count, unsigned int prec,
500 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
501 int arith)
503 unsigned HOST_WIDE_INT signmask;
505 signmask = (arith
506 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
507 : 0);
509 if (SHIFT_COUNT_TRUNCATED)
510 count %= prec;
512 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
514 /* Shifting by the host word size is undefined according to the
515 ANSI standard, so we must handle this as a special case. */
516 *hv = 0;
517 *lv = 0;
519 else if (count >= HOST_BITS_PER_WIDE_INT)
521 *hv = 0;
522 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 else
526 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
527 *lv = ((l1 >> count)
528 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
531 /* Zero / sign extend all bits that are beyond the precision. */
533 if (count >= (HOST_WIDE_INT)prec)
535 *hv = signmask;
536 *lv = signmask;
538 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
540 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
542 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
543 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 else
547 *hv = signmask;
548 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
549 *lv |= signmask << (prec - count);
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result.
555 Rotate right if COUNT is negative.
556 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 void
559 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
560 HOST_WIDE_INT count, unsigned int prec,
561 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563 unsigned HOST_WIDE_INT s1l, s2l;
564 HOST_WIDE_INT s1h, s2h;
566 count %= prec;
567 if (count < 0)
568 count += prec;
570 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
571 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
572 *lv = s1l | s2l;
573 *hv = s1h | s2h;
576 /* Rotate the doubleword integer in L1, H1 left by COUNT places
577 keeping only PREC bits of result. COUNT must be positive.
578 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580 void
581 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
582 HOST_WIDE_INT count, unsigned int prec,
583 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
585 unsigned HOST_WIDE_INT s1l, s2l;
586 HOST_WIDE_INT s1h, s2h;
588 count %= prec;
589 if (count < 0)
590 count += prec;
592 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
593 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
594 *lv = s1l | s2l;
595 *hv = s1h | s2h;
598 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
599 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
600 CODE is a tree code for a kind of division, one of
601 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
602 or EXACT_DIV_EXPR
603 It controls how the quotient is rounded to an integer.
604 Return nonzero if the operation overflows.
605 UNS nonzero says do unsigned division. */
608 div_and_round_double (enum tree_code code, int uns,
609 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
610 HOST_WIDE_INT hnum_orig,
611 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
612 HOST_WIDE_INT hden_orig,
613 unsigned HOST_WIDE_INT *lquo,
614 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
615 HOST_WIDE_INT *hrem)
617 int quo_neg = 0;
618 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
619 HOST_WIDE_INT den[4], quo[4];
620 int i, j;
621 unsigned HOST_WIDE_INT work;
622 unsigned HOST_WIDE_INT carry = 0;
623 unsigned HOST_WIDE_INT lnum = lnum_orig;
624 HOST_WIDE_INT hnum = hnum_orig;
625 unsigned HOST_WIDE_INT lden = lden_orig;
626 HOST_WIDE_INT hden = hden_orig;
627 int overflow = 0;
629 if (hden == 0 && lden == 0)
630 overflow = 1, lden = 1;
632 /* Calculate quotient sign and convert operands to unsigned. */
633 if (!uns)
635 if (hnum < 0)
637 quo_neg = ~ quo_neg;
638 /* (minimum integer) / (-1) is the only overflow case. */
639 if (neg_double (lnum, hnum, &lnum, &hnum)
640 && ((HOST_WIDE_INT) lden & hden) == -1)
641 overflow = 1;
643 if (hden < 0)
645 quo_neg = ~ quo_neg;
646 neg_double (lden, hden, &lden, &hden);
650 if (hnum == 0 && hden == 0)
651 { /* single precision */
652 *hquo = *hrem = 0;
653 /* This unsigned division rounds toward zero. */
654 *lquo = lnum / lden;
655 goto finish_up;
658 if (hnum == 0)
659 { /* trivial case: dividend < divisor */
660 /* hden != 0 already checked. */
661 *hquo = *lquo = 0;
662 *hrem = hnum;
663 *lrem = lnum;
664 goto finish_up;
667 memset (quo, 0, sizeof quo);
669 memset (num, 0, sizeof num); /* to zero 9th element */
670 memset (den, 0, sizeof den);
672 encode (num, lnum, hnum);
673 encode (den, lden, hden);
675 /* Special code for when the divisor < BASE. */
676 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
678 /* hnum != 0 already checked. */
679 for (i = 4 - 1; i >= 0; i--)
681 work = num[i] + carry * BASE;
682 quo[i] = work / lden;
683 carry = work % lden;
686 else
688 /* Full double precision division,
689 with thanks to Don Knuth's "Seminumerical Algorithms". */
690 int num_hi_sig, den_hi_sig;
691 unsigned HOST_WIDE_INT quo_est, scale;
693 /* Find the highest nonzero divisor digit. */
694 for (i = 4 - 1;; i--)
695 if (den[i] != 0)
697 den_hi_sig = i;
698 break;
701 /* Insure that the first digit of the divisor is at least BASE/2.
702 This is required by the quotient digit estimation algorithm. */
704 scale = BASE / (den[den_hi_sig] + 1);
705 if (scale > 1)
706 { /* scale divisor and dividend */
707 carry = 0;
708 for (i = 0; i <= 4 - 1; i++)
710 work = (num[i] * scale) + carry;
711 num[i] = LOWPART (work);
712 carry = HIGHPART (work);
715 num[4] = carry;
716 carry = 0;
717 for (i = 0; i <= 4 - 1; i++)
719 work = (den[i] * scale) + carry;
720 den[i] = LOWPART (work);
721 carry = HIGHPART (work);
722 if (den[i] != 0) den_hi_sig = i;
726 num_hi_sig = 4;
728 /* Main loop */
729 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
731 /* Guess the next quotient digit, quo_est, by dividing the first
732 two remaining dividend digits by the high order quotient digit.
733 quo_est is never low and is at most 2 high. */
734 unsigned HOST_WIDE_INT tmp;
736 num_hi_sig = i + den_hi_sig + 1;
737 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
738 if (num[num_hi_sig] != den[den_hi_sig])
739 quo_est = work / den[den_hi_sig];
740 else
741 quo_est = BASE - 1;
743 /* Refine quo_est so it's usually correct, and at most one high. */
744 tmp = work - quo_est * den[den_hi_sig];
745 if (tmp < BASE
746 && (den[den_hi_sig - 1] * quo_est
747 > (tmp * BASE + num[num_hi_sig - 2])))
748 quo_est--;
750 /* Try QUO_EST as the quotient digit, by multiplying the
751 divisor by QUO_EST and subtracting from the remaining dividend.
752 Keep in mind that QUO_EST is the I - 1st digit. */
754 carry = 0;
755 for (j = 0; j <= den_hi_sig; j++)
757 work = quo_est * den[j] + carry;
758 carry = HIGHPART (work);
759 work = num[i + j] - LOWPART (work);
760 num[i + j] = LOWPART (work);
761 carry += HIGHPART (work) != 0;
764 /* If quo_est was high by one, then num[i] went negative and
765 we need to correct things. */
766 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768 quo_est--;
769 carry = 0; /* add divisor back in */
770 for (j = 0; j <= den_hi_sig; j++)
772 work = num[i + j] + den[j] + carry;
773 carry = HIGHPART (work);
774 num[i + j] = LOWPART (work);
777 num [num_hi_sig] += carry;
780 /* Store the quotient digit. */
781 quo[i] = quo_est;
785 decode (quo, lquo, hquo);
787 finish_up:
788 /* If result is negative, make it so. */
789 if (quo_neg)
790 neg_double (*lquo, *hquo, lquo, hquo);
792 /* Compute trial remainder: rem = num - (quo * den) */
793 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
794 neg_double (*lrem, *hrem, lrem, hrem);
795 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797 switch (code)
799 case TRUNC_DIV_EXPR:
800 case TRUNC_MOD_EXPR: /* round toward zero */
801 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
802 return overflow;
804 case FLOOR_DIV_EXPR:
805 case FLOOR_MOD_EXPR: /* round toward negative infinity */
806 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
808 /* quo = quo - 1; */
809 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
810 lquo, hquo);
812 else
813 return overflow;
814 break;
816 case CEIL_DIV_EXPR:
817 case CEIL_MOD_EXPR: /* round toward positive infinity */
818 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
820 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
821 lquo, hquo);
823 else
824 return overflow;
825 break;
827 case ROUND_DIV_EXPR:
828 case ROUND_MOD_EXPR: /* round to closest integer */
830 unsigned HOST_WIDE_INT labs_rem = *lrem;
831 HOST_WIDE_INT habs_rem = *hrem;
832 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
833 HOST_WIDE_INT habs_den = hden, htwice;
835 /* Get absolute values. */
836 if (*hrem < 0)
837 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
838 if (hden < 0)
839 neg_double (lden, hden, &labs_den, &habs_den);
841 /* If (2 * abs (lrem) >= abs (lden)) */
842 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
843 labs_rem, habs_rem, &ltwice, &htwice);
845 if (((unsigned HOST_WIDE_INT) habs_den
846 < (unsigned HOST_WIDE_INT) htwice)
847 || (((unsigned HOST_WIDE_INT) habs_den
848 == (unsigned HOST_WIDE_INT) htwice)
849 && (labs_den < ltwice)))
851 if (*hquo < 0)
852 /* quo = quo - 1; */
853 add_double (*lquo, *hquo,
854 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
855 else
856 /* quo = quo + 1; */
857 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
858 lquo, hquo);
860 else
861 return overflow;
863 break;
865 default:
866 gcc_unreachable ();
869 /* Compute true remainder: rem = num - (quo * den) */
870 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
871 neg_double (*lrem, *hrem, lrem, hrem);
872 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
873 return overflow;
876 /* If ARG2 divides ARG1 with zero remainder, carries out the division
877 of type CODE and returns the quotient.
878 Otherwise returns NULL_TREE. */
880 static tree
881 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
883 unsigned HOST_WIDE_INT int1l, int2l;
884 HOST_WIDE_INT int1h, int2h;
885 unsigned HOST_WIDE_INT quol, reml;
886 HOST_WIDE_INT quoh, remh;
887 tree type = TREE_TYPE (arg1);
888 int uns = TYPE_UNSIGNED (type);
890 int1l = TREE_INT_CST_LOW (arg1);
891 int1h = TREE_INT_CST_HIGH (arg1);
892 int2l = TREE_INT_CST_LOW (arg2);
893 int2h = TREE_INT_CST_HIGH (arg2);
895 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
896 &quol, &quoh, &reml, &remh);
897 if (remh != 0 || reml != 0)
898 return NULL_TREE;
900 return build_int_cst_wide (type, quol, quoh);
903 /* Return true if the built-in mathematical function specified by CODE
904 is odd, i.e. -f(x) == f(-x). */
906 static bool
907 negate_mathfn_p (enum built_in_function code)
909 switch (code)
911 CASE_FLT_FN (BUILT_IN_ASIN):
912 CASE_FLT_FN (BUILT_IN_ASINH):
913 CASE_FLT_FN (BUILT_IN_ATAN):
914 CASE_FLT_FN (BUILT_IN_ATANH):
915 CASE_FLT_FN (BUILT_IN_CBRT):
916 CASE_FLT_FN (BUILT_IN_ERF):
917 CASE_FLT_FN (BUILT_IN_LLROUND):
918 CASE_FLT_FN (BUILT_IN_LROUND):
919 CASE_FLT_FN (BUILT_IN_ROUND):
920 CASE_FLT_FN (BUILT_IN_SIN):
921 CASE_FLT_FN (BUILT_IN_SINH):
922 CASE_FLT_FN (BUILT_IN_TAN):
923 CASE_FLT_FN (BUILT_IN_TANH):
924 CASE_FLT_FN (BUILT_IN_TRUNC):
925 return true;
927 CASE_FLT_FN (BUILT_IN_LLRINT):
928 CASE_FLT_FN (BUILT_IN_LRINT):
929 CASE_FLT_FN (BUILT_IN_NEARBYINT):
930 CASE_FLT_FN (BUILT_IN_RINT):
931 return !flag_rounding_math;
933 default:
934 break;
936 return false;
939 /* Check whether we may negate an integer constant T without causing
940 overflow. */
942 bool
943 may_negate_without_overflow_p (tree t)
945 unsigned HOST_WIDE_INT val;
946 unsigned int prec;
947 tree type;
949 gcc_assert (TREE_CODE (t) == INTEGER_CST);
951 type = TREE_TYPE (t);
952 if (TYPE_UNSIGNED (type))
953 return false;
955 prec = TYPE_PRECISION (type);
956 if (prec > HOST_BITS_PER_WIDE_INT)
958 if (TREE_INT_CST_LOW (t) != 0)
959 return true;
960 prec -= HOST_BITS_PER_WIDE_INT;
961 val = TREE_INT_CST_HIGH (t);
963 else
964 val = TREE_INT_CST_LOW (t);
965 if (prec < HOST_BITS_PER_WIDE_INT)
966 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
967 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
970 /* Determine whether an expression T can be cheaply negated using
971 the function negate_expr without introducing undefined overflow. */
973 static bool
974 negate_expr_p (tree t)
976 tree type;
978 if (t == 0)
979 return false;
981 type = TREE_TYPE (t);
983 STRIP_SIGN_NOPS (t);
984 switch (TREE_CODE (t))
986 case INTEGER_CST:
987 if (TYPE_UNSIGNED (type)
988 || (flag_wrapv && ! flag_trapv))
989 return true;
991 /* Check that -CST will not overflow type. */
992 return may_negate_without_overflow_p (t);
993 case BIT_NOT_EXPR:
994 return INTEGRAL_TYPE_P (type)
995 && (TYPE_UNSIGNED (type)
996 || (flag_wrapv && !flag_trapv));
998 case REAL_CST:
999 case NEGATE_EXPR:
1000 return true;
1002 case COMPLEX_CST:
1003 return negate_expr_p (TREE_REALPART (t))
1004 && negate_expr_p (TREE_IMAGPART (t));
1006 case PLUS_EXPR:
1007 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1008 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1009 return false;
1010 /* -(A + B) -> (-B) - A. */
1011 if (negate_expr_p (TREE_OPERAND (t, 1))
1012 && reorder_operands_p (TREE_OPERAND (t, 0),
1013 TREE_OPERAND (t, 1)))
1014 return true;
1015 /* -(A + B) -> (-A) - B. */
1016 return negate_expr_p (TREE_OPERAND (t, 0));
1018 case MINUS_EXPR:
1019 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1020 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1021 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1022 && reorder_operands_p (TREE_OPERAND (t, 0),
1023 TREE_OPERAND (t, 1));
1025 case MULT_EXPR:
1026 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1027 break;
1029 /* Fall through. */
1031 case RDIV_EXPR:
1032 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1033 return negate_expr_p (TREE_OPERAND (t, 1))
1034 || negate_expr_p (TREE_OPERAND (t, 0));
1035 break;
1037 case TRUNC_DIV_EXPR:
1038 case ROUND_DIV_EXPR:
1039 case FLOOR_DIV_EXPR:
1040 case CEIL_DIV_EXPR:
1041 case EXACT_DIV_EXPR:
1042 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1043 break;
1044 return negate_expr_p (TREE_OPERAND (t, 1))
1045 || negate_expr_p (TREE_OPERAND (t, 0));
1047 case NOP_EXPR:
1048 /* Negate -((double)float) as (double)(-float). */
1049 if (TREE_CODE (type) == REAL_TYPE)
1051 tree tem = strip_float_extensions (t);
1052 if (tem != t)
1053 return negate_expr_p (tem);
1055 break;
1057 case CALL_EXPR:
1058 /* Negate -f(x) as f(-x). */
1059 if (negate_mathfn_p (builtin_mathfn_code (t)))
1060 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1061 break;
1063 case RSHIFT_EXPR:
1064 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1065 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1067 tree op1 = TREE_OPERAND (t, 1);
1068 if (TREE_INT_CST_HIGH (op1) == 0
1069 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1070 == TREE_INT_CST_LOW (op1))
1071 return true;
1073 break;
1075 default:
1076 break;
1078 return false;
1081 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1082 simplification is possible.
1083 If negate_expr_p would return true for T, NULL_TREE will never be
1084 returned. */
1086 static tree
1087 fold_negate_expr (tree t)
1089 tree type = TREE_TYPE (t);
1090 tree tem;
1092 switch (TREE_CODE (t))
1094 /* Convert - (~A) to A + 1. */
1095 case BIT_NOT_EXPR:
1096 if (INTEGRAL_TYPE_P (type))
1097 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1098 build_int_cst (type, 1));
1099 break;
1101 case INTEGER_CST:
1102 tem = fold_negate_const (t, type);
1103 if (!TREE_OVERFLOW (tem)
1104 || TYPE_UNSIGNED (type)
1105 || !flag_trapv)
1106 return tem;
1107 break;
1109 case REAL_CST:
1110 tem = fold_negate_const (t, type);
1111 /* Two's complement FP formats, such as c4x, may overflow. */
1112 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1113 return tem;
1114 break;
1116 case COMPLEX_CST:
1118 tree rpart = negate_expr (TREE_REALPART (t));
1119 tree ipart = negate_expr (TREE_IMAGPART (t));
1121 if ((TREE_CODE (rpart) == REAL_CST
1122 && TREE_CODE (ipart) == REAL_CST)
1123 || (TREE_CODE (rpart) == INTEGER_CST
1124 && TREE_CODE (ipart) == INTEGER_CST))
1125 return build_complex (type, rpart, ipart);
1127 break;
1129 case NEGATE_EXPR:
1130 return TREE_OPERAND (t, 0);
1132 case PLUS_EXPR:
1133 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1134 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1136 /* -(A + B) -> (-B) - A. */
1137 if (negate_expr_p (TREE_OPERAND (t, 1))
1138 && reorder_operands_p (TREE_OPERAND (t, 0),
1139 TREE_OPERAND (t, 1)))
1141 tem = negate_expr (TREE_OPERAND (t, 1));
1142 return fold_build2 (MINUS_EXPR, type,
1143 tem, TREE_OPERAND (t, 0));
1146 /* -(A + B) -> (-A) - B. */
1147 if (negate_expr_p (TREE_OPERAND (t, 0)))
1149 tem = negate_expr (TREE_OPERAND (t, 0));
1150 return fold_build2 (MINUS_EXPR, type,
1151 tem, TREE_OPERAND (t, 1));
1154 break;
1156 case MINUS_EXPR:
1157 /* - (A - B) -> B - A */
1158 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1159 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1160 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1161 return fold_build2 (MINUS_EXPR, type,
1162 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1163 break;
1165 case MULT_EXPR:
1166 if (TYPE_UNSIGNED (type))
1167 break;
1169 /* Fall through. */
1171 case RDIV_EXPR:
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1174 tem = TREE_OPERAND (t, 1);
1175 if (negate_expr_p (tem))
1176 return fold_build2 (TREE_CODE (t), type,
1177 TREE_OPERAND (t, 0), negate_expr (tem));
1178 tem = TREE_OPERAND (t, 0);
1179 if (negate_expr_p (tem))
1180 return fold_build2 (TREE_CODE (t), type,
1181 negate_expr (tem), TREE_OPERAND (t, 1));
1183 break;
1185 case TRUNC_DIV_EXPR:
1186 case ROUND_DIV_EXPR:
1187 case FLOOR_DIV_EXPR:
1188 case CEIL_DIV_EXPR:
1189 case EXACT_DIV_EXPR:
1190 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1192 tem = TREE_OPERAND (t, 1);
1193 if (negate_expr_p (tem))
1194 return fold_build2 (TREE_CODE (t), type,
1195 TREE_OPERAND (t, 0), negate_expr (tem));
1196 tem = TREE_OPERAND (t, 0);
1197 if (negate_expr_p (tem))
1198 return fold_build2 (TREE_CODE (t), type,
1199 negate_expr (tem), TREE_OPERAND (t, 1));
1201 break;
1203 case NOP_EXPR:
1204 /* Convert -((double)float) into (double)(-float). */
1205 if (TREE_CODE (type) == REAL_TYPE)
1207 tem = strip_float_extensions (t);
1208 if (tem != t && negate_expr_p (tem))
1209 return negate_expr (tem);
1211 break;
1213 case CALL_EXPR:
1214 /* Negate -f(x) as f(-x). */
1215 if (negate_mathfn_p (builtin_mathfn_code (t))
1216 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1218 tree fndecl, arg, arglist;
1220 fndecl = get_callee_fndecl (t);
1221 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1222 arglist = build_tree_list (NULL_TREE, arg);
1223 return build_function_call_expr (fndecl, arglist);
1225 break;
1227 case RSHIFT_EXPR:
1228 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1229 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1231 tree op1 = TREE_OPERAND (t, 1);
1232 if (TREE_INT_CST_HIGH (op1) == 0
1233 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1234 == TREE_INT_CST_LOW (op1))
1236 tree ntype = TYPE_UNSIGNED (type)
1237 ? lang_hooks.types.signed_type (type)
1238 : lang_hooks.types.unsigned_type (type);
1239 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1240 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1241 return fold_convert (type, temp);
1244 break;
1246 default:
1247 break;
1250 return NULL_TREE;
1253 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1254 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1255 return NULL_TREE. */
1257 static tree
1258 negate_expr (tree t)
1260 tree type, tem;
1262 if (t == NULL_TREE)
1263 return NULL_TREE;
1265 type = TREE_TYPE (t);
1266 STRIP_SIGN_NOPS (t);
1268 tem = fold_negate_expr (t);
1269 if (!tem)
1270 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1271 return fold_convert (type, tem);
1274 /* Split a tree IN into a constant, literal and variable parts that could be
1275 combined with CODE to make IN. "constant" means an expression with
1276 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1277 commutative arithmetic operation. Store the constant part into *CONP,
1278 the literal in *LITP and return the variable part. If a part isn't
1279 present, set it to null. If the tree does not decompose in this way,
1280 return the entire tree as the variable part and the other parts as null.
1282 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1283 case, we negate an operand that was subtracted. Except if it is a
1284 literal for which we use *MINUS_LITP instead.
1286 If NEGATE_P is true, we are negating all of IN, again except a literal
1287 for which we use *MINUS_LITP instead.
1289 If IN is itself a literal or constant, return it as appropriate.
1291 Note that we do not guarantee that any of the three values will be the
1292 same type as IN, but they will have the same signedness and mode. */
1294 static tree
1295 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1296 tree *minus_litp, int negate_p)
1298 tree var = 0;
1300 *conp = 0;
1301 *litp = 0;
1302 *minus_litp = 0;
1304 /* Strip any conversions that don't change the machine mode or signedness. */
1305 STRIP_SIGN_NOPS (in);
1307 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1308 *litp = in;
1309 else if (TREE_CODE (in) == code
1310 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1311 /* We can associate addition and subtraction together (even
1312 though the C standard doesn't say so) for integers because
1313 the value is not affected. For reals, the value might be
1314 affected, so we can't. */
1315 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1316 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1318 tree op0 = TREE_OPERAND (in, 0);
1319 tree op1 = TREE_OPERAND (in, 1);
1320 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1321 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1323 /* First see if either of the operands is a literal, then a constant. */
1324 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1325 *litp = op0, op0 = 0;
1326 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1327 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1329 if (op0 != 0 && TREE_CONSTANT (op0))
1330 *conp = op0, op0 = 0;
1331 else if (op1 != 0 && TREE_CONSTANT (op1))
1332 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1334 /* If we haven't dealt with either operand, this is not a case we can
1335 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1336 if (op0 != 0 && op1 != 0)
1337 var = in;
1338 else if (op0 != 0)
1339 var = op0;
1340 else
1341 var = op1, neg_var_p = neg1_p;
1343 /* Now do any needed negations. */
1344 if (neg_litp_p)
1345 *minus_litp = *litp, *litp = 0;
1346 if (neg_conp_p)
1347 *conp = negate_expr (*conp);
1348 if (neg_var_p)
1349 var = negate_expr (var);
1351 else if (TREE_CONSTANT (in))
1352 *conp = in;
1353 else
1354 var = in;
1356 if (negate_p)
1358 if (*litp)
1359 *minus_litp = *litp, *litp = 0;
1360 else if (*minus_litp)
1361 *litp = *minus_litp, *minus_litp = 0;
1362 *conp = negate_expr (*conp);
1363 var = negate_expr (var);
1366 return var;
1369 /* Re-associate trees split by the above function. T1 and T2 are either
1370 expressions to associate or null. Return the new expression, if any. If
1371 we build an operation, do it in TYPE and with CODE. */
1373 static tree
1374 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1376 if (t1 == 0)
1377 return t2;
1378 else if (t2 == 0)
1379 return t1;
1381 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1382 try to fold this since we will have infinite recursion. But do
1383 deal with any NEGATE_EXPRs. */
1384 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1385 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1387 if (code == PLUS_EXPR)
1389 if (TREE_CODE (t1) == NEGATE_EXPR)
1390 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1391 fold_convert (type, TREE_OPERAND (t1, 0)));
1392 else if (TREE_CODE (t2) == NEGATE_EXPR)
1393 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1394 fold_convert (type, TREE_OPERAND (t2, 0)));
1395 else if (integer_zerop (t2))
1396 return fold_convert (type, t1);
1398 else if (code == MINUS_EXPR)
1400 if (integer_zerop (t2))
1401 return fold_convert (type, t1);
1404 return build2 (code, type, fold_convert (type, t1),
1405 fold_convert (type, t2));
1408 return fold_build2 (code, type, fold_convert (type, t1),
1409 fold_convert (type, t2));
1412 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1413 for use in int_const_binop, size_binop and size_diffop. */
1415 static bool
1416 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1418 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1419 return false;
1420 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1421 return false;
1423 switch (code)
1425 case LSHIFT_EXPR:
1426 case RSHIFT_EXPR:
1427 case LROTATE_EXPR:
1428 case RROTATE_EXPR:
1429 return true;
1431 default:
1432 break;
1435 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1436 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1437 && TYPE_MODE (type1) == TYPE_MODE (type2);
1441 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1442 to produce a new constant. Return NULL_TREE if we don't know how
1443 to evaluate CODE at compile-time.
1445 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1447 tree
1448 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1450 unsigned HOST_WIDE_INT int1l, int2l;
1451 HOST_WIDE_INT int1h, int2h;
1452 unsigned HOST_WIDE_INT low;
1453 HOST_WIDE_INT hi;
1454 unsigned HOST_WIDE_INT garbagel;
1455 HOST_WIDE_INT garbageh;
1456 tree t;
1457 tree type = TREE_TYPE (arg1);
1458 int uns = TYPE_UNSIGNED (type);
1459 int is_sizetype
1460 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1461 int overflow = 0;
1463 int1l = TREE_INT_CST_LOW (arg1);
1464 int1h = TREE_INT_CST_HIGH (arg1);
1465 int2l = TREE_INT_CST_LOW (arg2);
1466 int2h = TREE_INT_CST_HIGH (arg2);
1468 switch (code)
1470 case BIT_IOR_EXPR:
1471 low = int1l | int2l, hi = int1h | int2h;
1472 break;
1474 case BIT_XOR_EXPR:
1475 low = int1l ^ int2l, hi = int1h ^ int2h;
1476 break;
1478 case BIT_AND_EXPR:
1479 low = int1l & int2l, hi = int1h & int2h;
1480 break;
1482 case RSHIFT_EXPR:
1483 int2l = -int2l;
1484 case LSHIFT_EXPR:
1485 /* It's unclear from the C standard whether shifts can overflow.
1486 The following code ignores overflow; perhaps a C standard
1487 interpretation ruling is needed. */
1488 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1489 &low, &hi, !uns);
1490 break;
1492 case RROTATE_EXPR:
1493 int2l = - int2l;
1494 case LROTATE_EXPR:
1495 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1496 &low, &hi);
1497 break;
1499 case PLUS_EXPR:
1500 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1501 break;
1503 case MINUS_EXPR:
1504 neg_double (int2l, int2h, &low, &hi);
1505 add_double (int1l, int1h, low, hi, &low, &hi);
1506 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1507 break;
1509 case MULT_EXPR:
1510 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1511 break;
1513 case TRUNC_DIV_EXPR:
1514 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1515 case EXACT_DIV_EXPR:
1516 /* This is a shortcut for a common special case. */
1517 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1518 && !TREE_OVERFLOW (arg1)
1519 && !TREE_OVERFLOW (arg2)
1520 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1522 if (code == CEIL_DIV_EXPR)
1523 int1l += int2l - 1;
1525 low = int1l / int2l, hi = 0;
1526 break;
1529 /* ... fall through ... */
1531 case ROUND_DIV_EXPR:
1532 if (int2h == 0 && int2l == 0)
1533 return NULL_TREE;
1534 if (int2h == 0 && int2l == 1)
1536 low = int1l, hi = int1h;
1537 break;
1539 if (int1l == int2l && int1h == int2h
1540 && ! (int1l == 0 && int1h == 0))
1542 low = 1, hi = 0;
1543 break;
1545 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1546 &low, &hi, &garbagel, &garbageh);
1547 break;
1549 case TRUNC_MOD_EXPR:
1550 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1551 /* This is a shortcut for a common special case. */
1552 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1553 && !TREE_OVERFLOW (arg1)
1554 && !TREE_OVERFLOW (arg2)
1555 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1557 if (code == CEIL_MOD_EXPR)
1558 int1l += int2l - 1;
1559 low = int1l % int2l, hi = 0;
1560 break;
1563 /* ... fall through ... */
1565 case ROUND_MOD_EXPR:
1566 if (int2h == 0 && int2l == 0)
1567 return NULL_TREE;
1568 overflow = div_and_round_double (code, uns,
1569 int1l, int1h, int2l, int2h,
1570 &garbagel, &garbageh, &low, &hi);
1571 break;
1573 case MIN_EXPR:
1574 case MAX_EXPR:
1575 if (uns)
1576 low = (((unsigned HOST_WIDE_INT) int1h
1577 < (unsigned HOST_WIDE_INT) int2h)
1578 || (((unsigned HOST_WIDE_INT) int1h
1579 == (unsigned HOST_WIDE_INT) int2h)
1580 && int1l < int2l));
1581 else
1582 low = (int1h < int2h
1583 || (int1h == int2h && int1l < int2l));
1585 if (low == (code == MIN_EXPR))
1586 low = int1l, hi = int1h;
1587 else
1588 low = int2l, hi = int2h;
1589 break;
1591 default:
1592 return NULL_TREE;
1595 if (notrunc)
1597 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1599 /* Propagate overflow flags ourselves. */
1600 if (((!uns || is_sizetype) && overflow)
1601 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1603 t = copy_node (t);
1604 TREE_OVERFLOW (t) = 1;
1607 else
1608 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1609 ((!uns || is_sizetype) && overflow)
1610 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1611 false);
1613 return t;
1616 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1617 constant. We assume ARG1 and ARG2 have the same data type, or at least
1618 are the same kind of constant and the same machine mode. Return zero if
1619 combining the constants is not allowed in the current operating mode.
1621 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1623 static tree
1624 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1626 /* Sanity check for the recursive cases. */
1627 if (!arg1 || !arg2)
1628 return NULL_TREE;
1630 STRIP_NOPS (arg1);
1631 STRIP_NOPS (arg2);
1633 if (TREE_CODE (arg1) == INTEGER_CST)
1634 return int_const_binop (code, arg1, arg2, notrunc);
1636 if (TREE_CODE (arg1) == REAL_CST)
1638 enum machine_mode mode;
1639 REAL_VALUE_TYPE d1;
1640 REAL_VALUE_TYPE d2;
1641 REAL_VALUE_TYPE value;
1642 REAL_VALUE_TYPE result;
1643 bool inexact;
1644 tree t, type;
1646 /* The following codes are handled by real_arithmetic. */
1647 switch (code)
1649 case PLUS_EXPR:
1650 case MINUS_EXPR:
1651 case MULT_EXPR:
1652 case RDIV_EXPR:
1653 case MIN_EXPR:
1654 case MAX_EXPR:
1655 break;
1657 default:
1658 return NULL_TREE;
1661 d1 = TREE_REAL_CST (arg1);
1662 d2 = TREE_REAL_CST (arg2);
1664 type = TREE_TYPE (arg1);
1665 mode = TYPE_MODE (type);
1667 /* Don't perform operation if we honor signaling NaNs and
1668 either operand is a NaN. */
1669 if (HONOR_SNANS (mode)
1670 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1671 return NULL_TREE;
1673 /* Don't perform operation if it would raise a division
1674 by zero exception. */
1675 if (code == RDIV_EXPR
1676 && REAL_VALUES_EQUAL (d2, dconst0)
1677 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1678 return NULL_TREE;
1680 /* If either operand is a NaN, just return it. Otherwise, set up
1681 for floating-point trap; we return an overflow. */
1682 if (REAL_VALUE_ISNAN (d1))
1683 return arg1;
1684 else if (REAL_VALUE_ISNAN (d2))
1685 return arg2;
1687 inexact = real_arithmetic (&value, code, &d1, &d2);
1688 real_convert (&result, mode, &value);
1690 /* Don't constant fold this floating point operation if
1691 the result has overflowed and flag_trapping_math. */
1692 if (flag_trapping_math
1693 && MODE_HAS_INFINITIES (mode)
1694 && REAL_VALUE_ISINF (result)
1695 && !REAL_VALUE_ISINF (d1)
1696 && !REAL_VALUE_ISINF (d2))
1697 return NULL_TREE;
1699 /* Don't constant fold this floating point operation if the
1700 result may dependent upon the run-time rounding mode and
1701 flag_rounding_math is set, or if GCC's software emulation
1702 is unable to accurately represent the result. */
1703 if ((flag_rounding_math
1704 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1705 && !flag_unsafe_math_optimizations))
1706 && (inexact || !real_identical (&result, &value)))
1707 return NULL_TREE;
1709 t = build_real (type, result);
1711 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1712 return t;
1715 if (TREE_CODE (arg1) == COMPLEX_CST)
1717 tree type = TREE_TYPE (arg1);
1718 tree r1 = TREE_REALPART (arg1);
1719 tree i1 = TREE_IMAGPART (arg1);
1720 tree r2 = TREE_REALPART (arg2);
1721 tree i2 = TREE_IMAGPART (arg2);
1722 tree real, imag;
1724 switch (code)
1726 case PLUS_EXPR:
1727 case MINUS_EXPR:
1728 real = const_binop (code, r1, r2, notrunc);
1729 imag = const_binop (code, i1, i2, notrunc);
1730 break;
1732 case MULT_EXPR:
1733 real = const_binop (MINUS_EXPR,
1734 const_binop (MULT_EXPR, r1, r2, notrunc),
1735 const_binop (MULT_EXPR, i1, i2, notrunc),
1736 notrunc);
1737 imag = const_binop (PLUS_EXPR,
1738 const_binop (MULT_EXPR, r1, i2, notrunc),
1739 const_binop (MULT_EXPR, i1, r2, notrunc),
1740 notrunc);
1741 break;
1743 case RDIV_EXPR:
1745 tree magsquared
1746 = const_binop (PLUS_EXPR,
1747 const_binop (MULT_EXPR, r2, r2, notrunc),
1748 const_binop (MULT_EXPR, i2, i2, notrunc),
1749 notrunc);
1750 tree t1
1751 = const_binop (PLUS_EXPR,
1752 const_binop (MULT_EXPR, r1, r2, notrunc),
1753 const_binop (MULT_EXPR, i1, i2, notrunc),
1754 notrunc);
1755 tree t2
1756 = const_binop (MINUS_EXPR,
1757 const_binop (MULT_EXPR, i1, r2, notrunc),
1758 const_binop (MULT_EXPR, r1, i2, notrunc),
1759 notrunc);
1761 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1762 code = TRUNC_DIV_EXPR;
1764 real = const_binop (code, t1, magsquared, notrunc);
1765 imag = const_binop (code, t2, magsquared, notrunc);
1767 break;
1769 default:
1770 return NULL_TREE;
1773 if (real && imag)
1774 return build_complex (type, real, imag);
1777 return NULL_TREE;
1780 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1781 indicates which particular sizetype to create. */
1783 tree
1784 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1786 return build_int_cst (sizetype_tab[(int) kind], number);
1789 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1790 is a tree code. The type of the result is taken from the operands.
1791 Both must be equivalent integer types, ala int_binop_types_match_p.
1792 If the operands are constant, so is the result. */
1794 tree
1795 size_binop (enum tree_code code, tree arg0, tree arg1)
1797 tree type = TREE_TYPE (arg0);
1799 if (arg0 == error_mark_node || arg1 == error_mark_node)
1800 return error_mark_node;
1802 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1803 TREE_TYPE (arg1)));
1805 /* Handle the special case of two integer constants faster. */
1806 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1808 /* And some specific cases even faster than that. */
1809 if (code == PLUS_EXPR && integer_zerop (arg0))
1810 return arg1;
1811 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1812 && integer_zerop (arg1))
1813 return arg0;
1814 else if (code == MULT_EXPR && integer_onep (arg0))
1815 return arg1;
1817 /* Handle general case of two integer constants. */
1818 return int_const_binop (code, arg0, arg1, 0);
1821 return fold_build2 (code, type, arg0, arg1);
1824 /* Given two values, either both of sizetype or both of bitsizetype,
1825 compute the difference between the two values. Return the value
1826 in signed type corresponding to the type of the operands. */
1828 tree
1829 size_diffop (tree arg0, tree arg1)
1831 tree type = TREE_TYPE (arg0);
1832 tree ctype;
1834 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1835 TREE_TYPE (arg1)));
1837 /* If the type is already signed, just do the simple thing. */
1838 if (!TYPE_UNSIGNED (type))
1839 return size_binop (MINUS_EXPR, arg0, arg1);
1841 if (type == sizetype)
1842 ctype = ssizetype;
1843 else if (type == bitsizetype)
1844 ctype = sbitsizetype;
1845 else
1846 ctype = lang_hooks.types.signed_type (type);
1848 /* If either operand is not a constant, do the conversions to the signed
1849 type and subtract. The hardware will do the right thing with any
1850 overflow in the subtraction. */
1851 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1852 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1853 fold_convert (ctype, arg1));
1855 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1856 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1857 overflow) and negate (which can't either). Special-case a result
1858 of zero while we're here. */
1859 if (tree_int_cst_equal (arg0, arg1))
1860 return build_int_cst (ctype, 0);
1861 else if (tree_int_cst_lt (arg1, arg0))
1862 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1863 else
1864 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1865 fold_convert (ctype, size_binop (MINUS_EXPR,
1866 arg1, arg0)));
1869 /* A subroutine of fold_convert_const handling conversions of an
1870 INTEGER_CST to another integer type. */
1872 static tree
1873 fold_convert_const_int_from_int (tree type, tree arg1)
1875 tree t;
1877 /* Given an integer constant, make new constant with new type,
1878 appropriately sign-extended or truncated. */
1879 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1880 TREE_INT_CST_HIGH (arg1),
1881 /* Don't set the overflow when
1882 converting a pointer */
1883 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1884 (TREE_INT_CST_HIGH (arg1) < 0
1885 && (TYPE_UNSIGNED (type)
1886 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1887 | TREE_OVERFLOW (arg1),
1888 false);
1890 return t;
1893 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1894 to an integer type. */
1896 static tree
1897 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1899 int overflow = 0;
1900 tree t;
1902 /* The following code implements the floating point to integer
1903 conversion rules required by the Java Language Specification,
1904 that IEEE NaNs are mapped to zero and values that overflow
1905 the target precision saturate, i.e. values greater than
1906 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1907 are mapped to INT_MIN. These semantics are allowed by the
1908 C and C++ standards that simply state that the behavior of
1909 FP-to-integer conversion is unspecified upon overflow. */
1911 HOST_WIDE_INT high, low;
1912 REAL_VALUE_TYPE r;
1913 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1915 switch (code)
1917 case FIX_TRUNC_EXPR:
1918 real_trunc (&r, VOIDmode, &x);
1919 break;
1921 default:
1922 gcc_unreachable ();
1925 /* If R is NaN, return zero and show we have an overflow. */
1926 if (REAL_VALUE_ISNAN (r))
1928 overflow = 1;
1929 high = 0;
1930 low = 0;
1933 /* See if R is less than the lower bound or greater than the
1934 upper bound. */
1936 if (! overflow)
1938 tree lt = TYPE_MIN_VALUE (type);
1939 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1940 if (REAL_VALUES_LESS (r, l))
1942 overflow = 1;
1943 high = TREE_INT_CST_HIGH (lt);
1944 low = TREE_INT_CST_LOW (lt);
1948 if (! overflow)
1950 tree ut = TYPE_MAX_VALUE (type);
1951 if (ut)
1953 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1954 if (REAL_VALUES_LESS (u, r))
1956 overflow = 1;
1957 high = TREE_INT_CST_HIGH (ut);
1958 low = TREE_INT_CST_LOW (ut);
1963 if (! overflow)
1964 REAL_VALUE_TO_INT (&low, &high, r);
1966 t = force_fit_type_double (type, low, high, -1,
1967 overflow | TREE_OVERFLOW (arg1),
1968 false);
1969 return t;
1972 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1973 to another floating point type. */
1975 static tree
1976 fold_convert_const_real_from_real (tree type, tree arg1)
1978 REAL_VALUE_TYPE value;
1979 tree t;
1981 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1982 t = build_real (type, value);
1984 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1985 return t;
1988 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1989 type TYPE. If no simplification can be done return NULL_TREE. */
1991 static tree
1992 fold_convert_const (enum tree_code code, tree type, tree arg1)
1994 if (TREE_TYPE (arg1) == type)
1995 return arg1;
1997 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1999 if (TREE_CODE (arg1) == INTEGER_CST)
2000 return fold_convert_const_int_from_int (type, arg1);
2001 else if (TREE_CODE (arg1) == REAL_CST)
2002 return fold_convert_const_int_from_real (code, type, arg1);
2004 else if (TREE_CODE (type) == REAL_TYPE)
2006 if (TREE_CODE (arg1) == INTEGER_CST)
2007 return build_real_from_int_cst (type, arg1);
2008 if (TREE_CODE (arg1) == REAL_CST)
2009 return fold_convert_const_real_from_real (type, arg1);
2011 return NULL_TREE;
2014 /* Construct a vector of zero elements of vector type TYPE. */
2016 static tree
2017 build_zero_vector (tree type)
2019 tree elem, list;
2020 int i, units;
2022 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2023 units = TYPE_VECTOR_SUBPARTS (type);
2025 list = NULL_TREE;
2026 for (i = 0; i < units; i++)
2027 list = tree_cons (NULL_TREE, elem, list);
2028 return build_vector (type, list);
2031 /* Convert expression ARG to type TYPE. Used by the middle-end for
2032 simple conversions in preference to calling the front-end's convert. */
2034 tree
2035 fold_convert (tree type, tree arg)
2037 tree orig = TREE_TYPE (arg);
2038 tree tem;
2040 if (type == orig)
2041 return arg;
2043 if (TREE_CODE (arg) == ERROR_MARK
2044 || TREE_CODE (type) == ERROR_MARK
2045 || TREE_CODE (orig) == ERROR_MARK)
2046 return error_mark_node;
2048 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2049 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2050 TYPE_MAIN_VARIANT (orig)))
2051 return fold_build1 (NOP_EXPR, type, arg);
2053 switch (TREE_CODE (type))
2055 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2056 case POINTER_TYPE: case REFERENCE_TYPE:
2057 case OFFSET_TYPE:
2058 if (TREE_CODE (arg) == INTEGER_CST)
2060 tem = fold_convert_const (NOP_EXPR, type, arg);
2061 if (tem != NULL_TREE)
2062 return tem;
2064 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2065 || TREE_CODE (orig) == OFFSET_TYPE)
2066 return fold_build1 (NOP_EXPR, type, arg);
2067 if (TREE_CODE (orig) == COMPLEX_TYPE)
2069 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2070 return fold_convert (type, tem);
2072 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2073 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2074 return fold_build1 (NOP_EXPR, type, arg);
2076 case REAL_TYPE:
2077 if (TREE_CODE (arg) == INTEGER_CST)
2079 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2080 if (tem != NULL_TREE)
2081 return tem;
2083 else if (TREE_CODE (arg) == REAL_CST)
2085 tem = fold_convert_const (NOP_EXPR, type, arg);
2086 if (tem != NULL_TREE)
2087 return tem;
2090 switch (TREE_CODE (orig))
2092 case INTEGER_TYPE:
2093 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2094 case POINTER_TYPE: case REFERENCE_TYPE:
2095 return fold_build1 (FLOAT_EXPR, type, arg);
2097 case REAL_TYPE:
2098 return fold_build1 (NOP_EXPR, type, arg);
2100 case COMPLEX_TYPE:
2101 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2102 return fold_convert (type, tem);
2104 default:
2105 gcc_unreachable ();
2108 case COMPLEX_TYPE:
2109 switch (TREE_CODE (orig))
2111 case INTEGER_TYPE:
2112 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2113 case POINTER_TYPE: case REFERENCE_TYPE:
2114 case REAL_TYPE:
2115 return build2 (COMPLEX_EXPR, type,
2116 fold_convert (TREE_TYPE (type), arg),
2117 fold_convert (TREE_TYPE (type), integer_zero_node));
2118 case COMPLEX_TYPE:
2120 tree rpart, ipart;
2122 if (TREE_CODE (arg) == COMPLEX_EXPR)
2124 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2125 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2126 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2129 arg = save_expr (arg);
2130 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2131 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2132 rpart = fold_convert (TREE_TYPE (type), rpart);
2133 ipart = fold_convert (TREE_TYPE (type), ipart);
2134 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2137 default:
2138 gcc_unreachable ();
2141 case VECTOR_TYPE:
2142 if (integer_zerop (arg))
2143 return build_zero_vector (type);
2144 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2145 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2146 || TREE_CODE (orig) == VECTOR_TYPE);
2147 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2149 case VOID_TYPE:
2150 tem = fold_ignored_result (arg);
2151 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2152 return tem;
2153 return fold_build1 (NOP_EXPR, type, tem);
2155 default:
2156 gcc_unreachable ();
2160 /* Return false if expr can be assumed not to be an lvalue, true
2161 otherwise. */
2163 static bool
2164 maybe_lvalue_p (tree x)
2166 /* We only need to wrap lvalue tree codes. */
2167 switch (TREE_CODE (x))
2169 case VAR_DECL:
2170 case PARM_DECL:
2171 case RESULT_DECL:
2172 case LABEL_DECL:
2173 case FUNCTION_DECL:
2174 case SSA_NAME:
2176 case COMPONENT_REF:
2177 case INDIRECT_REF:
2178 case ALIGN_INDIRECT_REF:
2179 case MISALIGNED_INDIRECT_REF:
2180 case ARRAY_REF:
2181 case ARRAY_RANGE_REF:
2182 case BIT_FIELD_REF:
2183 case OBJ_TYPE_REF:
2185 case REALPART_EXPR:
2186 case IMAGPART_EXPR:
2187 case PREINCREMENT_EXPR:
2188 case PREDECREMENT_EXPR:
2189 case SAVE_EXPR:
2190 case TRY_CATCH_EXPR:
2191 case WITH_CLEANUP_EXPR:
2192 case COMPOUND_EXPR:
2193 case MODIFY_EXPR:
2194 case GIMPLE_MODIFY_STMT:
2195 case TARGET_EXPR:
2196 case COND_EXPR:
2197 case BIND_EXPR:
2198 case MIN_EXPR:
2199 case MAX_EXPR:
2200 break;
2202 default:
2203 /* Assume the worst for front-end tree codes. */
2204 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2205 break;
2206 return false;
2209 return true;
2212 /* Return an expr equal to X but certainly not valid as an lvalue. */
2214 tree
2215 non_lvalue (tree x)
2217 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2218 us. */
2219 if (in_gimple_form)
2220 return x;
2222 if (! maybe_lvalue_p (x))
2223 return x;
2224 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2227 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2228 Zero means allow extended lvalues. */
2230 int pedantic_lvalues;
2232 /* When pedantic, return an expr equal to X but certainly not valid as a
2233 pedantic lvalue. Otherwise, return X. */
2235 static tree
2236 pedantic_non_lvalue (tree x)
2238 if (pedantic_lvalues)
2239 return non_lvalue (x);
2240 else
2241 return x;
2244 /* Given a tree comparison code, return the code that is the logical inverse
2245 of the given code. It is not safe to do this for floating-point
2246 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2247 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2249 enum tree_code
2250 invert_tree_comparison (enum tree_code code, bool honor_nans)
2252 if (honor_nans && flag_trapping_math)
2253 return ERROR_MARK;
2255 switch (code)
2257 case EQ_EXPR:
2258 return NE_EXPR;
2259 case NE_EXPR:
2260 return EQ_EXPR;
2261 case GT_EXPR:
2262 return honor_nans ? UNLE_EXPR : LE_EXPR;
2263 case GE_EXPR:
2264 return honor_nans ? UNLT_EXPR : LT_EXPR;
2265 case LT_EXPR:
2266 return honor_nans ? UNGE_EXPR : GE_EXPR;
2267 case LE_EXPR:
2268 return honor_nans ? UNGT_EXPR : GT_EXPR;
2269 case LTGT_EXPR:
2270 return UNEQ_EXPR;
2271 case UNEQ_EXPR:
2272 return LTGT_EXPR;
2273 case UNGT_EXPR:
2274 return LE_EXPR;
2275 case UNGE_EXPR:
2276 return LT_EXPR;
2277 case UNLT_EXPR:
2278 return GE_EXPR;
2279 case UNLE_EXPR:
2280 return GT_EXPR;
2281 case ORDERED_EXPR:
2282 return UNORDERED_EXPR;
2283 case UNORDERED_EXPR:
2284 return ORDERED_EXPR;
2285 default:
2286 gcc_unreachable ();
2290 /* Similar, but return the comparison that results if the operands are
2291 swapped. This is safe for floating-point. */
2293 enum tree_code
2294 swap_tree_comparison (enum tree_code code)
2296 switch (code)
2298 case EQ_EXPR:
2299 case NE_EXPR:
2300 case ORDERED_EXPR:
2301 case UNORDERED_EXPR:
2302 case LTGT_EXPR:
2303 case UNEQ_EXPR:
2304 return code;
2305 case GT_EXPR:
2306 return LT_EXPR;
2307 case GE_EXPR:
2308 return LE_EXPR;
2309 case LT_EXPR:
2310 return GT_EXPR;
2311 case LE_EXPR:
2312 return GE_EXPR;
2313 case UNGT_EXPR:
2314 return UNLT_EXPR;
2315 case UNGE_EXPR:
2316 return UNLE_EXPR;
2317 case UNLT_EXPR:
2318 return UNGT_EXPR;
2319 case UNLE_EXPR:
2320 return UNGE_EXPR;
2321 default:
2322 gcc_unreachable ();
2327 /* Convert a comparison tree code from an enum tree_code representation
2328 into a compcode bit-based encoding. This function is the inverse of
2329 compcode_to_comparison. */
2331 static enum comparison_code
2332 comparison_to_compcode (enum tree_code code)
2334 switch (code)
2336 case LT_EXPR:
2337 return COMPCODE_LT;
2338 case EQ_EXPR:
2339 return COMPCODE_EQ;
2340 case LE_EXPR:
2341 return COMPCODE_LE;
2342 case GT_EXPR:
2343 return COMPCODE_GT;
2344 case NE_EXPR:
2345 return COMPCODE_NE;
2346 case GE_EXPR:
2347 return COMPCODE_GE;
2348 case ORDERED_EXPR:
2349 return COMPCODE_ORD;
2350 case UNORDERED_EXPR:
2351 return COMPCODE_UNORD;
2352 case UNLT_EXPR:
2353 return COMPCODE_UNLT;
2354 case UNEQ_EXPR:
2355 return COMPCODE_UNEQ;
2356 case UNLE_EXPR:
2357 return COMPCODE_UNLE;
2358 case UNGT_EXPR:
2359 return COMPCODE_UNGT;
2360 case LTGT_EXPR:
2361 return COMPCODE_LTGT;
2362 case UNGE_EXPR:
2363 return COMPCODE_UNGE;
2364 default:
2365 gcc_unreachable ();
2369 /* Convert a compcode bit-based encoding of a comparison operator back
2370 to GCC's enum tree_code representation. This function is the
2371 inverse of comparison_to_compcode. */
2373 static enum tree_code
2374 compcode_to_comparison (enum comparison_code code)
2376 switch (code)
2378 case COMPCODE_LT:
2379 return LT_EXPR;
2380 case COMPCODE_EQ:
2381 return EQ_EXPR;
2382 case COMPCODE_LE:
2383 return LE_EXPR;
2384 case COMPCODE_GT:
2385 return GT_EXPR;
2386 case COMPCODE_NE:
2387 return NE_EXPR;
2388 case COMPCODE_GE:
2389 return GE_EXPR;
2390 case COMPCODE_ORD:
2391 return ORDERED_EXPR;
2392 case COMPCODE_UNORD:
2393 return UNORDERED_EXPR;
2394 case COMPCODE_UNLT:
2395 return UNLT_EXPR;
2396 case COMPCODE_UNEQ:
2397 return UNEQ_EXPR;
2398 case COMPCODE_UNLE:
2399 return UNLE_EXPR;
2400 case COMPCODE_UNGT:
2401 return UNGT_EXPR;
2402 case COMPCODE_LTGT:
2403 return LTGT_EXPR;
2404 case COMPCODE_UNGE:
2405 return UNGE_EXPR;
2406 default:
2407 gcc_unreachable ();
2411 /* Return a tree for the comparison which is the combination of
2412 doing the AND or OR (depending on CODE) of the two operations LCODE
2413 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2414 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2415 if this makes the transformation invalid. */
2417 tree
2418 combine_comparisons (enum tree_code code, enum tree_code lcode,
2419 enum tree_code rcode, tree truth_type,
2420 tree ll_arg, tree lr_arg)
2422 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2423 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2424 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2425 enum comparison_code compcode;
2427 switch (code)
2429 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2430 compcode = lcompcode & rcompcode;
2431 break;
2433 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2434 compcode = lcompcode | rcompcode;
2435 break;
2437 default:
2438 return NULL_TREE;
2441 if (!honor_nans)
2443 /* Eliminate unordered comparisons, as well as LTGT and ORD
2444 which are not used unless the mode has NaNs. */
2445 compcode &= ~COMPCODE_UNORD;
2446 if (compcode == COMPCODE_LTGT)
2447 compcode = COMPCODE_NE;
2448 else if (compcode == COMPCODE_ORD)
2449 compcode = COMPCODE_TRUE;
2451 else if (flag_trapping_math)
2453 /* Check that the original operation and the optimized ones will trap
2454 under the same condition. */
2455 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2456 && (lcompcode != COMPCODE_EQ)
2457 && (lcompcode != COMPCODE_ORD);
2458 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2459 && (rcompcode != COMPCODE_EQ)
2460 && (rcompcode != COMPCODE_ORD);
2461 bool trap = (compcode & COMPCODE_UNORD) == 0
2462 && (compcode != COMPCODE_EQ)
2463 && (compcode != COMPCODE_ORD);
2465 /* In a short-circuited boolean expression the LHS might be
2466 such that the RHS, if evaluated, will never trap. For
2467 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2468 if neither x nor y is NaN. (This is a mixed blessing: for
2469 example, the expression above will never trap, hence
2470 optimizing it to x < y would be invalid). */
2471 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2472 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2473 rtrap = false;
2475 /* If the comparison was short-circuited, and only the RHS
2476 trapped, we may now generate a spurious trap. */
2477 if (rtrap && !ltrap
2478 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2479 return NULL_TREE;
2481 /* If we changed the conditions that cause a trap, we lose. */
2482 if ((ltrap || rtrap) != trap)
2483 return NULL_TREE;
2486 if (compcode == COMPCODE_TRUE)
2487 return constant_boolean_node (true, truth_type);
2488 else if (compcode == COMPCODE_FALSE)
2489 return constant_boolean_node (false, truth_type);
2490 else
2491 return fold_build2 (compcode_to_comparison (compcode),
2492 truth_type, ll_arg, lr_arg);
2495 /* Return nonzero if CODE is a tree code that represents a truth value. */
2497 static int
2498 truth_value_p (enum tree_code code)
2500 return (TREE_CODE_CLASS (code) == tcc_comparison
2501 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2502 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2503 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2506 /* Return nonzero if two operands (typically of the same tree node)
2507 are necessarily equal. If either argument has side-effects this
2508 function returns zero. FLAGS modifies behavior as follows:
2510 If OEP_ONLY_CONST is set, only return nonzero for constants.
2511 This function tests whether the operands are indistinguishable;
2512 it does not test whether they are equal using C's == operation.
2513 The distinction is important for IEEE floating point, because
2514 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2515 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2517 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2518 even though it may hold multiple values during a function.
2519 This is because a GCC tree node guarantees that nothing else is
2520 executed between the evaluation of its "operands" (which may often
2521 be evaluated in arbitrary order). Hence if the operands themselves
2522 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2523 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2524 unset means assuming isochronic (or instantaneous) tree equivalence.
2525 Unless comparing arbitrary expression trees, such as from different
2526 statements, this flag can usually be left unset.
2528 If OEP_PURE_SAME is set, then pure functions with identical arguments
2529 are considered the same. It is used when the caller has other ways
2530 to ensure that global memory is unchanged in between. */
2533 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2535 /* If either is ERROR_MARK, they aren't equal. */
2536 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2537 return 0;
2539 /* If both types don't have the same signedness, then we can't consider
2540 them equal. We must check this before the STRIP_NOPS calls
2541 because they may change the signedness of the arguments. */
2542 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2543 return 0;
2545 /* If both types don't have the same precision, then it is not safe
2546 to strip NOPs. */
2547 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2548 return 0;
2550 STRIP_NOPS (arg0);
2551 STRIP_NOPS (arg1);
2553 /* In case both args are comparisons but with different comparison
2554 code, try to swap the comparison operands of one arg to produce
2555 a match and compare that variant. */
2556 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2557 && COMPARISON_CLASS_P (arg0)
2558 && COMPARISON_CLASS_P (arg1))
2560 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2562 if (TREE_CODE (arg0) == swap_code)
2563 return operand_equal_p (TREE_OPERAND (arg0, 0),
2564 TREE_OPERAND (arg1, 1), flags)
2565 && operand_equal_p (TREE_OPERAND (arg0, 1),
2566 TREE_OPERAND (arg1, 0), flags);
2569 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2570 /* This is needed for conversions and for COMPONENT_REF.
2571 Might as well play it safe and always test this. */
2572 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2573 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2574 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2575 return 0;
2577 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2578 We don't care about side effects in that case because the SAVE_EXPR
2579 takes care of that for us. In all other cases, two expressions are
2580 equal if they have no side effects. If we have two identical
2581 expressions with side effects that should be treated the same due
2582 to the only side effects being identical SAVE_EXPR's, that will
2583 be detected in the recursive calls below. */
2584 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2585 && (TREE_CODE (arg0) == SAVE_EXPR
2586 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2587 return 1;
2589 /* Next handle constant cases, those for which we can return 1 even
2590 if ONLY_CONST is set. */
2591 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2592 switch (TREE_CODE (arg0))
2594 case INTEGER_CST:
2595 return tree_int_cst_equal (arg0, arg1);
2597 case REAL_CST:
2598 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2599 TREE_REAL_CST (arg1)))
2600 return 1;
2603 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2605 /* If we do not distinguish between signed and unsigned zero,
2606 consider them equal. */
2607 if (real_zerop (arg0) && real_zerop (arg1))
2608 return 1;
2610 return 0;
2612 case VECTOR_CST:
2614 tree v1, v2;
2616 v1 = TREE_VECTOR_CST_ELTS (arg0);
2617 v2 = TREE_VECTOR_CST_ELTS (arg1);
2618 while (v1 && v2)
2620 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2621 flags))
2622 return 0;
2623 v1 = TREE_CHAIN (v1);
2624 v2 = TREE_CHAIN (v2);
2627 return v1 == v2;
2630 case COMPLEX_CST:
2631 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2632 flags)
2633 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2634 flags));
2636 case STRING_CST:
2637 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2638 && ! memcmp (TREE_STRING_POINTER (arg0),
2639 TREE_STRING_POINTER (arg1),
2640 TREE_STRING_LENGTH (arg0)));
2642 case ADDR_EXPR:
2643 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2645 default:
2646 break;
2649 if (flags & OEP_ONLY_CONST)
2650 return 0;
2652 /* Define macros to test an operand from arg0 and arg1 for equality and a
2653 variant that allows null and views null as being different from any
2654 non-null value. In the latter case, if either is null, the both
2655 must be; otherwise, do the normal comparison. */
2656 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2657 TREE_OPERAND (arg1, N), flags)
2659 #define OP_SAME_WITH_NULL(N) \
2660 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2661 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2663 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2665 case tcc_unary:
2666 /* Two conversions are equal only if signedness and modes match. */
2667 switch (TREE_CODE (arg0))
2669 case NOP_EXPR:
2670 case CONVERT_EXPR:
2671 case FIX_TRUNC_EXPR:
2672 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2673 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2674 return 0;
2675 break;
2676 default:
2677 break;
2680 return OP_SAME (0);
2683 case tcc_comparison:
2684 case tcc_binary:
2685 if (OP_SAME (0) && OP_SAME (1))
2686 return 1;
2688 /* For commutative ops, allow the other order. */
2689 return (commutative_tree_code (TREE_CODE (arg0))
2690 && operand_equal_p (TREE_OPERAND (arg0, 0),
2691 TREE_OPERAND (arg1, 1), flags)
2692 && operand_equal_p (TREE_OPERAND (arg0, 1),
2693 TREE_OPERAND (arg1, 0), flags));
2695 case tcc_reference:
2696 /* If either of the pointer (or reference) expressions we are
2697 dereferencing contain a side effect, these cannot be equal. */
2698 if (TREE_SIDE_EFFECTS (arg0)
2699 || TREE_SIDE_EFFECTS (arg1))
2700 return 0;
2702 switch (TREE_CODE (arg0))
2704 case INDIRECT_REF:
2705 case ALIGN_INDIRECT_REF:
2706 case MISALIGNED_INDIRECT_REF:
2707 case REALPART_EXPR:
2708 case IMAGPART_EXPR:
2709 return OP_SAME (0);
2711 case ARRAY_REF:
2712 case ARRAY_RANGE_REF:
2713 /* Operands 2 and 3 may be null. */
2714 return (OP_SAME (0)
2715 && OP_SAME (1)
2716 && OP_SAME_WITH_NULL (2)
2717 && OP_SAME_WITH_NULL (3));
2719 case COMPONENT_REF:
2720 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2721 may be NULL when we're called to compare MEM_EXPRs. */
2722 return OP_SAME_WITH_NULL (0)
2723 && OP_SAME (1)
2724 && OP_SAME_WITH_NULL (2);
2726 case BIT_FIELD_REF:
2727 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2729 default:
2730 return 0;
2733 case tcc_expression:
2734 switch (TREE_CODE (arg0))
2736 case ADDR_EXPR:
2737 case TRUTH_NOT_EXPR:
2738 return OP_SAME (0);
2740 case TRUTH_ANDIF_EXPR:
2741 case TRUTH_ORIF_EXPR:
2742 return OP_SAME (0) && OP_SAME (1);
2744 case TRUTH_AND_EXPR:
2745 case TRUTH_OR_EXPR:
2746 case TRUTH_XOR_EXPR:
2747 if (OP_SAME (0) && OP_SAME (1))
2748 return 1;
2750 /* Otherwise take into account this is a commutative operation. */
2751 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2752 TREE_OPERAND (arg1, 1), flags)
2753 && operand_equal_p (TREE_OPERAND (arg0, 1),
2754 TREE_OPERAND (arg1, 0), flags));
2756 case CALL_EXPR:
2757 /* If the CALL_EXPRs call different functions, then they
2758 clearly can not be equal. */
2759 if (!OP_SAME (0))
2760 return 0;
2763 unsigned int cef = call_expr_flags (arg0);
2764 if (flags & OEP_PURE_SAME)
2765 cef &= ECF_CONST | ECF_PURE;
2766 else
2767 cef &= ECF_CONST;
2768 if (!cef)
2769 return 0;
2772 /* Now see if all the arguments are the same. operand_equal_p
2773 does not handle TREE_LIST, so we walk the operands here
2774 feeding them to operand_equal_p. */
2775 arg0 = TREE_OPERAND (arg0, 1);
2776 arg1 = TREE_OPERAND (arg1, 1);
2777 while (arg0 && arg1)
2779 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2780 flags))
2781 return 0;
2783 arg0 = TREE_CHAIN (arg0);
2784 arg1 = TREE_CHAIN (arg1);
2787 /* If we get here and both argument lists are exhausted
2788 then the CALL_EXPRs are equal. */
2789 return ! (arg0 || arg1);
2791 default:
2792 return 0;
2795 case tcc_declaration:
2796 /* Consider __builtin_sqrt equal to sqrt. */
2797 return (TREE_CODE (arg0) == FUNCTION_DECL
2798 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2799 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2800 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2802 default:
2803 return 0;
2806 #undef OP_SAME
2807 #undef OP_SAME_WITH_NULL
2810 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2811 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2813 When in doubt, return 0. */
2815 static int
2816 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2818 int unsignedp1, unsignedpo;
2819 tree primarg0, primarg1, primother;
2820 unsigned int correct_width;
2822 if (operand_equal_p (arg0, arg1, 0))
2823 return 1;
2825 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2826 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2827 return 0;
2829 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2830 and see if the inner values are the same. This removes any
2831 signedness comparison, which doesn't matter here. */
2832 primarg0 = arg0, primarg1 = arg1;
2833 STRIP_NOPS (primarg0);
2834 STRIP_NOPS (primarg1);
2835 if (operand_equal_p (primarg0, primarg1, 0))
2836 return 1;
2838 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2839 actual comparison operand, ARG0.
2841 First throw away any conversions to wider types
2842 already present in the operands. */
2844 primarg1 = get_narrower (arg1, &unsignedp1);
2845 primother = get_narrower (other, &unsignedpo);
2847 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2848 if (unsignedp1 == unsignedpo
2849 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2850 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2852 tree type = TREE_TYPE (arg0);
2854 /* Make sure shorter operand is extended the right way
2855 to match the longer operand. */
2856 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2857 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2859 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2860 return 1;
2863 return 0;
2866 /* See if ARG is an expression that is either a comparison or is performing
2867 arithmetic on comparisons. The comparisons must only be comparing
2868 two different values, which will be stored in *CVAL1 and *CVAL2; if
2869 they are nonzero it means that some operands have already been found.
2870 No variables may be used anywhere else in the expression except in the
2871 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2872 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2874 If this is true, return 1. Otherwise, return zero. */
2876 static int
2877 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2879 enum tree_code code = TREE_CODE (arg);
2880 enum tree_code_class class = TREE_CODE_CLASS (code);
2882 /* We can handle some of the tcc_expression cases here. */
2883 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2884 class = tcc_unary;
2885 else if (class == tcc_expression
2886 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2887 || code == COMPOUND_EXPR))
2888 class = tcc_binary;
2890 else if (class == tcc_expression && code == SAVE_EXPR
2891 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2893 /* If we've already found a CVAL1 or CVAL2, this expression is
2894 two complex to handle. */
2895 if (*cval1 || *cval2)
2896 return 0;
2898 class = tcc_unary;
2899 *save_p = 1;
2902 switch (class)
2904 case tcc_unary:
2905 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2907 case tcc_binary:
2908 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2909 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2910 cval1, cval2, save_p));
2912 case tcc_constant:
2913 return 1;
2915 case tcc_expression:
2916 if (code == COND_EXPR)
2917 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2918 cval1, cval2, save_p)
2919 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2920 cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2922 cval1, cval2, save_p));
2923 return 0;
2925 case tcc_comparison:
2926 /* First see if we can handle the first operand, then the second. For
2927 the second operand, we know *CVAL1 can't be zero. It must be that
2928 one side of the comparison is each of the values; test for the
2929 case where this isn't true by failing if the two operands
2930 are the same. */
2932 if (operand_equal_p (TREE_OPERAND (arg, 0),
2933 TREE_OPERAND (arg, 1), 0))
2934 return 0;
2936 if (*cval1 == 0)
2937 *cval1 = TREE_OPERAND (arg, 0);
2938 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2940 else if (*cval2 == 0)
2941 *cval2 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2944 else
2945 return 0;
2947 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2949 else if (*cval2 == 0)
2950 *cval2 = TREE_OPERAND (arg, 1);
2951 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2953 else
2954 return 0;
2956 return 1;
2958 default:
2959 return 0;
2963 /* ARG is a tree that is known to contain just arithmetic operations and
2964 comparisons. Evaluate the operations in the tree substituting NEW0 for
2965 any occurrence of OLD0 as an operand of a comparison and likewise for
2966 NEW1 and OLD1. */
2968 static tree
2969 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2971 tree type = TREE_TYPE (arg);
2972 enum tree_code code = TREE_CODE (arg);
2973 enum tree_code_class class = TREE_CODE_CLASS (code);
2975 /* We can handle some of the tcc_expression cases here. */
2976 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2977 class = tcc_unary;
2978 else if (class == tcc_expression
2979 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2980 class = tcc_binary;
2982 switch (class)
2984 case tcc_unary:
2985 return fold_build1 (code, type,
2986 eval_subst (TREE_OPERAND (arg, 0),
2987 old0, new0, old1, new1));
2989 case tcc_binary:
2990 return fold_build2 (code, type,
2991 eval_subst (TREE_OPERAND (arg, 0),
2992 old0, new0, old1, new1),
2993 eval_subst (TREE_OPERAND (arg, 1),
2994 old0, new0, old1, new1));
2996 case tcc_expression:
2997 switch (code)
2999 case SAVE_EXPR:
3000 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3002 case COMPOUND_EXPR:
3003 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3005 case COND_EXPR:
3006 return fold_build3 (code, type,
3007 eval_subst (TREE_OPERAND (arg, 0),
3008 old0, new0, old1, new1),
3009 eval_subst (TREE_OPERAND (arg, 1),
3010 old0, new0, old1, new1),
3011 eval_subst (TREE_OPERAND (arg, 2),
3012 old0, new0, old1, new1));
3013 default:
3014 break;
3016 /* Fall through - ??? */
3018 case tcc_comparison:
3020 tree arg0 = TREE_OPERAND (arg, 0);
3021 tree arg1 = TREE_OPERAND (arg, 1);
3023 /* We need to check both for exact equality and tree equality. The
3024 former will be true if the operand has a side-effect. In that
3025 case, we know the operand occurred exactly once. */
3027 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3028 arg0 = new0;
3029 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3030 arg0 = new1;
3032 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3033 arg1 = new0;
3034 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3035 arg1 = new1;
3037 return fold_build2 (code, type, arg0, arg1);
3040 default:
3041 return arg;
3045 /* Return a tree for the case when the result of an expression is RESULT
3046 converted to TYPE and OMITTED was previously an operand of the expression
3047 but is now not needed (e.g., we folded OMITTED * 0).
3049 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3050 the conversion of RESULT to TYPE. */
3052 tree
3053 omit_one_operand (tree type, tree result, tree omitted)
3055 tree t = fold_convert (type, result);
3057 if (TREE_SIDE_EFFECTS (omitted))
3058 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3060 return non_lvalue (t);
3063 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3065 static tree
3066 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3068 tree t = fold_convert (type, result);
3070 if (TREE_SIDE_EFFECTS (omitted))
3071 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3073 return pedantic_non_lvalue (t);
3076 /* Return a tree for the case when the result of an expression is RESULT
3077 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3078 of the expression but are now not needed.
3080 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3081 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3082 evaluated before OMITTED2. Otherwise, if neither has side effects,
3083 just do the conversion of RESULT to TYPE. */
3085 tree
3086 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3088 tree t = fold_convert (type, result);
3090 if (TREE_SIDE_EFFECTS (omitted2))
3091 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3092 if (TREE_SIDE_EFFECTS (omitted1))
3093 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3095 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3099 /* Return a simplified tree node for the truth-negation of ARG. This
3100 never alters ARG itself. We assume that ARG is an operation that
3101 returns a truth value (0 or 1).
3103 FIXME: one would think we would fold the result, but it causes
3104 problems with the dominator optimizer. */
3106 tree
3107 fold_truth_not_expr (tree arg)
3109 tree type = TREE_TYPE (arg);
3110 enum tree_code code = TREE_CODE (arg);
3112 /* If this is a comparison, we can simply invert it, except for
3113 floating-point non-equality comparisons, in which case we just
3114 enclose a TRUTH_NOT_EXPR around what we have. */
3116 if (TREE_CODE_CLASS (code) == tcc_comparison)
3118 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3119 if (FLOAT_TYPE_P (op_type)
3120 && flag_trapping_math
3121 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3122 && code != NE_EXPR && code != EQ_EXPR)
3123 return NULL_TREE;
3124 else
3126 code = invert_tree_comparison (code,
3127 HONOR_NANS (TYPE_MODE (op_type)));
3128 if (code == ERROR_MARK)
3129 return NULL_TREE;
3130 else
3131 return build2 (code, type,
3132 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3136 switch (code)
3138 case INTEGER_CST:
3139 return constant_boolean_node (integer_zerop (arg), type);
3141 case TRUTH_AND_EXPR:
3142 return build2 (TRUTH_OR_EXPR, type,
3143 invert_truthvalue (TREE_OPERAND (arg, 0)),
3144 invert_truthvalue (TREE_OPERAND (arg, 1)));
3146 case TRUTH_OR_EXPR:
3147 return build2 (TRUTH_AND_EXPR, type,
3148 invert_truthvalue (TREE_OPERAND (arg, 0)),
3149 invert_truthvalue (TREE_OPERAND (arg, 1)));
3151 case TRUTH_XOR_EXPR:
3152 /* Here we can invert either operand. We invert the first operand
3153 unless the second operand is a TRUTH_NOT_EXPR in which case our
3154 result is the XOR of the first operand with the inside of the
3155 negation of the second operand. */
3157 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3158 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3159 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3160 else
3161 return build2 (TRUTH_XOR_EXPR, type,
3162 invert_truthvalue (TREE_OPERAND (arg, 0)),
3163 TREE_OPERAND (arg, 1));
3165 case TRUTH_ANDIF_EXPR:
3166 return build2 (TRUTH_ORIF_EXPR, type,
3167 invert_truthvalue (TREE_OPERAND (arg, 0)),
3168 invert_truthvalue (TREE_OPERAND (arg, 1)));
3170 case TRUTH_ORIF_EXPR:
3171 return build2 (TRUTH_ANDIF_EXPR, type,
3172 invert_truthvalue (TREE_OPERAND (arg, 0)),
3173 invert_truthvalue (TREE_OPERAND (arg, 1)));
3175 case TRUTH_NOT_EXPR:
3176 return TREE_OPERAND (arg, 0);
3178 case COND_EXPR:
3180 tree arg1 = TREE_OPERAND (arg, 1);
3181 tree arg2 = TREE_OPERAND (arg, 2);
3182 /* A COND_EXPR may have a throw as one operand, which
3183 then has void type. Just leave void operands
3184 as they are. */
3185 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3186 VOID_TYPE_P (TREE_TYPE (arg1))
3187 ? arg1 : invert_truthvalue (arg1),
3188 VOID_TYPE_P (TREE_TYPE (arg2))
3189 ? arg2 : invert_truthvalue (arg2));
3192 case COMPOUND_EXPR:
3193 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3194 invert_truthvalue (TREE_OPERAND (arg, 1)));
3196 case NON_LVALUE_EXPR:
3197 return invert_truthvalue (TREE_OPERAND (arg, 0));
3199 case NOP_EXPR:
3200 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3201 return build1 (TRUTH_NOT_EXPR, type, arg);
3203 case CONVERT_EXPR:
3204 case FLOAT_EXPR:
3205 return build1 (TREE_CODE (arg), type,
3206 invert_truthvalue (TREE_OPERAND (arg, 0)));
3208 case BIT_AND_EXPR:
3209 if (!integer_onep (TREE_OPERAND (arg, 1)))
3210 break;
3211 return build2 (EQ_EXPR, type, arg,
3212 build_int_cst (type, 0));
3214 case SAVE_EXPR:
3215 return build1 (TRUTH_NOT_EXPR, type, arg);
3217 case CLEANUP_POINT_EXPR:
3218 return build1 (CLEANUP_POINT_EXPR, type,
3219 invert_truthvalue (TREE_OPERAND (arg, 0)));
3221 default:
3222 break;
3225 return NULL_TREE;
3228 /* Return a simplified tree node for the truth-negation of ARG. This
3229 never alters ARG itself. We assume that ARG is an operation that
3230 returns a truth value (0 or 1).
3232 FIXME: one would think we would fold the result, but it causes
3233 problems with the dominator optimizer. */
3235 tree
3236 invert_truthvalue (tree arg)
3238 tree tem;
3240 if (TREE_CODE (arg) == ERROR_MARK)
3241 return arg;
3243 tem = fold_truth_not_expr (arg);
3244 if (!tem)
3245 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3247 return tem;
3250 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3251 operands are another bit-wise operation with a common input. If so,
3252 distribute the bit operations to save an operation and possibly two if
3253 constants are involved. For example, convert
3254 (A | B) & (A | C) into A | (B & C)
3255 Further simplification will occur if B and C are constants.
3257 If this optimization cannot be done, 0 will be returned. */
3259 static tree
3260 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3262 tree common;
3263 tree left, right;
3265 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3266 || TREE_CODE (arg0) == code
3267 || (TREE_CODE (arg0) != BIT_AND_EXPR
3268 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3269 return 0;
3271 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3273 common = TREE_OPERAND (arg0, 0);
3274 left = TREE_OPERAND (arg0, 1);
3275 right = TREE_OPERAND (arg1, 1);
3277 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3279 common = TREE_OPERAND (arg0, 0);
3280 left = TREE_OPERAND (arg0, 1);
3281 right = TREE_OPERAND (arg1, 0);
3283 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3285 common = TREE_OPERAND (arg0, 1);
3286 left = TREE_OPERAND (arg0, 0);
3287 right = TREE_OPERAND (arg1, 1);
3289 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3291 common = TREE_OPERAND (arg0, 1);
3292 left = TREE_OPERAND (arg0, 0);
3293 right = TREE_OPERAND (arg1, 0);
3295 else
3296 return 0;
3298 return fold_build2 (TREE_CODE (arg0), type, common,
3299 fold_build2 (code, type, left, right));
3302 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3303 with code CODE. This optimization is unsafe. */
3304 static tree
3305 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3307 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3308 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3310 /* (A / C) +- (B / C) -> (A +- B) / C. */
3311 if (mul0 == mul1
3312 && operand_equal_p (TREE_OPERAND (arg0, 1),
3313 TREE_OPERAND (arg1, 1), 0))
3314 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3315 fold_build2 (code, type,
3316 TREE_OPERAND (arg0, 0),
3317 TREE_OPERAND (arg1, 0)),
3318 TREE_OPERAND (arg0, 1));
3320 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3321 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3322 TREE_OPERAND (arg1, 0), 0)
3323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3324 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3326 REAL_VALUE_TYPE r0, r1;
3327 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3328 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3329 if (!mul0)
3330 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3331 if (!mul1)
3332 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3333 real_arithmetic (&r0, code, &r0, &r1);
3334 return fold_build2 (MULT_EXPR, type,
3335 TREE_OPERAND (arg0, 0),
3336 build_real (type, r0));
3339 return NULL_TREE;
3342 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3343 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3345 static tree
3346 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3347 int unsignedp)
3349 tree result;
3351 if (bitpos == 0)
3353 tree size = TYPE_SIZE (TREE_TYPE (inner));
3354 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3355 || POINTER_TYPE_P (TREE_TYPE (inner)))
3356 && host_integerp (size, 0)
3357 && tree_low_cst (size, 0) == bitsize)
3358 return fold_convert (type, inner);
3361 result = build3 (BIT_FIELD_REF, type, inner,
3362 size_int (bitsize), bitsize_int (bitpos));
3364 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3366 return result;
3369 /* Optimize a bit-field compare.
3371 There are two cases: First is a compare against a constant and the
3372 second is a comparison of two items where the fields are at the same
3373 bit position relative to the start of a chunk (byte, halfword, word)
3374 large enough to contain it. In these cases we can avoid the shift
3375 implicit in bitfield extractions.
3377 For constants, we emit a compare of the shifted constant with the
3378 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3379 compared. For two fields at the same position, we do the ANDs with the
3380 similar mask and compare the result of the ANDs.
3382 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3383 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3384 are the left and right operands of the comparison, respectively.
3386 If the optimization described above can be done, we return the resulting
3387 tree. Otherwise we return zero. */
3389 static tree
3390 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3391 tree lhs, tree rhs)
3393 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3394 tree type = TREE_TYPE (lhs);
3395 tree signed_type, unsigned_type;
3396 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3397 enum machine_mode lmode, rmode, nmode;
3398 int lunsignedp, runsignedp;
3399 int lvolatilep = 0, rvolatilep = 0;
3400 tree linner, rinner = NULL_TREE;
3401 tree mask;
3402 tree offset;
3404 /* Get all the information about the extractions being done. If the bit size
3405 if the same as the size of the underlying object, we aren't doing an
3406 extraction at all and so can do nothing. We also don't want to
3407 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3408 then will no longer be able to replace it. */
3409 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3410 &lunsignedp, &lvolatilep, false);
3411 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3412 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3413 return 0;
3415 if (!const_p)
3417 /* If this is not a constant, we can only do something if bit positions,
3418 sizes, and signedness are the same. */
3419 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3420 &runsignedp, &rvolatilep, false);
3422 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3423 || lunsignedp != runsignedp || offset != 0
3424 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3425 return 0;
3428 /* See if we can find a mode to refer to this field. We should be able to,
3429 but fail if we can't. */
3430 nmode = get_best_mode (lbitsize, lbitpos,
3431 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3432 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3433 TYPE_ALIGN (TREE_TYPE (rinner))),
3434 word_mode, lvolatilep || rvolatilep);
3435 if (nmode == VOIDmode)
3436 return 0;
3438 /* Set signed and unsigned types of the precision of this mode for the
3439 shifts below. */
3440 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3441 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3443 /* Compute the bit position and size for the new reference and our offset
3444 within it. If the new reference is the same size as the original, we
3445 won't optimize anything, so return zero. */
3446 nbitsize = GET_MODE_BITSIZE (nmode);
3447 nbitpos = lbitpos & ~ (nbitsize - 1);
3448 lbitpos -= nbitpos;
3449 if (nbitsize == lbitsize)
3450 return 0;
3452 if (BYTES_BIG_ENDIAN)
3453 lbitpos = nbitsize - lbitsize - lbitpos;
3455 /* Make the mask to be used against the extracted field. */
3456 mask = build_int_cst_type (unsigned_type, -1);
3457 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3458 mask = const_binop (RSHIFT_EXPR, mask,
3459 size_int (nbitsize - lbitsize - lbitpos), 0);
3461 if (! const_p)
3462 /* If not comparing with constant, just rework the comparison
3463 and return. */
3464 return fold_build2 (code, compare_type,
3465 fold_build2 (BIT_AND_EXPR, unsigned_type,
3466 make_bit_field_ref (linner,
3467 unsigned_type,
3468 nbitsize, nbitpos,
3470 mask),
3471 fold_build2 (BIT_AND_EXPR, unsigned_type,
3472 make_bit_field_ref (rinner,
3473 unsigned_type,
3474 nbitsize, nbitpos,
3476 mask));
3478 /* Otherwise, we are handling the constant case. See if the constant is too
3479 big for the field. Warn and return a tree of for 0 (false) if so. We do
3480 this not only for its own sake, but to avoid having to test for this
3481 error case below. If we didn't, we might generate wrong code.
3483 For unsigned fields, the constant shifted right by the field length should
3484 be all zero. For signed fields, the high-order bits should agree with
3485 the sign bit. */
3487 if (lunsignedp)
3489 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3490 fold_convert (unsigned_type, rhs),
3491 size_int (lbitsize), 0)))
3493 warning (0, "comparison is always %d due to width of bit-field",
3494 code == NE_EXPR);
3495 return constant_boolean_node (code == NE_EXPR, compare_type);
3498 else
3500 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3501 size_int (lbitsize - 1), 0);
3502 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3504 warning (0, "comparison is always %d due to width of bit-field",
3505 code == NE_EXPR);
3506 return constant_boolean_node (code == NE_EXPR, compare_type);
3510 /* Single-bit compares should always be against zero. */
3511 if (lbitsize == 1 && ! integer_zerop (rhs))
3513 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3514 rhs = build_int_cst (type, 0);
3517 /* Make a new bitfield reference, shift the constant over the
3518 appropriate number of bits and mask it with the computed mask
3519 (in case this was a signed field). If we changed it, make a new one. */
3520 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3521 if (lvolatilep)
3523 TREE_SIDE_EFFECTS (lhs) = 1;
3524 TREE_THIS_VOLATILE (lhs) = 1;
3527 rhs = const_binop (BIT_AND_EXPR,
3528 const_binop (LSHIFT_EXPR,
3529 fold_convert (unsigned_type, rhs),
3530 size_int (lbitpos), 0),
3531 mask, 0);
3533 return build2 (code, compare_type,
3534 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3535 rhs);
3538 /* Subroutine for fold_truthop: decode a field reference.
3540 If EXP is a comparison reference, we return the innermost reference.
3542 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3543 set to the starting bit number.
3545 If the innermost field can be completely contained in a mode-sized
3546 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3548 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3549 otherwise it is not changed.
3551 *PUNSIGNEDP is set to the signedness of the field.
3553 *PMASK is set to the mask used. This is either contained in a
3554 BIT_AND_EXPR or derived from the width of the field.
3556 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3558 Return 0 if this is not a component reference or is one that we can't
3559 do anything with. */
3561 static tree
3562 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3563 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3564 int *punsignedp, int *pvolatilep,
3565 tree *pmask, tree *pand_mask)
3567 tree outer_type = 0;
3568 tree and_mask = 0;
3569 tree mask, inner, offset;
3570 tree unsigned_type;
3571 unsigned int precision;
3573 /* All the optimizations using this function assume integer fields.
3574 There are problems with FP fields since the type_for_size call
3575 below can fail for, e.g., XFmode. */
3576 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3577 return 0;
3579 /* We are interested in the bare arrangement of bits, so strip everything
3580 that doesn't affect the machine mode. However, record the type of the
3581 outermost expression if it may matter below. */
3582 if (TREE_CODE (exp) == NOP_EXPR
3583 || TREE_CODE (exp) == CONVERT_EXPR
3584 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3585 outer_type = TREE_TYPE (exp);
3586 STRIP_NOPS (exp);
3588 if (TREE_CODE (exp) == BIT_AND_EXPR)
3590 and_mask = TREE_OPERAND (exp, 1);
3591 exp = TREE_OPERAND (exp, 0);
3592 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3593 if (TREE_CODE (and_mask) != INTEGER_CST)
3594 return 0;
3597 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3598 punsignedp, pvolatilep, false);
3599 if ((inner == exp && and_mask == 0)
3600 || *pbitsize < 0 || offset != 0
3601 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3602 return 0;
3604 /* If the number of bits in the reference is the same as the bitsize of
3605 the outer type, then the outer type gives the signedness. Otherwise
3606 (in case of a small bitfield) the signedness is unchanged. */
3607 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3608 *punsignedp = TYPE_UNSIGNED (outer_type);
3610 /* Compute the mask to access the bitfield. */
3611 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3612 precision = TYPE_PRECISION (unsigned_type);
3614 mask = build_int_cst_type (unsigned_type, -1);
3616 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3617 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3619 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3620 if (and_mask != 0)
3621 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3622 fold_convert (unsigned_type, and_mask), mask);
3624 *pmask = mask;
3625 *pand_mask = and_mask;
3626 return inner;
3629 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3630 bit positions. */
3632 static int
3633 all_ones_mask_p (tree mask, int size)
3635 tree type = TREE_TYPE (mask);
3636 unsigned int precision = TYPE_PRECISION (type);
3637 tree tmask;
3639 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3641 return
3642 tree_int_cst_equal (mask,
3643 const_binop (RSHIFT_EXPR,
3644 const_binop (LSHIFT_EXPR, tmask,
3645 size_int (precision - size),
3647 size_int (precision - size), 0));
3650 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3651 represents the sign bit of EXP's type. If EXP represents a sign
3652 or zero extension, also test VAL against the unextended type.
3653 The return value is the (sub)expression whose sign bit is VAL,
3654 or NULL_TREE otherwise. */
3656 static tree
3657 sign_bit_p (tree exp, tree val)
3659 unsigned HOST_WIDE_INT mask_lo, lo;
3660 HOST_WIDE_INT mask_hi, hi;
3661 int width;
3662 tree t;
3664 /* Tree EXP must have an integral type. */
3665 t = TREE_TYPE (exp);
3666 if (! INTEGRAL_TYPE_P (t))
3667 return NULL_TREE;
3669 /* Tree VAL must be an integer constant. */
3670 if (TREE_CODE (val) != INTEGER_CST
3671 || TREE_OVERFLOW (val))
3672 return NULL_TREE;
3674 width = TYPE_PRECISION (t);
3675 if (width > HOST_BITS_PER_WIDE_INT)
3677 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3678 lo = 0;
3680 mask_hi = ((unsigned HOST_WIDE_INT) -1
3681 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3682 mask_lo = -1;
3684 else
3686 hi = 0;
3687 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3689 mask_hi = 0;
3690 mask_lo = ((unsigned HOST_WIDE_INT) -1
3691 >> (HOST_BITS_PER_WIDE_INT - width));
3694 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3695 treat VAL as if it were unsigned. */
3696 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3697 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3698 return exp;
3700 /* Handle extension from a narrower type. */
3701 if (TREE_CODE (exp) == NOP_EXPR
3702 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3703 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3705 return NULL_TREE;
3708 /* Subroutine for fold_truthop: determine if an operand is simple enough
3709 to be evaluated unconditionally. */
3711 static int
3712 simple_operand_p (tree exp)
3714 /* Strip any conversions that don't change the machine mode. */
3715 STRIP_NOPS (exp);
3717 return (CONSTANT_CLASS_P (exp)
3718 || TREE_CODE (exp) == SSA_NAME
3719 || (DECL_P (exp)
3720 && ! TREE_ADDRESSABLE (exp)
3721 && ! TREE_THIS_VOLATILE (exp)
3722 && ! DECL_NONLOCAL (exp)
3723 /* Don't regard global variables as simple. They may be
3724 allocated in ways unknown to the compiler (shared memory,
3725 #pragma weak, etc). */
3726 && ! TREE_PUBLIC (exp)
3727 && ! DECL_EXTERNAL (exp)
3728 /* Loading a static variable is unduly expensive, but global
3729 registers aren't expensive. */
3730 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3733 /* The following functions are subroutines to fold_range_test and allow it to
3734 try to change a logical combination of comparisons into a range test.
3736 For example, both
3737 X == 2 || X == 3 || X == 4 || X == 5
3739 X >= 2 && X <= 5
3740 are converted to
3741 (unsigned) (X - 2) <= 3
3743 We describe each set of comparisons as being either inside or outside
3744 a range, using a variable named like IN_P, and then describe the
3745 range with a lower and upper bound. If one of the bounds is omitted,
3746 it represents either the highest or lowest value of the type.
3748 In the comments below, we represent a range by two numbers in brackets
3749 preceded by a "+" to designate being inside that range, or a "-" to
3750 designate being outside that range, so the condition can be inverted by
3751 flipping the prefix. An omitted bound is represented by a "-". For
3752 example, "- [-, 10]" means being outside the range starting at the lowest
3753 possible value and ending at 10, in other words, being greater than 10.
3754 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3755 always false.
3757 We set up things so that the missing bounds are handled in a consistent
3758 manner so neither a missing bound nor "true" and "false" need to be
3759 handled using a special case. */
3761 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3762 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3763 and UPPER1_P are nonzero if the respective argument is an upper bound
3764 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3765 must be specified for a comparison. ARG1 will be converted to ARG0's
3766 type if both are specified. */
3768 static tree
3769 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3770 tree arg1, int upper1_p)
3772 tree tem;
3773 int result;
3774 int sgn0, sgn1;
3776 /* If neither arg represents infinity, do the normal operation.
3777 Else, if not a comparison, return infinity. Else handle the special
3778 comparison rules. Note that most of the cases below won't occur, but
3779 are handled for consistency. */
3781 if (arg0 != 0 && arg1 != 0)
3783 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3784 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3785 STRIP_NOPS (tem);
3786 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3789 if (TREE_CODE_CLASS (code) != tcc_comparison)
3790 return 0;
3792 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3793 for neither. In real maths, we cannot assume open ended ranges are
3794 the same. But, this is computer arithmetic, where numbers are finite.
3795 We can therefore make the transformation of any unbounded range with
3796 the value Z, Z being greater than any representable number. This permits
3797 us to treat unbounded ranges as equal. */
3798 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3799 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3800 switch (code)
3802 case EQ_EXPR:
3803 result = sgn0 == sgn1;
3804 break;
3805 case NE_EXPR:
3806 result = sgn0 != sgn1;
3807 break;
3808 case LT_EXPR:
3809 result = sgn0 < sgn1;
3810 break;
3811 case LE_EXPR:
3812 result = sgn0 <= sgn1;
3813 break;
3814 case GT_EXPR:
3815 result = sgn0 > sgn1;
3816 break;
3817 case GE_EXPR:
3818 result = sgn0 >= sgn1;
3819 break;
3820 default:
3821 gcc_unreachable ();
3824 return constant_boolean_node (result, type);
3827 /* Given EXP, a logical expression, set the range it is testing into
3828 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3829 actually being tested. *PLOW and *PHIGH will be made of the same type
3830 as the returned expression. If EXP is not a comparison, we will most
3831 likely not be returning a useful value and range. */
3833 static tree
3834 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3836 enum tree_code code;
3837 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3838 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3839 int in_p, n_in_p;
3840 tree low, high, n_low, n_high;
3842 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3843 and see if we can refine the range. Some of the cases below may not
3844 happen, but it doesn't seem worth worrying about this. We "continue"
3845 the outer loop when we've changed something; otherwise we "break"
3846 the switch, which will "break" the while. */
3848 in_p = 0;
3849 low = high = build_int_cst (TREE_TYPE (exp), 0);
3851 while (1)
3853 code = TREE_CODE (exp);
3854 exp_type = TREE_TYPE (exp);
3856 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3858 if (TREE_CODE_LENGTH (code) > 0)
3859 arg0 = TREE_OPERAND (exp, 0);
3860 if (TREE_CODE_CLASS (code) == tcc_comparison
3861 || TREE_CODE_CLASS (code) == tcc_unary
3862 || TREE_CODE_CLASS (code) == tcc_binary)
3863 arg0_type = TREE_TYPE (arg0);
3864 if (TREE_CODE_CLASS (code) == tcc_binary
3865 || TREE_CODE_CLASS (code) == tcc_comparison
3866 || (TREE_CODE_CLASS (code) == tcc_expression
3867 && TREE_CODE_LENGTH (code) > 1))
3868 arg1 = TREE_OPERAND (exp, 1);
3871 switch (code)
3873 case TRUTH_NOT_EXPR:
3874 in_p = ! in_p, exp = arg0;
3875 continue;
3877 case EQ_EXPR: case NE_EXPR:
3878 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3879 /* We can only do something if the range is testing for zero
3880 and if the second operand is an integer constant. Note that
3881 saying something is "in" the range we make is done by
3882 complementing IN_P since it will set in the initial case of
3883 being not equal to zero; "out" is leaving it alone. */
3884 if (low == 0 || high == 0
3885 || ! integer_zerop (low) || ! integer_zerop (high)
3886 || TREE_CODE (arg1) != INTEGER_CST)
3887 break;
3889 switch (code)
3891 case NE_EXPR: /* - [c, c] */
3892 low = high = arg1;
3893 break;
3894 case EQ_EXPR: /* + [c, c] */
3895 in_p = ! in_p, low = high = arg1;
3896 break;
3897 case GT_EXPR: /* - [-, c] */
3898 low = 0, high = arg1;
3899 break;
3900 case GE_EXPR: /* + [c, -] */
3901 in_p = ! in_p, low = arg1, high = 0;
3902 break;
3903 case LT_EXPR: /* - [c, -] */
3904 low = arg1, high = 0;
3905 break;
3906 case LE_EXPR: /* + [-, c] */
3907 in_p = ! in_p, low = 0, high = arg1;
3908 break;
3909 default:
3910 gcc_unreachable ();
3913 /* If this is an unsigned comparison, we also know that EXP is
3914 greater than or equal to zero. We base the range tests we make
3915 on that fact, so we record it here so we can parse existing
3916 range tests. We test arg0_type since often the return type
3917 of, e.g. EQ_EXPR, is boolean. */
3918 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3920 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3921 in_p, low, high, 1,
3922 build_int_cst (arg0_type, 0),
3923 NULL_TREE))
3924 break;
3926 in_p = n_in_p, low = n_low, high = n_high;
3928 /* If the high bound is missing, but we have a nonzero low
3929 bound, reverse the range so it goes from zero to the low bound
3930 minus 1. */
3931 if (high == 0 && low && ! integer_zerop (low))
3933 in_p = ! in_p;
3934 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3935 integer_one_node, 0);
3936 low = build_int_cst (arg0_type, 0);
3940 exp = arg0;
3941 continue;
3943 case NEGATE_EXPR:
3944 /* (-x) IN [a,b] -> x in [-b, -a] */
3945 n_low = range_binop (MINUS_EXPR, exp_type,
3946 build_int_cst (exp_type, 0),
3947 0, high, 1);
3948 n_high = range_binop (MINUS_EXPR, exp_type,
3949 build_int_cst (exp_type, 0),
3950 0, low, 0);
3951 low = n_low, high = n_high;
3952 exp = arg0;
3953 continue;
3955 case BIT_NOT_EXPR:
3956 /* ~ X -> -X - 1 */
3957 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3958 build_int_cst (exp_type, 1));
3959 continue;
3961 case PLUS_EXPR: case MINUS_EXPR:
3962 if (TREE_CODE (arg1) != INTEGER_CST)
3963 break;
3965 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3966 move a constant to the other side. */
3967 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3968 break;
3970 /* If EXP is signed, any overflow in the computation is undefined,
3971 so we don't worry about it so long as our computations on
3972 the bounds don't overflow. For unsigned, overflow is defined
3973 and this is exactly the right thing. */
3974 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3975 arg0_type, low, 0, arg1, 0);
3976 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3977 arg0_type, high, 1, arg1, 0);
3978 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3979 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3980 break;
3982 /* Check for an unsigned range which has wrapped around the maximum
3983 value thus making n_high < n_low, and normalize it. */
3984 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3986 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3987 integer_one_node, 0);
3988 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3989 integer_one_node, 0);
3991 /* If the range is of the form +/- [ x+1, x ], we won't
3992 be able to normalize it. But then, it represents the
3993 whole range or the empty set, so make it
3994 +/- [ -, - ]. */
3995 if (tree_int_cst_equal (n_low, low)
3996 && tree_int_cst_equal (n_high, high))
3997 low = high = 0;
3998 else
3999 in_p = ! in_p;
4001 else
4002 low = n_low, high = n_high;
4004 exp = arg0;
4005 continue;
4007 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4008 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4009 break;
4011 if (! INTEGRAL_TYPE_P (arg0_type)
4012 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4013 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4014 break;
4016 n_low = low, n_high = high;
4018 if (n_low != 0)
4019 n_low = fold_convert (arg0_type, n_low);
4021 if (n_high != 0)
4022 n_high = fold_convert (arg0_type, n_high);
4025 /* If we're converting arg0 from an unsigned type, to exp,
4026 a signed type, we will be doing the comparison as unsigned.
4027 The tests above have already verified that LOW and HIGH
4028 are both positive.
4030 So we have to ensure that we will handle large unsigned
4031 values the same way that the current signed bounds treat
4032 negative values. */
4034 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4036 tree high_positive;
4037 tree equiv_type = lang_hooks.types.type_for_mode
4038 (TYPE_MODE (arg0_type), 1);
4040 /* A range without an upper bound is, naturally, unbounded.
4041 Since convert would have cropped a very large value, use
4042 the max value for the destination type. */
4043 high_positive
4044 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4045 : TYPE_MAX_VALUE (arg0_type);
4047 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4048 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4049 fold_convert (arg0_type,
4050 high_positive),
4051 build_int_cst (arg0_type, 1));
4053 /* If the low bound is specified, "and" the range with the
4054 range for which the original unsigned value will be
4055 positive. */
4056 if (low != 0)
4058 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4059 1, n_low, n_high, 1,
4060 fold_convert (arg0_type,
4061 integer_zero_node),
4062 high_positive))
4063 break;
4065 in_p = (n_in_p == in_p);
4067 else
4069 /* Otherwise, "or" the range with the range of the input
4070 that will be interpreted as negative. */
4071 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4072 0, n_low, n_high, 1,
4073 fold_convert (arg0_type,
4074 integer_zero_node),
4075 high_positive))
4076 break;
4078 in_p = (in_p != n_in_p);
4082 exp = arg0;
4083 low = n_low, high = n_high;
4084 continue;
4086 default:
4087 break;
4090 break;
4093 /* If EXP is a constant, we can evaluate whether this is true or false. */
4094 if (TREE_CODE (exp) == INTEGER_CST)
4096 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4097 exp, 0, low, 0))
4098 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4099 exp, 1, high, 1)));
4100 low = high = 0;
4101 exp = 0;
4104 *pin_p = in_p, *plow = low, *phigh = high;
4105 return exp;
4108 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4109 type, TYPE, return an expression to test if EXP is in (or out of, depending
4110 on IN_P) the range. Return 0 if the test couldn't be created. */
4112 static tree
4113 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4115 tree etype = TREE_TYPE (exp);
4116 tree value;
4118 #ifdef HAVE_canonicalize_funcptr_for_compare
4119 /* Disable this optimization for function pointer expressions
4120 on targets that require function pointer canonicalization. */
4121 if (HAVE_canonicalize_funcptr_for_compare
4122 && TREE_CODE (etype) == POINTER_TYPE
4123 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4124 return NULL_TREE;
4125 #endif
4127 if (! in_p)
4129 value = build_range_check (type, exp, 1, low, high);
4130 if (value != 0)
4131 return invert_truthvalue (value);
4133 return 0;
4136 if (low == 0 && high == 0)
4137 return build_int_cst (type, 1);
4139 if (low == 0)
4140 return fold_build2 (LE_EXPR, type, exp,
4141 fold_convert (etype, high));
4143 if (high == 0)
4144 return fold_build2 (GE_EXPR, type, exp,
4145 fold_convert (etype, low));
4147 if (operand_equal_p (low, high, 0))
4148 return fold_build2 (EQ_EXPR, type, exp,
4149 fold_convert (etype, low));
4151 if (integer_zerop (low))
4153 if (! TYPE_UNSIGNED (etype))
4155 etype = lang_hooks.types.unsigned_type (etype);
4156 high = fold_convert (etype, high);
4157 exp = fold_convert (etype, exp);
4159 return build_range_check (type, exp, 1, 0, high);
4162 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4163 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4165 unsigned HOST_WIDE_INT lo;
4166 HOST_WIDE_INT hi;
4167 int prec;
4169 prec = TYPE_PRECISION (etype);
4170 if (prec <= HOST_BITS_PER_WIDE_INT)
4172 hi = 0;
4173 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4175 else
4177 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4178 lo = (unsigned HOST_WIDE_INT) -1;
4181 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4183 if (TYPE_UNSIGNED (etype))
4185 etype = lang_hooks.types.signed_type (etype);
4186 exp = fold_convert (etype, exp);
4188 return fold_build2 (GT_EXPR, type, exp,
4189 build_int_cst (etype, 0));
4193 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4194 This requires wrap-around arithmetics for the type of the expression. */
4195 switch (TREE_CODE (etype))
4197 case INTEGER_TYPE:
4198 /* There is no requirement that LOW be within the range of ETYPE
4199 if the latter is a subtype. It must, however, be within the base
4200 type of ETYPE. So be sure we do the subtraction in that type. */
4201 if (TREE_TYPE (etype))
4202 etype = TREE_TYPE (etype);
4203 break;
4205 case ENUMERAL_TYPE:
4206 case BOOLEAN_TYPE:
4207 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4208 TYPE_UNSIGNED (etype));
4209 break;
4211 default:
4212 break;
4215 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4216 if (TREE_CODE (etype) == INTEGER_TYPE
4217 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4219 tree utype, minv, maxv;
4221 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4222 for the type in question, as we rely on this here. */
4223 utype = lang_hooks.types.unsigned_type (etype);
4224 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4225 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4226 integer_one_node, 1);
4227 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4229 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4230 minv, 1, maxv, 1)))
4231 etype = utype;
4232 else
4233 return 0;
4236 high = fold_convert (etype, high);
4237 low = fold_convert (etype, low);
4238 exp = fold_convert (etype, exp);
4240 value = const_binop (MINUS_EXPR, high, low, 0);
4242 if (value != 0 && !TREE_OVERFLOW (value))
4243 return build_range_check (type,
4244 fold_build2 (MINUS_EXPR, etype, exp, low),
4245 1, build_int_cst (etype, 0), value);
4247 return 0;
4250 /* Return the predecessor of VAL in its type, handling the infinite case. */
4252 static tree
4253 range_predecessor (tree val)
4255 tree type = TREE_TYPE (val);
4257 if (INTEGRAL_TYPE_P (type)
4258 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4259 return 0;
4260 else
4261 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4264 /* Return the successor of VAL in its type, handling the infinite case. */
4266 static tree
4267 range_successor (tree val)
4269 tree type = TREE_TYPE (val);
4271 if (INTEGRAL_TYPE_P (type)
4272 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4273 return 0;
4274 else
4275 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4278 /* Given two ranges, see if we can merge them into one. Return 1 if we
4279 can, 0 if we can't. Set the output range into the specified parameters. */
4281 static int
4282 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4283 tree high0, int in1_p, tree low1, tree high1)
4285 int no_overlap;
4286 int subset;
4287 int temp;
4288 tree tem;
4289 int in_p;
4290 tree low, high;
4291 int lowequal = ((low0 == 0 && low1 == 0)
4292 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4293 low0, 0, low1, 0)));
4294 int highequal = ((high0 == 0 && high1 == 0)
4295 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4296 high0, 1, high1, 1)));
4298 /* Make range 0 be the range that starts first, or ends last if they
4299 start at the same value. Swap them if it isn't. */
4300 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4301 low0, 0, low1, 0))
4302 || (lowequal
4303 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4304 high1, 1, high0, 1))))
4306 temp = in0_p, in0_p = in1_p, in1_p = temp;
4307 tem = low0, low0 = low1, low1 = tem;
4308 tem = high0, high0 = high1, high1 = tem;
4311 /* Now flag two cases, whether the ranges are disjoint or whether the
4312 second range is totally subsumed in the first. Note that the tests
4313 below are simplified by the ones above. */
4314 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4315 high0, 1, low1, 0));
4316 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4317 high1, 1, high0, 1));
4319 /* We now have four cases, depending on whether we are including or
4320 excluding the two ranges. */
4321 if (in0_p && in1_p)
4323 /* If they don't overlap, the result is false. If the second range
4324 is a subset it is the result. Otherwise, the range is from the start
4325 of the second to the end of the first. */
4326 if (no_overlap)
4327 in_p = 0, low = high = 0;
4328 else if (subset)
4329 in_p = 1, low = low1, high = high1;
4330 else
4331 in_p = 1, low = low1, high = high0;
4334 else if (in0_p && ! in1_p)
4336 /* If they don't overlap, the result is the first range. If they are
4337 equal, the result is false. If the second range is a subset of the
4338 first, and the ranges begin at the same place, we go from just after
4339 the end of the second range to the end of the first. If the second
4340 range is not a subset of the first, or if it is a subset and both
4341 ranges end at the same place, the range starts at the start of the
4342 first range and ends just before the second range.
4343 Otherwise, we can't describe this as a single range. */
4344 if (no_overlap)
4345 in_p = 1, low = low0, high = high0;
4346 else if (lowequal && highequal)
4347 in_p = 0, low = high = 0;
4348 else if (subset && lowequal)
4350 low = range_successor (high1);
4351 high = high0;
4352 in_p = (low != 0);
4354 else if (! subset || highequal)
4356 low = low0;
4357 high = range_predecessor (low1);
4358 in_p = (high != 0);
4360 else
4361 return 0;
4364 else if (! in0_p && in1_p)
4366 /* If they don't overlap, the result is the second range. If the second
4367 is a subset of the first, the result is false. Otherwise,
4368 the range starts just after the first range and ends at the
4369 end of the second. */
4370 if (no_overlap)
4371 in_p = 1, low = low1, high = high1;
4372 else if (subset || highequal)
4373 in_p = 0, low = high = 0;
4374 else
4376 low = range_successor (high0);
4377 high = high1;
4378 in_p = (low != 0);
4382 else
4384 /* The case where we are excluding both ranges. Here the complex case
4385 is if they don't overlap. In that case, the only time we have a
4386 range is if they are adjacent. If the second is a subset of the
4387 first, the result is the first. Otherwise, the range to exclude
4388 starts at the beginning of the first range and ends at the end of the
4389 second. */
4390 if (no_overlap)
4392 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 range_successor (high0),
4394 1, low1, 0)))
4395 in_p = 0, low = low0, high = high1;
4396 else
4398 /* Canonicalize - [min, x] into - [-, x]. */
4399 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4400 switch (TREE_CODE (TREE_TYPE (low0)))
4402 case ENUMERAL_TYPE:
4403 if (TYPE_PRECISION (TREE_TYPE (low0))
4404 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4405 break;
4406 /* FALLTHROUGH */
4407 case INTEGER_TYPE:
4408 if (tree_int_cst_equal (low0,
4409 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4410 low0 = 0;
4411 break;
4412 case POINTER_TYPE:
4413 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4414 && integer_zerop (low0))
4415 low0 = 0;
4416 break;
4417 default:
4418 break;
4421 /* Canonicalize - [x, max] into - [x, -]. */
4422 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4423 switch (TREE_CODE (TREE_TYPE (high1)))
4425 case ENUMERAL_TYPE:
4426 if (TYPE_PRECISION (TREE_TYPE (high1))
4427 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4428 break;
4429 /* FALLTHROUGH */
4430 case INTEGER_TYPE:
4431 if (tree_int_cst_equal (high1,
4432 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4433 high1 = 0;
4434 break;
4435 case POINTER_TYPE:
4436 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4437 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4438 high1, 1,
4439 integer_one_node, 1)))
4440 high1 = 0;
4441 break;
4442 default:
4443 break;
4446 /* The ranges might be also adjacent between the maximum and
4447 minimum values of the given type. For
4448 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4449 return + [x + 1, y - 1]. */
4450 if (low0 == 0 && high1 == 0)
4452 low = range_successor (high0);
4453 high = range_predecessor (low1);
4454 if (low == 0 || high == 0)
4455 return 0;
4457 in_p = 1;
4459 else
4460 return 0;
4463 else if (subset)
4464 in_p = 0, low = low0, high = high0;
4465 else
4466 in_p = 0, low = low0, high = high1;
4469 *pin_p = in_p, *plow = low, *phigh = high;
4470 return 1;
4474 /* Subroutine of fold, looking inside expressions of the form
4475 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4476 of the COND_EXPR. This function is being used also to optimize
4477 A op B ? C : A, by reversing the comparison first.
4479 Return a folded expression whose code is not a COND_EXPR
4480 anymore, or NULL_TREE if no folding opportunity is found. */
4482 static tree
4483 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4485 enum tree_code comp_code = TREE_CODE (arg0);
4486 tree arg00 = TREE_OPERAND (arg0, 0);
4487 tree arg01 = TREE_OPERAND (arg0, 1);
4488 tree arg1_type = TREE_TYPE (arg1);
4489 tree tem;
4491 STRIP_NOPS (arg1);
4492 STRIP_NOPS (arg2);
4494 /* If we have A op 0 ? A : -A, consider applying the following
4495 transformations:
4497 A == 0? A : -A same as -A
4498 A != 0? A : -A same as A
4499 A >= 0? A : -A same as abs (A)
4500 A > 0? A : -A same as abs (A)
4501 A <= 0? A : -A same as -abs (A)
4502 A < 0? A : -A same as -abs (A)
4504 None of these transformations work for modes with signed
4505 zeros. If A is +/-0, the first two transformations will
4506 change the sign of the result (from +0 to -0, or vice
4507 versa). The last four will fix the sign of the result,
4508 even though the original expressions could be positive or
4509 negative, depending on the sign of A.
4511 Note that all these transformations are correct if A is
4512 NaN, since the two alternatives (A and -A) are also NaNs. */
4513 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4514 ? real_zerop (arg01)
4515 : integer_zerop (arg01))
4516 && ((TREE_CODE (arg2) == NEGATE_EXPR
4517 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4518 /* In the case that A is of the form X-Y, '-A' (arg2) may
4519 have already been folded to Y-X, check for that. */
4520 || (TREE_CODE (arg1) == MINUS_EXPR
4521 && TREE_CODE (arg2) == MINUS_EXPR
4522 && operand_equal_p (TREE_OPERAND (arg1, 0),
4523 TREE_OPERAND (arg2, 1), 0)
4524 && operand_equal_p (TREE_OPERAND (arg1, 1),
4525 TREE_OPERAND (arg2, 0), 0))))
4526 switch (comp_code)
4528 case EQ_EXPR:
4529 case UNEQ_EXPR:
4530 tem = fold_convert (arg1_type, arg1);
4531 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4532 case NE_EXPR:
4533 case LTGT_EXPR:
4534 return pedantic_non_lvalue (fold_convert (type, arg1));
4535 case UNGE_EXPR:
4536 case UNGT_EXPR:
4537 if (flag_trapping_math)
4538 break;
4539 /* Fall through. */
4540 case GE_EXPR:
4541 case GT_EXPR:
4542 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4543 arg1 = fold_convert (lang_hooks.types.signed_type
4544 (TREE_TYPE (arg1)), arg1);
4545 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4546 return pedantic_non_lvalue (fold_convert (type, tem));
4547 case UNLE_EXPR:
4548 case UNLT_EXPR:
4549 if (flag_trapping_math)
4550 break;
4551 case LE_EXPR:
4552 case LT_EXPR:
4553 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4554 arg1 = fold_convert (lang_hooks.types.signed_type
4555 (TREE_TYPE (arg1)), arg1);
4556 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4557 return negate_expr (fold_convert (type, tem));
4558 default:
4559 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4560 break;
4563 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4564 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4565 both transformations are correct when A is NaN: A != 0
4566 is then true, and A == 0 is false. */
4568 if (integer_zerop (arg01) && integer_zerop (arg2))
4570 if (comp_code == NE_EXPR)
4571 return pedantic_non_lvalue (fold_convert (type, arg1));
4572 else if (comp_code == EQ_EXPR)
4573 return build_int_cst (type, 0);
4576 /* Try some transformations of A op B ? A : B.
4578 A == B? A : B same as B
4579 A != B? A : B same as A
4580 A >= B? A : B same as max (A, B)
4581 A > B? A : B same as max (B, A)
4582 A <= B? A : B same as min (A, B)
4583 A < B? A : B same as min (B, A)
4585 As above, these transformations don't work in the presence
4586 of signed zeros. For example, if A and B are zeros of
4587 opposite sign, the first two transformations will change
4588 the sign of the result. In the last four, the original
4589 expressions give different results for (A=+0, B=-0) and
4590 (A=-0, B=+0), but the transformed expressions do not.
4592 The first two transformations are correct if either A or B
4593 is a NaN. In the first transformation, the condition will
4594 be false, and B will indeed be chosen. In the case of the
4595 second transformation, the condition A != B will be true,
4596 and A will be chosen.
4598 The conversions to max() and min() are not correct if B is
4599 a number and A is not. The conditions in the original
4600 expressions will be false, so all four give B. The min()
4601 and max() versions would give a NaN instead. */
4602 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4603 /* Avoid these transformations if the COND_EXPR may be used
4604 as an lvalue in the C++ front-end. PR c++/19199. */
4605 && (in_gimple_form
4606 || (strcmp (lang_hooks.name, "GNU C++") != 0
4607 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4608 || ! maybe_lvalue_p (arg1)
4609 || ! maybe_lvalue_p (arg2)))
4611 tree comp_op0 = arg00;
4612 tree comp_op1 = arg01;
4613 tree comp_type = TREE_TYPE (comp_op0);
4615 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4616 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4618 comp_type = type;
4619 comp_op0 = arg1;
4620 comp_op1 = arg2;
4623 switch (comp_code)
4625 case EQ_EXPR:
4626 return pedantic_non_lvalue (fold_convert (type, arg2));
4627 case NE_EXPR:
4628 return pedantic_non_lvalue (fold_convert (type, arg1));
4629 case LE_EXPR:
4630 case LT_EXPR:
4631 case UNLE_EXPR:
4632 case UNLT_EXPR:
4633 /* In C++ a ?: expression can be an lvalue, so put the
4634 operand which will be used if they are equal first
4635 so that we can convert this back to the
4636 corresponding COND_EXPR. */
4637 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4639 comp_op0 = fold_convert (comp_type, comp_op0);
4640 comp_op1 = fold_convert (comp_type, comp_op1);
4641 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4642 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4643 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4644 return pedantic_non_lvalue (fold_convert (type, tem));
4646 break;
4647 case GE_EXPR:
4648 case GT_EXPR:
4649 case UNGE_EXPR:
4650 case UNGT_EXPR:
4651 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4653 comp_op0 = fold_convert (comp_type, comp_op0);
4654 comp_op1 = fold_convert (comp_type, comp_op1);
4655 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4656 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4657 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4658 return pedantic_non_lvalue (fold_convert (type, tem));
4660 break;
4661 case UNEQ_EXPR:
4662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4663 return pedantic_non_lvalue (fold_convert (type, arg2));
4664 break;
4665 case LTGT_EXPR:
4666 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4667 return pedantic_non_lvalue (fold_convert (type, arg1));
4668 break;
4669 default:
4670 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4671 break;
4675 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4676 we might still be able to simplify this. For example,
4677 if C1 is one less or one more than C2, this might have started
4678 out as a MIN or MAX and been transformed by this function.
4679 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4681 if (INTEGRAL_TYPE_P (type)
4682 && TREE_CODE (arg01) == INTEGER_CST
4683 && TREE_CODE (arg2) == INTEGER_CST)
4684 switch (comp_code)
4686 case EQ_EXPR:
4687 /* We can replace A with C1 in this case. */
4688 arg1 = fold_convert (type, arg01);
4689 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4691 case LT_EXPR:
4692 /* If C1 is C2 + 1, this is min(A, C2). */
4693 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4694 OEP_ONLY_CONST)
4695 && operand_equal_p (arg01,
4696 const_binop (PLUS_EXPR, arg2,
4697 build_int_cst (type, 1), 0),
4698 OEP_ONLY_CONST))
4699 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4700 type, arg1, arg2));
4701 break;
4703 case LE_EXPR:
4704 /* If C1 is C2 - 1, this is min(A, C2). */
4705 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4706 OEP_ONLY_CONST)
4707 && operand_equal_p (arg01,
4708 const_binop (MINUS_EXPR, arg2,
4709 build_int_cst (type, 1), 0),
4710 OEP_ONLY_CONST))
4711 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4712 type, arg1, arg2));
4713 break;
4715 case GT_EXPR:
4716 /* If C1 is C2 - 1, this is max(A, C2). */
4717 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4718 OEP_ONLY_CONST)
4719 && operand_equal_p (arg01,
4720 const_binop (MINUS_EXPR, arg2,
4721 build_int_cst (type, 1), 0),
4722 OEP_ONLY_CONST))
4723 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4724 type, arg1, arg2));
4725 break;
4727 case GE_EXPR:
4728 /* If C1 is C2 + 1, this is max(A, C2). */
4729 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4730 OEP_ONLY_CONST)
4731 && operand_equal_p (arg01,
4732 const_binop (PLUS_EXPR, arg2,
4733 build_int_cst (type, 1), 0),
4734 OEP_ONLY_CONST))
4735 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4736 type, arg1, arg2));
4737 break;
4738 case NE_EXPR:
4739 break;
4740 default:
4741 gcc_unreachable ();
4744 return NULL_TREE;
4749 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4750 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4751 #endif
4753 /* EXP is some logical combination of boolean tests. See if we can
4754 merge it into some range test. Return the new tree if so. */
4756 static tree
4757 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4759 int or_op = (code == TRUTH_ORIF_EXPR
4760 || code == TRUTH_OR_EXPR);
4761 int in0_p, in1_p, in_p;
4762 tree low0, low1, low, high0, high1, high;
4763 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4764 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4765 tree tem;
4767 /* If this is an OR operation, invert both sides; we will invert
4768 again at the end. */
4769 if (or_op)
4770 in0_p = ! in0_p, in1_p = ! in1_p;
4772 /* If both expressions are the same, if we can merge the ranges, and we
4773 can build the range test, return it or it inverted. If one of the
4774 ranges is always true or always false, consider it to be the same
4775 expression as the other. */
4776 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4777 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4778 in1_p, low1, high1)
4779 && 0 != (tem = (build_range_check (type,
4780 lhs != 0 ? lhs
4781 : rhs != 0 ? rhs : integer_zero_node,
4782 in_p, low, high))))
4783 return or_op ? invert_truthvalue (tem) : tem;
4785 /* On machines where the branch cost is expensive, if this is a
4786 short-circuited branch and the underlying object on both sides
4787 is the same, make a non-short-circuit operation. */
4788 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4789 && lhs != 0 && rhs != 0
4790 && (code == TRUTH_ANDIF_EXPR
4791 || code == TRUTH_ORIF_EXPR)
4792 && operand_equal_p (lhs, rhs, 0))
4794 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4795 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4796 which cases we can't do this. */
4797 if (simple_operand_p (lhs))
4798 return build2 (code == TRUTH_ANDIF_EXPR
4799 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4800 type, op0, op1);
4802 else if (lang_hooks.decls.global_bindings_p () == 0
4803 && ! CONTAINS_PLACEHOLDER_P (lhs))
4805 tree common = save_expr (lhs);
4807 if (0 != (lhs = build_range_check (type, common,
4808 or_op ? ! in0_p : in0_p,
4809 low0, high0))
4810 && (0 != (rhs = build_range_check (type, common,
4811 or_op ? ! in1_p : in1_p,
4812 low1, high1))))
4813 return build2 (code == TRUTH_ANDIF_EXPR
4814 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4815 type, lhs, rhs);
4819 return 0;
4822 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4823 bit value. Arrange things so the extra bits will be set to zero if and
4824 only if C is signed-extended to its full width. If MASK is nonzero,
4825 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4827 static tree
4828 unextend (tree c, int p, int unsignedp, tree mask)
4830 tree type = TREE_TYPE (c);
4831 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4832 tree temp;
4834 if (p == modesize || unsignedp)
4835 return c;
4837 /* We work by getting just the sign bit into the low-order bit, then
4838 into the high-order bit, then sign-extend. We then XOR that value
4839 with C. */
4840 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4841 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4843 /* We must use a signed type in order to get an arithmetic right shift.
4844 However, we must also avoid introducing accidental overflows, so that
4845 a subsequent call to integer_zerop will work. Hence we must
4846 do the type conversion here. At this point, the constant is either
4847 zero or one, and the conversion to a signed type can never overflow.
4848 We could get an overflow if this conversion is done anywhere else. */
4849 if (TYPE_UNSIGNED (type))
4850 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4852 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4853 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4854 if (mask != 0)
4855 temp = const_binop (BIT_AND_EXPR, temp,
4856 fold_convert (TREE_TYPE (c), mask), 0);
4857 /* If necessary, convert the type back to match the type of C. */
4858 if (TYPE_UNSIGNED (type))
4859 temp = fold_convert (type, temp);
4861 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4864 /* Find ways of folding logical expressions of LHS and RHS:
4865 Try to merge two comparisons to the same innermost item.
4866 Look for range tests like "ch >= '0' && ch <= '9'".
4867 Look for combinations of simple terms on machines with expensive branches
4868 and evaluate the RHS unconditionally.
4870 For example, if we have p->a == 2 && p->b == 4 and we can make an
4871 object large enough to span both A and B, we can do this with a comparison
4872 against the object ANDed with the a mask.
4874 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4875 operations to do this with one comparison.
4877 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4878 function and the one above.
4880 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4881 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4883 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4884 two operands.
4886 We return the simplified tree or 0 if no optimization is possible. */
4888 static tree
4889 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4891 /* If this is the "or" of two comparisons, we can do something if
4892 the comparisons are NE_EXPR. If this is the "and", we can do something
4893 if the comparisons are EQ_EXPR. I.e.,
4894 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4896 WANTED_CODE is this operation code. For single bit fields, we can
4897 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4898 comparison for one-bit fields. */
4900 enum tree_code wanted_code;
4901 enum tree_code lcode, rcode;
4902 tree ll_arg, lr_arg, rl_arg, rr_arg;
4903 tree ll_inner, lr_inner, rl_inner, rr_inner;
4904 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4905 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4906 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4907 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4908 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4909 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4910 enum machine_mode lnmode, rnmode;
4911 tree ll_mask, lr_mask, rl_mask, rr_mask;
4912 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4913 tree l_const, r_const;
4914 tree lntype, rntype, result;
4915 int first_bit, end_bit;
4916 int volatilep;
4917 tree orig_lhs = lhs, orig_rhs = rhs;
4918 enum tree_code orig_code = code;
4920 /* Start by getting the comparison codes. Fail if anything is volatile.
4921 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4922 it were surrounded with a NE_EXPR. */
4924 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4925 return 0;
4927 lcode = TREE_CODE (lhs);
4928 rcode = TREE_CODE (rhs);
4930 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4932 lhs = build2 (NE_EXPR, truth_type, lhs,
4933 build_int_cst (TREE_TYPE (lhs), 0));
4934 lcode = NE_EXPR;
4937 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4939 rhs = build2 (NE_EXPR, truth_type, rhs,
4940 build_int_cst (TREE_TYPE (rhs), 0));
4941 rcode = NE_EXPR;
4944 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4945 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4946 return 0;
4948 ll_arg = TREE_OPERAND (lhs, 0);
4949 lr_arg = TREE_OPERAND (lhs, 1);
4950 rl_arg = TREE_OPERAND (rhs, 0);
4951 rr_arg = TREE_OPERAND (rhs, 1);
4953 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4954 if (simple_operand_p (ll_arg)
4955 && simple_operand_p (lr_arg))
4957 tree result;
4958 if (operand_equal_p (ll_arg, rl_arg, 0)
4959 && operand_equal_p (lr_arg, rr_arg, 0))
4961 result = combine_comparisons (code, lcode, rcode,
4962 truth_type, ll_arg, lr_arg);
4963 if (result)
4964 return result;
4966 else if (operand_equal_p (ll_arg, rr_arg, 0)
4967 && operand_equal_p (lr_arg, rl_arg, 0))
4969 result = combine_comparisons (code, lcode,
4970 swap_tree_comparison (rcode),
4971 truth_type, ll_arg, lr_arg);
4972 if (result)
4973 return result;
4977 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4978 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4980 /* If the RHS can be evaluated unconditionally and its operands are
4981 simple, it wins to evaluate the RHS unconditionally on machines
4982 with expensive branches. In this case, this isn't a comparison
4983 that can be merged. Avoid doing this if the RHS is a floating-point
4984 comparison since those can trap. */
4986 if (BRANCH_COST >= 2
4987 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4988 && simple_operand_p (rl_arg)
4989 && simple_operand_p (rr_arg))
4991 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4992 if (code == TRUTH_OR_EXPR
4993 && lcode == NE_EXPR && integer_zerop (lr_arg)
4994 && rcode == NE_EXPR && integer_zerop (rr_arg)
4995 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4996 return build2 (NE_EXPR, truth_type,
4997 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4998 ll_arg, rl_arg),
4999 build_int_cst (TREE_TYPE (ll_arg), 0));
5001 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5002 if (code == TRUTH_AND_EXPR
5003 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5004 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5005 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5006 return build2 (EQ_EXPR, truth_type,
5007 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5008 ll_arg, rl_arg),
5009 build_int_cst (TREE_TYPE (ll_arg), 0));
5011 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5013 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5014 return build2 (code, truth_type, lhs, rhs);
5015 return NULL_TREE;
5019 /* See if the comparisons can be merged. Then get all the parameters for
5020 each side. */
5022 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5023 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5024 return 0;
5026 volatilep = 0;
5027 ll_inner = decode_field_reference (ll_arg,
5028 &ll_bitsize, &ll_bitpos, &ll_mode,
5029 &ll_unsignedp, &volatilep, &ll_mask,
5030 &ll_and_mask);
5031 lr_inner = decode_field_reference (lr_arg,
5032 &lr_bitsize, &lr_bitpos, &lr_mode,
5033 &lr_unsignedp, &volatilep, &lr_mask,
5034 &lr_and_mask);
5035 rl_inner = decode_field_reference (rl_arg,
5036 &rl_bitsize, &rl_bitpos, &rl_mode,
5037 &rl_unsignedp, &volatilep, &rl_mask,
5038 &rl_and_mask);
5039 rr_inner = decode_field_reference (rr_arg,
5040 &rr_bitsize, &rr_bitpos, &rr_mode,
5041 &rr_unsignedp, &volatilep, &rr_mask,
5042 &rr_and_mask);
5044 /* It must be true that the inner operation on the lhs of each
5045 comparison must be the same if we are to be able to do anything.
5046 Then see if we have constants. If not, the same must be true for
5047 the rhs's. */
5048 if (volatilep || ll_inner == 0 || rl_inner == 0
5049 || ! operand_equal_p (ll_inner, rl_inner, 0))
5050 return 0;
5052 if (TREE_CODE (lr_arg) == INTEGER_CST
5053 && TREE_CODE (rr_arg) == INTEGER_CST)
5054 l_const = lr_arg, r_const = rr_arg;
5055 else if (lr_inner == 0 || rr_inner == 0
5056 || ! operand_equal_p (lr_inner, rr_inner, 0))
5057 return 0;
5058 else
5059 l_const = r_const = 0;
5061 /* If either comparison code is not correct for our logical operation,
5062 fail. However, we can convert a one-bit comparison against zero into
5063 the opposite comparison against that bit being set in the field. */
5065 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5066 if (lcode != wanted_code)
5068 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5070 /* Make the left operand unsigned, since we are only interested
5071 in the value of one bit. Otherwise we are doing the wrong
5072 thing below. */
5073 ll_unsignedp = 1;
5074 l_const = ll_mask;
5076 else
5077 return 0;
5080 /* This is analogous to the code for l_const above. */
5081 if (rcode != wanted_code)
5083 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5085 rl_unsignedp = 1;
5086 r_const = rl_mask;
5088 else
5089 return 0;
5092 /* See if we can find a mode that contains both fields being compared on
5093 the left. If we can't, fail. Otherwise, update all constants and masks
5094 to be relative to a field of that size. */
5095 first_bit = MIN (ll_bitpos, rl_bitpos);
5096 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5097 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5098 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5099 volatilep);
5100 if (lnmode == VOIDmode)
5101 return 0;
5103 lnbitsize = GET_MODE_BITSIZE (lnmode);
5104 lnbitpos = first_bit & ~ (lnbitsize - 1);
5105 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5106 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5108 if (BYTES_BIG_ENDIAN)
5110 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5111 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5114 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5115 size_int (xll_bitpos), 0);
5116 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5117 size_int (xrl_bitpos), 0);
5119 if (l_const)
5121 l_const = fold_convert (lntype, l_const);
5122 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5123 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5124 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5125 fold_build1 (BIT_NOT_EXPR,
5126 lntype, ll_mask),
5127 0)))
5129 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5131 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5134 if (r_const)
5136 r_const = fold_convert (lntype, r_const);
5137 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5138 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5139 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5140 fold_build1 (BIT_NOT_EXPR,
5141 lntype, rl_mask),
5142 0)))
5144 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5146 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5150 /* If the right sides are not constant, do the same for it. Also,
5151 disallow this optimization if a size or signedness mismatch occurs
5152 between the left and right sides. */
5153 if (l_const == 0)
5155 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5156 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5157 /* Make sure the two fields on the right
5158 correspond to the left without being swapped. */
5159 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5160 return 0;
5162 first_bit = MIN (lr_bitpos, rr_bitpos);
5163 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5164 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5165 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5166 volatilep);
5167 if (rnmode == VOIDmode)
5168 return 0;
5170 rnbitsize = GET_MODE_BITSIZE (rnmode);
5171 rnbitpos = first_bit & ~ (rnbitsize - 1);
5172 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5173 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5175 if (BYTES_BIG_ENDIAN)
5177 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5178 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5181 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5182 size_int (xlr_bitpos), 0);
5183 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5184 size_int (xrr_bitpos), 0);
5186 /* Make a mask that corresponds to both fields being compared.
5187 Do this for both items being compared. If the operands are the
5188 same size and the bits being compared are in the same position
5189 then we can do this by masking both and comparing the masked
5190 results. */
5191 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5192 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5193 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5195 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5196 ll_unsignedp || rl_unsignedp);
5197 if (! all_ones_mask_p (ll_mask, lnbitsize))
5198 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5200 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5201 lr_unsignedp || rr_unsignedp);
5202 if (! all_ones_mask_p (lr_mask, rnbitsize))
5203 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5205 return build2 (wanted_code, truth_type, lhs, rhs);
5208 /* There is still another way we can do something: If both pairs of
5209 fields being compared are adjacent, we may be able to make a wider
5210 field containing them both.
5212 Note that we still must mask the lhs/rhs expressions. Furthermore,
5213 the mask must be shifted to account for the shift done by
5214 make_bit_field_ref. */
5215 if ((ll_bitsize + ll_bitpos == rl_bitpos
5216 && lr_bitsize + lr_bitpos == rr_bitpos)
5217 || (ll_bitpos == rl_bitpos + rl_bitsize
5218 && lr_bitpos == rr_bitpos + rr_bitsize))
5220 tree type;
5222 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5223 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5224 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5225 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5227 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5228 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5229 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5230 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5232 /* Convert to the smaller type before masking out unwanted bits. */
5233 type = lntype;
5234 if (lntype != rntype)
5236 if (lnbitsize > rnbitsize)
5238 lhs = fold_convert (rntype, lhs);
5239 ll_mask = fold_convert (rntype, ll_mask);
5240 type = rntype;
5242 else if (lnbitsize < rnbitsize)
5244 rhs = fold_convert (lntype, rhs);
5245 lr_mask = fold_convert (lntype, lr_mask);
5246 type = lntype;
5250 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5251 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5253 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5254 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5256 return build2 (wanted_code, truth_type, lhs, rhs);
5259 return 0;
5262 /* Handle the case of comparisons with constants. If there is something in
5263 common between the masks, those bits of the constants must be the same.
5264 If not, the condition is always false. Test for this to avoid generating
5265 incorrect code below. */
5266 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5267 if (! integer_zerop (result)
5268 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5269 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5271 if (wanted_code == NE_EXPR)
5273 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5274 return constant_boolean_node (true, truth_type);
5276 else
5278 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5279 return constant_boolean_node (false, truth_type);
5283 /* Construct the expression we will return. First get the component
5284 reference we will make. Unless the mask is all ones the width of
5285 that field, perform the mask operation. Then compare with the
5286 merged constant. */
5287 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5288 ll_unsignedp || rl_unsignedp);
5290 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5291 if (! all_ones_mask_p (ll_mask, lnbitsize))
5292 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5294 return build2 (wanted_code, truth_type, result,
5295 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5298 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5299 constant. */
5301 static tree
5302 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5304 tree arg0 = op0;
5305 enum tree_code op_code;
5306 tree comp_const = op1;
5307 tree minmax_const;
5308 int consts_equal, consts_lt;
5309 tree inner;
5311 STRIP_SIGN_NOPS (arg0);
5313 op_code = TREE_CODE (arg0);
5314 minmax_const = TREE_OPERAND (arg0, 1);
5315 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5316 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5317 inner = TREE_OPERAND (arg0, 0);
5319 /* If something does not permit us to optimize, return the original tree. */
5320 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5321 || TREE_CODE (comp_const) != INTEGER_CST
5322 || TREE_OVERFLOW (comp_const)
5323 || TREE_CODE (minmax_const) != INTEGER_CST
5324 || TREE_OVERFLOW (minmax_const))
5325 return NULL_TREE;
5327 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5328 and GT_EXPR, doing the rest with recursive calls using logical
5329 simplifications. */
5330 switch (code)
5332 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5334 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5335 type, op0, op1);
5336 if (tem)
5337 return invert_truthvalue (tem);
5338 return NULL_TREE;
5341 case GE_EXPR:
5342 return
5343 fold_build2 (TRUTH_ORIF_EXPR, type,
5344 optimize_minmax_comparison
5345 (EQ_EXPR, type, arg0, comp_const),
5346 optimize_minmax_comparison
5347 (GT_EXPR, type, arg0, comp_const));
5349 case EQ_EXPR:
5350 if (op_code == MAX_EXPR && consts_equal)
5351 /* MAX (X, 0) == 0 -> X <= 0 */
5352 return fold_build2 (LE_EXPR, type, inner, comp_const);
5354 else if (op_code == MAX_EXPR && consts_lt)
5355 /* MAX (X, 0) == 5 -> X == 5 */
5356 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5358 else if (op_code == MAX_EXPR)
5359 /* MAX (X, 0) == -1 -> false */
5360 return omit_one_operand (type, integer_zero_node, inner);
5362 else if (consts_equal)
5363 /* MIN (X, 0) == 0 -> X >= 0 */
5364 return fold_build2 (GE_EXPR, type, inner, comp_const);
5366 else if (consts_lt)
5367 /* MIN (X, 0) == 5 -> false */
5368 return omit_one_operand (type, integer_zero_node, inner);
5370 else
5371 /* MIN (X, 0) == -1 -> X == -1 */
5372 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5374 case GT_EXPR:
5375 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5376 /* MAX (X, 0) > 0 -> X > 0
5377 MAX (X, 0) > 5 -> X > 5 */
5378 return fold_build2 (GT_EXPR, type, inner, comp_const);
5380 else if (op_code == MAX_EXPR)
5381 /* MAX (X, 0) > -1 -> true */
5382 return omit_one_operand (type, integer_one_node, inner);
5384 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5385 /* MIN (X, 0) > 0 -> false
5386 MIN (X, 0) > 5 -> false */
5387 return omit_one_operand (type, integer_zero_node, inner);
5389 else
5390 /* MIN (X, 0) > -1 -> X > -1 */
5391 return fold_build2 (GT_EXPR, type, inner, comp_const);
5393 default:
5394 return NULL_TREE;
5398 /* T is an integer expression that is being multiplied, divided, or taken a
5399 modulus (CODE says which and what kind of divide or modulus) by a
5400 constant C. See if we can eliminate that operation by folding it with
5401 other operations already in T. WIDE_TYPE, if non-null, is a type that
5402 should be used for the computation if wider than our type.
5404 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5405 (X * 2) + (Y * 4). We must, however, be assured that either the original
5406 expression would not overflow or that overflow is undefined for the type
5407 in the language in question.
5409 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5410 the machine has a multiply-accumulate insn or that this is part of an
5411 addressing calculation.
5413 If we return a non-null expression, it is an equivalent form of the
5414 original computation, but need not be in the original type. */
5416 static tree
5417 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5419 /* To avoid exponential search depth, refuse to allow recursion past
5420 three levels. Beyond that (1) it's highly unlikely that we'll find
5421 something interesting and (2) we've probably processed it before
5422 when we built the inner expression. */
5424 static int depth;
5425 tree ret;
5427 if (depth > 3)
5428 return NULL;
5430 depth++;
5431 ret = extract_muldiv_1 (t, c, code, wide_type);
5432 depth--;
5434 return ret;
5437 static tree
5438 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5440 tree type = TREE_TYPE (t);
5441 enum tree_code tcode = TREE_CODE (t);
5442 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5443 > GET_MODE_SIZE (TYPE_MODE (type)))
5444 ? wide_type : type);
5445 tree t1, t2;
5446 int same_p = tcode == code;
5447 tree op0 = NULL_TREE, op1 = NULL_TREE;
5449 /* Don't deal with constants of zero here; they confuse the code below. */
5450 if (integer_zerop (c))
5451 return NULL_TREE;
5453 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5454 op0 = TREE_OPERAND (t, 0);
5456 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5457 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5459 /* Note that we need not handle conditional operations here since fold
5460 already handles those cases. So just do arithmetic here. */
5461 switch (tcode)
5463 case INTEGER_CST:
5464 /* For a constant, we can always simplify if we are a multiply
5465 or (for divide and modulus) if it is a multiple of our constant. */
5466 if (code == MULT_EXPR
5467 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5468 return const_binop (code, fold_convert (ctype, t),
5469 fold_convert (ctype, c), 0);
5470 break;
5472 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5473 /* If op0 is an expression ... */
5474 if ((COMPARISON_CLASS_P (op0)
5475 || UNARY_CLASS_P (op0)
5476 || BINARY_CLASS_P (op0)
5477 || EXPRESSION_CLASS_P (op0))
5478 /* ... and is unsigned, and its type is smaller than ctype,
5479 then we cannot pass through as widening. */
5480 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5481 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5482 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5483 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5484 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5485 /* ... or this is a truncation (t is narrower than op0),
5486 then we cannot pass through this narrowing. */
5487 || (GET_MODE_SIZE (TYPE_MODE (type))
5488 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5489 /* ... or signedness changes for division or modulus,
5490 then we cannot pass through this conversion. */
5491 || (code != MULT_EXPR
5492 && (TYPE_UNSIGNED (ctype)
5493 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5494 break;
5496 /* Pass the constant down and see if we can make a simplification. If
5497 we can, replace this expression with the inner simplification for
5498 possible later conversion to our or some other type. */
5499 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5500 && TREE_CODE (t2) == INTEGER_CST
5501 && !TREE_OVERFLOW (t2)
5502 && (0 != (t1 = extract_muldiv (op0, t2, code,
5503 code == MULT_EXPR
5504 ? ctype : NULL_TREE))))
5505 return t1;
5506 break;
5508 case ABS_EXPR:
5509 /* If widening the type changes it from signed to unsigned, then we
5510 must avoid building ABS_EXPR itself as unsigned. */
5511 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5513 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5514 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5516 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5517 return fold_convert (ctype, t1);
5519 break;
5521 /* FALLTHROUGH */
5522 case NEGATE_EXPR:
5523 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5524 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5525 break;
5527 case MIN_EXPR: case MAX_EXPR:
5528 /* If widening the type changes the signedness, then we can't perform
5529 this optimization as that changes the result. */
5530 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5531 break;
5533 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5534 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5535 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5537 if (tree_int_cst_sgn (c) < 0)
5538 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5540 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5541 fold_convert (ctype, t2));
5543 break;
5545 case LSHIFT_EXPR: case RSHIFT_EXPR:
5546 /* If the second operand is constant, this is a multiplication
5547 or floor division, by a power of two, so we can treat it that
5548 way unless the multiplier or divisor overflows. Signed
5549 left-shift overflow is implementation-defined rather than
5550 undefined in C90, so do not convert signed left shift into
5551 multiplication. */
5552 if (TREE_CODE (op1) == INTEGER_CST
5553 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5554 /* const_binop may not detect overflow correctly,
5555 so check for it explicitly here. */
5556 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5557 && TREE_INT_CST_HIGH (op1) == 0
5558 && 0 != (t1 = fold_convert (ctype,
5559 const_binop (LSHIFT_EXPR,
5560 size_one_node,
5561 op1, 0)))
5562 && !TREE_OVERFLOW (t1))
5563 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5564 ? MULT_EXPR : FLOOR_DIV_EXPR,
5565 ctype, fold_convert (ctype, op0), t1),
5566 c, code, wide_type);
5567 break;
5569 case PLUS_EXPR: case MINUS_EXPR:
5570 /* See if we can eliminate the operation on both sides. If we can, we
5571 can return a new PLUS or MINUS. If we can't, the only remaining
5572 cases where we can do anything are if the second operand is a
5573 constant. */
5574 t1 = extract_muldiv (op0, c, code, wide_type);
5575 t2 = extract_muldiv (op1, c, code, wide_type);
5576 if (t1 != 0 && t2 != 0
5577 && (code == MULT_EXPR
5578 /* If not multiplication, we can only do this if both operands
5579 are divisible by c. */
5580 || (multiple_of_p (ctype, op0, c)
5581 && multiple_of_p (ctype, op1, c))))
5582 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5583 fold_convert (ctype, t2));
5585 /* If this was a subtraction, negate OP1 and set it to be an addition.
5586 This simplifies the logic below. */
5587 if (tcode == MINUS_EXPR)
5588 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5590 if (TREE_CODE (op1) != INTEGER_CST)
5591 break;
5593 /* If either OP1 or C are negative, this optimization is not safe for
5594 some of the division and remainder types while for others we need
5595 to change the code. */
5596 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5598 if (code == CEIL_DIV_EXPR)
5599 code = FLOOR_DIV_EXPR;
5600 else if (code == FLOOR_DIV_EXPR)
5601 code = CEIL_DIV_EXPR;
5602 else if (code != MULT_EXPR
5603 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5604 break;
5607 /* If it's a multiply or a division/modulus operation of a multiple
5608 of our constant, do the operation and verify it doesn't overflow. */
5609 if (code == MULT_EXPR
5610 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5612 op1 = const_binop (code, fold_convert (ctype, op1),
5613 fold_convert (ctype, c), 0);
5614 /* We allow the constant to overflow with wrapping semantics. */
5615 if (op1 == 0
5616 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5617 break;
5619 else
5620 break;
5622 /* If we have an unsigned type is not a sizetype, we cannot widen
5623 the operation since it will change the result if the original
5624 computation overflowed. */
5625 if (TYPE_UNSIGNED (ctype)
5626 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5627 && ctype != type)
5628 break;
5630 /* If we were able to eliminate our operation from the first side,
5631 apply our operation to the second side and reform the PLUS. */
5632 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5633 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5635 /* The last case is if we are a multiply. In that case, we can
5636 apply the distributive law to commute the multiply and addition
5637 if the multiplication of the constants doesn't overflow. */
5638 if (code == MULT_EXPR)
5639 return fold_build2 (tcode, ctype,
5640 fold_build2 (code, ctype,
5641 fold_convert (ctype, op0),
5642 fold_convert (ctype, c)),
5643 op1);
5645 break;
5647 case MULT_EXPR:
5648 /* We have a special case here if we are doing something like
5649 (C * 8) % 4 since we know that's zero. */
5650 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5651 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5652 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5653 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5654 return omit_one_operand (type, integer_zero_node, op0);
5656 /* ... fall through ... */
5658 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5659 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5660 /* If we can extract our operation from the LHS, do so and return a
5661 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5662 do something only if the second operand is a constant. */
5663 if (same_p
5664 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5665 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5666 fold_convert (ctype, op1));
5667 else if (tcode == MULT_EXPR && code == MULT_EXPR
5668 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5669 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5670 fold_convert (ctype, t1));
5671 else if (TREE_CODE (op1) != INTEGER_CST)
5672 return 0;
5674 /* If these are the same operation types, we can associate them
5675 assuming no overflow. */
5676 if (tcode == code
5677 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5678 fold_convert (ctype, c), 0))
5679 && !TREE_OVERFLOW (t1))
5680 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5682 /* If these operations "cancel" each other, we have the main
5683 optimizations of this pass, which occur when either constant is a
5684 multiple of the other, in which case we replace this with either an
5685 operation or CODE or TCODE.
5687 If we have an unsigned type that is not a sizetype, we cannot do
5688 this since it will change the result if the original computation
5689 overflowed. */
5690 if ((! TYPE_UNSIGNED (ctype)
5691 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5692 && ! flag_wrapv
5693 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5694 || (tcode == MULT_EXPR
5695 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5696 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5698 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5699 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5700 fold_convert (ctype,
5701 const_binop (TRUNC_DIV_EXPR,
5702 op1, c, 0)));
5703 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5704 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5705 fold_convert (ctype,
5706 const_binop (TRUNC_DIV_EXPR,
5707 c, op1, 0)));
5709 break;
5711 default:
5712 break;
5715 return 0;
5718 /* Return a node which has the indicated constant VALUE (either 0 or
5719 1), and is of the indicated TYPE. */
5721 tree
5722 constant_boolean_node (int value, tree type)
5724 if (type == integer_type_node)
5725 return value ? integer_one_node : integer_zero_node;
5726 else if (type == boolean_type_node)
5727 return value ? boolean_true_node : boolean_false_node;
5728 else
5729 return build_int_cst (type, value);
5733 /* Return true if expr looks like an ARRAY_REF and set base and
5734 offset to the appropriate trees. If there is no offset,
5735 offset is set to NULL_TREE. Base will be canonicalized to
5736 something you can get the element type from using
5737 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5738 in bytes to the base. */
5740 static bool
5741 extract_array_ref (tree expr, tree *base, tree *offset)
5743 /* One canonical form is a PLUS_EXPR with the first
5744 argument being an ADDR_EXPR with a possible NOP_EXPR
5745 attached. */
5746 if (TREE_CODE (expr) == PLUS_EXPR)
5748 tree op0 = TREE_OPERAND (expr, 0);
5749 tree inner_base, dummy1;
5750 /* Strip NOP_EXPRs here because the C frontends and/or
5751 folders present us (int *)&x.a + 4B possibly. */
5752 STRIP_NOPS (op0);
5753 if (extract_array_ref (op0, &inner_base, &dummy1))
5755 *base = inner_base;
5756 if (dummy1 == NULL_TREE)
5757 *offset = TREE_OPERAND (expr, 1);
5758 else
5759 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5760 dummy1, TREE_OPERAND (expr, 1));
5761 return true;
5764 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5765 which we transform into an ADDR_EXPR with appropriate
5766 offset. For other arguments to the ADDR_EXPR we assume
5767 zero offset and as such do not care about the ADDR_EXPR
5768 type and strip possible nops from it. */
5769 else if (TREE_CODE (expr) == ADDR_EXPR)
5771 tree op0 = TREE_OPERAND (expr, 0);
5772 if (TREE_CODE (op0) == ARRAY_REF)
5774 tree idx = TREE_OPERAND (op0, 1);
5775 *base = TREE_OPERAND (op0, 0);
5776 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5777 array_ref_element_size (op0));
5779 else
5781 /* Handle array-to-pointer decay as &a. */
5782 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5783 *base = TREE_OPERAND (expr, 0);
5784 else
5785 *base = expr;
5786 *offset = NULL_TREE;
5788 return true;
5790 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5791 else if (SSA_VAR_P (expr)
5792 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5794 *base = expr;
5795 *offset = NULL_TREE;
5796 return true;
5799 return false;
5803 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5804 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5805 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5806 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5807 COND is the first argument to CODE; otherwise (as in the example
5808 given here), it is the second argument. TYPE is the type of the
5809 original expression. Return NULL_TREE if no simplification is
5810 possible. */
5812 static tree
5813 fold_binary_op_with_conditional_arg (enum tree_code code,
5814 tree type, tree op0, tree op1,
5815 tree cond, tree arg, int cond_first_p)
5817 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5818 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5819 tree test, true_value, false_value;
5820 tree lhs = NULL_TREE;
5821 tree rhs = NULL_TREE;
5823 /* This transformation is only worthwhile if we don't have to wrap
5824 arg in a SAVE_EXPR, and the operation can be simplified on at least
5825 one of the branches once its pushed inside the COND_EXPR. */
5826 if (!TREE_CONSTANT (arg))
5827 return NULL_TREE;
5829 if (TREE_CODE (cond) == COND_EXPR)
5831 test = TREE_OPERAND (cond, 0);
5832 true_value = TREE_OPERAND (cond, 1);
5833 false_value = TREE_OPERAND (cond, 2);
5834 /* If this operand throws an expression, then it does not make
5835 sense to try to perform a logical or arithmetic operation
5836 involving it. */
5837 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5838 lhs = true_value;
5839 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5840 rhs = false_value;
5842 else
5844 tree testtype = TREE_TYPE (cond);
5845 test = cond;
5846 true_value = constant_boolean_node (true, testtype);
5847 false_value = constant_boolean_node (false, testtype);
5850 arg = fold_convert (arg_type, arg);
5851 if (lhs == 0)
5853 true_value = fold_convert (cond_type, true_value);
5854 if (cond_first_p)
5855 lhs = fold_build2 (code, type, true_value, arg);
5856 else
5857 lhs = fold_build2 (code, type, arg, true_value);
5859 if (rhs == 0)
5861 false_value = fold_convert (cond_type, false_value);
5862 if (cond_first_p)
5863 rhs = fold_build2 (code, type, false_value, arg);
5864 else
5865 rhs = fold_build2 (code, type, arg, false_value);
5868 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5869 return fold_convert (type, test);
5873 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5875 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5876 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5877 ADDEND is the same as X.
5879 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5880 and finite. The problematic cases are when X is zero, and its mode
5881 has signed zeros. In the case of rounding towards -infinity,
5882 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5883 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5885 static bool
5886 fold_real_zero_addition_p (tree type, tree addend, int negate)
5888 if (!real_zerop (addend))
5889 return false;
5891 /* Don't allow the fold with -fsignaling-nans. */
5892 if (HONOR_SNANS (TYPE_MODE (type)))
5893 return false;
5895 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5896 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5897 return true;
5899 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5900 if (TREE_CODE (addend) == REAL_CST
5901 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5902 negate = !negate;
5904 /* The mode has signed zeros, and we have to honor their sign.
5905 In this situation, there is only one case we can return true for.
5906 X - 0 is the same as X unless rounding towards -infinity is
5907 supported. */
5908 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5911 /* Subroutine of fold() that checks comparisons of built-in math
5912 functions against real constants.
5914 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5915 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5916 is the type of the result and ARG0 and ARG1 are the operands of the
5917 comparison. ARG1 must be a TREE_REAL_CST.
5919 The function returns the constant folded tree if a simplification
5920 can be made, and NULL_TREE otherwise. */
5922 static tree
5923 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5924 tree type, tree arg0, tree arg1)
5926 REAL_VALUE_TYPE c;
5928 if (BUILTIN_SQRT_P (fcode))
5930 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5931 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5933 c = TREE_REAL_CST (arg1);
5934 if (REAL_VALUE_NEGATIVE (c))
5936 /* sqrt(x) < y is always false, if y is negative. */
5937 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5938 return omit_one_operand (type, integer_zero_node, arg);
5940 /* sqrt(x) > y is always true, if y is negative and we
5941 don't care about NaNs, i.e. negative values of x. */
5942 if (code == NE_EXPR || !HONOR_NANS (mode))
5943 return omit_one_operand (type, integer_one_node, arg);
5945 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5946 return fold_build2 (GE_EXPR, type, arg,
5947 build_real (TREE_TYPE (arg), dconst0));
5949 else if (code == GT_EXPR || code == GE_EXPR)
5951 REAL_VALUE_TYPE c2;
5953 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5954 real_convert (&c2, mode, &c2);
5956 if (REAL_VALUE_ISINF (c2))
5958 /* sqrt(x) > y is x == +Inf, when y is very large. */
5959 if (HONOR_INFINITIES (mode))
5960 return fold_build2 (EQ_EXPR, type, arg,
5961 build_real (TREE_TYPE (arg), c2));
5963 /* sqrt(x) > y is always false, when y is very large
5964 and we don't care about infinities. */
5965 return omit_one_operand (type, integer_zero_node, arg);
5968 /* sqrt(x) > c is the same as x > c*c. */
5969 return fold_build2 (code, type, arg,
5970 build_real (TREE_TYPE (arg), c2));
5972 else if (code == LT_EXPR || code == LE_EXPR)
5974 REAL_VALUE_TYPE c2;
5976 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5977 real_convert (&c2, mode, &c2);
5979 if (REAL_VALUE_ISINF (c2))
5981 /* sqrt(x) < y is always true, when y is a very large
5982 value and we don't care about NaNs or Infinities. */
5983 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5984 return omit_one_operand (type, integer_one_node, arg);
5986 /* sqrt(x) < y is x != +Inf when y is very large and we
5987 don't care about NaNs. */
5988 if (! HONOR_NANS (mode))
5989 return fold_build2 (NE_EXPR, type, arg,
5990 build_real (TREE_TYPE (arg), c2));
5992 /* sqrt(x) < y is x >= 0 when y is very large and we
5993 don't care about Infinities. */
5994 if (! HONOR_INFINITIES (mode))
5995 return fold_build2 (GE_EXPR, type, arg,
5996 build_real (TREE_TYPE (arg), dconst0));
5998 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5999 if (lang_hooks.decls.global_bindings_p () != 0
6000 || CONTAINS_PLACEHOLDER_P (arg))
6001 return NULL_TREE;
6003 arg = save_expr (arg);
6004 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6005 fold_build2 (GE_EXPR, type, arg,
6006 build_real (TREE_TYPE (arg),
6007 dconst0)),
6008 fold_build2 (NE_EXPR, type, arg,
6009 build_real (TREE_TYPE (arg),
6010 c2)));
6013 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6014 if (! HONOR_NANS (mode))
6015 return fold_build2 (code, type, arg,
6016 build_real (TREE_TYPE (arg), c2));
6018 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6019 if (lang_hooks.decls.global_bindings_p () == 0
6020 && ! CONTAINS_PLACEHOLDER_P (arg))
6022 arg = save_expr (arg);
6023 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6024 fold_build2 (GE_EXPR, type, arg,
6025 build_real (TREE_TYPE (arg),
6026 dconst0)),
6027 fold_build2 (code, type, arg,
6028 build_real (TREE_TYPE (arg),
6029 c2)));
6034 return NULL_TREE;
6037 /* Subroutine of fold() that optimizes comparisons against Infinities,
6038 either +Inf or -Inf.
6040 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6041 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6042 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6044 The function returns the constant folded tree if a simplification
6045 can be made, and NULL_TREE otherwise. */
6047 static tree
6048 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6050 enum machine_mode mode;
6051 REAL_VALUE_TYPE max;
6052 tree temp;
6053 bool neg;
6055 mode = TYPE_MODE (TREE_TYPE (arg0));
6057 /* For negative infinity swap the sense of the comparison. */
6058 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6059 if (neg)
6060 code = swap_tree_comparison (code);
6062 switch (code)
6064 case GT_EXPR:
6065 /* x > +Inf is always false, if with ignore sNANs. */
6066 if (HONOR_SNANS (mode))
6067 return NULL_TREE;
6068 return omit_one_operand (type, integer_zero_node, arg0);
6070 case LE_EXPR:
6071 /* x <= +Inf is always true, if we don't case about NaNs. */
6072 if (! HONOR_NANS (mode))
6073 return omit_one_operand (type, integer_one_node, arg0);
6075 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6076 if (lang_hooks.decls.global_bindings_p () == 0
6077 && ! CONTAINS_PLACEHOLDER_P (arg0))
6079 arg0 = save_expr (arg0);
6080 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6082 break;
6084 case EQ_EXPR:
6085 case GE_EXPR:
6086 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6087 real_maxval (&max, neg, mode);
6088 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6089 arg0, build_real (TREE_TYPE (arg0), max));
6091 case LT_EXPR:
6092 /* x < +Inf is always equal to x <= DBL_MAX. */
6093 real_maxval (&max, neg, mode);
6094 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6095 arg0, build_real (TREE_TYPE (arg0), max));
6097 case NE_EXPR:
6098 /* x != +Inf is always equal to !(x > DBL_MAX). */
6099 real_maxval (&max, neg, mode);
6100 if (! HONOR_NANS (mode))
6101 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6102 arg0, build_real (TREE_TYPE (arg0), max));
6104 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6105 arg0, build_real (TREE_TYPE (arg0), max));
6106 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6108 default:
6109 break;
6112 return NULL_TREE;
6115 /* Subroutine of fold() that optimizes comparisons of a division by
6116 a nonzero integer constant against an integer constant, i.e.
6117 X/C1 op C2.
6119 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6120 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6121 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6123 The function returns the constant folded tree if a simplification
6124 can be made, and NULL_TREE otherwise. */
6126 static tree
6127 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6129 tree prod, tmp, hi, lo;
6130 tree arg00 = TREE_OPERAND (arg0, 0);
6131 tree arg01 = TREE_OPERAND (arg0, 1);
6132 unsigned HOST_WIDE_INT lpart;
6133 HOST_WIDE_INT hpart;
6134 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6135 bool neg_overflow;
6136 int overflow;
6138 /* We have to do this the hard way to detect unsigned overflow.
6139 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6140 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6141 TREE_INT_CST_HIGH (arg01),
6142 TREE_INT_CST_LOW (arg1),
6143 TREE_INT_CST_HIGH (arg1),
6144 &lpart, &hpart, unsigned_p);
6145 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6146 -1, overflow, false);
6147 neg_overflow = false;
6149 if (unsigned_p)
6151 tmp = int_const_binop (MINUS_EXPR, arg01,
6152 build_int_cst (TREE_TYPE (arg01), 1), 0);
6153 lo = prod;
6155 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6156 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6157 TREE_INT_CST_HIGH (prod),
6158 TREE_INT_CST_LOW (tmp),
6159 TREE_INT_CST_HIGH (tmp),
6160 &lpart, &hpart, unsigned_p);
6161 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6162 -1, overflow | TREE_OVERFLOW (prod),
6163 false);
6165 else if (tree_int_cst_sgn (arg01) >= 0)
6167 tmp = int_const_binop (MINUS_EXPR, arg01,
6168 build_int_cst (TREE_TYPE (arg01), 1), 0);
6169 switch (tree_int_cst_sgn (arg1))
6171 case -1:
6172 neg_overflow = true;
6173 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6174 hi = prod;
6175 break;
6177 case 0:
6178 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6179 hi = tmp;
6180 break;
6182 case 1:
6183 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6184 lo = prod;
6185 break;
6187 default:
6188 gcc_unreachable ();
6191 else
6193 /* A negative divisor reverses the relational operators. */
6194 code = swap_tree_comparison (code);
6196 tmp = int_const_binop (PLUS_EXPR, arg01,
6197 build_int_cst (TREE_TYPE (arg01), 1), 0);
6198 switch (tree_int_cst_sgn (arg1))
6200 case -1:
6201 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6202 lo = prod;
6203 break;
6205 case 0:
6206 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6207 lo = tmp;
6208 break;
6210 case 1:
6211 neg_overflow = true;
6212 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6213 hi = prod;
6214 break;
6216 default:
6217 gcc_unreachable ();
6221 switch (code)
6223 case EQ_EXPR:
6224 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6225 return omit_one_operand (type, integer_zero_node, arg00);
6226 if (TREE_OVERFLOW (hi))
6227 return fold_build2 (GE_EXPR, type, arg00, lo);
6228 if (TREE_OVERFLOW (lo))
6229 return fold_build2 (LE_EXPR, type, arg00, hi);
6230 return build_range_check (type, arg00, 1, lo, hi);
6232 case NE_EXPR:
6233 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6234 return omit_one_operand (type, integer_one_node, arg00);
6235 if (TREE_OVERFLOW (hi))
6236 return fold_build2 (LT_EXPR, type, arg00, lo);
6237 if (TREE_OVERFLOW (lo))
6238 return fold_build2 (GT_EXPR, type, arg00, hi);
6239 return build_range_check (type, arg00, 0, lo, hi);
6241 case LT_EXPR:
6242 if (TREE_OVERFLOW (lo))
6244 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6245 return omit_one_operand (type, tmp, arg00);
6247 return fold_build2 (LT_EXPR, type, arg00, lo);
6249 case LE_EXPR:
6250 if (TREE_OVERFLOW (hi))
6252 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6253 return omit_one_operand (type, tmp, arg00);
6255 return fold_build2 (LE_EXPR, type, arg00, hi);
6257 case GT_EXPR:
6258 if (TREE_OVERFLOW (hi))
6260 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6261 return omit_one_operand (type, tmp, arg00);
6263 return fold_build2 (GT_EXPR, type, arg00, hi);
6265 case GE_EXPR:
6266 if (TREE_OVERFLOW (lo))
6268 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6269 return omit_one_operand (type, tmp, arg00);
6271 return fold_build2 (GE_EXPR, type, arg00, lo);
6273 default:
6274 break;
6277 return NULL_TREE;
6281 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6282 equality/inequality test, then return a simplified form of the test
6283 using a sign testing. Otherwise return NULL. TYPE is the desired
6284 result type. */
6286 static tree
6287 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6288 tree result_type)
6290 /* If this is testing a single bit, we can optimize the test. */
6291 if ((code == NE_EXPR || code == EQ_EXPR)
6292 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6293 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6295 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6296 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6297 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6299 if (arg00 != NULL_TREE
6300 /* This is only a win if casting to a signed type is cheap,
6301 i.e. when arg00's type is not a partial mode. */
6302 && TYPE_PRECISION (TREE_TYPE (arg00))
6303 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6305 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6306 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6307 result_type, fold_convert (stype, arg00),
6308 build_int_cst (stype, 0));
6312 return NULL_TREE;
6315 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6316 equality/inequality test, then return a simplified form of
6317 the test using shifts and logical operations. Otherwise return
6318 NULL. TYPE is the desired result type. */
6320 tree
6321 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6322 tree result_type)
6324 /* If this is testing a single bit, we can optimize the test. */
6325 if ((code == NE_EXPR || code == EQ_EXPR)
6326 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6327 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6329 tree inner = TREE_OPERAND (arg0, 0);
6330 tree type = TREE_TYPE (arg0);
6331 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6332 enum machine_mode operand_mode = TYPE_MODE (type);
6333 int ops_unsigned;
6334 tree signed_type, unsigned_type, intermediate_type;
6335 tree tem, one;
6337 /* First, see if we can fold the single bit test into a sign-bit
6338 test. */
6339 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6340 result_type);
6341 if (tem)
6342 return tem;
6344 /* Otherwise we have (A & C) != 0 where C is a single bit,
6345 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6346 Similarly for (A & C) == 0. */
6348 /* If INNER is a right shift of a constant and it plus BITNUM does
6349 not overflow, adjust BITNUM and INNER. */
6350 if (TREE_CODE (inner) == RSHIFT_EXPR
6351 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6352 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6353 && bitnum < TYPE_PRECISION (type)
6354 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6355 bitnum - TYPE_PRECISION (type)))
6357 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6358 inner = TREE_OPERAND (inner, 0);
6361 /* If we are going to be able to omit the AND below, we must do our
6362 operations as unsigned. If we must use the AND, we have a choice.
6363 Normally unsigned is faster, but for some machines signed is. */
6364 #ifdef LOAD_EXTEND_OP
6365 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6366 && !flag_syntax_only) ? 0 : 1;
6367 #else
6368 ops_unsigned = 1;
6369 #endif
6371 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6372 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6373 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6374 inner = fold_convert (intermediate_type, inner);
6376 if (bitnum != 0)
6377 inner = build2 (RSHIFT_EXPR, intermediate_type,
6378 inner, size_int (bitnum));
6380 one = build_int_cst (intermediate_type, 1);
6382 if (code == EQ_EXPR)
6383 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6385 /* Put the AND last so it can combine with more things. */
6386 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6388 /* Make sure to return the proper type. */
6389 inner = fold_convert (result_type, inner);
6391 return inner;
6393 return NULL_TREE;
6396 /* Check whether we are allowed to reorder operands arg0 and arg1,
6397 such that the evaluation of arg1 occurs before arg0. */
6399 static bool
6400 reorder_operands_p (tree arg0, tree arg1)
6402 if (! flag_evaluation_order)
6403 return true;
6404 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6405 return true;
6406 return ! TREE_SIDE_EFFECTS (arg0)
6407 && ! TREE_SIDE_EFFECTS (arg1);
6410 /* Test whether it is preferable two swap two operands, ARG0 and
6411 ARG1, for example because ARG0 is an integer constant and ARG1
6412 isn't. If REORDER is true, only recommend swapping if we can
6413 evaluate the operands in reverse order. */
6415 bool
6416 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6418 STRIP_SIGN_NOPS (arg0);
6419 STRIP_SIGN_NOPS (arg1);
6421 if (TREE_CODE (arg1) == INTEGER_CST)
6422 return 0;
6423 if (TREE_CODE (arg0) == INTEGER_CST)
6424 return 1;
6426 if (TREE_CODE (arg1) == REAL_CST)
6427 return 0;
6428 if (TREE_CODE (arg0) == REAL_CST)
6429 return 1;
6431 if (TREE_CODE (arg1) == COMPLEX_CST)
6432 return 0;
6433 if (TREE_CODE (arg0) == COMPLEX_CST)
6434 return 1;
6436 if (TREE_CONSTANT (arg1))
6437 return 0;
6438 if (TREE_CONSTANT (arg0))
6439 return 1;
6441 if (optimize_size)
6442 return 0;
6444 if (reorder && flag_evaluation_order
6445 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6446 return 0;
6448 if (DECL_P (arg1))
6449 return 0;
6450 if (DECL_P (arg0))
6451 return 1;
6453 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6454 for commutative and comparison operators. Ensuring a canonical
6455 form allows the optimizers to find additional redundancies without
6456 having to explicitly check for both orderings. */
6457 if (TREE_CODE (arg0) == SSA_NAME
6458 && TREE_CODE (arg1) == SSA_NAME
6459 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6460 return 1;
6462 return 0;
6465 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6466 ARG0 is extended to a wider type. */
6468 static tree
6469 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6471 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6472 tree arg1_unw;
6473 tree shorter_type, outer_type;
6474 tree min, max;
6475 bool above, below;
6477 if (arg0_unw == arg0)
6478 return NULL_TREE;
6479 shorter_type = TREE_TYPE (arg0_unw);
6481 #ifdef HAVE_canonicalize_funcptr_for_compare
6482 /* Disable this optimization if we're casting a function pointer
6483 type on targets that require function pointer canonicalization. */
6484 if (HAVE_canonicalize_funcptr_for_compare
6485 && TREE_CODE (shorter_type) == POINTER_TYPE
6486 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6487 return NULL_TREE;
6488 #endif
6490 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6491 return NULL_TREE;
6493 arg1_unw = get_unwidened (arg1, shorter_type);
6495 /* If possible, express the comparison in the shorter mode. */
6496 if ((code == EQ_EXPR || code == NE_EXPR
6497 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6498 && (TREE_TYPE (arg1_unw) == shorter_type
6499 || (TREE_CODE (arg1_unw) == INTEGER_CST
6500 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6501 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6502 && int_fits_type_p (arg1_unw, shorter_type))))
6503 return fold_build2 (code, type, arg0_unw,
6504 fold_convert (shorter_type, arg1_unw));
6506 if (TREE_CODE (arg1_unw) != INTEGER_CST
6507 || TREE_CODE (shorter_type) != INTEGER_TYPE
6508 || !int_fits_type_p (arg1_unw, shorter_type))
6509 return NULL_TREE;
6511 /* If we are comparing with the integer that does not fit into the range
6512 of the shorter type, the result is known. */
6513 outer_type = TREE_TYPE (arg1_unw);
6514 min = lower_bound_in_type (outer_type, shorter_type);
6515 max = upper_bound_in_type (outer_type, shorter_type);
6517 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6518 max, arg1_unw));
6519 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6520 arg1_unw, min));
6522 switch (code)
6524 case EQ_EXPR:
6525 if (above || below)
6526 return omit_one_operand (type, integer_zero_node, arg0);
6527 break;
6529 case NE_EXPR:
6530 if (above || below)
6531 return omit_one_operand (type, integer_one_node, arg0);
6532 break;
6534 case LT_EXPR:
6535 case LE_EXPR:
6536 if (above)
6537 return omit_one_operand (type, integer_one_node, arg0);
6538 else if (below)
6539 return omit_one_operand (type, integer_zero_node, arg0);
6541 case GT_EXPR:
6542 case GE_EXPR:
6543 if (above)
6544 return omit_one_operand (type, integer_zero_node, arg0);
6545 else if (below)
6546 return omit_one_operand (type, integer_one_node, arg0);
6548 default:
6549 break;
6552 return NULL_TREE;
6555 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6556 ARG0 just the signedness is changed. */
6558 static tree
6559 fold_sign_changed_comparison (enum tree_code code, tree type,
6560 tree arg0, tree arg1)
6562 tree arg0_inner;
6563 tree inner_type, outer_type;
6565 if (TREE_CODE (arg0) != NOP_EXPR
6566 && TREE_CODE (arg0) != CONVERT_EXPR)
6567 return NULL_TREE;
6569 outer_type = TREE_TYPE (arg0);
6570 arg0_inner = TREE_OPERAND (arg0, 0);
6571 inner_type = TREE_TYPE (arg0_inner);
6573 #ifdef HAVE_canonicalize_funcptr_for_compare
6574 /* Disable this optimization if we're casting a function pointer
6575 type on targets that require function pointer canonicalization. */
6576 if (HAVE_canonicalize_funcptr_for_compare
6577 && TREE_CODE (inner_type) == POINTER_TYPE
6578 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6579 return NULL_TREE;
6580 #endif
6582 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6583 return NULL_TREE;
6585 if (TREE_CODE (arg1) != INTEGER_CST
6586 && !((TREE_CODE (arg1) == NOP_EXPR
6587 || TREE_CODE (arg1) == CONVERT_EXPR)
6588 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6589 return NULL_TREE;
6591 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6592 && code != NE_EXPR
6593 && code != EQ_EXPR)
6594 return NULL_TREE;
6596 if (TREE_CODE (arg1) == INTEGER_CST)
6597 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6598 TREE_INT_CST_HIGH (arg1), 0,
6599 TREE_OVERFLOW (arg1),
6600 false);
6601 else
6602 arg1 = fold_convert (inner_type, arg1);
6604 return fold_build2 (code, type, arg0_inner, arg1);
6607 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6608 step of the array. Reconstructs s and delta in the case of s * delta
6609 being an integer constant (and thus already folded).
6610 ADDR is the address. MULT is the multiplicative expression.
6611 If the function succeeds, the new address expression is returned. Otherwise
6612 NULL_TREE is returned. */
6614 static tree
6615 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6617 tree s, delta, step;
6618 tree ref = TREE_OPERAND (addr, 0), pref;
6619 tree ret, pos;
6620 tree itype;
6622 /* Canonicalize op1 into a possibly non-constant delta
6623 and an INTEGER_CST s. */
6624 if (TREE_CODE (op1) == MULT_EXPR)
6626 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6628 STRIP_NOPS (arg0);
6629 STRIP_NOPS (arg1);
6631 if (TREE_CODE (arg0) == INTEGER_CST)
6633 s = arg0;
6634 delta = arg1;
6636 else if (TREE_CODE (arg1) == INTEGER_CST)
6638 s = arg1;
6639 delta = arg0;
6641 else
6642 return NULL_TREE;
6644 else if (TREE_CODE (op1) == INTEGER_CST)
6646 delta = op1;
6647 s = NULL_TREE;
6649 else
6651 /* Simulate we are delta * 1. */
6652 delta = op1;
6653 s = integer_one_node;
6656 for (;; ref = TREE_OPERAND (ref, 0))
6658 if (TREE_CODE (ref) == ARRAY_REF)
6660 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6661 if (! itype)
6662 continue;
6664 step = array_ref_element_size (ref);
6665 if (TREE_CODE (step) != INTEGER_CST)
6666 continue;
6668 if (s)
6670 if (! tree_int_cst_equal (step, s))
6671 continue;
6673 else
6675 /* Try if delta is a multiple of step. */
6676 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6677 if (! tmp)
6678 continue;
6679 delta = tmp;
6682 break;
6685 if (!handled_component_p (ref))
6686 return NULL_TREE;
6689 /* We found the suitable array reference. So copy everything up to it,
6690 and replace the index. */
6692 pref = TREE_OPERAND (addr, 0);
6693 ret = copy_node (pref);
6694 pos = ret;
6696 while (pref != ref)
6698 pref = TREE_OPERAND (pref, 0);
6699 TREE_OPERAND (pos, 0) = copy_node (pref);
6700 pos = TREE_OPERAND (pos, 0);
6703 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6704 fold_convert (itype,
6705 TREE_OPERAND (pos, 1)),
6706 fold_convert (itype, delta));
6708 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6712 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6713 means A >= Y && A != MAX, but in this case we know that
6714 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6716 static tree
6717 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6719 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6721 if (TREE_CODE (bound) == LT_EXPR)
6722 a = TREE_OPERAND (bound, 0);
6723 else if (TREE_CODE (bound) == GT_EXPR)
6724 a = TREE_OPERAND (bound, 1);
6725 else
6726 return NULL_TREE;
6728 typea = TREE_TYPE (a);
6729 if (!INTEGRAL_TYPE_P (typea)
6730 && !POINTER_TYPE_P (typea))
6731 return NULL_TREE;
6733 if (TREE_CODE (ineq) == LT_EXPR)
6735 a1 = TREE_OPERAND (ineq, 1);
6736 y = TREE_OPERAND (ineq, 0);
6738 else if (TREE_CODE (ineq) == GT_EXPR)
6740 a1 = TREE_OPERAND (ineq, 0);
6741 y = TREE_OPERAND (ineq, 1);
6743 else
6744 return NULL_TREE;
6746 if (TREE_TYPE (a1) != typea)
6747 return NULL_TREE;
6749 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6750 if (!integer_onep (diff))
6751 return NULL_TREE;
6753 return fold_build2 (GE_EXPR, type, a, y);
6756 /* Fold a sum or difference of at least one multiplication.
6757 Returns the folded tree or NULL if no simplification could be made. */
6759 static tree
6760 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6762 tree arg00, arg01, arg10, arg11;
6763 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6765 /* (A * C) +- (B * C) -> (A+-B) * C.
6766 (A * C) +- A -> A * (C+-1).
6767 We are most concerned about the case where C is a constant,
6768 but other combinations show up during loop reduction. Since
6769 it is not difficult, try all four possibilities. */
6771 if (TREE_CODE (arg0) == MULT_EXPR)
6773 arg00 = TREE_OPERAND (arg0, 0);
6774 arg01 = TREE_OPERAND (arg0, 1);
6776 else
6778 arg00 = arg0;
6779 arg01 = build_one_cst (type);
6781 if (TREE_CODE (arg1) == MULT_EXPR)
6783 arg10 = TREE_OPERAND (arg1, 0);
6784 arg11 = TREE_OPERAND (arg1, 1);
6786 else
6788 arg10 = arg1;
6789 arg11 = build_one_cst (type);
6791 same = NULL_TREE;
6793 if (operand_equal_p (arg01, arg11, 0))
6794 same = arg01, alt0 = arg00, alt1 = arg10;
6795 else if (operand_equal_p (arg00, arg10, 0))
6796 same = arg00, alt0 = arg01, alt1 = arg11;
6797 else if (operand_equal_p (arg00, arg11, 0))
6798 same = arg00, alt0 = arg01, alt1 = arg10;
6799 else if (operand_equal_p (arg01, arg10, 0))
6800 same = arg01, alt0 = arg00, alt1 = arg11;
6802 /* No identical multiplicands; see if we can find a common
6803 power-of-two factor in non-power-of-two multiplies. This
6804 can help in multi-dimensional array access. */
6805 else if (host_integerp (arg01, 0)
6806 && host_integerp (arg11, 0))
6808 HOST_WIDE_INT int01, int11, tmp;
6809 bool swap = false;
6810 tree maybe_same;
6811 int01 = TREE_INT_CST_LOW (arg01);
6812 int11 = TREE_INT_CST_LOW (arg11);
6814 /* Move min of absolute values to int11. */
6815 if ((int01 >= 0 ? int01 : -int01)
6816 < (int11 >= 0 ? int11 : -int11))
6818 tmp = int01, int01 = int11, int11 = tmp;
6819 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6820 maybe_same = arg01;
6821 swap = true;
6823 else
6824 maybe_same = arg11;
6826 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6828 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6829 build_int_cst (TREE_TYPE (arg00),
6830 int01 / int11));
6831 alt1 = arg10;
6832 same = maybe_same;
6833 if (swap)
6834 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6838 if (same)
6839 return fold_build2 (MULT_EXPR, type,
6840 fold_build2 (code, type,
6841 fold_convert (type, alt0),
6842 fold_convert (type, alt1)),
6843 fold_convert (type, same));
6845 return NULL_TREE;
6848 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6849 specified by EXPR into the buffer PTR of length LEN bytes.
6850 Return the number of bytes placed in the buffer, or zero
6851 upon failure. */
6853 static int
6854 native_encode_int (tree expr, unsigned char *ptr, int len)
6856 tree type = TREE_TYPE (expr);
6857 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6858 int byte, offset, word, words;
6859 unsigned char value;
6861 if (total_bytes > len)
6862 return 0;
6863 words = total_bytes / UNITS_PER_WORD;
6865 for (byte = 0; byte < total_bytes; byte++)
6867 int bitpos = byte * BITS_PER_UNIT;
6868 if (bitpos < HOST_BITS_PER_WIDE_INT)
6869 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6870 else
6871 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6872 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6874 if (total_bytes > UNITS_PER_WORD)
6876 word = byte / UNITS_PER_WORD;
6877 if (WORDS_BIG_ENDIAN)
6878 word = (words - 1) - word;
6879 offset = word * UNITS_PER_WORD;
6880 if (BYTES_BIG_ENDIAN)
6881 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6882 else
6883 offset += byte % UNITS_PER_WORD;
6885 else
6886 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6887 ptr[offset] = value;
6889 return total_bytes;
6893 /* Subroutine of native_encode_expr. Encode the REAL_CST
6894 specified by EXPR into the buffer PTR of length LEN bytes.
6895 Return the number of bytes placed in the buffer, or zero
6896 upon failure. */
6898 static int
6899 native_encode_real (tree expr, unsigned char *ptr, int len)
6901 tree type = TREE_TYPE (expr);
6902 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6903 int byte, offset, word, words;
6904 unsigned char value;
6906 /* There are always 32 bits in each long, no matter the size of
6907 the hosts long. We handle floating point representations with
6908 up to 192 bits. */
6909 long tmp[6];
6911 if (total_bytes > len)
6912 return 0;
6913 words = total_bytes / UNITS_PER_WORD;
6915 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6917 for (byte = 0; byte < total_bytes; byte++)
6919 int bitpos = byte * BITS_PER_UNIT;
6920 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6922 if (total_bytes > UNITS_PER_WORD)
6924 word = byte / UNITS_PER_WORD;
6925 if (FLOAT_WORDS_BIG_ENDIAN)
6926 word = (words - 1) - word;
6927 offset = word * UNITS_PER_WORD;
6928 if (BYTES_BIG_ENDIAN)
6929 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6930 else
6931 offset += byte % UNITS_PER_WORD;
6933 else
6934 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6935 ptr[offset] = value;
6937 return total_bytes;
6940 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6941 specified by EXPR into the buffer PTR of length LEN bytes.
6942 Return the number of bytes placed in the buffer, or zero
6943 upon failure. */
6945 static int
6946 native_encode_complex (tree expr, unsigned char *ptr, int len)
6948 int rsize, isize;
6949 tree part;
6951 part = TREE_REALPART (expr);
6952 rsize = native_encode_expr (part, ptr, len);
6953 if (rsize == 0)
6954 return 0;
6955 part = TREE_IMAGPART (expr);
6956 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6957 if (isize != rsize)
6958 return 0;
6959 return rsize + isize;
6963 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6964 specified by EXPR into the buffer PTR of length LEN bytes.
6965 Return the number of bytes placed in the buffer, or zero
6966 upon failure. */
6968 static int
6969 native_encode_vector (tree expr, unsigned char *ptr, int len)
6971 int i, size, offset, count;
6972 tree itype, elem, elements;
6974 offset = 0;
6975 elements = TREE_VECTOR_CST_ELTS (expr);
6976 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6977 itype = TREE_TYPE (TREE_TYPE (expr));
6978 size = GET_MODE_SIZE (TYPE_MODE (itype));
6979 for (i = 0; i < count; i++)
6981 if (elements)
6983 elem = TREE_VALUE (elements);
6984 elements = TREE_CHAIN (elements);
6986 else
6987 elem = NULL_TREE;
6989 if (elem)
6991 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6992 return 0;
6994 else
6996 if (offset + size > len)
6997 return 0;
6998 memset (ptr+offset, 0, size);
7000 offset += size;
7002 return offset;
7006 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7007 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7008 buffer PTR of length LEN bytes. Return the number of bytes
7009 placed in the buffer, or zero upon failure. */
7011 static int
7012 native_encode_expr (tree expr, unsigned char *ptr, int len)
7014 switch (TREE_CODE (expr))
7016 case INTEGER_CST:
7017 return native_encode_int (expr, ptr, len);
7019 case REAL_CST:
7020 return native_encode_real (expr, ptr, len);
7022 case COMPLEX_CST:
7023 return native_encode_complex (expr, ptr, len);
7025 case VECTOR_CST:
7026 return native_encode_vector (expr, ptr, len);
7028 default:
7029 return 0;
7034 /* Subroutine of native_interpret_expr. Interpret the contents of
7035 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7036 If the buffer cannot be interpreted, return NULL_TREE. */
7038 static tree
7039 native_interpret_int (tree type, unsigned char *ptr, int len)
7041 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7042 int byte, offset, word, words;
7043 unsigned char value;
7044 unsigned int HOST_WIDE_INT lo = 0;
7045 HOST_WIDE_INT hi = 0;
7047 if (total_bytes > len)
7048 return NULL_TREE;
7049 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7050 return NULL_TREE;
7051 words = total_bytes / UNITS_PER_WORD;
7053 for (byte = 0; byte < total_bytes; byte++)
7055 int bitpos = byte * BITS_PER_UNIT;
7056 if (total_bytes > UNITS_PER_WORD)
7058 word = byte / UNITS_PER_WORD;
7059 if (WORDS_BIG_ENDIAN)
7060 word = (words - 1) - word;
7061 offset = word * UNITS_PER_WORD;
7062 if (BYTES_BIG_ENDIAN)
7063 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7064 else
7065 offset += byte % UNITS_PER_WORD;
7067 else
7068 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7069 value = ptr[offset];
7071 if (bitpos < HOST_BITS_PER_WIDE_INT)
7072 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7073 else
7074 hi |= (unsigned HOST_WIDE_INT) value
7075 << (bitpos - HOST_BITS_PER_WIDE_INT);
7078 return build_int_cst_wide_type (type, lo, hi);
7082 /* Subroutine of native_interpret_expr. Interpret the contents of
7083 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7084 If the buffer cannot be interpreted, return NULL_TREE. */
7086 static tree
7087 native_interpret_real (tree type, unsigned char *ptr, int len)
7089 enum machine_mode mode = TYPE_MODE (type);
7090 int total_bytes = GET_MODE_SIZE (mode);
7091 int byte, offset, word, words;
7092 unsigned char value;
7093 /* There are always 32 bits in each long, no matter the size of
7094 the hosts long. We handle floating point representations with
7095 up to 192 bits. */
7096 REAL_VALUE_TYPE r;
7097 long tmp[6];
7099 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7100 if (total_bytes > len || total_bytes > 24)
7101 return NULL_TREE;
7102 words = total_bytes / UNITS_PER_WORD;
7104 memset (tmp, 0, sizeof (tmp));
7105 for (byte = 0; byte < total_bytes; byte++)
7107 int bitpos = byte * BITS_PER_UNIT;
7108 if (total_bytes > UNITS_PER_WORD)
7110 word = byte / UNITS_PER_WORD;
7111 if (FLOAT_WORDS_BIG_ENDIAN)
7112 word = (words - 1) - word;
7113 offset = word * UNITS_PER_WORD;
7114 if (BYTES_BIG_ENDIAN)
7115 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7116 else
7117 offset += byte % UNITS_PER_WORD;
7119 else
7120 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7121 value = ptr[offset];
7123 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7126 real_from_target (&r, tmp, mode);
7127 return build_real (type, r);
7131 /* Subroutine of native_interpret_expr. Interpret the contents of
7132 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7133 If the buffer cannot be interpreted, return NULL_TREE. */
7135 static tree
7136 native_interpret_complex (tree type, unsigned char *ptr, int len)
7138 tree etype, rpart, ipart;
7139 int size;
7141 etype = TREE_TYPE (type);
7142 size = GET_MODE_SIZE (TYPE_MODE (etype));
7143 if (size * 2 > len)
7144 return NULL_TREE;
7145 rpart = native_interpret_expr (etype, ptr, size);
7146 if (!rpart)
7147 return NULL_TREE;
7148 ipart = native_interpret_expr (etype, ptr+size, size);
7149 if (!ipart)
7150 return NULL_TREE;
7151 return build_complex (type, rpart, ipart);
7155 /* Subroutine of native_interpret_expr. Interpret the contents of
7156 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7157 If the buffer cannot be interpreted, return NULL_TREE. */
7159 static tree
7160 native_interpret_vector (tree type, unsigned char *ptr, int len)
7162 tree etype, elem, elements;
7163 int i, size, count;
7165 etype = TREE_TYPE (type);
7166 size = GET_MODE_SIZE (TYPE_MODE (etype));
7167 count = TYPE_VECTOR_SUBPARTS (type);
7168 if (size * count > len)
7169 return NULL_TREE;
7171 elements = NULL_TREE;
7172 for (i = count - 1; i >= 0; i--)
7174 elem = native_interpret_expr (etype, ptr+(i*size), size);
7175 if (!elem)
7176 return NULL_TREE;
7177 elements = tree_cons (NULL_TREE, elem, elements);
7179 return build_vector (type, elements);
7183 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7184 the buffer PTR of length LEN as a constant of type TYPE. For
7185 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7186 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7187 return NULL_TREE. */
7189 static tree
7190 native_interpret_expr (tree type, unsigned char *ptr, int len)
7192 switch (TREE_CODE (type))
7194 case INTEGER_TYPE:
7195 case ENUMERAL_TYPE:
7196 case BOOLEAN_TYPE:
7197 return native_interpret_int (type, ptr, len);
7199 case REAL_TYPE:
7200 return native_interpret_real (type, ptr, len);
7202 case COMPLEX_TYPE:
7203 return native_interpret_complex (type, ptr, len);
7205 case VECTOR_TYPE:
7206 return native_interpret_vector (type, ptr, len);
7208 default:
7209 return NULL_TREE;
7214 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7215 TYPE at compile-time. If we're unable to perform the conversion
7216 return NULL_TREE. */
7218 static tree
7219 fold_view_convert_expr (tree type, tree expr)
7221 /* We support up to 512-bit values (for V8DFmode). */
7222 unsigned char buffer[64];
7223 int len;
7225 /* Check that the host and target are sane. */
7226 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7227 return NULL_TREE;
7229 len = native_encode_expr (expr, buffer, sizeof (buffer));
7230 if (len == 0)
7231 return NULL_TREE;
7233 return native_interpret_expr (type, buffer, len);
7237 /* Fold a unary expression of code CODE and type TYPE with operand
7238 OP0. Return the folded expression if folding is successful.
7239 Otherwise, return NULL_TREE. */
7241 tree
7242 fold_unary (enum tree_code code, tree type, tree op0)
7244 tree tem;
7245 tree arg0;
7246 enum tree_code_class kind = TREE_CODE_CLASS (code);
7248 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7249 && TREE_CODE_LENGTH (code) == 1);
7251 arg0 = op0;
7252 if (arg0)
7254 if (code == NOP_EXPR || code == CONVERT_EXPR
7255 || code == FLOAT_EXPR || code == ABS_EXPR)
7257 /* Don't use STRIP_NOPS, because signedness of argument type
7258 matters. */
7259 STRIP_SIGN_NOPS (arg0);
7261 else
7263 /* Strip any conversions that don't change the mode. This
7264 is safe for every expression, except for a comparison
7265 expression because its signedness is derived from its
7266 operands.
7268 Note that this is done as an internal manipulation within
7269 the constant folder, in order to find the simplest
7270 representation of the arguments so that their form can be
7271 studied. In any cases, the appropriate type conversions
7272 should be put back in the tree that will get out of the
7273 constant folder. */
7274 STRIP_NOPS (arg0);
7278 if (TREE_CODE_CLASS (code) == tcc_unary)
7280 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7281 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7282 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7283 else if (TREE_CODE (arg0) == COND_EXPR)
7285 tree arg01 = TREE_OPERAND (arg0, 1);
7286 tree arg02 = TREE_OPERAND (arg0, 2);
7287 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7288 arg01 = fold_build1 (code, type, arg01);
7289 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7290 arg02 = fold_build1 (code, type, arg02);
7291 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7292 arg01, arg02);
7294 /* If this was a conversion, and all we did was to move into
7295 inside the COND_EXPR, bring it back out. But leave it if
7296 it is a conversion from integer to integer and the
7297 result precision is no wider than a word since such a
7298 conversion is cheap and may be optimized away by combine,
7299 while it couldn't if it were outside the COND_EXPR. Then return
7300 so we don't get into an infinite recursion loop taking the
7301 conversion out and then back in. */
7303 if ((code == NOP_EXPR || code == CONVERT_EXPR
7304 || code == NON_LVALUE_EXPR)
7305 && TREE_CODE (tem) == COND_EXPR
7306 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7307 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7308 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7309 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7310 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7311 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7312 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7313 && (INTEGRAL_TYPE_P
7314 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7315 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7316 || flag_syntax_only))
7317 tem = build1 (code, type,
7318 build3 (COND_EXPR,
7319 TREE_TYPE (TREE_OPERAND
7320 (TREE_OPERAND (tem, 1), 0)),
7321 TREE_OPERAND (tem, 0),
7322 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7323 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7324 return tem;
7326 else if (COMPARISON_CLASS_P (arg0))
7328 if (TREE_CODE (type) == BOOLEAN_TYPE)
7330 arg0 = copy_node (arg0);
7331 TREE_TYPE (arg0) = type;
7332 return arg0;
7334 else if (TREE_CODE (type) != INTEGER_TYPE)
7335 return fold_build3 (COND_EXPR, type, arg0,
7336 fold_build1 (code, type,
7337 integer_one_node),
7338 fold_build1 (code, type,
7339 integer_zero_node));
7343 switch (code)
7345 case NOP_EXPR:
7346 case FLOAT_EXPR:
7347 case CONVERT_EXPR:
7348 case FIX_TRUNC_EXPR:
7349 if (TREE_TYPE (op0) == type)
7350 return op0;
7352 /* If we have (type) (a CMP b) and type is an integral type, return
7353 new expression involving the new type. */
7354 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7355 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7356 TREE_OPERAND (op0, 1));
7358 /* Handle cases of two conversions in a row. */
7359 if (TREE_CODE (op0) == NOP_EXPR
7360 || TREE_CODE (op0) == CONVERT_EXPR)
7362 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7363 tree inter_type = TREE_TYPE (op0);
7364 int inside_int = INTEGRAL_TYPE_P (inside_type);
7365 int inside_ptr = POINTER_TYPE_P (inside_type);
7366 int inside_float = FLOAT_TYPE_P (inside_type);
7367 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7368 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7369 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7370 int inter_int = INTEGRAL_TYPE_P (inter_type);
7371 int inter_ptr = POINTER_TYPE_P (inter_type);
7372 int inter_float = FLOAT_TYPE_P (inter_type);
7373 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7374 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7375 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7376 int final_int = INTEGRAL_TYPE_P (type);
7377 int final_ptr = POINTER_TYPE_P (type);
7378 int final_float = FLOAT_TYPE_P (type);
7379 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7380 unsigned int final_prec = TYPE_PRECISION (type);
7381 int final_unsignedp = TYPE_UNSIGNED (type);
7383 /* In addition to the cases of two conversions in a row
7384 handled below, if we are converting something to its own
7385 type via an object of identical or wider precision, neither
7386 conversion is needed. */
7387 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7388 && (((inter_int || inter_ptr) && final_int)
7389 || (inter_float && final_float))
7390 && inter_prec >= final_prec)
7391 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7393 /* Likewise, if the intermediate and final types are either both
7394 float or both integer, we don't need the middle conversion if
7395 it is wider than the final type and doesn't change the signedness
7396 (for integers). Avoid this if the final type is a pointer
7397 since then we sometimes need the inner conversion. Likewise if
7398 the outer has a precision not equal to the size of its mode. */
7399 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7400 || (inter_float && inside_float)
7401 || (inter_vec && inside_vec))
7402 && inter_prec >= inside_prec
7403 && (inter_float || inter_vec
7404 || inter_unsignedp == inside_unsignedp)
7405 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7406 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7407 && ! final_ptr
7408 && (! final_vec || inter_prec == inside_prec))
7409 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7411 /* If we have a sign-extension of a zero-extended value, we can
7412 replace that by a single zero-extension. */
7413 if (inside_int && inter_int && final_int
7414 && inside_prec < inter_prec && inter_prec < final_prec
7415 && inside_unsignedp && !inter_unsignedp)
7416 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7418 /* Two conversions in a row are not needed unless:
7419 - some conversion is floating-point (overstrict for now), or
7420 - some conversion is a vector (overstrict for now), or
7421 - the intermediate type is narrower than both initial and
7422 final, or
7423 - the intermediate type and innermost type differ in signedness,
7424 and the outermost type is wider than the intermediate, or
7425 - the initial type is a pointer type and the precisions of the
7426 intermediate and final types differ, or
7427 - the final type is a pointer type and the precisions of the
7428 initial and intermediate types differ.
7429 - the final type is a pointer type and the initial type not
7430 - the initial type is a pointer to an array and the final type
7431 not. */
7432 if (! inside_float && ! inter_float && ! final_float
7433 && ! inside_vec && ! inter_vec && ! final_vec
7434 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7435 && ! (inside_int && inter_int
7436 && inter_unsignedp != inside_unsignedp
7437 && inter_prec < final_prec)
7438 && ((inter_unsignedp && inter_prec > inside_prec)
7439 == (final_unsignedp && final_prec > inter_prec))
7440 && ! (inside_ptr && inter_prec != final_prec)
7441 && ! (final_ptr && inside_prec != inter_prec)
7442 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7443 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7444 && final_ptr == inside_ptr
7445 && ! (inside_ptr
7446 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7447 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7448 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7451 /* Handle (T *)&A.B.C for A being of type T and B and C
7452 living at offset zero. This occurs frequently in
7453 C++ upcasting and then accessing the base. */
7454 if (TREE_CODE (op0) == ADDR_EXPR
7455 && POINTER_TYPE_P (type)
7456 && handled_component_p (TREE_OPERAND (op0, 0)))
7458 HOST_WIDE_INT bitsize, bitpos;
7459 tree offset;
7460 enum machine_mode mode;
7461 int unsignedp, volatilep;
7462 tree base = TREE_OPERAND (op0, 0);
7463 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7464 &mode, &unsignedp, &volatilep, false);
7465 /* If the reference was to a (constant) zero offset, we can use
7466 the address of the base if it has the same base type
7467 as the result type. */
7468 if (! offset && bitpos == 0
7469 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7470 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7471 return fold_convert (type, build_fold_addr_expr (base));
7474 if ((TREE_CODE (op0) == MODIFY_EXPR
7475 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7476 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7477 /* Detect assigning a bitfield. */
7478 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7479 && DECL_BIT_FIELD
7480 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7482 /* Don't leave an assignment inside a conversion
7483 unless assigning a bitfield. */
7484 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7485 /* First do the assignment, then return converted constant. */
7486 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7487 TREE_NO_WARNING (tem) = 1;
7488 TREE_USED (tem) = 1;
7489 return tem;
7492 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7493 constants (if x has signed type, the sign bit cannot be set
7494 in c). This folds extension into the BIT_AND_EXPR. */
7495 if (INTEGRAL_TYPE_P (type)
7496 && TREE_CODE (type) != BOOLEAN_TYPE
7497 && TREE_CODE (op0) == BIT_AND_EXPR
7498 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7500 tree and = op0;
7501 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7502 int change = 0;
7504 if (TYPE_UNSIGNED (TREE_TYPE (and))
7505 || (TYPE_PRECISION (type)
7506 <= TYPE_PRECISION (TREE_TYPE (and))))
7507 change = 1;
7508 else if (TYPE_PRECISION (TREE_TYPE (and1))
7509 <= HOST_BITS_PER_WIDE_INT
7510 && host_integerp (and1, 1))
7512 unsigned HOST_WIDE_INT cst;
7514 cst = tree_low_cst (and1, 1);
7515 cst &= (HOST_WIDE_INT) -1
7516 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7517 change = (cst == 0);
7518 #ifdef LOAD_EXTEND_OP
7519 if (change
7520 && !flag_syntax_only
7521 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7522 == ZERO_EXTEND))
7524 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7525 and0 = fold_convert (uns, and0);
7526 and1 = fold_convert (uns, and1);
7528 #endif
7530 if (change)
7532 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7533 TREE_INT_CST_HIGH (and1), 0,
7534 TREE_OVERFLOW (and1),
7535 false);
7536 return fold_build2 (BIT_AND_EXPR, type,
7537 fold_convert (type, and0), tem);
7541 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7542 T2 being pointers to types of the same size. */
7543 if (POINTER_TYPE_P (type)
7544 && BINARY_CLASS_P (arg0)
7545 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7546 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7548 tree arg00 = TREE_OPERAND (arg0, 0);
7549 tree t0 = type;
7550 tree t1 = TREE_TYPE (arg00);
7551 tree tt0 = TREE_TYPE (t0);
7552 tree tt1 = TREE_TYPE (t1);
7553 tree s0 = TYPE_SIZE (tt0);
7554 tree s1 = TYPE_SIZE (tt1);
7556 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7557 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7558 TREE_OPERAND (arg0, 1));
7561 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7562 of the same precision, and X is a integer type not narrower than
7563 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7564 if (INTEGRAL_TYPE_P (type)
7565 && TREE_CODE (op0) == BIT_NOT_EXPR
7566 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7567 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7568 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7569 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7571 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7572 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7573 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7574 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7577 tem = fold_convert_const (code, type, arg0);
7578 return tem ? tem : NULL_TREE;
7580 case VIEW_CONVERT_EXPR:
7581 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7582 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7583 return fold_view_convert_expr (type, op0);
7585 case NEGATE_EXPR:
7586 tem = fold_negate_expr (arg0);
7587 if (tem)
7588 return fold_convert (type, tem);
7589 return NULL_TREE;
7591 case ABS_EXPR:
7592 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7593 return fold_abs_const (arg0, type);
7594 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7595 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7596 /* Convert fabs((double)float) into (double)fabsf(float). */
7597 else if (TREE_CODE (arg0) == NOP_EXPR
7598 && TREE_CODE (type) == REAL_TYPE)
7600 tree targ0 = strip_float_extensions (arg0);
7601 if (targ0 != arg0)
7602 return fold_convert (type, fold_build1 (ABS_EXPR,
7603 TREE_TYPE (targ0),
7604 targ0));
7606 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7607 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7608 return arg0;
7610 /* Strip sign ops from argument. */
7611 if (TREE_CODE (type) == REAL_TYPE)
7613 tem = fold_strip_sign_ops (arg0);
7614 if (tem)
7615 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7617 return NULL_TREE;
7619 case CONJ_EXPR:
7620 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7621 return fold_convert (type, arg0);
7622 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7624 tree itype = TREE_TYPE (type);
7625 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7626 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7627 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7629 if (TREE_CODE (arg0) == COMPLEX_CST)
7631 tree itype = TREE_TYPE (type);
7632 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7633 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7634 return build_complex (type, rpart, negate_expr (ipart));
7636 if (TREE_CODE (arg0) == CONJ_EXPR)
7637 return fold_convert (type, TREE_OPERAND (arg0, 0));
7638 return NULL_TREE;
7640 case BIT_NOT_EXPR:
7641 if (TREE_CODE (arg0) == INTEGER_CST)
7642 return fold_not_const (arg0, type);
7643 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7644 return TREE_OPERAND (arg0, 0);
7645 /* Convert ~ (-A) to A - 1. */
7646 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7647 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7648 build_int_cst (type, 1));
7649 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7650 else if (INTEGRAL_TYPE_P (type)
7651 && ((TREE_CODE (arg0) == MINUS_EXPR
7652 && integer_onep (TREE_OPERAND (arg0, 1)))
7653 || (TREE_CODE (arg0) == PLUS_EXPR
7654 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7655 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7656 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7657 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7658 && (tem = fold_unary (BIT_NOT_EXPR, type,
7659 fold_convert (type,
7660 TREE_OPERAND (arg0, 0)))))
7661 return fold_build2 (BIT_XOR_EXPR, type, tem,
7662 fold_convert (type, TREE_OPERAND (arg0, 1)));
7663 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7664 && (tem = fold_unary (BIT_NOT_EXPR, type,
7665 fold_convert (type,
7666 TREE_OPERAND (arg0, 1)))))
7667 return fold_build2 (BIT_XOR_EXPR, type,
7668 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7670 return NULL_TREE;
7672 case TRUTH_NOT_EXPR:
7673 /* The argument to invert_truthvalue must have Boolean type. */
7674 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7675 arg0 = fold_convert (boolean_type_node, arg0);
7677 /* Note that the operand of this must be an int
7678 and its values must be 0 or 1.
7679 ("true" is a fixed value perhaps depending on the language,
7680 but we don't handle values other than 1 correctly yet.) */
7681 tem = fold_truth_not_expr (arg0);
7682 if (!tem)
7683 return NULL_TREE;
7684 return fold_convert (type, tem);
7686 case REALPART_EXPR:
7687 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7688 return fold_convert (type, arg0);
7689 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7690 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7691 TREE_OPERAND (arg0, 1));
7692 if (TREE_CODE (arg0) == COMPLEX_CST)
7693 return fold_convert (type, TREE_REALPART (arg0));
7694 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7696 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7697 tem = fold_build2 (TREE_CODE (arg0), itype,
7698 fold_build1 (REALPART_EXPR, itype,
7699 TREE_OPERAND (arg0, 0)),
7700 fold_build1 (REALPART_EXPR, itype,
7701 TREE_OPERAND (arg0, 1)));
7702 return fold_convert (type, tem);
7704 if (TREE_CODE (arg0) == CONJ_EXPR)
7706 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7707 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7708 return fold_convert (type, tem);
7710 if (TREE_CODE (arg0) == CALL_EXPR)
7712 tree fn = get_callee_fndecl (arg0);
7713 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7714 switch (DECL_FUNCTION_CODE (fn))
7716 CASE_FLT_FN (BUILT_IN_CEXPI):
7717 fn = mathfn_built_in (type, BUILT_IN_COS);
7718 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7720 default:;
7723 return NULL_TREE;
7725 case IMAGPART_EXPR:
7726 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7727 return fold_convert (type, integer_zero_node);
7728 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7729 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7730 TREE_OPERAND (arg0, 0));
7731 if (TREE_CODE (arg0) == COMPLEX_CST)
7732 return fold_convert (type, TREE_IMAGPART (arg0));
7733 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7735 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7736 tem = fold_build2 (TREE_CODE (arg0), itype,
7737 fold_build1 (IMAGPART_EXPR, itype,
7738 TREE_OPERAND (arg0, 0)),
7739 fold_build1 (IMAGPART_EXPR, itype,
7740 TREE_OPERAND (arg0, 1)));
7741 return fold_convert (type, tem);
7743 if (TREE_CODE (arg0) == CONJ_EXPR)
7745 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7746 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7747 return fold_convert (type, negate_expr (tem));
7749 if (TREE_CODE (arg0) == CALL_EXPR)
7751 tree fn = get_callee_fndecl (arg0);
7752 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7753 switch (DECL_FUNCTION_CODE (fn))
7755 CASE_FLT_FN (BUILT_IN_CEXPI):
7756 fn = mathfn_built_in (type, BUILT_IN_SIN);
7757 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7759 default:;
7762 return NULL_TREE;
7764 default:
7765 return NULL_TREE;
7766 } /* switch (code) */
7769 /* Fold a binary expression of code CODE and type TYPE with operands
7770 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7771 Return the folded expression if folding is successful. Otherwise,
7772 return NULL_TREE. */
7774 static tree
7775 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7777 enum tree_code compl_code;
7779 if (code == MIN_EXPR)
7780 compl_code = MAX_EXPR;
7781 else if (code == MAX_EXPR)
7782 compl_code = MIN_EXPR;
7783 else
7784 gcc_unreachable ();
7786 /* MIN (MAX (a, b), b) == b. */
7787 if (TREE_CODE (op0) == compl_code
7788 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7789 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7791 /* MIN (MAX (b, a), b) == b. */
7792 if (TREE_CODE (op0) == compl_code
7793 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7794 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7795 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7797 /* MIN (a, MAX (a, b)) == a. */
7798 if (TREE_CODE (op1) == compl_code
7799 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7800 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7801 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7803 /* MIN (a, MAX (b, a)) == a. */
7804 if (TREE_CODE (op1) == compl_code
7805 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7806 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7807 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7809 return NULL_TREE;
7812 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7813 by changing CODE to reduce the magnitude of constants involved in
7814 ARG0 of the comparison.
7815 Returns a canonicalized comparison tree if a simplification was
7816 possible, otherwise returns NULL_TREE. */
7818 static tree
7819 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7820 tree arg0, tree arg1)
7822 enum tree_code code0 = TREE_CODE (arg0);
7823 tree t, cst0 = NULL_TREE;
7824 int sgn0;
7825 bool swap = false;
7827 /* Match A +- CST code arg1 and CST code arg1. */
7828 if (!(((code0 == MINUS_EXPR
7829 || code0 == PLUS_EXPR)
7830 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7831 || code0 == INTEGER_CST))
7832 return NULL_TREE;
7834 /* Identify the constant in arg0 and its sign. */
7835 if (code0 == INTEGER_CST)
7836 cst0 = arg0;
7837 else
7838 cst0 = TREE_OPERAND (arg0, 1);
7839 sgn0 = tree_int_cst_sgn (cst0);
7841 /* Overflowed constants and zero will cause problems. */
7842 if (integer_zerop (cst0)
7843 || TREE_OVERFLOW (cst0))
7844 return NULL_TREE;
7846 /* See if we can reduce the magnitude of the constant in
7847 arg0 by changing the comparison code. */
7848 if (code0 == INTEGER_CST)
7850 /* CST <= arg1 -> CST-1 < arg1. */
7851 if (code == LE_EXPR && sgn0 == 1)
7852 code = LT_EXPR;
7853 /* -CST < arg1 -> -CST-1 <= arg1. */
7854 else if (code == LT_EXPR && sgn0 == -1)
7855 code = LE_EXPR;
7856 /* CST > arg1 -> CST-1 >= arg1. */
7857 else if (code == GT_EXPR && sgn0 == 1)
7858 code = GE_EXPR;
7859 /* -CST >= arg1 -> -CST-1 > arg1. */
7860 else if (code == GE_EXPR && sgn0 == -1)
7861 code = GT_EXPR;
7862 else
7863 return NULL_TREE;
7864 /* arg1 code' CST' might be more canonical. */
7865 swap = true;
7867 else
7869 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7870 if (code == LT_EXPR
7871 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7872 code = LE_EXPR;
7873 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7874 else if (code == GT_EXPR
7875 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7876 code = GE_EXPR;
7877 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7878 else if (code == LE_EXPR
7879 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7880 code = LT_EXPR;
7881 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7882 else if (code == GE_EXPR
7883 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7884 code = GT_EXPR;
7885 else
7886 return NULL_TREE;
7889 /* Now build the constant reduced in magnitude. */
7890 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7891 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7892 if (code0 != INTEGER_CST)
7893 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7895 /* If swapping might yield to a more canonical form, do so. */
7896 if (swap)
7897 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7898 else
7899 return fold_build2 (code, type, t, arg1);
7902 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7903 overflow further. Try to decrease the magnitude of constants involved
7904 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7905 and put sole constants at the second argument position.
7906 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7908 static tree
7909 maybe_canonicalize_comparison (enum tree_code code, tree type,
7910 tree arg0, tree arg1)
7912 tree t;
7914 /* In principle pointers also have undefined overflow behavior,
7915 but that causes problems elsewhere. */
7916 if ((flag_wrapv || flag_trapv)
7917 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7918 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7919 return NULL_TREE;
7921 /* Try canonicalization by simplifying arg0. */
7922 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7923 if (t)
7924 return t;
7926 /* Try canonicalization by simplifying arg1 using the swapped
7927 comparison. */
7928 code = swap_tree_comparison (code);
7929 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7932 /* Subroutine of fold_binary. This routine performs all of the
7933 transformations that are common to the equality/inequality
7934 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7935 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7936 fold_binary should call fold_binary. Fold a comparison with
7937 tree code CODE and type TYPE with operands OP0 and OP1. Return
7938 the folded comparison or NULL_TREE. */
7940 static tree
7941 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7943 tree arg0, arg1, tem;
7945 arg0 = op0;
7946 arg1 = op1;
7948 STRIP_SIGN_NOPS (arg0);
7949 STRIP_SIGN_NOPS (arg1);
7951 tem = fold_relational_const (code, type, arg0, arg1);
7952 if (tem != NULL_TREE)
7953 return tem;
7955 /* If one arg is a real or integer constant, put it last. */
7956 if (tree_swap_operands_p (arg0, arg1, true))
7957 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7959 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7960 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7961 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7962 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7963 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7964 && !(flag_wrapv || flag_trapv))
7965 && (TREE_CODE (arg1) == INTEGER_CST
7966 && !TREE_OVERFLOW (arg1)))
7968 tree const1 = TREE_OPERAND (arg0, 1);
7969 tree const2 = arg1;
7970 tree variable = TREE_OPERAND (arg0, 0);
7971 tree lhs;
7972 int lhs_add;
7973 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7975 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7976 TREE_TYPE (arg1), const2, const1);
7977 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7978 && (TREE_CODE (lhs) != INTEGER_CST
7979 || !TREE_OVERFLOW (lhs)))
7980 return fold_build2 (code, type, variable, lhs);
7983 /* For comparisons of pointers we can decompose it to a compile time
7984 comparison of the base objects and the offsets into the object.
7985 This requires at least one operand being an ADDR_EXPR to do more
7986 than the operand_equal_p test below. */
7987 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7988 && (TREE_CODE (arg0) == ADDR_EXPR
7989 || TREE_CODE (arg1) == ADDR_EXPR))
7991 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
7992 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
7993 enum machine_mode mode;
7994 int volatilep, unsignedp;
7995 bool indirect_base0 = false;
7997 /* Get base and offset for the access. Strip ADDR_EXPR for
7998 get_inner_reference, but put it back by stripping INDIRECT_REF
7999 off the base object if possible. */
8000 base0 = arg0;
8001 if (TREE_CODE (arg0) == ADDR_EXPR)
8003 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8004 &bitsize, &bitpos0, &offset0, &mode,
8005 &unsignedp, &volatilep, false);
8006 if (TREE_CODE (base0) == INDIRECT_REF)
8007 base0 = TREE_OPERAND (base0, 0);
8008 else
8009 indirect_base0 = true;
8012 base1 = arg1;
8013 if (TREE_CODE (arg1) == ADDR_EXPR)
8015 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8016 &bitsize, &bitpos1, &offset1, &mode,
8017 &unsignedp, &volatilep, false);
8018 /* We have to make sure to have an indirect/non-indirect base1
8019 just the same as we did for base0. */
8020 if (TREE_CODE (base1) == INDIRECT_REF
8021 && !indirect_base0)
8022 base1 = TREE_OPERAND (base1, 0);
8023 else if (!indirect_base0)
8024 base1 = NULL_TREE;
8026 else if (indirect_base0)
8027 base1 = NULL_TREE;
8029 /* If we have equivalent bases we might be able to simplify. */
8030 if (base0 && base1
8031 && operand_equal_p (base0, base1, 0))
8033 /* We can fold this expression to a constant if the non-constant
8034 offset parts are equal. */
8035 if (offset0 == offset1
8036 || (offset0 && offset1
8037 && operand_equal_p (offset0, offset1, 0)))
8039 switch (code)
8041 case EQ_EXPR:
8042 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8043 case NE_EXPR:
8044 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8045 case LT_EXPR:
8046 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8047 case LE_EXPR:
8048 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8049 case GE_EXPR:
8050 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8051 case GT_EXPR:
8052 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8053 default:;
8056 /* We can simplify the comparison to a comparison of the variable
8057 offset parts if the constant offset parts are equal.
8058 Be careful to use signed size type here because otherwise we
8059 mess with array offsets in the wrong way. This is possible
8060 because pointer arithmetic is restricted to retain within an
8061 object and overflow on pointer differences is undefined as of
8062 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8063 else if (bitpos0 == bitpos1)
8065 tree signed_size_type_node;
8066 signed_size_type_node = signed_type_for (size_type_node);
8068 /* By converting to signed size type we cover middle-end pointer
8069 arithmetic which operates on unsigned pointer types of size
8070 type size and ARRAY_REF offsets which are properly sign or
8071 zero extended from their type in case it is narrower than
8072 size type. */
8073 if (offset0 == NULL_TREE)
8074 offset0 = build_int_cst (signed_size_type_node, 0);
8075 else
8076 offset0 = fold_convert (signed_size_type_node, offset0);
8077 if (offset1 == NULL_TREE)
8078 offset1 = build_int_cst (signed_size_type_node, 0);
8079 else
8080 offset1 = fold_convert (signed_size_type_node, offset1);
8082 return fold_build2 (code, type, offset0, offset1);
8087 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8088 same object, then we can fold this to a comparison of the two offsets in
8089 signed size type. This is possible because pointer arithmetic is
8090 restricted to retain within an object and overflow on pointer differences
8091 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8092 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8093 && !flag_wrapv && !flag_trapv)
8095 tree base0, offset0, base1, offset1;
8097 if (extract_array_ref (arg0, &base0, &offset0)
8098 && extract_array_ref (arg1, &base1, &offset1)
8099 && operand_equal_p (base0, base1, 0))
8101 tree signed_size_type_node;
8102 signed_size_type_node = signed_type_for (size_type_node);
8104 /* By converting to signed size type we cover middle-end pointer
8105 arithmetic which operates on unsigned pointer types of size
8106 type size and ARRAY_REF offsets which are properly sign or
8107 zero extended from their type in case it is narrower than
8108 size type. */
8109 if (offset0 == NULL_TREE)
8110 offset0 = build_int_cst (signed_size_type_node, 0);
8111 else
8112 offset0 = fold_convert (signed_size_type_node, offset0);
8113 if (offset1 == NULL_TREE)
8114 offset1 = build_int_cst (signed_size_type_node, 0);
8115 else
8116 offset1 = fold_convert (signed_size_type_node, offset1);
8118 return fold_build2 (code, type, offset0, offset1);
8122 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8123 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8124 the resulting offset is smaller in absolute value than the
8125 original one. */
8126 if (!(flag_wrapv || flag_trapv)
8127 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8128 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8129 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8130 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8131 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8132 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8133 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8135 tree const1 = TREE_OPERAND (arg0, 1);
8136 tree const2 = TREE_OPERAND (arg1, 1);
8137 tree variable1 = TREE_OPERAND (arg0, 0);
8138 tree variable2 = TREE_OPERAND (arg1, 0);
8139 tree cst;
8141 /* Put the constant on the side where it doesn't overflow and is
8142 of lower absolute value than before. */
8143 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8144 ? MINUS_EXPR : PLUS_EXPR,
8145 const2, const1, 0);
8146 if (!TREE_OVERFLOW (cst)
8147 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8148 return fold_build2 (code, type,
8149 variable1,
8150 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8151 variable2, cst));
8153 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8154 ? MINUS_EXPR : PLUS_EXPR,
8155 const1, const2, 0);
8156 if (!TREE_OVERFLOW (cst)
8157 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8158 return fold_build2 (code, type,
8159 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8160 variable1, cst),
8161 variable2);
8164 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8165 signed arithmetic case. That form is created by the compiler
8166 often enough for folding it to be of value. One example is in
8167 computing loop trip counts after Operator Strength Reduction. */
8168 if (!(flag_wrapv || flag_trapv)
8169 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8170 && TREE_CODE (arg0) == MULT_EXPR
8171 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8172 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8173 && integer_zerop (arg1))
8175 tree const1 = TREE_OPERAND (arg0, 1);
8176 tree const2 = arg1; /* zero */
8177 tree variable1 = TREE_OPERAND (arg0, 0);
8178 enum tree_code cmp_code = code;
8180 gcc_assert (!integer_zerop (const1));
8182 /* If const1 is negative we swap the sense of the comparison. */
8183 if (tree_int_cst_sgn (const1) < 0)
8184 cmp_code = swap_tree_comparison (cmp_code);
8186 return fold_build2 (cmp_code, type, variable1, const2);
8189 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8190 if (tem)
8191 return tem;
8193 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8195 tree targ0 = strip_float_extensions (arg0);
8196 tree targ1 = strip_float_extensions (arg1);
8197 tree newtype = TREE_TYPE (targ0);
8199 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8200 newtype = TREE_TYPE (targ1);
8202 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8203 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8204 return fold_build2 (code, type, fold_convert (newtype, targ0),
8205 fold_convert (newtype, targ1));
8207 /* (-a) CMP (-b) -> b CMP a */
8208 if (TREE_CODE (arg0) == NEGATE_EXPR
8209 && TREE_CODE (arg1) == NEGATE_EXPR)
8210 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8211 TREE_OPERAND (arg0, 0));
8213 if (TREE_CODE (arg1) == REAL_CST)
8215 REAL_VALUE_TYPE cst;
8216 cst = TREE_REAL_CST (arg1);
8218 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8219 if (TREE_CODE (arg0) == NEGATE_EXPR)
8220 return fold_build2 (swap_tree_comparison (code), type,
8221 TREE_OPERAND (arg0, 0),
8222 build_real (TREE_TYPE (arg1),
8223 REAL_VALUE_NEGATE (cst)));
8225 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8226 /* a CMP (-0) -> a CMP 0 */
8227 if (REAL_VALUE_MINUS_ZERO (cst))
8228 return fold_build2 (code, type, arg0,
8229 build_real (TREE_TYPE (arg1), dconst0));
8231 /* x != NaN is always true, other ops are always false. */
8232 if (REAL_VALUE_ISNAN (cst)
8233 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8235 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8236 return omit_one_operand (type, tem, arg0);
8239 /* Fold comparisons against infinity. */
8240 if (REAL_VALUE_ISINF (cst))
8242 tem = fold_inf_compare (code, type, arg0, arg1);
8243 if (tem != NULL_TREE)
8244 return tem;
8248 /* If this is a comparison of a real constant with a PLUS_EXPR
8249 or a MINUS_EXPR of a real constant, we can convert it into a
8250 comparison with a revised real constant as long as no overflow
8251 occurs when unsafe_math_optimizations are enabled. */
8252 if (flag_unsafe_math_optimizations
8253 && TREE_CODE (arg1) == REAL_CST
8254 && (TREE_CODE (arg0) == PLUS_EXPR
8255 || TREE_CODE (arg0) == MINUS_EXPR)
8256 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8257 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8258 ? MINUS_EXPR : PLUS_EXPR,
8259 arg1, TREE_OPERAND (arg0, 1), 0))
8260 && !TREE_OVERFLOW (tem))
8261 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8263 /* Likewise, we can simplify a comparison of a real constant with
8264 a MINUS_EXPR whose first operand is also a real constant, i.e.
8265 (c1 - x) < c2 becomes x > c1-c2. */
8266 if (flag_unsafe_math_optimizations
8267 && TREE_CODE (arg1) == REAL_CST
8268 && TREE_CODE (arg0) == MINUS_EXPR
8269 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8270 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8271 arg1, 0))
8272 && !TREE_OVERFLOW (tem))
8273 return fold_build2 (swap_tree_comparison (code), type,
8274 TREE_OPERAND (arg0, 1), tem);
8276 /* Fold comparisons against built-in math functions. */
8277 if (TREE_CODE (arg1) == REAL_CST
8278 && flag_unsafe_math_optimizations
8279 && ! flag_errno_math)
8281 enum built_in_function fcode = builtin_mathfn_code (arg0);
8283 if (fcode != END_BUILTINS)
8285 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8286 if (tem != NULL_TREE)
8287 return tem;
8292 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8293 if (TREE_CONSTANT (arg1)
8294 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8295 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8296 /* This optimization is invalid for ordered comparisons
8297 if CONST+INCR overflows or if foo+incr might overflow.
8298 This optimization is invalid for floating point due to rounding.
8299 For pointer types we assume overflow doesn't happen. */
8300 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8301 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8302 && (code == EQ_EXPR || code == NE_EXPR))))
8304 tree varop, newconst;
8306 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8308 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8309 arg1, TREE_OPERAND (arg0, 1));
8310 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8311 TREE_OPERAND (arg0, 0),
8312 TREE_OPERAND (arg0, 1));
8314 else
8316 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8317 arg1, TREE_OPERAND (arg0, 1));
8318 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8319 TREE_OPERAND (arg0, 0),
8320 TREE_OPERAND (arg0, 1));
8324 /* If VAROP is a reference to a bitfield, we must mask
8325 the constant by the width of the field. */
8326 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8327 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8328 && host_integerp (DECL_SIZE (TREE_OPERAND
8329 (TREE_OPERAND (varop, 0), 1)), 1))
8331 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8332 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8333 tree folded_compare, shift;
8335 /* First check whether the comparison would come out
8336 always the same. If we don't do that we would
8337 change the meaning with the masking. */
8338 folded_compare = fold_build2 (code, type,
8339 TREE_OPERAND (varop, 0), arg1);
8340 if (TREE_CODE (folded_compare) == INTEGER_CST)
8341 return omit_one_operand (type, folded_compare, varop);
8343 shift = build_int_cst (NULL_TREE,
8344 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8345 shift = fold_convert (TREE_TYPE (varop), shift);
8346 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8347 newconst, shift);
8348 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8349 newconst, shift);
8352 return fold_build2 (code, type, varop, newconst);
8355 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8356 && (TREE_CODE (arg0) == NOP_EXPR
8357 || TREE_CODE (arg0) == CONVERT_EXPR))
8359 /* If we are widening one operand of an integer comparison,
8360 see if the other operand is similarly being widened. Perhaps we
8361 can do the comparison in the narrower type. */
8362 tem = fold_widened_comparison (code, type, arg0, arg1);
8363 if (tem)
8364 return tem;
8366 /* Or if we are changing signedness. */
8367 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8368 if (tem)
8369 return tem;
8372 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8373 constant, we can simplify it. */
8374 if (TREE_CODE (arg1) == INTEGER_CST
8375 && (TREE_CODE (arg0) == MIN_EXPR
8376 || TREE_CODE (arg0) == MAX_EXPR)
8377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8379 tem = optimize_minmax_comparison (code, type, op0, op1);
8380 if (tem)
8381 return tem;
8384 /* Simplify comparison of something with itself. (For IEEE
8385 floating-point, we can only do some of these simplifications.) */
8386 if (operand_equal_p (arg0, arg1, 0))
8388 switch (code)
8390 case EQ_EXPR:
8391 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8392 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8393 return constant_boolean_node (1, type);
8394 break;
8396 case GE_EXPR:
8397 case LE_EXPR:
8398 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8399 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8400 return constant_boolean_node (1, type);
8401 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8403 case NE_EXPR:
8404 /* For NE, we can only do this simplification if integer
8405 or we don't honor IEEE floating point NaNs. */
8406 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8407 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8408 break;
8409 /* ... fall through ... */
8410 case GT_EXPR:
8411 case LT_EXPR:
8412 return constant_boolean_node (0, type);
8413 default:
8414 gcc_unreachable ();
8418 /* If we are comparing an expression that just has comparisons
8419 of two integer values, arithmetic expressions of those comparisons,
8420 and constants, we can simplify it. There are only three cases
8421 to check: the two values can either be equal, the first can be
8422 greater, or the second can be greater. Fold the expression for
8423 those three values. Since each value must be 0 or 1, we have
8424 eight possibilities, each of which corresponds to the constant 0
8425 or 1 or one of the six possible comparisons.
8427 This handles common cases like (a > b) == 0 but also handles
8428 expressions like ((x > y) - (y > x)) > 0, which supposedly
8429 occur in macroized code. */
8431 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8433 tree cval1 = 0, cval2 = 0;
8434 int save_p = 0;
8436 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8437 /* Don't handle degenerate cases here; they should already
8438 have been handled anyway. */
8439 && cval1 != 0 && cval2 != 0
8440 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8441 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8442 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8443 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8444 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8445 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8446 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8448 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8449 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8451 /* We can't just pass T to eval_subst in case cval1 or cval2
8452 was the same as ARG1. */
8454 tree high_result
8455 = fold_build2 (code, type,
8456 eval_subst (arg0, cval1, maxval,
8457 cval2, minval),
8458 arg1);
8459 tree equal_result
8460 = fold_build2 (code, type,
8461 eval_subst (arg0, cval1, maxval,
8462 cval2, maxval),
8463 arg1);
8464 tree low_result
8465 = fold_build2 (code, type,
8466 eval_subst (arg0, cval1, minval,
8467 cval2, maxval),
8468 arg1);
8470 /* All three of these results should be 0 or 1. Confirm they are.
8471 Then use those values to select the proper code to use. */
8473 if (TREE_CODE (high_result) == INTEGER_CST
8474 && TREE_CODE (equal_result) == INTEGER_CST
8475 && TREE_CODE (low_result) == INTEGER_CST)
8477 /* Make a 3-bit mask with the high-order bit being the
8478 value for `>', the next for '=', and the low for '<'. */
8479 switch ((integer_onep (high_result) * 4)
8480 + (integer_onep (equal_result) * 2)
8481 + integer_onep (low_result))
8483 case 0:
8484 /* Always false. */
8485 return omit_one_operand (type, integer_zero_node, arg0);
8486 case 1:
8487 code = LT_EXPR;
8488 break;
8489 case 2:
8490 code = EQ_EXPR;
8491 break;
8492 case 3:
8493 code = LE_EXPR;
8494 break;
8495 case 4:
8496 code = GT_EXPR;
8497 break;
8498 case 5:
8499 code = NE_EXPR;
8500 break;
8501 case 6:
8502 code = GE_EXPR;
8503 break;
8504 case 7:
8505 /* Always true. */
8506 return omit_one_operand (type, integer_one_node, arg0);
8509 if (save_p)
8510 return save_expr (build2 (code, type, cval1, cval2));
8511 return fold_build2 (code, type, cval1, cval2);
8516 /* Fold a comparison of the address of COMPONENT_REFs with the same
8517 type and component to a comparison of the address of the base
8518 object. In short, &x->a OP &y->a to x OP y and
8519 &x->a OP &y.a to x OP &y */
8520 if (TREE_CODE (arg0) == ADDR_EXPR
8521 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8522 && TREE_CODE (arg1) == ADDR_EXPR
8523 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8525 tree cref0 = TREE_OPERAND (arg0, 0);
8526 tree cref1 = TREE_OPERAND (arg1, 0);
8527 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8529 tree op0 = TREE_OPERAND (cref0, 0);
8530 tree op1 = TREE_OPERAND (cref1, 0);
8531 return fold_build2 (code, type,
8532 build_fold_addr_expr (op0),
8533 build_fold_addr_expr (op1));
8537 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8538 into a single range test. */
8539 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8540 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8541 && TREE_CODE (arg1) == INTEGER_CST
8542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8543 && !integer_zerop (TREE_OPERAND (arg0, 1))
8544 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8545 && !TREE_OVERFLOW (arg1))
8547 tem = fold_div_compare (code, type, arg0, arg1);
8548 if (tem != NULL_TREE)
8549 return tem;
8552 /* Fold ~X op ~Y as Y op X. */
8553 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8554 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8555 return fold_build2 (code, type,
8556 TREE_OPERAND (arg1, 0),
8557 TREE_OPERAND (arg0, 0));
8559 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8560 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8561 && TREE_CODE (arg1) == INTEGER_CST)
8562 return fold_build2 (swap_tree_comparison (code), type,
8563 TREE_OPERAND (arg0, 0),
8564 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8566 return NULL_TREE;
8570 /* Subroutine of fold_binary. Optimize complex multiplications of the
8571 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8572 argument EXPR represents the expression "z" of type TYPE. */
8574 static tree
8575 fold_mult_zconjz (tree type, tree expr)
8577 tree itype = TREE_TYPE (type);
8578 tree rpart, ipart, tem;
8580 if (TREE_CODE (expr) == COMPLEX_EXPR)
8582 rpart = TREE_OPERAND (expr, 0);
8583 ipart = TREE_OPERAND (expr, 1);
8585 else if (TREE_CODE (expr) == COMPLEX_CST)
8587 rpart = TREE_REALPART (expr);
8588 ipart = TREE_IMAGPART (expr);
8590 else
8592 expr = save_expr (expr);
8593 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8594 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8597 rpart = save_expr (rpart);
8598 ipart = save_expr (ipart);
8599 tem = fold_build2 (PLUS_EXPR, itype,
8600 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8601 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8602 return fold_build2 (COMPLEX_EXPR, type, tem,
8603 fold_convert (itype, integer_zero_node));
8607 /* Fold a binary expression of code CODE and type TYPE with operands
8608 OP0 and OP1. Return the folded expression if folding is
8609 successful. Otherwise, return NULL_TREE. */
8611 tree
8612 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8614 enum tree_code_class kind = TREE_CODE_CLASS (code);
8615 tree arg0, arg1, tem;
8616 tree t1 = NULL_TREE;
8618 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8619 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8620 && TREE_CODE_LENGTH (code) == 2
8621 && op0 != NULL_TREE
8622 && op1 != NULL_TREE);
8624 arg0 = op0;
8625 arg1 = op1;
8627 /* Strip any conversions that don't change the mode. This is
8628 safe for every expression, except for a comparison expression
8629 because its signedness is derived from its operands. So, in
8630 the latter case, only strip conversions that don't change the
8631 signedness.
8633 Note that this is done as an internal manipulation within the
8634 constant folder, in order to find the simplest representation
8635 of the arguments so that their form can be studied. In any
8636 cases, the appropriate type conversions should be put back in
8637 the tree that will get out of the constant folder. */
8639 if (kind == tcc_comparison)
8641 STRIP_SIGN_NOPS (arg0);
8642 STRIP_SIGN_NOPS (arg1);
8644 else
8646 STRIP_NOPS (arg0);
8647 STRIP_NOPS (arg1);
8650 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8651 constant but we can't do arithmetic on them. */
8652 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8653 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8654 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8655 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8657 if (kind == tcc_binary)
8658 tem = const_binop (code, arg0, arg1, 0);
8659 else if (kind == tcc_comparison)
8660 tem = fold_relational_const (code, type, arg0, arg1);
8661 else
8662 tem = NULL_TREE;
8664 if (tem != NULL_TREE)
8666 if (TREE_TYPE (tem) != type)
8667 tem = fold_convert (type, tem);
8668 return tem;
8672 /* If this is a commutative operation, and ARG0 is a constant, move it
8673 to ARG1 to reduce the number of tests below. */
8674 if (commutative_tree_code (code)
8675 && tree_swap_operands_p (arg0, arg1, true))
8676 return fold_build2 (code, type, op1, op0);
8678 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8680 First check for cases where an arithmetic operation is applied to a
8681 compound, conditional, or comparison operation. Push the arithmetic
8682 operation inside the compound or conditional to see if any folding
8683 can then be done. Convert comparison to conditional for this purpose.
8684 The also optimizes non-constant cases that used to be done in
8685 expand_expr.
8687 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8688 one of the operands is a comparison and the other is a comparison, a
8689 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8690 code below would make the expression more complex. Change it to a
8691 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8692 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8694 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8695 || code == EQ_EXPR || code == NE_EXPR)
8696 && ((truth_value_p (TREE_CODE (arg0))
8697 && (truth_value_p (TREE_CODE (arg1))
8698 || (TREE_CODE (arg1) == BIT_AND_EXPR
8699 && integer_onep (TREE_OPERAND (arg1, 1)))))
8700 || (truth_value_p (TREE_CODE (arg1))
8701 && (truth_value_p (TREE_CODE (arg0))
8702 || (TREE_CODE (arg0) == BIT_AND_EXPR
8703 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8705 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8706 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8707 : TRUTH_XOR_EXPR,
8708 boolean_type_node,
8709 fold_convert (boolean_type_node, arg0),
8710 fold_convert (boolean_type_node, arg1));
8712 if (code == EQ_EXPR)
8713 tem = invert_truthvalue (tem);
8715 return fold_convert (type, tem);
8718 if (TREE_CODE_CLASS (code) == tcc_binary
8719 || TREE_CODE_CLASS (code) == tcc_comparison)
8721 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8722 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8723 fold_build2 (code, type,
8724 TREE_OPERAND (arg0, 1), op1));
8725 if (TREE_CODE (arg1) == COMPOUND_EXPR
8726 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8727 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8728 fold_build2 (code, type,
8729 op0, TREE_OPERAND (arg1, 1)));
8731 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8733 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8734 arg0, arg1,
8735 /*cond_first_p=*/1);
8736 if (tem != NULL_TREE)
8737 return tem;
8740 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8742 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8743 arg1, arg0,
8744 /*cond_first_p=*/0);
8745 if (tem != NULL_TREE)
8746 return tem;
8750 switch (code)
8752 case PLUS_EXPR:
8753 /* A + (-B) -> A - B */
8754 if (TREE_CODE (arg1) == NEGATE_EXPR)
8755 return fold_build2 (MINUS_EXPR, type,
8756 fold_convert (type, arg0),
8757 fold_convert (type, TREE_OPERAND (arg1, 0)));
8758 /* (-A) + B -> B - A */
8759 if (TREE_CODE (arg0) == NEGATE_EXPR
8760 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8761 return fold_build2 (MINUS_EXPR, type,
8762 fold_convert (type, arg1),
8763 fold_convert (type, TREE_OPERAND (arg0, 0)));
8764 /* Convert ~A + 1 to -A. */
8765 if (INTEGRAL_TYPE_P (type)
8766 && TREE_CODE (arg0) == BIT_NOT_EXPR
8767 && integer_onep (arg1))
8768 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8770 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8771 same or one. */
8772 if ((TREE_CODE (arg0) == MULT_EXPR
8773 || TREE_CODE (arg1) == MULT_EXPR)
8774 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8776 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8777 if (tem)
8778 return tem;
8781 if (! FLOAT_TYPE_P (type))
8783 if (integer_zerop (arg1))
8784 return non_lvalue (fold_convert (type, arg0));
8786 /* ~X + X is -1. */
8787 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8788 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8789 && !TYPE_TRAP_SIGNED (type))
8791 t1 = build_int_cst_type (type, -1);
8792 return omit_one_operand (type, t1, arg1);
8795 /* X + ~X is -1. */
8796 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8797 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8798 && !TYPE_TRAP_SIGNED (type))
8800 t1 = build_int_cst_type (type, -1);
8801 return omit_one_operand (type, t1, arg0);
8804 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8805 with a constant, and the two constants have no bits in common,
8806 we should treat this as a BIT_IOR_EXPR since this may produce more
8807 simplifications. */
8808 if (TREE_CODE (arg0) == BIT_AND_EXPR
8809 && TREE_CODE (arg1) == BIT_AND_EXPR
8810 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8811 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8812 && integer_zerop (const_binop (BIT_AND_EXPR,
8813 TREE_OPERAND (arg0, 1),
8814 TREE_OPERAND (arg1, 1), 0)))
8816 code = BIT_IOR_EXPR;
8817 goto bit_ior;
8820 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8821 (plus (plus (mult) (mult)) (foo)) so that we can
8822 take advantage of the factoring cases below. */
8823 if (((TREE_CODE (arg0) == PLUS_EXPR
8824 || TREE_CODE (arg0) == MINUS_EXPR)
8825 && TREE_CODE (arg1) == MULT_EXPR)
8826 || ((TREE_CODE (arg1) == PLUS_EXPR
8827 || TREE_CODE (arg1) == MINUS_EXPR)
8828 && TREE_CODE (arg0) == MULT_EXPR))
8830 tree parg0, parg1, parg, marg;
8831 enum tree_code pcode;
8833 if (TREE_CODE (arg1) == MULT_EXPR)
8834 parg = arg0, marg = arg1;
8835 else
8836 parg = arg1, marg = arg0;
8837 pcode = TREE_CODE (parg);
8838 parg0 = TREE_OPERAND (parg, 0);
8839 parg1 = TREE_OPERAND (parg, 1);
8840 STRIP_NOPS (parg0);
8841 STRIP_NOPS (parg1);
8843 if (TREE_CODE (parg0) == MULT_EXPR
8844 && TREE_CODE (parg1) != MULT_EXPR)
8845 return fold_build2 (pcode, type,
8846 fold_build2 (PLUS_EXPR, type,
8847 fold_convert (type, parg0),
8848 fold_convert (type, marg)),
8849 fold_convert (type, parg1));
8850 if (TREE_CODE (parg0) != MULT_EXPR
8851 && TREE_CODE (parg1) == MULT_EXPR)
8852 return fold_build2 (PLUS_EXPR, type,
8853 fold_convert (type, parg0),
8854 fold_build2 (pcode, type,
8855 fold_convert (type, marg),
8856 fold_convert (type,
8857 parg1)));
8860 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8861 of the array. Loop optimizer sometimes produce this type of
8862 expressions. */
8863 if (TREE_CODE (arg0) == ADDR_EXPR)
8865 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8866 if (tem)
8867 return fold_convert (type, tem);
8869 else if (TREE_CODE (arg1) == ADDR_EXPR)
8871 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8872 if (tem)
8873 return fold_convert (type, tem);
8876 else
8878 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8879 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8880 return non_lvalue (fold_convert (type, arg0));
8882 /* Likewise if the operands are reversed. */
8883 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8884 return non_lvalue (fold_convert (type, arg1));
8886 /* Convert X + -C into X - C. */
8887 if (TREE_CODE (arg1) == REAL_CST
8888 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8890 tem = fold_negate_const (arg1, type);
8891 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8892 return fold_build2 (MINUS_EXPR, type,
8893 fold_convert (type, arg0),
8894 fold_convert (type, tem));
8897 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8898 to __complex__ ( x, y ). This is not the same for SNaNs or
8899 if singed zeros are involved. */
8900 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8901 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8902 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8904 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8905 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8906 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8907 bool arg0rz = false, arg0iz = false;
8908 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8909 || (arg0i && (arg0iz = real_zerop (arg0i))))
8911 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8912 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8913 if (arg0rz && arg1i && real_zerop (arg1i))
8915 tree rp = arg1r ? arg1r
8916 : build1 (REALPART_EXPR, rtype, arg1);
8917 tree ip = arg0i ? arg0i
8918 : build1 (IMAGPART_EXPR, rtype, arg0);
8919 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8921 else if (arg0iz && arg1r && real_zerop (arg1r))
8923 tree rp = arg0r ? arg0r
8924 : build1 (REALPART_EXPR, rtype, arg0);
8925 tree ip = arg1i ? arg1i
8926 : build1 (IMAGPART_EXPR, rtype, arg1);
8927 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8932 if (flag_unsafe_math_optimizations
8933 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8934 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8935 && (tem = distribute_real_division (code, type, arg0, arg1)))
8936 return tem;
8938 /* Convert x+x into x*2.0. */
8939 if (operand_equal_p (arg0, arg1, 0)
8940 && SCALAR_FLOAT_TYPE_P (type))
8941 return fold_build2 (MULT_EXPR, type, arg0,
8942 build_real (type, dconst2));
8944 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8945 if (flag_unsafe_math_optimizations
8946 && TREE_CODE (arg1) == PLUS_EXPR
8947 && TREE_CODE (arg0) != MULT_EXPR)
8949 tree tree10 = TREE_OPERAND (arg1, 0);
8950 tree tree11 = TREE_OPERAND (arg1, 1);
8951 if (TREE_CODE (tree11) == MULT_EXPR
8952 && TREE_CODE (tree10) == MULT_EXPR)
8954 tree tree0;
8955 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8956 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8959 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8960 if (flag_unsafe_math_optimizations
8961 && TREE_CODE (arg0) == PLUS_EXPR
8962 && TREE_CODE (arg1) != MULT_EXPR)
8964 tree tree00 = TREE_OPERAND (arg0, 0);
8965 tree tree01 = TREE_OPERAND (arg0, 1);
8966 if (TREE_CODE (tree01) == MULT_EXPR
8967 && TREE_CODE (tree00) == MULT_EXPR)
8969 tree tree0;
8970 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8971 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8976 bit_rotate:
8977 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8978 is a rotate of A by C1 bits. */
8979 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8980 is a rotate of A by B bits. */
8982 enum tree_code code0, code1;
8983 code0 = TREE_CODE (arg0);
8984 code1 = TREE_CODE (arg1);
8985 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8986 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8987 && operand_equal_p (TREE_OPERAND (arg0, 0),
8988 TREE_OPERAND (arg1, 0), 0)
8989 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8991 tree tree01, tree11;
8992 enum tree_code code01, code11;
8994 tree01 = TREE_OPERAND (arg0, 1);
8995 tree11 = TREE_OPERAND (arg1, 1);
8996 STRIP_NOPS (tree01);
8997 STRIP_NOPS (tree11);
8998 code01 = TREE_CODE (tree01);
8999 code11 = TREE_CODE (tree11);
9000 if (code01 == INTEGER_CST
9001 && code11 == INTEGER_CST
9002 && TREE_INT_CST_HIGH (tree01) == 0
9003 && TREE_INT_CST_HIGH (tree11) == 0
9004 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9005 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9006 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9007 code0 == LSHIFT_EXPR ? tree01 : tree11);
9008 else if (code11 == MINUS_EXPR)
9010 tree tree110, tree111;
9011 tree110 = TREE_OPERAND (tree11, 0);
9012 tree111 = TREE_OPERAND (tree11, 1);
9013 STRIP_NOPS (tree110);
9014 STRIP_NOPS (tree111);
9015 if (TREE_CODE (tree110) == INTEGER_CST
9016 && 0 == compare_tree_int (tree110,
9017 TYPE_PRECISION
9018 (TREE_TYPE (TREE_OPERAND
9019 (arg0, 0))))
9020 && operand_equal_p (tree01, tree111, 0))
9021 return build2 ((code0 == LSHIFT_EXPR
9022 ? LROTATE_EXPR
9023 : RROTATE_EXPR),
9024 type, TREE_OPERAND (arg0, 0), tree01);
9026 else if (code01 == MINUS_EXPR)
9028 tree tree010, tree011;
9029 tree010 = TREE_OPERAND (tree01, 0);
9030 tree011 = TREE_OPERAND (tree01, 1);
9031 STRIP_NOPS (tree010);
9032 STRIP_NOPS (tree011);
9033 if (TREE_CODE (tree010) == INTEGER_CST
9034 && 0 == compare_tree_int (tree010,
9035 TYPE_PRECISION
9036 (TREE_TYPE (TREE_OPERAND
9037 (arg0, 0))))
9038 && operand_equal_p (tree11, tree011, 0))
9039 return build2 ((code0 != LSHIFT_EXPR
9040 ? LROTATE_EXPR
9041 : RROTATE_EXPR),
9042 type, TREE_OPERAND (arg0, 0), tree11);
9047 associate:
9048 /* In most languages, can't associate operations on floats through
9049 parentheses. Rather than remember where the parentheses were, we
9050 don't associate floats at all, unless the user has specified
9051 -funsafe-math-optimizations. */
9053 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9055 tree var0, con0, lit0, minus_lit0;
9056 tree var1, con1, lit1, minus_lit1;
9058 /* Split both trees into variables, constants, and literals. Then
9059 associate each group together, the constants with literals,
9060 then the result with variables. This increases the chances of
9061 literals being recombined later and of generating relocatable
9062 expressions for the sum of a constant and literal. */
9063 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9064 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9065 code == MINUS_EXPR);
9067 /* Only do something if we found more than two objects. Otherwise,
9068 nothing has changed and we risk infinite recursion. */
9069 if (2 < ((var0 != 0) + (var1 != 0)
9070 + (con0 != 0) + (con1 != 0)
9071 + (lit0 != 0) + (lit1 != 0)
9072 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9074 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9075 if (code == MINUS_EXPR)
9076 code = PLUS_EXPR;
9078 var0 = associate_trees (var0, var1, code, type);
9079 con0 = associate_trees (con0, con1, code, type);
9080 lit0 = associate_trees (lit0, lit1, code, type);
9081 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9083 /* Preserve the MINUS_EXPR if the negative part of the literal is
9084 greater than the positive part. Otherwise, the multiplicative
9085 folding code (i.e extract_muldiv) may be fooled in case
9086 unsigned constants are subtracted, like in the following
9087 example: ((X*2 + 4) - 8U)/2. */
9088 if (minus_lit0 && lit0)
9090 if (TREE_CODE (lit0) == INTEGER_CST
9091 && TREE_CODE (minus_lit0) == INTEGER_CST
9092 && tree_int_cst_lt (lit0, minus_lit0))
9094 minus_lit0 = associate_trees (minus_lit0, lit0,
9095 MINUS_EXPR, type);
9096 lit0 = 0;
9098 else
9100 lit0 = associate_trees (lit0, minus_lit0,
9101 MINUS_EXPR, type);
9102 minus_lit0 = 0;
9105 if (minus_lit0)
9107 if (con0 == 0)
9108 return fold_convert (type,
9109 associate_trees (var0, minus_lit0,
9110 MINUS_EXPR, type));
9111 else
9113 con0 = associate_trees (con0, minus_lit0,
9114 MINUS_EXPR, type);
9115 return fold_convert (type,
9116 associate_trees (var0, con0,
9117 PLUS_EXPR, type));
9121 con0 = associate_trees (con0, lit0, code, type);
9122 return fold_convert (type, associate_trees (var0, con0,
9123 code, type));
9127 return NULL_TREE;
9129 case MINUS_EXPR:
9130 /* A - (-B) -> A + B */
9131 if (TREE_CODE (arg1) == NEGATE_EXPR)
9132 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9133 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9134 if (TREE_CODE (arg0) == NEGATE_EXPR
9135 && (FLOAT_TYPE_P (type)
9136 || INTEGRAL_TYPE_P (type))
9137 && negate_expr_p (arg1)
9138 && reorder_operands_p (arg0, arg1))
9139 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9140 TREE_OPERAND (arg0, 0));
9141 /* Convert -A - 1 to ~A. */
9142 if (INTEGRAL_TYPE_P (type)
9143 && TREE_CODE (arg0) == NEGATE_EXPR
9144 && integer_onep (arg1)
9145 && !TYPE_TRAP_SIGNED (type))
9146 return fold_build1 (BIT_NOT_EXPR, type,
9147 fold_convert (type, TREE_OPERAND (arg0, 0)));
9149 /* Convert -1 - A to ~A. */
9150 if (INTEGRAL_TYPE_P (type)
9151 && integer_all_onesp (arg0))
9152 return fold_build1 (BIT_NOT_EXPR, type, op1);
9154 if (! FLOAT_TYPE_P (type))
9156 if (integer_zerop (arg0))
9157 return negate_expr (fold_convert (type, arg1));
9158 if (integer_zerop (arg1))
9159 return non_lvalue (fold_convert (type, arg0));
9161 /* Fold A - (A & B) into ~B & A. */
9162 if (!TREE_SIDE_EFFECTS (arg0)
9163 && TREE_CODE (arg1) == BIT_AND_EXPR)
9165 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9166 return fold_build2 (BIT_AND_EXPR, type,
9167 fold_build1 (BIT_NOT_EXPR, type,
9168 TREE_OPERAND (arg1, 0)),
9169 arg0);
9170 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9171 return fold_build2 (BIT_AND_EXPR, type,
9172 fold_build1 (BIT_NOT_EXPR, type,
9173 TREE_OPERAND (arg1, 1)),
9174 arg0);
9177 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9178 any power of 2 minus 1. */
9179 if (TREE_CODE (arg0) == BIT_AND_EXPR
9180 && TREE_CODE (arg1) == BIT_AND_EXPR
9181 && operand_equal_p (TREE_OPERAND (arg0, 0),
9182 TREE_OPERAND (arg1, 0), 0))
9184 tree mask0 = TREE_OPERAND (arg0, 1);
9185 tree mask1 = TREE_OPERAND (arg1, 1);
9186 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9188 if (operand_equal_p (tem, mask1, 0))
9190 tem = fold_build2 (BIT_XOR_EXPR, type,
9191 TREE_OPERAND (arg0, 0), mask1);
9192 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9197 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9198 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9199 return non_lvalue (fold_convert (type, arg0));
9201 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9202 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9203 (-ARG1 + ARG0) reduces to -ARG1. */
9204 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9205 return negate_expr (fold_convert (type, arg1));
9207 /* Fold &x - &x. This can happen from &x.foo - &x.
9208 This is unsafe for certain floats even in non-IEEE formats.
9209 In IEEE, it is unsafe because it does wrong for NaNs.
9210 Also note that operand_equal_p is always false if an operand
9211 is volatile. */
9213 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9214 && operand_equal_p (arg0, arg1, 0))
9215 return fold_convert (type, integer_zero_node);
9217 /* A - B -> A + (-B) if B is easily negatable. */
9218 if (negate_expr_p (arg1)
9219 && ((FLOAT_TYPE_P (type)
9220 /* Avoid this transformation if B is a positive REAL_CST. */
9221 && (TREE_CODE (arg1) != REAL_CST
9222 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9223 || INTEGRAL_TYPE_P (type)))
9224 return fold_build2 (PLUS_EXPR, type,
9225 fold_convert (type, arg0),
9226 fold_convert (type, negate_expr (arg1)));
9228 /* Try folding difference of addresses. */
9230 HOST_WIDE_INT diff;
9232 if ((TREE_CODE (arg0) == ADDR_EXPR
9233 || TREE_CODE (arg1) == ADDR_EXPR)
9234 && ptr_difference_const (arg0, arg1, &diff))
9235 return build_int_cst_type (type, diff);
9238 /* Fold &a[i] - &a[j] to i-j. */
9239 if (TREE_CODE (arg0) == ADDR_EXPR
9240 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9241 && TREE_CODE (arg1) == ADDR_EXPR
9242 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9244 tree aref0 = TREE_OPERAND (arg0, 0);
9245 tree aref1 = TREE_OPERAND (arg1, 0);
9246 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9247 TREE_OPERAND (aref1, 0), 0))
9249 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9250 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9251 tree esz = array_ref_element_size (aref0);
9252 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9253 return fold_build2 (MULT_EXPR, type, diff,
9254 fold_convert (type, esz));
9259 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9260 of the array. Loop optimizer sometimes produce this type of
9261 expressions. */
9262 if (TREE_CODE (arg0) == ADDR_EXPR)
9264 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9265 if (tem)
9266 return fold_convert (type, tem);
9269 if (flag_unsafe_math_optimizations
9270 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9271 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9272 && (tem = distribute_real_division (code, type, arg0, arg1)))
9273 return tem;
9275 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9276 same or one. */
9277 if ((TREE_CODE (arg0) == MULT_EXPR
9278 || TREE_CODE (arg1) == MULT_EXPR)
9279 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9281 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9282 if (tem)
9283 return tem;
9286 goto associate;
9288 case MULT_EXPR:
9289 /* (-A) * (-B) -> A * B */
9290 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9291 return fold_build2 (MULT_EXPR, type,
9292 fold_convert (type, TREE_OPERAND (arg0, 0)),
9293 fold_convert (type, negate_expr (arg1)));
9294 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9295 return fold_build2 (MULT_EXPR, type,
9296 fold_convert (type, negate_expr (arg0)),
9297 fold_convert (type, TREE_OPERAND (arg1, 0)));
9299 if (! FLOAT_TYPE_P (type))
9301 if (integer_zerop (arg1))
9302 return omit_one_operand (type, arg1, arg0);
9303 if (integer_onep (arg1))
9304 return non_lvalue (fold_convert (type, arg0));
9305 /* Transform x * -1 into -x. */
9306 if (integer_all_onesp (arg1))
9307 return fold_convert (type, negate_expr (arg0));
9308 /* Transform x * -C into -x * C if x is easily negatable. */
9309 if (TREE_CODE (arg1) == INTEGER_CST
9310 && tree_int_cst_sgn (arg1) == -1
9311 && negate_expr_p (arg0)
9312 && (tem = negate_expr (arg1)) != arg1
9313 && !TREE_OVERFLOW (tem))
9314 return fold_build2 (MULT_EXPR, type,
9315 negate_expr (arg0), tem);
9317 /* (a * (1 << b)) is (a << b) */
9318 if (TREE_CODE (arg1) == LSHIFT_EXPR
9319 && integer_onep (TREE_OPERAND (arg1, 0)))
9320 return fold_build2 (LSHIFT_EXPR, type, arg0,
9321 TREE_OPERAND (arg1, 1));
9322 if (TREE_CODE (arg0) == LSHIFT_EXPR
9323 && integer_onep (TREE_OPERAND (arg0, 0)))
9324 return fold_build2 (LSHIFT_EXPR, type, arg1,
9325 TREE_OPERAND (arg0, 1));
9327 if (TREE_CODE (arg1) == INTEGER_CST
9328 && 0 != (tem = extract_muldiv (op0,
9329 fold_convert (type, arg1),
9330 code, NULL_TREE)))
9331 return fold_convert (type, tem);
9333 /* Optimize z * conj(z) for integer complex numbers. */
9334 if (TREE_CODE (arg0) == CONJ_EXPR
9335 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9336 return fold_mult_zconjz (type, arg1);
9337 if (TREE_CODE (arg1) == CONJ_EXPR
9338 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9339 return fold_mult_zconjz (type, arg0);
9341 else
9343 /* Maybe fold x * 0 to 0. The expressions aren't the same
9344 when x is NaN, since x * 0 is also NaN. Nor are they the
9345 same in modes with signed zeros, since multiplying a
9346 negative value by 0 gives -0, not +0. */
9347 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9348 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9349 && real_zerop (arg1))
9350 return omit_one_operand (type, arg1, arg0);
9351 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9352 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9353 && real_onep (arg1))
9354 return non_lvalue (fold_convert (type, arg0));
9356 /* Transform x * -1.0 into -x. */
9357 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9358 && real_minus_onep (arg1))
9359 return fold_convert (type, negate_expr (arg0));
9361 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9362 if (flag_unsafe_math_optimizations
9363 && TREE_CODE (arg0) == RDIV_EXPR
9364 && TREE_CODE (arg1) == REAL_CST
9365 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9367 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9368 arg1, 0);
9369 if (tem)
9370 return fold_build2 (RDIV_EXPR, type, tem,
9371 TREE_OPERAND (arg0, 1));
9374 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9375 if (operand_equal_p (arg0, arg1, 0))
9377 tree tem = fold_strip_sign_ops (arg0);
9378 if (tem != NULL_TREE)
9380 tem = fold_convert (type, tem);
9381 return fold_build2 (MULT_EXPR, type, tem, tem);
9385 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9386 This is not the same for NaNs or if singed zeros are
9387 involved. */
9388 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9389 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9390 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9391 && TREE_CODE (arg1) == COMPLEX_CST
9392 && real_zerop (TREE_REALPART (arg1)))
9394 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9395 if (real_onep (TREE_IMAGPART (arg1)))
9396 return fold_build2 (COMPLEX_EXPR, type,
9397 negate_expr (fold_build1 (IMAGPART_EXPR,
9398 rtype, arg0)),
9399 fold_build1 (REALPART_EXPR, rtype, arg0));
9400 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9401 return fold_build2 (COMPLEX_EXPR, type,
9402 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9403 negate_expr (fold_build1 (REALPART_EXPR,
9404 rtype, arg0)));
9407 /* Optimize z * conj(z) for floating point complex numbers.
9408 Guarded by flag_unsafe_math_optimizations as non-finite
9409 imaginary components don't produce scalar results. */
9410 if (flag_unsafe_math_optimizations
9411 && TREE_CODE (arg0) == CONJ_EXPR
9412 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9413 return fold_mult_zconjz (type, arg1);
9414 if (flag_unsafe_math_optimizations
9415 && TREE_CODE (arg1) == CONJ_EXPR
9416 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9417 return fold_mult_zconjz (type, arg0);
9419 if (flag_unsafe_math_optimizations)
9421 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9422 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9424 /* Optimizations of root(...)*root(...). */
9425 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9427 tree rootfn, arg, arglist;
9428 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9429 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9431 /* Optimize sqrt(x)*sqrt(x) as x. */
9432 if (BUILTIN_SQRT_P (fcode0)
9433 && operand_equal_p (arg00, arg10, 0)
9434 && ! HONOR_SNANS (TYPE_MODE (type)))
9435 return arg00;
9437 /* Optimize root(x)*root(y) as root(x*y). */
9438 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9439 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9440 arglist = build_tree_list (NULL_TREE, arg);
9441 return build_function_call_expr (rootfn, arglist);
9444 /* Optimize expN(x)*expN(y) as expN(x+y). */
9445 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9447 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9448 tree arg = fold_build2 (PLUS_EXPR, type,
9449 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9450 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9451 tree arglist = build_tree_list (NULL_TREE, arg);
9452 return build_function_call_expr (expfn, arglist);
9455 /* Optimizations of pow(...)*pow(...). */
9456 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9457 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9458 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9460 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9461 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9462 1)));
9463 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9464 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9465 1)));
9467 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9468 if (operand_equal_p (arg01, arg11, 0))
9470 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9471 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9472 tree arglist = tree_cons (NULL_TREE, arg,
9473 build_tree_list (NULL_TREE,
9474 arg01));
9475 return build_function_call_expr (powfn, arglist);
9478 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9479 if (operand_equal_p (arg00, arg10, 0))
9481 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9482 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9483 tree arglist = tree_cons (NULL_TREE, arg00,
9484 build_tree_list (NULL_TREE,
9485 arg));
9486 return build_function_call_expr (powfn, arglist);
9490 /* Optimize tan(x)*cos(x) as sin(x). */
9491 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9492 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9493 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9494 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9495 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9496 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9497 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9498 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9500 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9502 if (sinfn != NULL_TREE)
9503 return build_function_call_expr (sinfn,
9504 TREE_OPERAND (arg0, 1));
9507 /* Optimize x*pow(x,c) as pow(x,c+1). */
9508 if (fcode1 == BUILT_IN_POW
9509 || fcode1 == BUILT_IN_POWF
9510 || fcode1 == BUILT_IN_POWL)
9512 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9513 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9514 1)));
9515 if (TREE_CODE (arg11) == REAL_CST
9516 && !TREE_OVERFLOW (arg11)
9517 && operand_equal_p (arg0, arg10, 0))
9519 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9520 REAL_VALUE_TYPE c;
9521 tree arg, arglist;
9523 c = TREE_REAL_CST (arg11);
9524 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9525 arg = build_real (type, c);
9526 arglist = build_tree_list (NULL_TREE, arg);
9527 arglist = tree_cons (NULL_TREE, arg0, arglist);
9528 return build_function_call_expr (powfn, arglist);
9532 /* Optimize pow(x,c)*x as pow(x,c+1). */
9533 if (fcode0 == BUILT_IN_POW
9534 || fcode0 == BUILT_IN_POWF
9535 || fcode0 == BUILT_IN_POWL)
9537 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9538 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9539 1)));
9540 if (TREE_CODE (arg01) == REAL_CST
9541 && !TREE_OVERFLOW (arg01)
9542 && operand_equal_p (arg1, arg00, 0))
9544 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9545 REAL_VALUE_TYPE c;
9546 tree arg, arglist;
9548 c = TREE_REAL_CST (arg01);
9549 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9550 arg = build_real (type, c);
9551 arglist = build_tree_list (NULL_TREE, arg);
9552 arglist = tree_cons (NULL_TREE, arg1, arglist);
9553 return build_function_call_expr (powfn, arglist);
9557 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9558 if (! optimize_size
9559 && operand_equal_p (arg0, arg1, 0))
9561 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9563 if (powfn)
9565 tree arg = build_real (type, dconst2);
9566 tree arglist = build_tree_list (NULL_TREE, arg);
9567 arglist = tree_cons (NULL_TREE, arg0, arglist);
9568 return build_function_call_expr (powfn, arglist);
9573 goto associate;
9575 case BIT_IOR_EXPR:
9576 bit_ior:
9577 if (integer_all_onesp (arg1))
9578 return omit_one_operand (type, arg1, arg0);
9579 if (integer_zerop (arg1))
9580 return non_lvalue (fold_convert (type, arg0));
9581 if (operand_equal_p (arg0, arg1, 0))
9582 return non_lvalue (fold_convert (type, arg0));
9584 /* ~X | X is -1. */
9585 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9586 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9588 t1 = build_int_cst_type (type, -1);
9589 return omit_one_operand (type, t1, arg1);
9592 /* X | ~X is -1. */
9593 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9594 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9596 t1 = build_int_cst_type (type, -1);
9597 return omit_one_operand (type, t1, arg0);
9600 /* Canonicalize (X & C1) | C2. */
9601 if (TREE_CODE (arg0) == BIT_AND_EXPR
9602 && TREE_CODE (arg1) == INTEGER_CST
9603 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9605 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9606 int width = TYPE_PRECISION (type);
9607 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9608 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9609 hi2 = TREE_INT_CST_HIGH (arg1);
9610 lo2 = TREE_INT_CST_LOW (arg1);
9612 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9613 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9614 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9616 if (width > HOST_BITS_PER_WIDE_INT)
9618 mhi = (unsigned HOST_WIDE_INT) -1
9619 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9620 mlo = -1;
9622 else
9624 mhi = 0;
9625 mlo = (unsigned HOST_WIDE_INT) -1
9626 >> (HOST_BITS_PER_WIDE_INT - width);
9629 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9630 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9631 return fold_build2 (BIT_IOR_EXPR, type,
9632 TREE_OPERAND (arg0, 0), arg1);
9634 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9635 hi1 &= mhi;
9636 lo1 &= mlo;
9637 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9638 return fold_build2 (BIT_IOR_EXPR, type,
9639 fold_build2 (BIT_AND_EXPR, type,
9640 TREE_OPERAND (arg0, 0),
9641 build_int_cst_wide (type,
9642 lo1 & ~lo2,
9643 hi1 & ~hi2)),
9644 arg1);
9647 /* (X & Y) | Y is (X, Y). */
9648 if (TREE_CODE (arg0) == BIT_AND_EXPR
9649 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9650 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9651 /* (X & Y) | X is (Y, X). */
9652 if (TREE_CODE (arg0) == BIT_AND_EXPR
9653 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9654 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9655 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9656 /* X | (X & Y) is (Y, X). */
9657 if (TREE_CODE (arg1) == BIT_AND_EXPR
9658 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9659 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9660 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9661 /* X | (Y & X) is (Y, X). */
9662 if (TREE_CODE (arg1) == BIT_AND_EXPR
9663 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9664 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9665 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9667 t1 = distribute_bit_expr (code, type, arg0, arg1);
9668 if (t1 != NULL_TREE)
9669 return t1;
9671 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9673 This results in more efficient code for machines without a NAND
9674 instruction. Combine will canonicalize to the first form
9675 which will allow use of NAND instructions provided by the
9676 backend if they exist. */
9677 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9678 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9680 return fold_build1 (BIT_NOT_EXPR, type,
9681 build2 (BIT_AND_EXPR, type,
9682 TREE_OPERAND (arg0, 0),
9683 TREE_OPERAND (arg1, 0)));
9686 /* See if this can be simplified into a rotate first. If that
9687 is unsuccessful continue in the association code. */
9688 goto bit_rotate;
9690 case BIT_XOR_EXPR:
9691 if (integer_zerop (arg1))
9692 return non_lvalue (fold_convert (type, arg0));
9693 if (integer_all_onesp (arg1))
9694 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9695 if (operand_equal_p (arg0, arg1, 0))
9696 return omit_one_operand (type, integer_zero_node, arg0);
9698 /* ~X ^ X is -1. */
9699 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9700 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9702 t1 = build_int_cst_type (type, -1);
9703 return omit_one_operand (type, t1, arg1);
9706 /* X ^ ~X is -1. */
9707 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9708 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9710 t1 = build_int_cst_type (type, -1);
9711 return omit_one_operand (type, t1, arg0);
9714 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9715 with a constant, and the two constants have no bits in common,
9716 we should treat this as a BIT_IOR_EXPR since this may produce more
9717 simplifications. */
9718 if (TREE_CODE (arg0) == BIT_AND_EXPR
9719 && TREE_CODE (arg1) == BIT_AND_EXPR
9720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9721 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9722 && integer_zerop (const_binop (BIT_AND_EXPR,
9723 TREE_OPERAND (arg0, 1),
9724 TREE_OPERAND (arg1, 1), 0)))
9726 code = BIT_IOR_EXPR;
9727 goto bit_ior;
9730 /* (X | Y) ^ X -> Y & ~ X*/
9731 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9732 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9734 tree t2 = TREE_OPERAND (arg0, 1);
9735 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9736 arg1);
9737 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9738 fold_convert (type, t1));
9739 return t1;
9742 /* (Y | X) ^ X -> Y & ~ X*/
9743 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9744 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9746 tree t2 = TREE_OPERAND (arg0, 0);
9747 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9748 arg1);
9749 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9750 fold_convert (type, t1));
9751 return t1;
9754 /* X ^ (X | Y) -> Y & ~ X*/
9755 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9756 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9758 tree t2 = TREE_OPERAND (arg1, 1);
9759 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9760 arg0);
9761 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9762 fold_convert (type, t1));
9763 return t1;
9766 /* X ^ (Y | X) -> Y & ~ X*/
9767 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9768 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9770 tree t2 = TREE_OPERAND (arg1, 0);
9771 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9772 arg0);
9773 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9774 fold_convert (type, t1));
9775 return t1;
9778 /* Convert ~X ^ ~Y to X ^ Y. */
9779 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9780 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9781 return fold_build2 (code, type,
9782 fold_convert (type, TREE_OPERAND (arg0, 0)),
9783 fold_convert (type, TREE_OPERAND (arg1, 0)));
9785 /* Convert ~X ^ C to X ^ ~C. */
9786 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9787 && TREE_CODE (arg1) == INTEGER_CST)
9788 return fold_build2 (code, type,
9789 fold_convert (type, TREE_OPERAND (arg0, 0)),
9790 fold_build1 (BIT_NOT_EXPR, type, arg1));
9792 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9793 if (TREE_CODE (arg0) == BIT_AND_EXPR
9794 && integer_onep (TREE_OPERAND (arg0, 1))
9795 && integer_onep (arg1))
9796 return fold_build2 (EQ_EXPR, type, arg0,
9797 build_int_cst (TREE_TYPE (arg0), 0));
9799 /* Fold (X & Y) ^ Y as ~X & Y. */
9800 if (TREE_CODE (arg0) == BIT_AND_EXPR
9801 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9803 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9804 return fold_build2 (BIT_AND_EXPR, type,
9805 fold_build1 (BIT_NOT_EXPR, type, tem),
9806 fold_convert (type, arg1));
9808 /* Fold (X & Y) ^ X as ~Y & X. */
9809 if (TREE_CODE (arg0) == BIT_AND_EXPR
9810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9811 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9813 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9814 return fold_build2 (BIT_AND_EXPR, type,
9815 fold_build1 (BIT_NOT_EXPR, type, tem),
9816 fold_convert (type, arg1));
9818 /* Fold X ^ (X & Y) as X & ~Y. */
9819 if (TREE_CODE (arg1) == BIT_AND_EXPR
9820 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9822 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9823 return fold_build2 (BIT_AND_EXPR, type,
9824 fold_convert (type, arg0),
9825 fold_build1 (BIT_NOT_EXPR, type, tem));
9827 /* Fold X ^ (Y & X) as ~Y & X. */
9828 if (TREE_CODE (arg1) == BIT_AND_EXPR
9829 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9830 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9832 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9833 return fold_build2 (BIT_AND_EXPR, type,
9834 fold_build1 (BIT_NOT_EXPR, type, tem),
9835 fold_convert (type, arg0));
9838 /* See if this can be simplified into a rotate first. If that
9839 is unsuccessful continue in the association code. */
9840 goto bit_rotate;
9842 case BIT_AND_EXPR:
9843 if (integer_all_onesp (arg1))
9844 return non_lvalue (fold_convert (type, arg0));
9845 if (integer_zerop (arg1))
9846 return omit_one_operand (type, arg1, arg0);
9847 if (operand_equal_p (arg0, arg1, 0))
9848 return non_lvalue (fold_convert (type, arg0));
9850 /* ~X & X is always zero. */
9851 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9852 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9853 return omit_one_operand (type, integer_zero_node, arg1);
9855 /* X & ~X is always zero. */
9856 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9857 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9858 return omit_one_operand (type, integer_zero_node, arg0);
9860 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9861 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9862 && TREE_CODE (arg1) == INTEGER_CST
9863 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9864 return fold_build2 (BIT_IOR_EXPR, type,
9865 fold_build2 (BIT_AND_EXPR, type,
9866 TREE_OPERAND (arg0, 0), arg1),
9867 fold_build2 (BIT_AND_EXPR, type,
9868 TREE_OPERAND (arg0, 1), arg1));
9870 /* (X | Y) & Y is (X, Y). */
9871 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9872 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9873 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9874 /* (X | Y) & X is (Y, X). */
9875 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9876 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9877 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9878 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9879 /* X & (X | Y) is (Y, X). */
9880 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9881 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9882 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9883 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9884 /* X & (Y | X) is (Y, X). */
9885 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9886 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9887 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9888 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9890 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9891 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9892 && integer_onep (TREE_OPERAND (arg0, 1))
9893 && integer_onep (arg1))
9895 tem = TREE_OPERAND (arg0, 0);
9896 return fold_build2 (EQ_EXPR, type,
9897 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9898 build_int_cst (TREE_TYPE (tem), 1)),
9899 build_int_cst (TREE_TYPE (tem), 0));
9901 /* Fold ~X & 1 as (X & 1) == 0. */
9902 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9903 && integer_onep (arg1))
9905 tem = TREE_OPERAND (arg0, 0);
9906 return fold_build2 (EQ_EXPR, type,
9907 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9908 build_int_cst (TREE_TYPE (tem), 1)),
9909 build_int_cst (TREE_TYPE (tem), 0));
9912 /* Fold (X ^ Y) & Y as ~X & Y. */
9913 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9914 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9916 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9917 return fold_build2 (BIT_AND_EXPR, type,
9918 fold_build1 (BIT_NOT_EXPR, type, tem),
9919 fold_convert (type, arg1));
9921 /* Fold (X ^ Y) & X as ~Y & X. */
9922 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9923 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9924 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9926 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9927 return fold_build2 (BIT_AND_EXPR, type,
9928 fold_build1 (BIT_NOT_EXPR, type, tem),
9929 fold_convert (type, arg1));
9931 /* Fold X & (X ^ Y) as X & ~Y. */
9932 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9933 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9935 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9936 return fold_build2 (BIT_AND_EXPR, type,
9937 fold_convert (type, arg0),
9938 fold_build1 (BIT_NOT_EXPR, type, tem));
9940 /* Fold X & (Y ^ X) as ~Y & X. */
9941 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9942 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9943 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9945 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9946 return fold_build2 (BIT_AND_EXPR, type,
9947 fold_build1 (BIT_NOT_EXPR, type, tem),
9948 fold_convert (type, arg0));
9951 t1 = distribute_bit_expr (code, type, arg0, arg1);
9952 if (t1 != NULL_TREE)
9953 return t1;
9954 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9955 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9956 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9958 unsigned int prec
9959 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9961 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9962 && (~TREE_INT_CST_LOW (arg1)
9963 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9964 return fold_convert (type, TREE_OPERAND (arg0, 0));
9967 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9969 This results in more efficient code for machines without a NOR
9970 instruction. Combine will canonicalize to the first form
9971 which will allow use of NOR instructions provided by the
9972 backend if they exist. */
9973 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9974 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9976 return fold_build1 (BIT_NOT_EXPR, type,
9977 build2 (BIT_IOR_EXPR, type,
9978 TREE_OPERAND (arg0, 0),
9979 TREE_OPERAND (arg1, 0)));
9982 goto associate;
9984 case RDIV_EXPR:
9985 /* Don't touch a floating-point divide by zero unless the mode
9986 of the constant can represent infinity. */
9987 if (TREE_CODE (arg1) == REAL_CST
9988 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9989 && real_zerop (arg1))
9990 return NULL_TREE;
9992 /* Optimize A / A to 1.0 if we don't care about
9993 NaNs or Infinities. Skip the transformation
9994 for non-real operands. */
9995 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9996 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9997 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9998 && operand_equal_p (arg0, arg1, 0))
10000 tree r = build_real (TREE_TYPE (arg0), dconst1);
10002 return omit_two_operands (type, r, arg0, arg1);
10005 /* The complex version of the above A / A optimization. */
10006 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10007 && operand_equal_p (arg0, arg1, 0))
10009 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10010 if (! HONOR_NANS (TYPE_MODE (elem_type))
10011 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10013 tree r = build_real (elem_type, dconst1);
10014 /* omit_two_operands will call fold_convert for us. */
10015 return omit_two_operands (type, r, arg0, arg1);
10019 /* (-A) / (-B) -> A / B */
10020 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10021 return fold_build2 (RDIV_EXPR, type,
10022 TREE_OPERAND (arg0, 0),
10023 negate_expr (arg1));
10024 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10025 return fold_build2 (RDIV_EXPR, type,
10026 negate_expr (arg0),
10027 TREE_OPERAND (arg1, 0));
10029 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10030 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10031 && real_onep (arg1))
10032 return non_lvalue (fold_convert (type, arg0));
10034 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10035 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10036 && real_minus_onep (arg1))
10037 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10039 /* If ARG1 is a constant, we can convert this to a multiply by the
10040 reciprocal. This does not have the same rounding properties,
10041 so only do this if -funsafe-math-optimizations. We can actually
10042 always safely do it if ARG1 is a power of two, but it's hard to
10043 tell if it is or not in a portable manner. */
10044 if (TREE_CODE (arg1) == REAL_CST)
10046 if (flag_unsafe_math_optimizations
10047 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10048 arg1, 0)))
10049 return fold_build2 (MULT_EXPR, type, arg0, tem);
10050 /* Find the reciprocal if optimizing and the result is exact. */
10051 if (optimize)
10053 REAL_VALUE_TYPE r;
10054 r = TREE_REAL_CST (arg1);
10055 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10057 tem = build_real (type, r);
10058 return fold_build2 (MULT_EXPR, type,
10059 fold_convert (type, arg0), tem);
10063 /* Convert A/B/C to A/(B*C). */
10064 if (flag_unsafe_math_optimizations
10065 && TREE_CODE (arg0) == RDIV_EXPR)
10066 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10067 fold_build2 (MULT_EXPR, type,
10068 TREE_OPERAND (arg0, 1), arg1));
10070 /* Convert A/(B/C) to (A/B)*C. */
10071 if (flag_unsafe_math_optimizations
10072 && TREE_CODE (arg1) == RDIV_EXPR)
10073 return fold_build2 (MULT_EXPR, type,
10074 fold_build2 (RDIV_EXPR, type, arg0,
10075 TREE_OPERAND (arg1, 0)),
10076 TREE_OPERAND (arg1, 1));
10078 /* Convert C1/(X*C2) into (C1/C2)/X. */
10079 if (flag_unsafe_math_optimizations
10080 && TREE_CODE (arg1) == MULT_EXPR
10081 && TREE_CODE (arg0) == REAL_CST
10082 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10084 tree tem = const_binop (RDIV_EXPR, arg0,
10085 TREE_OPERAND (arg1, 1), 0);
10086 if (tem)
10087 return fold_build2 (RDIV_EXPR, type, tem,
10088 TREE_OPERAND (arg1, 0));
10091 if (flag_unsafe_math_optimizations)
10093 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10094 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10096 /* Optimize sin(x)/cos(x) as tan(x). */
10097 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10098 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10099 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10100 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10101 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10103 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10105 if (tanfn != NULL_TREE)
10106 return build_function_call_expr (tanfn,
10107 TREE_OPERAND (arg0, 1));
10110 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10111 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10112 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10113 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10114 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10115 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10117 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10119 if (tanfn != NULL_TREE)
10121 tree tmp = TREE_OPERAND (arg0, 1);
10122 tmp = build_function_call_expr (tanfn, tmp);
10123 return fold_build2 (RDIV_EXPR, type,
10124 build_real (type, dconst1), tmp);
10128 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10129 NaNs or Infinities. */
10130 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10131 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10132 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10134 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10135 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10137 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10138 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10139 && operand_equal_p (arg00, arg01, 0))
10141 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10143 if (cosfn != NULL_TREE)
10144 return build_function_call_expr (cosfn,
10145 TREE_OPERAND (arg0, 1));
10149 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10150 NaNs or Infinities. */
10151 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10152 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10153 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10155 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10156 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10158 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10159 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10160 && operand_equal_p (arg00, arg01, 0))
10162 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10164 if (cosfn != NULL_TREE)
10166 tree tmp = TREE_OPERAND (arg0, 1);
10167 tmp = build_function_call_expr (cosfn, tmp);
10168 return fold_build2 (RDIV_EXPR, type,
10169 build_real (type, dconst1),
10170 tmp);
10175 /* Optimize pow(x,c)/x as pow(x,c-1). */
10176 if (fcode0 == BUILT_IN_POW
10177 || fcode0 == BUILT_IN_POWF
10178 || fcode0 == BUILT_IN_POWL)
10180 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10181 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10182 if (TREE_CODE (arg01) == REAL_CST
10183 && !TREE_OVERFLOW (arg01)
10184 && operand_equal_p (arg1, arg00, 0))
10186 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10187 REAL_VALUE_TYPE c;
10188 tree arg, arglist;
10190 c = TREE_REAL_CST (arg01);
10191 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10192 arg = build_real (type, c);
10193 arglist = build_tree_list (NULL_TREE, arg);
10194 arglist = tree_cons (NULL_TREE, arg1, arglist);
10195 return build_function_call_expr (powfn, arglist);
10199 /* Optimize x/expN(y) into x*expN(-y). */
10200 if (BUILTIN_EXPONENT_P (fcode1))
10202 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10203 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10204 tree arglist = build_tree_list (NULL_TREE,
10205 fold_convert (type, arg));
10206 arg1 = build_function_call_expr (expfn, arglist);
10207 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10210 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10211 if (fcode1 == BUILT_IN_POW
10212 || fcode1 == BUILT_IN_POWF
10213 || fcode1 == BUILT_IN_POWL)
10215 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10216 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10217 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10218 tree neg11 = fold_convert (type, negate_expr (arg11));
10219 tree arglist = tree_cons(NULL_TREE, arg10,
10220 build_tree_list (NULL_TREE, neg11));
10221 arg1 = build_function_call_expr (powfn, arglist);
10222 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10225 return NULL_TREE;
10227 case TRUNC_DIV_EXPR:
10228 case FLOOR_DIV_EXPR:
10229 /* Simplify A / (B << N) where A and B are positive and B is
10230 a power of 2, to A >> (N + log2(B)). */
10231 if (TREE_CODE (arg1) == LSHIFT_EXPR
10232 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10234 tree sval = TREE_OPERAND (arg1, 0);
10235 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10237 tree sh_cnt = TREE_OPERAND (arg1, 1);
10238 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10240 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10241 sh_cnt, build_int_cst (NULL_TREE, pow2));
10242 return fold_build2 (RSHIFT_EXPR, type,
10243 fold_convert (type, arg0), sh_cnt);
10246 /* Fall thru */
10248 case ROUND_DIV_EXPR:
10249 case CEIL_DIV_EXPR:
10250 case EXACT_DIV_EXPR:
10251 if (integer_onep (arg1))
10252 return non_lvalue (fold_convert (type, arg0));
10253 if (integer_zerop (arg1))
10254 return NULL_TREE;
10255 /* X / -1 is -X. */
10256 if (!TYPE_UNSIGNED (type)
10257 && TREE_CODE (arg1) == INTEGER_CST
10258 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10259 && TREE_INT_CST_HIGH (arg1) == -1)
10260 return fold_convert (type, negate_expr (arg0));
10262 /* Convert -A / -B to A / B when the type is signed and overflow is
10263 undefined. */
10264 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10265 && TREE_CODE (arg0) == NEGATE_EXPR
10266 && negate_expr_p (arg1))
10267 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10268 negate_expr (arg1));
10269 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10270 && TREE_CODE (arg1) == NEGATE_EXPR
10271 && negate_expr_p (arg0))
10272 return fold_build2 (code, type, negate_expr (arg0),
10273 TREE_OPERAND (arg1, 0));
10275 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10276 operation, EXACT_DIV_EXPR.
10278 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10279 At one time others generated faster code, it's not clear if they do
10280 after the last round to changes to the DIV code in expmed.c. */
10281 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10282 && multiple_of_p (type, arg0, arg1))
10283 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10285 if (TREE_CODE (arg1) == INTEGER_CST
10286 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10287 return fold_convert (type, tem);
10289 return NULL_TREE;
10291 case CEIL_MOD_EXPR:
10292 case FLOOR_MOD_EXPR:
10293 case ROUND_MOD_EXPR:
10294 case TRUNC_MOD_EXPR:
10295 /* X % 1 is always zero, but be sure to preserve any side
10296 effects in X. */
10297 if (integer_onep (arg1))
10298 return omit_one_operand (type, integer_zero_node, arg0);
10300 /* X % 0, return X % 0 unchanged so that we can get the
10301 proper warnings and errors. */
10302 if (integer_zerop (arg1))
10303 return NULL_TREE;
10305 /* 0 % X is always zero, but be sure to preserve any side
10306 effects in X. Place this after checking for X == 0. */
10307 if (integer_zerop (arg0))
10308 return omit_one_operand (type, integer_zero_node, arg1);
10310 /* X % -1 is zero. */
10311 if (!TYPE_UNSIGNED (type)
10312 && TREE_CODE (arg1) == INTEGER_CST
10313 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10314 && TREE_INT_CST_HIGH (arg1) == -1)
10315 return omit_one_operand (type, integer_zero_node, arg0);
10317 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10318 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10319 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10320 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10322 tree c = arg1;
10323 /* Also optimize A % (C << N) where C is a power of 2,
10324 to A & ((C << N) - 1). */
10325 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10326 c = TREE_OPERAND (arg1, 0);
10328 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10330 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10331 build_int_cst (TREE_TYPE (arg1), 1));
10332 return fold_build2 (BIT_AND_EXPR, type,
10333 fold_convert (type, arg0),
10334 fold_convert (type, mask));
10338 /* X % -C is the same as X % C. */
10339 if (code == TRUNC_MOD_EXPR
10340 && !TYPE_UNSIGNED (type)
10341 && TREE_CODE (arg1) == INTEGER_CST
10342 && !TREE_OVERFLOW (arg1)
10343 && TREE_INT_CST_HIGH (arg1) < 0
10344 && !flag_trapv
10345 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10346 && !sign_bit_p (arg1, arg1))
10347 return fold_build2 (code, type, fold_convert (type, arg0),
10348 fold_convert (type, negate_expr (arg1)));
10350 /* X % -Y is the same as X % Y. */
10351 if (code == TRUNC_MOD_EXPR
10352 && !TYPE_UNSIGNED (type)
10353 && TREE_CODE (arg1) == NEGATE_EXPR
10354 && !flag_trapv)
10355 return fold_build2 (code, type, fold_convert (type, arg0),
10356 fold_convert (type, TREE_OPERAND (arg1, 0)));
10358 if (TREE_CODE (arg1) == INTEGER_CST
10359 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10360 return fold_convert (type, tem);
10362 return NULL_TREE;
10364 case LROTATE_EXPR:
10365 case RROTATE_EXPR:
10366 if (integer_all_onesp (arg0))
10367 return omit_one_operand (type, arg0, arg1);
10368 goto shift;
10370 case RSHIFT_EXPR:
10371 /* Optimize -1 >> x for arithmetic right shifts. */
10372 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10373 return omit_one_operand (type, arg0, arg1);
10374 /* ... fall through ... */
10376 case LSHIFT_EXPR:
10377 shift:
10378 if (integer_zerop (arg1))
10379 return non_lvalue (fold_convert (type, arg0));
10380 if (integer_zerop (arg0))
10381 return omit_one_operand (type, arg0, arg1);
10383 /* Since negative shift count is not well-defined,
10384 don't try to compute it in the compiler. */
10385 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10386 return NULL_TREE;
10388 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10389 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10390 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10391 && host_integerp (TREE_OPERAND (arg0, 1), false)
10392 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10394 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10395 + TREE_INT_CST_LOW (arg1));
10397 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10398 being well defined. */
10399 if (low >= TYPE_PRECISION (type))
10401 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10402 low = low % TYPE_PRECISION (type);
10403 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10404 return build_int_cst (type, 0);
10405 else
10406 low = TYPE_PRECISION (type) - 1;
10409 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10410 build_int_cst (type, low));
10413 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10414 into x & ((unsigned)-1 >> c) for unsigned types. */
10415 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10416 || (TYPE_UNSIGNED (type)
10417 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10418 && host_integerp (arg1, false)
10419 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10420 && host_integerp (TREE_OPERAND (arg0, 1), false)
10421 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10423 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10424 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10425 tree lshift;
10426 tree arg00;
10428 if (low0 == low1)
10430 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10432 lshift = build_int_cst (type, -1);
10433 lshift = int_const_binop (code, lshift, arg1, 0);
10435 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10439 /* Rewrite an LROTATE_EXPR by a constant into an
10440 RROTATE_EXPR by a new constant. */
10441 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10443 tree tem = build_int_cst (TREE_TYPE (arg1),
10444 GET_MODE_BITSIZE (TYPE_MODE (type)));
10445 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10446 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10449 /* If we have a rotate of a bit operation with the rotate count and
10450 the second operand of the bit operation both constant,
10451 permute the two operations. */
10452 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10453 && (TREE_CODE (arg0) == BIT_AND_EXPR
10454 || TREE_CODE (arg0) == BIT_IOR_EXPR
10455 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10456 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10457 return fold_build2 (TREE_CODE (arg0), type,
10458 fold_build2 (code, type,
10459 TREE_OPERAND (arg0, 0), arg1),
10460 fold_build2 (code, type,
10461 TREE_OPERAND (arg0, 1), arg1));
10463 /* Two consecutive rotates adding up to the width of the mode can
10464 be ignored. */
10465 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10466 && TREE_CODE (arg0) == RROTATE_EXPR
10467 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10468 && TREE_INT_CST_HIGH (arg1) == 0
10469 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10470 && ((TREE_INT_CST_LOW (arg1)
10471 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10472 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10473 return TREE_OPERAND (arg0, 0);
10475 return NULL_TREE;
10477 case MIN_EXPR:
10478 if (operand_equal_p (arg0, arg1, 0))
10479 return omit_one_operand (type, arg0, arg1);
10480 if (INTEGRAL_TYPE_P (type)
10481 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10482 return omit_one_operand (type, arg1, arg0);
10483 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10484 if (tem)
10485 return tem;
10486 goto associate;
10488 case MAX_EXPR:
10489 if (operand_equal_p (arg0, arg1, 0))
10490 return omit_one_operand (type, arg0, arg1);
10491 if (INTEGRAL_TYPE_P (type)
10492 && TYPE_MAX_VALUE (type)
10493 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10494 return omit_one_operand (type, arg1, arg0);
10495 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10496 if (tem)
10497 return tem;
10498 goto associate;
10500 case TRUTH_ANDIF_EXPR:
10501 /* Note that the operands of this must be ints
10502 and their values must be 0 or 1.
10503 ("true" is a fixed value perhaps depending on the language.) */
10504 /* If first arg is constant zero, return it. */
10505 if (integer_zerop (arg0))
10506 return fold_convert (type, arg0);
10507 case TRUTH_AND_EXPR:
10508 /* If either arg is constant true, drop it. */
10509 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10510 return non_lvalue (fold_convert (type, arg1));
10511 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10512 /* Preserve sequence points. */
10513 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10514 return non_lvalue (fold_convert (type, arg0));
10515 /* If second arg is constant zero, result is zero, but first arg
10516 must be evaluated. */
10517 if (integer_zerop (arg1))
10518 return omit_one_operand (type, arg1, arg0);
10519 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10520 case will be handled here. */
10521 if (integer_zerop (arg0))
10522 return omit_one_operand (type, arg0, arg1);
10524 /* !X && X is always false. */
10525 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10526 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10527 return omit_one_operand (type, integer_zero_node, arg1);
10528 /* X && !X is always false. */
10529 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10530 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10531 return omit_one_operand (type, integer_zero_node, arg0);
10533 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10534 means A >= Y && A != MAX, but in this case we know that
10535 A < X <= MAX. */
10537 if (!TREE_SIDE_EFFECTS (arg0)
10538 && !TREE_SIDE_EFFECTS (arg1))
10540 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10541 if (tem && !operand_equal_p (tem, arg0, 0))
10542 return fold_build2 (code, type, tem, arg1);
10544 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10545 if (tem && !operand_equal_p (tem, arg1, 0))
10546 return fold_build2 (code, type, arg0, tem);
10549 truth_andor:
10550 /* We only do these simplifications if we are optimizing. */
10551 if (!optimize)
10552 return NULL_TREE;
10554 /* Check for things like (A || B) && (A || C). We can convert this
10555 to A || (B && C). Note that either operator can be any of the four
10556 truth and/or operations and the transformation will still be
10557 valid. Also note that we only care about order for the
10558 ANDIF and ORIF operators. If B contains side effects, this
10559 might change the truth-value of A. */
10560 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10561 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10562 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10563 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10564 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10565 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10567 tree a00 = TREE_OPERAND (arg0, 0);
10568 tree a01 = TREE_OPERAND (arg0, 1);
10569 tree a10 = TREE_OPERAND (arg1, 0);
10570 tree a11 = TREE_OPERAND (arg1, 1);
10571 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10572 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10573 && (code == TRUTH_AND_EXPR
10574 || code == TRUTH_OR_EXPR));
10576 if (operand_equal_p (a00, a10, 0))
10577 return fold_build2 (TREE_CODE (arg0), type, a00,
10578 fold_build2 (code, type, a01, a11));
10579 else if (commutative && operand_equal_p (a00, a11, 0))
10580 return fold_build2 (TREE_CODE (arg0), type, a00,
10581 fold_build2 (code, type, a01, a10));
10582 else if (commutative && operand_equal_p (a01, a10, 0))
10583 return fold_build2 (TREE_CODE (arg0), type, a01,
10584 fold_build2 (code, type, a00, a11));
10586 /* This case if tricky because we must either have commutative
10587 operators or else A10 must not have side-effects. */
10589 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10590 && operand_equal_p (a01, a11, 0))
10591 return fold_build2 (TREE_CODE (arg0), type,
10592 fold_build2 (code, type, a00, a10),
10593 a01);
10596 /* See if we can build a range comparison. */
10597 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10598 return tem;
10600 /* Check for the possibility of merging component references. If our
10601 lhs is another similar operation, try to merge its rhs with our
10602 rhs. Then try to merge our lhs and rhs. */
10603 if (TREE_CODE (arg0) == code
10604 && 0 != (tem = fold_truthop (code, type,
10605 TREE_OPERAND (arg0, 1), arg1)))
10606 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10608 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10609 return tem;
10611 return NULL_TREE;
10613 case TRUTH_ORIF_EXPR:
10614 /* Note that the operands of this must be ints
10615 and their values must be 0 or true.
10616 ("true" is a fixed value perhaps depending on the language.) */
10617 /* If first arg is constant true, return it. */
10618 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10619 return fold_convert (type, arg0);
10620 case TRUTH_OR_EXPR:
10621 /* If either arg is constant zero, drop it. */
10622 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10623 return non_lvalue (fold_convert (type, arg1));
10624 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10625 /* Preserve sequence points. */
10626 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10627 return non_lvalue (fold_convert (type, arg0));
10628 /* If second arg is constant true, result is true, but we must
10629 evaluate first arg. */
10630 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10631 return omit_one_operand (type, arg1, arg0);
10632 /* Likewise for first arg, but note this only occurs here for
10633 TRUTH_OR_EXPR. */
10634 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10635 return omit_one_operand (type, arg0, arg1);
10637 /* !X || X is always true. */
10638 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10639 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10640 return omit_one_operand (type, integer_one_node, arg1);
10641 /* X || !X is always true. */
10642 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10643 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10644 return omit_one_operand (type, integer_one_node, arg0);
10646 goto truth_andor;
10648 case TRUTH_XOR_EXPR:
10649 /* If the second arg is constant zero, drop it. */
10650 if (integer_zerop (arg1))
10651 return non_lvalue (fold_convert (type, arg0));
10652 /* If the second arg is constant true, this is a logical inversion. */
10653 if (integer_onep (arg1))
10655 /* Only call invert_truthvalue if operand is a truth value. */
10656 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10657 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10658 else
10659 tem = invert_truthvalue (arg0);
10660 return non_lvalue (fold_convert (type, tem));
10662 /* Identical arguments cancel to zero. */
10663 if (operand_equal_p (arg0, arg1, 0))
10664 return omit_one_operand (type, integer_zero_node, arg0);
10666 /* !X ^ X is always true. */
10667 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10668 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10669 return omit_one_operand (type, integer_one_node, arg1);
10671 /* X ^ !X is always true. */
10672 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10673 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10674 return omit_one_operand (type, integer_one_node, arg0);
10676 return NULL_TREE;
10678 case EQ_EXPR:
10679 case NE_EXPR:
10680 tem = fold_comparison (code, type, op0, op1);
10681 if (tem != NULL_TREE)
10682 return tem;
10684 /* bool_var != 0 becomes bool_var. */
10685 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10686 && code == NE_EXPR)
10687 return non_lvalue (fold_convert (type, arg0));
10689 /* bool_var == 1 becomes bool_var. */
10690 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10691 && code == EQ_EXPR)
10692 return non_lvalue (fold_convert (type, arg0));
10694 /* bool_var != 1 becomes !bool_var. */
10695 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10696 && code == NE_EXPR)
10697 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10699 /* bool_var == 0 becomes !bool_var. */
10700 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10701 && code == EQ_EXPR)
10702 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10704 /* If this is an equality comparison of the address of a non-weak
10705 object against zero, then we know the result. */
10706 if (TREE_CODE (arg0) == ADDR_EXPR
10707 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10708 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10709 && integer_zerop (arg1))
10710 return constant_boolean_node (code != EQ_EXPR, type);
10712 /* If this is an equality comparison of the address of two non-weak,
10713 unaliased symbols neither of which are extern (since we do not
10714 have access to attributes for externs), then we know the result. */
10715 if (TREE_CODE (arg0) == ADDR_EXPR
10716 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10717 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10718 && ! lookup_attribute ("alias",
10719 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10720 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10721 && TREE_CODE (arg1) == ADDR_EXPR
10722 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10723 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10724 && ! lookup_attribute ("alias",
10725 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10726 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10728 /* We know that we're looking at the address of two
10729 non-weak, unaliased, static _DECL nodes.
10731 It is both wasteful and incorrect to call operand_equal_p
10732 to compare the two ADDR_EXPR nodes. It is wasteful in that
10733 all we need to do is test pointer equality for the arguments
10734 to the two ADDR_EXPR nodes. It is incorrect to use
10735 operand_equal_p as that function is NOT equivalent to a
10736 C equality test. It can in fact return false for two
10737 objects which would test as equal using the C equality
10738 operator. */
10739 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10740 return constant_boolean_node (equal
10741 ? code == EQ_EXPR : code != EQ_EXPR,
10742 type);
10745 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10746 a MINUS_EXPR of a constant, we can convert it into a comparison with
10747 a revised constant as long as no overflow occurs. */
10748 if (TREE_CODE (arg1) == INTEGER_CST
10749 && (TREE_CODE (arg0) == PLUS_EXPR
10750 || TREE_CODE (arg0) == MINUS_EXPR)
10751 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10752 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10753 ? MINUS_EXPR : PLUS_EXPR,
10754 fold_convert (TREE_TYPE (arg0), arg1),
10755 TREE_OPERAND (arg0, 1), 0))
10756 && !TREE_OVERFLOW (tem))
10757 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10759 /* Similarly for a NEGATE_EXPR. */
10760 if (TREE_CODE (arg0) == NEGATE_EXPR
10761 && TREE_CODE (arg1) == INTEGER_CST
10762 && 0 != (tem = negate_expr (arg1))
10763 && TREE_CODE (tem) == INTEGER_CST
10764 && !TREE_OVERFLOW (tem))
10765 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10767 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10768 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10769 && TREE_CODE (arg1) == INTEGER_CST
10770 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10771 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10772 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10773 fold_convert (TREE_TYPE (arg0), arg1),
10774 TREE_OPERAND (arg0, 1)));
10776 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10777 for !=. Don't do this for ordered comparisons due to overflow. */
10778 if (TREE_CODE (arg0) == MINUS_EXPR
10779 && integer_zerop (arg1))
10780 return fold_build2 (code, type,
10781 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10783 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10784 if (TREE_CODE (arg0) == ABS_EXPR
10785 && (integer_zerop (arg1) || real_zerop (arg1)))
10786 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10788 /* If this is an EQ or NE comparison with zero and ARG0 is
10789 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10790 two operations, but the latter can be done in one less insn
10791 on machines that have only two-operand insns or on which a
10792 constant cannot be the first operand. */
10793 if (TREE_CODE (arg0) == BIT_AND_EXPR
10794 && integer_zerop (arg1))
10796 tree arg00 = TREE_OPERAND (arg0, 0);
10797 tree arg01 = TREE_OPERAND (arg0, 1);
10798 if (TREE_CODE (arg00) == LSHIFT_EXPR
10799 && integer_onep (TREE_OPERAND (arg00, 0)))
10800 return
10801 fold_build2 (code, type,
10802 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10803 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10804 arg01, TREE_OPERAND (arg00, 1)),
10805 fold_convert (TREE_TYPE (arg0),
10806 integer_one_node)),
10807 arg1);
10808 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10809 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10810 return
10811 fold_build2 (code, type,
10812 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10813 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10814 arg00, TREE_OPERAND (arg01, 1)),
10815 fold_convert (TREE_TYPE (arg0),
10816 integer_one_node)),
10817 arg1);
10820 /* If this is an NE or EQ comparison of zero against the result of a
10821 signed MOD operation whose second operand is a power of 2, make
10822 the MOD operation unsigned since it is simpler and equivalent. */
10823 if (integer_zerop (arg1)
10824 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10825 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10826 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10827 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10828 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10829 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10831 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10832 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10833 fold_convert (newtype,
10834 TREE_OPERAND (arg0, 0)),
10835 fold_convert (newtype,
10836 TREE_OPERAND (arg0, 1)));
10838 return fold_build2 (code, type, newmod,
10839 fold_convert (newtype, arg1));
10842 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10843 C1 is a valid shift constant, and C2 is a power of two, i.e.
10844 a single bit. */
10845 if (TREE_CODE (arg0) == BIT_AND_EXPR
10846 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10847 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10848 == INTEGER_CST
10849 && integer_pow2p (TREE_OPERAND (arg0, 1))
10850 && integer_zerop (arg1))
10852 tree itype = TREE_TYPE (arg0);
10853 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10854 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10856 /* Check for a valid shift count. */
10857 if (TREE_INT_CST_HIGH (arg001) == 0
10858 && TREE_INT_CST_LOW (arg001) < prec)
10860 tree arg01 = TREE_OPERAND (arg0, 1);
10861 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10862 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10863 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10864 can be rewritten as (X & (C2 << C1)) != 0. */
10865 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10867 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10868 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10869 return fold_build2 (code, type, tem, arg1);
10871 /* Otherwise, for signed (arithmetic) shifts,
10872 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10873 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10874 else if (!TYPE_UNSIGNED (itype))
10875 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10876 arg000, build_int_cst (itype, 0));
10877 /* Otherwise, of unsigned (logical) shifts,
10878 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10879 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10880 else
10881 return omit_one_operand (type,
10882 code == EQ_EXPR ? integer_one_node
10883 : integer_zero_node,
10884 arg000);
10888 /* If this is an NE comparison of zero with an AND of one, remove the
10889 comparison since the AND will give the correct value. */
10890 if (code == NE_EXPR
10891 && integer_zerop (arg1)
10892 && TREE_CODE (arg0) == BIT_AND_EXPR
10893 && integer_onep (TREE_OPERAND (arg0, 1)))
10894 return fold_convert (type, arg0);
10896 /* If we have (A & C) == C where C is a power of 2, convert this into
10897 (A & C) != 0. Similarly for NE_EXPR. */
10898 if (TREE_CODE (arg0) == BIT_AND_EXPR
10899 && integer_pow2p (TREE_OPERAND (arg0, 1))
10900 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10901 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10902 arg0, fold_convert (TREE_TYPE (arg0),
10903 integer_zero_node));
10905 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10906 bit, then fold the expression into A < 0 or A >= 0. */
10907 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10908 if (tem)
10909 return tem;
10911 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10912 Similarly for NE_EXPR. */
10913 if (TREE_CODE (arg0) == BIT_AND_EXPR
10914 && TREE_CODE (arg1) == INTEGER_CST
10915 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10917 tree notc = fold_build1 (BIT_NOT_EXPR,
10918 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10919 TREE_OPERAND (arg0, 1));
10920 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10921 arg1, notc);
10922 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10923 if (integer_nonzerop (dandnotc))
10924 return omit_one_operand (type, rslt, arg0);
10927 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10928 Similarly for NE_EXPR. */
10929 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10930 && TREE_CODE (arg1) == INTEGER_CST
10931 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10933 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10934 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10935 TREE_OPERAND (arg0, 1), notd);
10936 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10937 if (integer_nonzerop (candnotd))
10938 return omit_one_operand (type, rslt, arg0);
10941 /* If this is a comparison of a field, we may be able to simplify it. */
10942 if ((TREE_CODE (arg0) == COMPONENT_REF
10943 || TREE_CODE (arg0) == BIT_FIELD_REF)
10944 /* Handle the constant case even without -O
10945 to make sure the warnings are given. */
10946 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10948 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10949 if (t1)
10950 return t1;
10953 /* Optimize comparisons of strlen vs zero to a compare of the
10954 first character of the string vs zero. To wit,
10955 strlen(ptr) == 0 => *ptr == 0
10956 strlen(ptr) != 0 => *ptr != 0
10957 Other cases should reduce to one of these two (or a constant)
10958 due to the return value of strlen being unsigned. */
10959 if (TREE_CODE (arg0) == CALL_EXPR
10960 && integer_zerop (arg1))
10962 tree fndecl = get_callee_fndecl (arg0);
10963 tree arglist;
10965 if (fndecl
10966 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10967 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10968 && (arglist = TREE_OPERAND (arg0, 1))
10969 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10970 && ! TREE_CHAIN (arglist))
10972 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10973 return fold_build2 (code, type, iref,
10974 build_int_cst (TREE_TYPE (iref), 0));
10978 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10979 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10980 if (TREE_CODE (arg0) == RSHIFT_EXPR
10981 && integer_zerop (arg1)
10982 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10984 tree arg00 = TREE_OPERAND (arg0, 0);
10985 tree arg01 = TREE_OPERAND (arg0, 1);
10986 tree itype = TREE_TYPE (arg00);
10987 if (TREE_INT_CST_HIGH (arg01) == 0
10988 && TREE_INT_CST_LOW (arg01)
10989 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10991 if (TYPE_UNSIGNED (itype))
10993 itype = lang_hooks.types.signed_type (itype);
10994 arg00 = fold_convert (itype, arg00);
10996 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10997 type, arg00, build_int_cst (itype, 0));
11001 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11002 if (integer_zerop (arg1)
11003 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11004 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11005 TREE_OPERAND (arg0, 1));
11007 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11008 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11009 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11010 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11011 build_int_cst (TREE_TYPE (arg1), 0));
11012 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11013 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11014 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11015 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11016 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11017 build_int_cst (TREE_TYPE (arg1), 0));
11019 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11020 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11021 && TREE_CODE (arg1) == INTEGER_CST
11022 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11023 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11024 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11025 TREE_OPERAND (arg0, 1), arg1));
11027 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11028 (X & C) == 0 when C is a single bit. */
11029 if (TREE_CODE (arg0) == BIT_AND_EXPR
11030 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11031 && integer_zerop (arg1)
11032 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11034 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11035 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11036 TREE_OPERAND (arg0, 1));
11037 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11038 type, tem, arg1);
11041 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11042 constant C is a power of two, i.e. a single bit. */
11043 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11044 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11045 && integer_zerop (arg1)
11046 && integer_pow2p (TREE_OPERAND (arg0, 1))
11047 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11048 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11050 tree arg00 = TREE_OPERAND (arg0, 0);
11051 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11052 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11055 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11056 when is C is a power of two, i.e. a single bit. */
11057 if (TREE_CODE (arg0) == BIT_AND_EXPR
11058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11059 && integer_zerop (arg1)
11060 && integer_pow2p (TREE_OPERAND (arg0, 1))
11061 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11062 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11064 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11065 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11066 arg000, TREE_OPERAND (arg0, 1));
11067 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11068 tem, build_int_cst (TREE_TYPE (tem), 0));
11071 if (integer_zerop (arg1)
11072 && tree_expr_nonzero_p (arg0))
11074 tree res = constant_boolean_node (code==NE_EXPR, type);
11075 return omit_one_operand (type, res, arg0);
11078 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11079 if (TREE_CODE (arg0) == NEGATE_EXPR
11080 && TREE_CODE (arg1) == NEGATE_EXPR)
11081 return fold_build2 (code, type,
11082 TREE_OPERAND (arg0, 0),
11083 TREE_OPERAND (arg1, 0));
11085 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11086 if (TREE_CODE (arg0) == BIT_AND_EXPR
11087 && TREE_CODE (arg1) == BIT_AND_EXPR)
11089 tree arg00 = TREE_OPERAND (arg0, 0);
11090 tree arg01 = TREE_OPERAND (arg0, 1);
11091 tree arg10 = TREE_OPERAND (arg1, 0);
11092 tree arg11 = TREE_OPERAND (arg1, 1);
11093 tree itype = TREE_TYPE (arg0);
11095 if (operand_equal_p (arg01, arg11, 0))
11096 return fold_build2 (code, type,
11097 fold_build2 (BIT_AND_EXPR, itype,
11098 fold_build2 (BIT_XOR_EXPR, itype,
11099 arg00, arg10),
11100 arg01),
11101 build_int_cst (itype, 0));
11103 if (operand_equal_p (arg01, arg10, 0))
11104 return fold_build2 (code, type,
11105 fold_build2 (BIT_AND_EXPR, itype,
11106 fold_build2 (BIT_XOR_EXPR, itype,
11107 arg00, arg11),
11108 arg01),
11109 build_int_cst (itype, 0));
11111 if (operand_equal_p (arg00, arg11, 0))
11112 return fold_build2 (code, type,
11113 fold_build2 (BIT_AND_EXPR, itype,
11114 fold_build2 (BIT_XOR_EXPR, itype,
11115 arg01, arg10),
11116 arg00),
11117 build_int_cst (itype, 0));
11119 if (operand_equal_p (arg00, arg10, 0))
11120 return fold_build2 (code, type,
11121 fold_build2 (BIT_AND_EXPR, itype,
11122 fold_build2 (BIT_XOR_EXPR, itype,
11123 arg01, arg11),
11124 arg00),
11125 build_int_cst (itype, 0));
11128 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11129 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11131 tree arg00 = TREE_OPERAND (arg0, 0);
11132 tree arg01 = TREE_OPERAND (arg0, 1);
11133 tree arg10 = TREE_OPERAND (arg1, 0);
11134 tree arg11 = TREE_OPERAND (arg1, 1);
11135 tree itype = TREE_TYPE (arg0);
11137 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11138 operand_equal_p guarantees no side-effects so we don't need
11139 to use omit_one_operand on Z. */
11140 if (operand_equal_p (arg01, arg11, 0))
11141 return fold_build2 (code, type, arg00, arg10);
11142 if (operand_equal_p (arg01, arg10, 0))
11143 return fold_build2 (code, type, arg00, arg11);
11144 if (operand_equal_p (arg00, arg11, 0))
11145 return fold_build2 (code, type, arg01, arg10);
11146 if (operand_equal_p (arg00, arg10, 0))
11147 return fold_build2 (code, type, arg01, arg11);
11149 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11150 if (TREE_CODE (arg01) == INTEGER_CST
11151 && TREE_CODE (arg11) == INTEGER_CST)
11152 return fold_build2 (code, type,
11153 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11154 fold_build2 (BIT_XOR_EXPR, itype,
11155 arg01, arg11)),
11156 arg10);
11158 return NULL_TREE;
11160 case LT_EXPR:
11161 case GT_EXPR:
11162 case LE_EXPR:
11163 case GE_EXPR:
11164 tem = fold_comparison (code, type, op0, op1);
11165 if (tem != NULL_TREE)
11166 return tem;
11168 /* Transform comparisons of the form X +- C CMP X. */
11169 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11171 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11172 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11173 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11174 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11175 && !(flag_wrapv || flag_trapv))))
11177 tree arg01 = TREE_OPERAND (arg0, 1);
11178 enum tree_code code0 = TREE_CODE (arg0);
11179 int is_positive;
11181 if (TREE_CODE (arg01) == REAL_CST)
11182 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11183 else
11184 is_positive = tree_int_cst_sgn (arg01);
11186 /* (X - c) > X becomes false. */
11187 if (code == GT_EXPR
11188 && ((code0 == MINUS_EXPR && is_positive >= 0)
11189 || (code0 == PLUS_EXPR && is_positive <= 0)))
11190 return constant_boolean_node (0, type);
11192 /* Likewise (X + c) < X becomes false. */
11193 if (code == LT_EXPR
11194 && ((code0 == PLUS_EXPR && is_positive >= 0)
11195 || (code0 == MINUS_EXPR && is_positive <= 0)))
11196 return constant_boolean_node (0, type);
11198 /* Convert (X - c) <= X to true. */
11199 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11200 && code == LE_EXPR
11201 && ((code0 == MINUS_EXPR && is_positive >= 0)
11202 || (code0 == PLUS_EXPR && is_positive <= 0)))
11203 return constant_boolean_node (1, type);
11205 /* Convert (X + c) >= X to true. */
11206 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11207 && code == GE_EXPR
11208 && ((code0 == PLUS_EXPR && is_positive >= 0)
11209 || (code0 == MINUS_EXPR && is_positive <= 0)))
11210 return constant_boolean_node (1, type);
11212 if (TREE_CODE (arg01) == INTEGER_CST)
11214 /* Convert X + c > X and X - c < X to true for integers. */
11215 if (code == GT_EXPR
11216 && ((code0 == PLUS_EXPR && is_positive > 0)
11217 || (code0 == MINUS_EXPR && is_positive < 0)))
11218 return constant_boolean_node (1, type);
11220 if (code == LT_EXPR
11221 && ((code0 == MINUS_EXPR && is_positive > 0)
11222 || (code0 == PLUS_EXPR && is_positive < 0)))
11223 return constant_boolean_node (1, type);
11225 /* Convert X + c <= X and X - c >= X to false for integers. */
11226 if (code == LE_EXPR
11227 && ((code0 == PLUS_EXPR && is_positive > 0)
11228 || (code0 == MINUS_EXPR && is_positive < 0)))
11229 return constant_boolean_node (0, type);
11231 if (code == GE_EXPR
11232 && ((code0 == MINUS_EXPR && is_positive > 0)
11233 || (code0 == PLUS_EXPR && is_positive < 0)))
11234 return constant_boolean_node (0, type);
11238 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11239 This transformation affects the cases which are handled in later
11240 optimizations involving comparisons with non-negative constants. */
11241 if (TREE_CODE (arg1) == INTEGER_CST
11242 && TREE_CODE (arg0) != INTEGER_CST
11243 && tree_int_cst_sgn (arg1) > 0)
11245 if (code == GE_EXPR)
11247 arg1 = const_binop (MINUS_EXPR, arg1,
11248 build_int_cst (TREE_TYPE (arg1), 1), 0);
11249 return fold_build2 (GT_EXPR, type, arg0,
11250 fold_convert (TREE_TYPE (arg0), arg1));
11252 if (code == LT_EXPR)
11254 arg1 = const_binop (MINUS_EXPR, arg1,
11255 build_int_cst (TREE_TYPE (arg1), 1), 0);
11256 return fold_build2 (LE_EXPR, type, arg0,
11257 fold_convert (TREE_TYPE (arg0), arg1));
11261 /* Comparisons with the highest or lowest possible integer of
11262 the specified precision will have known values. */
11264 tree arg1_type = TREE_TYPE (arg1);
11265 unsigned int width = TYPE_PRECISION (arg1_type);
11267 if (TREE_CODE (arg1) == INTEGER_CST
11268 && !TREE_OVERFLOW (arg1)
11269 && width <= 2 * HOST_BITS_PER_WIDE_INT
11270 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11272 HOST_WIDE_INT signed_max_hi;
11273 unsigned HOST_WIDE_INT signed_max_lo;
11274 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11276 if (width <= HOST_BITS_PER_WIDE_INT)
11278 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11279 - 1;
11280 signed_max_hi = 0;
11281 max_hi = 0;
11283 if (TYPE_UNSIGNED (arg1_type))
11285 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11286 min_lo = 0;
11287 min_hi = 0;
11289 else
11291 max_lo = signed_max_lo;
11292 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11293 min_hi = -1;
11296 else
11298 width -= HOST_BITS_PER_WIDE_INT;
11299 signed_max_lo = -1;
11300 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11301 - 1;
11302 max_lo = -1;
11303 min_lo = 0;
11305 if (TYPE_UNSIGNED (arg1_type))
11307 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11308 min_hi = 0;
11310 else
11312 max_hi = signed_max_hi;
11313 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11317 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11318 && TREE_INT_CST_LOW (arg1) == max_lo)
11319 switch (code)
11321 case GT_EXPR:
11322 return omit_one_operand (type, integer_zero_node, arg0);
11324 case GE_EXPR:
11325 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11327 case LE_EXPR:
11328 return omit_one_operand (type, integer_one_node, arg0);
11330 case LT_EXPR:
11331 return fold_build2 (NE_EXPR, type, arg0, arg1);
11333 /* The GE_EXPR and LT_EXPR cases above are not normally
11334 reached because of previous transformations. */
11336 default:
11337 break;
11339 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11340 == max_hi
11341 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11342 switch (code)
11344 case GT_EXPR:
11345 arg1 = const_binop (PLUS_EXPR, arg1,
11346 build_int_cst (TREE_TYPE (arg1), 1), 0);
11347 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11348 case LE_EXPR:
11349 arg1 = const_binop (PLUS_EXPR, arg1,
11350 build_int_cst (TREE_TYPE (arg1), 1), 0);
11351 return fold_build2 (NE_EXPR, type, arg0, arg1);
11352 default:
11353 break;
11355 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11356 == min_hi
11357 && TREE_INT_CST_LOW (arg1) == min_lo)
11358 switch (code)
11360 case LT_EXPR:
11361 return omit_one_operand (type, integer_zero_node, arg0);
11363 case LE_EXPR:
11364 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11366 case GE_EXPR:
11367 return omit_one_operand (type, integer_one_node, arg0);
11369 case GT_EXPR:
11370 return fold_build2 (NE_EXPR, type, op0, op1);
11372 default:
11373 break;
11375 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11376 == min_hi
11377 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11378 switch (code)
11380 case GE_EXPR:
11381 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11382 return fold_build2 (NE_EXPR, type, arg0, arg1);
11383 case LT_EXPR:
11384 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11385 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11386 default:
11387 break;
11390 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11391 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11392 && TYPE_UNSIGNED (arg1_type)
11393 /* We will flip the signedness of the comparison operator
11394 associated with the mode of arg1, so the sign bit is
11395 specified by this mode. Check that arg1 is the signed
11396 max associated with this sign bit. */
11397 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11398 /* signed_type does not work on pointer types. */
11399 && INTEGRAL_TYPE_P (arg1_type))
11401 /* The following case also applies to X < signed_max+1
11402 and X >= signed_max+1 because previous transformations. */
11403 if (code == LE_EXPR || code == GT_EXPR)
11405 tree st0, st1;
11406 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11407 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11408 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11409 type, fold_convert (st0, arg0),
11410 build_int_cst (st1, 0));
11416 /* If we are comparing an ABS_EXPR with a constant, we can
11417 convert all the cases into explicit comparisons, but they may
11418 well not be faster than doing the ABS and one comparison.
11419 But ABS (X) <= C is a range comparison, which becomes a subtraction
11420 and a comparison, and is probably faster. */
11421 if (code == LE_EXPR
11422 && TREE_CODE (arg1) == INTEGER_CST
11423 && TREE_CODE (arg0) == ABS_EXPR
11424 && ! TREE_SIDE_EFFECTS (arg0)
11425 && (0 != (tem = negate_expr (arg1)))
11426 && TREE_CODE (tem) == INTEGER_CST
11427 && !TREE_OVERFLOW (tem))
11428 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11429 build2 (GE_EXPR, type,
11430 TREE_OPERAND (arg0, 0), tem),
11431 build2 (LE_EXPR, type,
11432 TREE_OPERAND (arg0, 0), arg1));
11434 /* Convert ABS_EXPR<x> >= 0 to true. */
11435 if (code == GE_EXPR
11436 && tree_expr_nonnegative_p (arg0)
11437 && (integer_zerop (arg1)
11438 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11439 && real_zerop (arg1))))
11440 return omit_one_operand (type, integer_one_node, arg0);
11442 /* Convert ABS_EXPR<x> < 0 to false. */
11443 if (code == LT_EXPR
11444 && tree_expr_nonnegative_p (arg0)
11445 && (integer_zerop (arg1) || real_zerop (arg1)))
11446 return omit_one_operand (type, integer_zero_node, arg0);
11448 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11449 and similarly for >= into !=. */
11450 if ((code == LT_EXPR || code == GE_EXPR)
11451 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11452 && TREE_CODE (arg1) == LSHIFT_EXPR
11453 && integer_onep (TREE_OPERAND (arg1, 0)))
11454 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11455 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11456 TREE_OPERAND (arg1, 1)),
11457 build_int_cst (TREE_TYPE (arg0), 0));
11459 if ((code == LT_EXPR || code == GE_EXPR)
11460 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11461 && (TREE_CODE (arg1) == NOP_EXPR
11462 || TREE_CODE (arg1) == CONVERT_EXPR)
11463 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11464 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11465 return
11466 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11467 fold_convert (TREE_TYPE (arg0),
11468 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11469 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11470 1))),
11471 build_int_cst (TREE_TYPE (arg0), 0));
11473 return NULL_TREE;
11475 case UNORDERED_EXPR:
11476 case ORDERED_EXPR:
11477 case UNLT_EXPR:
11478 case UNLE_EXPR:
11479 case UNGT_EXPR:
11480 case UNGE_EXPR:
11481 case UNEQ_EXPR:
11482 case LTGT_EXPR:
11483 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11485 t1 = fold_relational_const (code, type, arg0, arg1);
11486 if (t1 != NULL_TREE)
11487 return t1;
11490 /* If the first operand is NaN, the result is constant. */
11491 if (TREE_CODE (arg0) == REAL_CST
11492 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11493 && (code != LTGT_EXPR || ! flag_trapping_math))
11495 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11496 ? integer_zero_node
11497 : integer_one_node;
11498 return omit_one_operand (type, t1, arg1);
11501 /* If the second operand is NaN, the result is constant. */
11502 if (TREE_CODE (arg1) == REAL_CST
11503 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11504 && (code != LTGT_EXPR || ! flag_trapping_math))
11506 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11507 ? integer_zero_node
11508 : integer_one_node;
11509 return omit_one_operand (type, t1, arg0);
11512 /* Simplify unordered comparison of something with itself. */
11513 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11514 && operand_equal_p (arg0, arg1, 0))
11515 return constant_boolean_node (1, type);
11517 if (code == LTGT_EXPR
11518 && !flag_trapping_math
11519 && operand_equal_p (arg0, arg1, 0))
11520 return constant_boolean_node (0, type);
11522 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11524 tree targ0 = strip_float_extensions (arg0);
11525 tree targ1 = strip_float_extensions (arg1);
11526 tree newtype = TREE_TYPE (targ0);
11528 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11529 newtype = TREE_TYPE (targ1);
11531 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11532 return fold_build2 (code, type, fold_convert (newtype, targ0),
11533 fold_convert (newtype, targ1));
11536 return NULL_TREE;
11538 case COMPOUND_EXPR:
11539 /* When pedantic, a compound expression can be neither an lvalue
11540 nor an integer constant expression. */
11541 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11542 return NULL_TREE;
11543 /* Don't let (0, 0) be null pointer constant. */
11544 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11545 : fold_convert (type, arg1);
11546 return pedantic_non_lvalue (tem);
11548 case COMPLEX_EXPR:
11549 if ((TREE_CODE (arg0) == REAL_CST
11550 && TREE_CODE (arg1) == REAL_CST)
11551 || (TREE_CODE (arg0) == INTEGER_CST
11552 && TREE_CODE (arg1) == INTEGER_CST))
11553 return build_complex (type, arg0, arg1);
11554 return NULL_TREE;
11556 case ASSERT_EXPR:
11557 /* An ASSERT_EXPR should never be passed to fold_binary. */
11558 gcc_unreachable ();
11560 default:
11561 return NULL_TREE;
11562 } /* switch (code) */
11565 /* Callback for walk_tree, looking for LABEL_EXPR.
11566 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11567 Do not check the sub-tree of GOTO_EXPR. */
11569 static tree
11570 contains_label_1 (tree *tp,
11571 int *walk_subtrees,
11572 void *data ATTRIBUTE_UNUSED)
11574 switch (TREE_CODE (*tp))
11576 case LABEL_EXPR:
11577 return *tp;
11578 case GOTO_EXPR:
11579 *walk_subtrees = 0;
11580 /* no break */
11581 default:
11582 return NULL_TREE;
11586 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11587 accessible from outside the sub-tree. Returns NULL_TREE if no
11588 addressable label is found. */
11590 static bool
11591 contains_label_p (tree st)
11593 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11596 /* Fold a ternary expression of code CODE and type TYPE with operands
11597 OP0, OP1, and OP2. Return the folded expression if folding is
11598 successful. Otherwise, return NULL_TREE. */
11600 tree
11601 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11603 tree tem;
11604 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11605 enum tree_code_class kind = TREE_CODE_CLASS (code);
11607 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11608 && TREE_CODE_LENGTH (code) == 3);
11610 /* Strip any conversions that don't change the mode. This is safe
11611 for every expression, except for a comparison expression because
11612 its signedness is derived from its operands. So, in the latter
11613 case, only strip conversions that don't change the signedness.
11615 Note that this is done as an internal manipulation within the
11616 constant folder, in order to find the simplest representation of
11617 the arguments so that their form can be studied. In any cases,
11618 the appropriate type conversions should be put back in the tree
11619 that will get out of the constant folder. */
11620 if (op0)
11622 arg0 = op0;
11623 STRIP_NOPS (arg0);
11626 if (op1)
11628 arg1 = op1;
11629 STRIP_NOPS (arg1);
11632 switch (code)
11634 case COMPONENT_REF:
11635 if (TREE_CODE (arg0) == CONSTRUCTOR
11636 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11638 unsigned HOST_WIDE_INT idx;
11639 tree field, value;
11640 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11641 if (field == arg1)
11642 return value;
11644 return NULL_TREE;
11646 case COND_EXPR:
11647 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11648 so all simple results must be passed through pedantic_non_lvalue. */
11649 if (TREE_CODE (arg0) == INTEGER_CST)
11651 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11652 tem = integer_zerop (arg0) ? op2 : op1;
11653 /* Only optimize constant conditions when the selected branch
11654 has the same type as the COND_EXPR. This avoids optimizing
11655 away "c ? x : throw", where the throw has a void type.
11656 Avoid throwing away that operand which contains label. */
11657 if ((!TREE_SIDE_EFFECTS (unused_op)
11658 || !contains_label_p (unused_op))
11659 && (! VOID_TYPE_P (TREE_TYPE (tem))
11660 || VOID_TYPE_P (type)))
11661 return pedantic_non_lvalue (tem);
11662 return NULL_TREE;
11664 if (operand_equal_p (arg1, op2, 0))
11665 return pedantic_omit_one_operand (type, arg1, arg0);
11667 /* If we have A op B ? A : C, we may be able to convert this to a
11668 simpler expression, depending on the operation and the values
11669 of B and C. Signed zeros prevent all of these transformations,
11670 for reasons given above each one.
11672 Also try swapping the arguments and inverting the conditional. */
11673 if (COMPARISON_CLASS_P (arg0)
11674 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11675 arg1, TREE_OPERAND (arg0, 1))
11676 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11678 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11679 if (tem)
11680 return tem;
11683 if (COMPARISON_CLASS_P (arg0)
11684 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11685 op2,
11686 TREE_OPERAND (arg0, 1))
11687 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11689 tem = fold_truth_not_expr (arg0);
11690 if (tem && COMPARISON_CLASS_P (tem))
11692 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11693 if (tem)
11694 return tem;
11698 /* If the second operand is simpler than the third, swap them
11699 since that produces better jump optimization results. */
11700 if (truth_value_p (TREE_CODE (arg0))
11701 && tree_swap_operands_p (op1, op2, false))
11703 /* See if this can be inverted. If it can't, possibly because
11704 it was a floating-point inequality comparison, don't do
11705 anything. */
11706 tem = fold_truth_not_expr (arg0);
11707 if (tem)
11708 return fold_build3 (code, type, tem, op2, op1);
11711 /* Convert A ? 1 : 0 to simply A. */
11712 if (integer_onep (op1)
11713 && integer_zerop (op2)
11714 /* If we try to convert OP0 to our type, the
11715 call to fold will try to move the conversion inside
11716 a COND, which will recurse. In that case, the COND_EXPR
11717 is probably the best choice, so leave it alone. */
11718 && type == TREE_TYPE (arg0))
11719 return pedantic_non_lvalue (arg0);
11721 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11722 over COND_EXPR in cases such as floating point comparisons. */
11723 if (integer_zerop (op1)
11724 && integer_onep (op2)
11725 && truth_value_p (TREE_CODE (arg0)))
11726 return pedantic_non_lvalue (fold_convert (type,
11727 invert_truthvalue (arg0)));
11729 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11730 if (TREE_CODE (arg0) == LT_EXPR
11731 && integer_zerop (TREE_OPERAND (arg0, 1))
11732 && integer_zerop (op2)
11733 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11735 /* sign_bit_p only checks ARG1 bits within A's precision.
11736 If <sign bit of A> has wider type than A, bits outside
11737 of A's precision in <sign bit of A> need to be checked.
11738 If they are all 0, this optimization needs to be done
11739 in unsigned A's type, if they are all 1 in signed A's type,
11740 otherwise this can't be done. */
11741 if (TYPE_PRECISION (TREE_TYPE (tem))
11742 < TYPE_PRECISION (TREE_TYPE (arg1))
11743 && TYPE_PRECISION (TREE_TYPE (tem))
11744 < TYPE_PRECISION (type))
11746 unsigned HOST_WIDE_INT mask_lo;
11747 HOST_WIDE_INT mask_hi;
11748 int inner_width, outer_width;
11749 tree tem_type;
11751 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11752 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11753 if (outer_width > TYPE_PRECISION (type))
11754 outer_width = TYPE_PRECISION (type);
11756 if (outer_width > HOST_BITS_PER_WIDE_INT)
11758 mask_hi = ((unsigned HOST_WIDE_INT) -1
11759 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11760 mask_lo = -1;
11762 else
11764 mask_hi = 0;
11765 mask_lo = ((unsigned HOST_WIDE_INT) -1
11766 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11768 if (inner_width > HOST_BITS_PER_WIDE_INT)
11770 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11771 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11772 mask_lo = 0;
11774 else
11775 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11776 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11778 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11779 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11781 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11782 tem = fold_convert (tem_type, tem);
11784 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11785 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11787 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11788 tem = fold_convert (tem_type, tem);
11790 else
11791 tem = NULL;
11794 if (tem)
11795 return fold_convert (type,
11796 fold_build2 (BIT_AND_EXPR,
11797 TREE_TYPE (tem), tem,
11798 fold_convert (TREE_TYPE (tem),
11799 arg1)));
11802 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11803 already handled above. */
11804 if (TREE_CODE (arg0) == BIT_AND_EXPR
11805 && integer_onep (TREE_OPERAND (arg0, 1))
11806 && integer_zerop (op2)
11807 && integer_pow2p (arg1))
11809 tree tem = TREE_OPERAND (arg0, 0);
11810 STRIP_NOPS (tem);
11811 if (TREE_CODE (tem) == RSHIFT_EXPR
11812 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11813 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11814 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11815 return fold_build2 (BIT_AND_EXPR, type,
11816 TREE_OPERAND (tem, 0), arg1);
11819 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11820 is probably obsolete because the first operand should be a
11821 truth value (that's why we have the two cases above), but let's
11822 leave it in until we can confirm this for all front-ends. */
11823 if (integer_zerop (op2)
11824 && TREE_CODE (arg0) == NE_EXPR
11825 && integer_zerop (TREE_OPERAND (arg0, 1))
11826 && integer_pow2p (arg1)
11827 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11828 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11829 arg1, OEP_ONLY_CONST))
11830 return pedantic_non_lvalue (fold_convert (type,
11831 TREE_OPERAND (arg0, 0)));
11833 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11834 if (integer_zerop (op2)
11835 && truth_value_p (TREE_CODE (arg0))
11836 && truth_value_p (TREE_CODE (arg1)))
11837 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11838 fold_convert (type, arg0),
11839 arg1);
11841 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11842 if (integer_onep (op2)
11843 && truth_value_p (TREE_CODE (arg0))
11844 && truth_value_p (TREE_CODE (arg1)))
11846 /* Only perform transformation if ARG0 is easily inverted. */
11847 tem = fold_truth_not_expr (arg0);
11848 if (tem)
11849 return fold_build2 (TRUTH_ORIF_EXPR, type,
11850 fold_convert (type, tem),
11851 arg1);
11854 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11855 if (integer_zerop (arg1)
11856 && truth_value_p (TREE_CODE (arg0))
11857 && truth_value_p (TREE_CODE (op2)))
11859 /* Only perform transformation if ARG0 is easily inverted. */
11860 tem = fold_truth_not_expr (arg0);
11861 if (tem)
11862 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11863 fold_convert (type, tem),
11864 op2);
11867 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11868 if (integer_onep (arg1)
11869 && truth_value_p (TREE_CODE (arg0))
11870 && truth_value_p (TREE_CODE (op2)))
11871 return fold_build2 (TRUTH_ORIF_EXPR, type,
11872 fold_convert (type, arg0),
11873 op2);
11875 return NULL_TREE;
11877 case CALL_EXPR:
11878 /* Check for a built-in function. */
11879 if (TREE_CODE (op0) == ADDR_EXPR
11880 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11881 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11882 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11883 return NULL_TREE;
11885 case BIT_FIELD_REF:
11886 if (TREE_CODE (arg0) == VECTOR_CST
11887 && type == TREE_TYPE (TREE_TYPE (arg0))
11888 && host_integerp (arg1, 1)
11889 && host_integerp (op2, 1))
11891 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11892 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11894 if (width != 0
11895 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11896 && (idx % width) == 0
11897 && (idx = idx / width)
11898 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11900 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11901 while (idx-- > 0 && elements)
11902 elements = TREE_CHAIN (elements);
11903 if (elements)
11904 return TREE_VALUE (elements);
11905 else
11906 return fold_convert (type, integer_zero_node);
11909 return NULL_TREE;
11911 default:
11912 return NULL_TREE;
11913 } /* switch (code) */
11916 /* Perform constant folding and related simplification of EXPR.
11917 The related simplifications include x*1 => x, x*0 => 0, etc.,
11918 and application of the associative law.
11919 NOP_EXPR conversions may be removed freely (as long as we
11920 are careful not to change the type of the overall expression).
11921 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11922 but we can constant-fold them if they have constant operands. */
11924 #ifdef ENABLE_FOLD_CHECKING
11925 # define fold(x) fold_1 (x)
11926 static tree fold_1 (tree);
11927 static
11928 #endif
11929 tree
11930 fold (tree expr)
11932 const tree t = expr;
11933 enum tree_code code = TREE_CODE (t);
11934 enum tree_code_class kind = TREE_CODE_CLASS (code);
11935 tree tem;
11937 /* Return right away if a constant. */
11938 if (kind == tcc_constant)
11939 return t;
11941 if (IS_EXPR_CODE_CLASS (kind)
11942 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11944 tree type = TREE_TYPE (t);
11945 tree op0, op1, op2;
11947 switch (TREE_CODE_LENGTH (code))
11949 case 1:
11950 op0 = TREE_OPERAND (t, 0);
11951 tem = fold_unary (code, type, op0);
11952 return tem ? tem : expr;
11953 case 2:
11954 op0 = TREE_OPERAND (t, 0);
11955 op1 = TREE_OPERAND (t, 1);
11956 tem = fold_binary (code, type, op0, op1);
11957 return tem ? tem : expr;
11958 case 3:
11959 op0 = TREE_OPERAND (t, 0);
11960 op1 = TREE_OPERAND (t, 1);
11961 op2 = TREE_OPERAND (t, 2);
11962 tem = fold_ternary (code, type, op0, op1, op2);
11963 return tem ? tem : expr;
11964 default:
11965 break;
11969 switch (code)
11971 case CONST_DECL:
11972 return fold (DECL_INITIAL (t));
11974 default:
11975 return t;
11976 } /* switch (code) */
11979 #ifdef ENABLE_FOLD_CHECKING
11980 #undef fold
11982 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11983 static void fold_check_failed (tree, tree);
11984 void print_fold_checksum (tree);
11986 /* When --enable-checking=fold, compute a digest of expr before
11987 and after actual fold call to see if fold did not accidentally
11988 change original expr. */
11990 tree
11991 fold (tree expr)
11993 tree ret;
11994 struct md5_ctx ctx;
11995 unsigned char checksum_before[16], checksum_after[16];
11996 htab_t ht;
11998 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11999 md5_init_ctx (&ctx);
12000 fold_checksum_tree (expr, &ctx, ht);
12001 md5_finish_ctx (&ctx, checksum_before);
12002 htab_empty (ht);
12004 ret = fold_1 (expr);
12006 md5_init_ctx (&ctx);
12007 fold_checksum_tree (expr, &ctx, ht);
12008 md5_finish_ctx (&ctx, checksum_after);
12009 htab_delete (ht);
12011 if (memcmp (checksum_before, checksum_after, 16))
12012 fold_check_failed (expr, ret);
12014 return ret;
12017 void
12018 print_fold_checksum (tree expr)
12020 struct md5_ctx ctx;
12021 unsigned char checksum[16], cnt;
12022 htab_t ht;
12024 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12025 md5_init_ctx (&ctx);
12026 fold_checksum_tree (expr, &ctx, ht);
12027 md5_finish_ctx (&ctx, checksum);
12028 htab_delete (ht);
12029 for (cnt = 0; cnt < 16; ++cnt)
12030 fprintf (stderr, "%02x", checksum[cnt]);
12031 putc ('\n', stderr);
12034 static void
12035 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12037 internal_error ("fold check: original tree changed by fold");
12040 static void
12041 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12043 void **slot;
12044 enum tree_code code;
12045 struct tree_function_decl buf;
12046 int i, len;
12048 recursive_label:
12050 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12051 <= sizeof (struct tree_function_decl))
12052 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12053 if (expr == NULL)
12054 return;
12055 slot = htab_find_slot (ht, expr, INSERT);
12056 if (*slot != NULL)
12057 return;
12058 *slot = expr;
12059 code = TREE_CODE (expr);
12060 if (TREE_CODE_CLASS (code) == tcc_declaration
12061 && DECL_ASSEMBLER_NAME_SET_P (expr))
12063 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12064 memcpy ((char *) &buf, expr, tree_size (expr));
12065 expr = (tree) &buf;
12066 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12068 else if (TREE_CODE_CLASS (code) == tcc_type
12069 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12070 || TYPE_CACHED_VALUES_P (expr)
12071 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12073 /* Allow these fields to be modified. */
12074 memcpy ((char *) &buf, expr, tree_size (expr));
12075 expr = (tree) &buf;
12076 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12077 TYPE_POINTER_TO (expr) = NULL;
12078 TYPE_REFERENCE_TO (expr) = NULL;
12079 if (TYPE_CACHED_VALUES_P (expr))
12081 TYPE_CACHED_VALUES_P (expr) = 0;
12082 TYPE_CACHED_VALUES (expr) = NULL;
12085 md5_process_bytes (expr, tree_size (expr), ctx);
12086 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12087 if (TREE_CODE_CLASS (code) != tcc_type
12088 && TREE_CODE_CLASS (code) != tcc_declaration
12089 && code != TREE_LIST)
12090 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12091 switch (TREE_CODE_CLASS (code))
12093 case tcc_constant:
12094 switch (code)
12096 case STRING_CST:
12097 md5_process_bytes (TREE_STRING_POINTER (expr),
12098 TREE_STRING_LENGTH (expr), ctx);
12099 break;
12100 case COMPLEX_CST:
12101 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12102 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12103 break;
12104 case VECTOR_CST:
12105 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12106 break;
12107 default:
12108 break;
12110 break;
12111 case tcc_exceptional:
12112 switch (code)
12114 case TREE_LIST:
12115 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12116 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12117 expr = TREE_CHAIN (expr);
12118 goto recursive_label;
12119 break;
12120 case TREE_VEC:
12121 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12122 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12123 break;
12124 default:
12125 break;
12127 break;
12128 case tcc_expression:
12129 case tcc_reference:
12130 case tcc_comparison:
12131 case tcc_unary:
12132 case tcc_binary:
12133 case tcc_statement:
12134 len = TREE_CODE_LENGTH (code);
12135 for (i = 0; i < len; ++i)
12136 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12137 break;
12138 case tcc_declaration:
12139 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12140 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12141 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12143 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12144 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12145 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12146 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12147 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12149 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12150 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12152 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12154 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12155 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12156 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12158 break;
12159 case tcc_type:
12160 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12161 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12162 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12163 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12164 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12165 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12166 if (INTEGRAL_TYPE_P (expr)
12167 || SCALAR_FLOAT_TYPE_P (expr))
12169 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12170 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12172 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12173 if (TREE_CODE (expr) == RECORD_TYPE
12174 || TREE_CODE (expr) == UNION_TYPE
12175 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12176 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12177 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12178 break;
12179 default:
12180 break;
12184 #endif
12186 /* Fold a unary tree expression with code CODE of type TYPE with an
12187 operand OP0. Return a folded expression if successful. Otherwise,
12188 return a tree expression with code CODE of type TYPE with an
12189 operand OP0. */
12191 tree
12192 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12194 tree tem;
12195 #ifdef ENABLE_FOLD_CHECKING
12196 unsigned char checksum_before[16], checksum_after[16];
12197 struct md5_ctx ctx;
12198 htab_t ht;
12200 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12201 md5_init_ctx (&ctx);
12202 fold_checksum_tree (op0, &ctx, ht);
12203 md5_finish_ctx (&ctx, checksum_before);
12204 htab_empty (ht);
12205 #endif
12207 tem = fold_unary (code, type, op0);
12208 if (!tem)
12209 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12211 #ifdef ENABLE_FOLD_CHECKING
12212 md5_init_ctx (&ctx);
12213 fold_checksum_tree (op0, &ctx, ht);
12214 md5_finish_ctx (&ctx, checksum_after);
12215 htab_delete (ht);
12217 if (memcmp (checksum_before, checksum_after, 16))
12218 fold_check_failed (op0, tem);
12219 #endif
12220 return tem;
12223 /* Fold a binary tree expression with code CODE of type TYPE with
12224 operands OP0 and OP1. Return a folded expression if successful.
12225 Otherwise, return a tree expression with code CODE of type TYPE
12226 with operands OP0 and OP1. */
12228 tree
12229 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12230 MEM_STAT_DECL)
12232 tree tem;
12233 #ifdef ENABLE_FOLD_CHECKING
12234 unsigned char checksum_before_op0[16],
12235 checksum_before_op1[16],
12236 checksum_after_op0[16],
12237 checksum_after_op1[16];
12238 struct md5_ctx ctx;
12239 htab_t ht;
12241 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12242 md5_init_ctx (&ctx);
12243 fold_checksum_tree (op0, &ctx, ht);
12244 md5_finish_ctx (&ctx, checksum_before_op0);
12245 htab_empty (ht);
12247 md5_init_ctx (&ctx);
12248 fold_checksum_tree (op1, &ctx, ht);
12249 md5_finish_ctx (&ctx, checksum_before_op1);
12250 htab_empty (ht);
12251 #endif
12253 tem = fold_binary (code, type, op0, op1);
12254 if (!tem)
12255 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12257 #ifdef ENABLE_FOLD_CHECKING
12258 md5_init_ctx (&ctx);
12259 fold_checksum_tree (op0, &ctx, ht);
12260 md5_finish_ctx (&ctx, checksum_after_op0);
12261 htab_empty (ht);
12263 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12264 fold_check_failed (op0, tem);
12266 md5_init_ctx (&ctx);
12267 fold_checksum_tree (op1, &ctx, ht);
12268 md5_finish_ctx (&ctx, checksum_after_op1);
12269 htab_delete (ht);
12271 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12272 fold_check_failed (op1, tem);
12273 #endif
12274 return tem;
12277 /* Fold a ternary tree expression with code CODE of type TYPE with
12278 operands OP0, OP1, and OP2. Return a folded expression if
12279 successful. Otherwise, return a tree expression with code CODE of
12280 type TYPE with operands OP0, OP1, and OP2. */
12282 tree
12283 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12284 MEM_STAT_DECL)
12286 tree tem;
12287 #ifdef ENABLE_FOLD_CHECKING
12288 unsigned char checksum_before_op0[16],
12289 checksum_before_op1[16],
12290 checksum_before_op2[16],
12291 checksum_after_op0[16],
12292 checksum_after_op1[16],
12293 checksum_after_op2[16];
12294 struct md5_ctx ctx;
12295 htab_t ht;
12297 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12298 md5_init_ctx (&ctx);
12299 fold_checksum_tree (op0, &ctx, ht);
12300 md5_finish_ctx (&ctx, checksum_before_op0);
12301 htab_empty (ht);
12303 md5_init_ctx (&ctx);
12304 fold_checksum_tree (op1, &ctx, ht);
12305 md5_finish_ctx (&ctx, checksum_before_op1);
12306 htab_empty (ht);
12308 md5_init_ctx (&ctx);
12309 fold_checksum_tree (op2, &ctx, ht);
12310 md5_finish_ctx (&ctx, checksum_before_op2);
12311 htab_empty (ht);
12312 #endif
12314 tem = fold_ternary (code, type, op0, op1, op2);
12315 if (!tem)
12316 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12318 #ifdef ENABLE_FOLD_CHECKING
12319 md5_init_ctx (&ctx);
12320 fold_checksum_tree (op0, &ctx, ht);
12321 md5_finish_ctx (&ctx, checksum_after_op0);
12322 htab_empty (ht);
12324 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12325 fold_check_failed (op0, tem);
12327 md5_init_ctx (&ctx);
12328 fold_checksum_tree (op1, &ctx, ht);
12329 md5_finish_ctx (&ctx, checksum_after_op1);
12330 htab_empty (ht);
12332 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12333 fold_check_failed (op1, tem);
12335 md5_init_ctx (&ctx);
12336 fold_checksum_tree (op2, &ctx, ht);
12337 md5_finish_ctx (&ctx, checksum_after_op2);
12338 htab_delete (ht);
12340 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12341 fold_check_failed (op2, tem);
12342 #endif
12343 return tem;
12346 /* Perform constant folding and related simplification of initializer
12347 expression EXPR. These behave identically to "fold_buildN" but ignore
12348 potential run-time traps and exceptions that fold must preserve. */
12350 #define START_FOLD_INIT \
12351 int saved_signaling_nans = flag_signaling_nans;\
12352 int saved_trapping_math = flag_trapping_math;\
12353 int saved_rounding_math = flag_rounding_math;\
12354 int saved_trapv = flag_trapv;\
12355 int saved_folding_initializer = folding_initializer;\
12356 flag_signaling_nans = 0;\
12357 flag_trapping_math = 0;\
12358 flag_rounding_math = 0;\
12359 flag_trapv = 0;\
12360 folding_initializer = 1;
12362 #define END_FOLD_INIT \
12363 flag_signaling_nans = saved_signaling_nans;\
12364 flag_trapping_math = saved_trapping_math;\
12365 flag_rounding_math = saved_rounding_math;\
12366 flag_trapv = saved_trapv;\
12367 folding_initializer = saved_folding_initializer;
12369 tree
12370 fold_build1_initializer (enum tree_code code, tree type, tree op)
12372 tree result;
12373 START_FOLD_INIT;
12375 result = fold_build1 (code, type, op);
12377 END_FOLD_INIT;
12378 return result;
12381 tree
12382 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12384 tree result;
12385 START_FOLD_INIT;
12387 result = fold_build2 (code, type, op0, op1);
12389 END_FOLD_INIT;
12390 return result;
12393 tree
12394 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12395 tree op2)
12397 tree result;
12398 START_FOLD_INIT;
12400 result = fold_build3 (code, type, op0, op1, op2);
12402 END_FOLD_INIT;
12403 return result;
12406 #undef START_FOLD_INIT
12407 #undef END_FOLD_INIT
12409 /* Determine if first argument is a multiple of second argument. Return 0 if
12410 it is not, or we cannot easily determined it to be.
12412 An example of the sort of thing we care about (at this point; this routine
12413 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12414 fold cases do now) is discovering that
12416 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12418 is a multiple of
12420 SAVE_EXPR (J * 8)
12422 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12424 This code also handles discovering that
12426 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12428 is a multiple of 8 so we don't have to worry about dealing with a
12429 possible remainder.
12431 Note that we *look* inside a SAVE_EXPR only to determine how it was
12432 calculated; it is not safe for fold to do much of anything else with the
12433 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12434 at run time. For example, the latter example above *cannot* be implemented
12435 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12436 evaluation time of the original SAVE_EXPR is not necessarily the same at
12437 the time the new expression is evaluated. The only optimization of this
12438 sort that would be valid is changing
12440 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12442 divided by 8 to
12444 SAVE_EXPR (I) * SAVE_EXPR (J)
12446 (where the same SAVE_EXPR (J) is used in the original and the
12447 transformed version). */
12449 static int
12450 multiple_of_p (tree type, tree top, tree bottom)
12452 if (operand_equal_p (top, bottom, 0))
12453 return 1;
12455 if (TREE_CODE (type) != INTEGER_TYPE)
12456 return 0;
12458 switch (TREE_CODE (top))
12460 case BIT_AND_EXPR:
12461 /* Bitwise and provides a power of two multiple. If the mask is
12462 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12463 if (!integer_pow2p (bottom))
12464 return 0;
12465 /* FALLTHRU */
12467 case MULT_EXPR:
12468 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12469 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12471 case PLUS_EXPR:
12472 case MINUS_EXPR:
12473 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12474 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12476 case LSHIFT_EXPR:
12477 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12479 tree op1, t1;
12481 op1 = TREE_OPERAND (top, 1);
12482 /* const_binop may not detect overflow correctly,
12483 so check for it explicitly here. */
12484 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12485 > TREE_INT_CST_LOW (op1)
12486 && TREE_INT_CST_HIGH (op1) == 0
12487 && 0 != (t1 = fold_convert (type,
12488 const_binop (LSHIFT_EXPR,
12489 size_one_node,
12490 op1, 0)))
12491 && !TREE_OVERFLOW (t1))
12492 return multiple_of_p (type, t1, bottom);
12494 return 0;
12496 case NOP_EXPR:
12497 /* Can't handle conversions from non-integral or wider integral type. */
12498 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12499 || (TYPE_PRECISION (type)
12500 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12501 return 0;
12503 /* .. fall through ... */
12505 case SAVE_EXPR:
12506 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12508 case INTEGER_CST:
12509 if (TREE_CODE (bottom) != INTEGER_CST
12510 || (TYPE_UNSIGNED (type)
12511 && (tree_int_cst_sgn (top) < 0
12512 || tree_int_cst_sgn (bottom) < 0)))
12513 return 0;
12514 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12515 top, bottom, 0));
12517 default:
12518 return 0;
12522 /* Return true if `t' is known to be non-negative. */
12524 bool
12525 tree_expr_nonnegative_p (tree t)
12527 if (t == error_mark_node)
12528 return false;
12530 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12531 return true;
12533 switch (TREE_CODE (t))
12535 case SSA_NAME:
12536 /* Query VRP to see if it has recorded any information about
12537 the range of this object. */
12538 return ssa_name_nonnegative_p (t);
12540 case ABS_EXPR:
12541 /* We can't return 1 if flag_wrapv is set because
12542 ABS_EXPR<INT_MIN> = INT_MIN. */
12543 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12544 return true;
12545 break;
12547 case INTEGER_CST:
12548 return tree_int_cst_sgn (t) >= 0;
12550 case REAL_CST:
12551 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12553 case PLUS_EXPR:
12554 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12555 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12556 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12558 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12559 both unsigned and at least 2 bits shorter than the result. */
12560 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12561 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12562 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12564 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12565 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12566 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12567 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12569 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12570 TYPE_PRECISION (inner2)) + 1;
12571 return prec < TYPE_PRECISION (TREE_TYPE (t));
12574 break;
12576 case MULT_EXPR:
12577 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12579 /* x * x for floating point x is always non-negative. */
12580 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12581 return true;
12582 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12583 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12586 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12587 both unsigned and their total bits is shorter than the result. */
12588 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12589 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12590 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12592 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12593 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12594 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12595 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12596 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12597 < TYPE_PRECISION (TREE_TYPE (t));
12599 return false;
12601 case BIT_AND_EXPR:
12602 case MAX_EXPR:
12603 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12604 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12606 case BIT_IOR_EXPR:
12607 case BIT_XOR_EXPR:
12608 case MIN_EXPR:
12609 case RDIV_EXPR:
12610 case TRUNC_DIV_EXPR:
12611 case CEIL_DIV_EXPR:
12612 case FLOOR_DIV_EXPR:
12613 case ROUND_DIV_EXPR:
12614 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12615 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12617 case TRUNC_MOD_EXPR:
12618 case CEIL_MOD_EXPR:
12619 case FLOOR_MOD_EXPR:
12620 case ROUND_MOD_EXPR:
12621 case SAVE_EXPR:
12622 case NON_LVALUE_EXPR:
12623 case FLOAT_EXPR:
12624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12626 case COMPOUND_EXPR:
12627 case MODIFY_EXPR:
12628 case GIMPLE_MODIFY_STMT:
12629 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12631 case BIND_EXPR:
12632 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12634 case COND_EXPR:
12635 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12636 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12638 case NOP_EXPR:
12640 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12641 tree outer_type = TREE_TYPE (t);
12643 if (TREE_CODE (outer_type) == REAL_TYPE)
12645 if (TREE_CODE (inner_type) == REAL_TYPE)
12646 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12647 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12649 if (TYPE_UNSIGNED (inner_type))
12650 return true;
12651 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12654 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12656 if (TREE_CODE (inner_type) == REAL_TYPE)
12657 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12658 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12659 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12660 && TYPE_UNSIGNED (inner_type);
12663 break;
12665 case TARGET_EXPR:
12667 tree temp = TARGET_EXPR_SLOT (t);
12668 t = TARGET_EXPR_INITIAL (t);
12670 /* If the initializer is non-void, then it's a normal expression
12671 that will be assigned to the slot. */
12672 if (!VOID_TYPE_P (t))
12673 return tree_expr_nonnegative_p (t);
12675 /* Otherwise, the initializer sets the slot in some way. One common
12676 way is an assignment statement at the end of the initializer. */
12677 while (1)
12679 if (TREE_CODE (t) == BIND_EXPR)
12680 t = expr_last (BIND_EXPR_BODY (t));
12681 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12682 || TREE_CODE (t) == TRY_CATCH_EXPR)
12683 t = expr_last (TREE_OPERAND (t, 0));
12684 else if (TREE_CODE (t) == STATEMENT_LIST)
12685 t = expr_last (t);
12686 else
12687 break;
12689 if ((TREE_CODE (t) == MODIFY_EXPR
12690 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12691 && GENERIC_TREE_OPERAND (t, 0) == temp)
12692 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12694 return false;
12697 case CALL_EXPR:
12699 tree fndecl = get_callee_fndecl (t);
12700 tree arglist = TREE_OPERAND (t, 1);
12701 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12702 switch (DECL_FUNCTION_CODE (fndecl))
12704 CASE_FLT_FN (BUILT_IN_ACOS):
12705 CASE_FLT_FN (BUILT_IN_ACOSH):
12706 CASE_FLT_FN (BUILT_IN_CABS):
12707 CASE_FLT_FN (BUILT_IN_COSH):
12708 CASE_FLT_FN (BUILT_IN_ERFC):
12709 CASE_FLT_FN (BUILT_IN_EXP):
12710 CASE_FLT_FN (BUILT_IN_EXP10):
12711 CASE_FLT_FN (BUILT_IN_EXP2):
12712 CASE_FLT_FN (BUILT_IN_FABS):
12713 CASE_FLT_FN (BUILT_IN_FDIM):
12714 CASE_FLT_FN (BUILT_IN_HYPOT):
12715 CASE_FLT_FN (BUILT_IN_POW10):
12716 CASE_INT_FN (BUILT_IN_FFS):
12717 CASE_INT_FN (BUILT_IN_PARITY):
12718 CASE_INT_FN (BUILT_IN_POPCOUNT):
12719 case BUILT_IN_BSWAP32:
12720 case BUILT_IN_BSWAP64:
12721 /* Always true. */
12722 return true;
12724 CASE_FLT_FN (BUILT_IN_SQRT):
12725 /* sqrt(-0.0) is -0.0. */
12726 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12727 return true;
12728 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12730 CASE_FLT_FN (BUILT_IN_ASINH):
12731 CASE_FLT_FN (BUILT_IN_ATAN):
12732 CASE_FLT_FN (BUILT_IN_ATANH):
12733 CASE_FLT_FN (BUILT_IN_CBRT):
12734 CASE_FLT_FN (BUILT_IN_CEIL):
12735 CASE_FLT_FN (BUILT_IN_ERF):
12736 CASE_FLT_FN (BUILT_IN_EXPM1):
12737 CASE_FLT_FN (BUILT_IN_FLOOR):
12738 CASE_FLT_FN (BUILT_IN_FMOD):
12739 CASE_FLT_FN (BUILT_IN_FREXP):
12740 CASE_FLT_FN (BUILT_IN_LCEIL):
12741 CASE_FLT_FN (BUILT_IN_LDEXP):
12742 CASE_FLT_FN (BUILT_IN_LFLOOR):
12743 CASE_FLT_FN (BUILT_IN_LLCEIL):
12744 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12745 CASE_FLT_FN (BUILT_IN_LLRINT):
12746 CASE_FLT_FN (BUILT_IN_LLROUND):
12747 CASE_FLT_FN (BUILT_IN_LRINT):
12748 CASE_FLT_FN (BUILT_IN_LROUND):
12749 CASE_FLT_FN (BUILT_IN_MODF):
12750 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12751 CASE_FLT_FN (BUILT_IN_RINT):
12752 CASE_FLT_FN (BUILT_IN_ROUND):
12753 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12754 CASE_FLT_FN (BUILT_IN_SINH):
12755 CASE_FLT_FN (BUILT_IN_TANH):
12756 CASE_FLT_FN (BUILT_IN_TRUNC):
12757 /* True if the 1st argument is nonnegative. */
12758 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12760 CASE_FLT_FN (BUILT_IN_FMAX):
12761 /* True if the 1st OR 2nd arguments are nonnegative. */
12762 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12763 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12765 CASE_FLT_FN (BUILT_IN_FMIN):
12766 /* True if the 1st AND 2nd arguments are nonnegative. */
12767 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12768 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12770 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12771 /* True if the 2nd argument is nonnegative. */
12772 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12774 CASE_FLT_FN (BUILT_IN_POWI):
12775 /* True if the 1st argument is nonnegative or the second
12776 argument is an even integer. */
12777 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12779 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12780 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12781 return true;
12783 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12785 CASE_FLT_FN (BUILT_IN_POW):
12786 /* True if the 1st argument is nonnegative or the second
12787 argument is an even integer valued real. */
12788 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12790 REAL_VALUE_TYPE c;
12791 HOST_WIDE_INT n;
12793 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12794 n = real_to_integer (&c);
12795 if ((n & 1) == 0)
12797 REAL_VALUE_TYPE cint;
12798 real_from_integer (&cint, VOIDmode, n,
12799 n < 0 ? -1 : 0, 0);
12800 if (real_identical (&c, &cint))
12801 return true;
12804 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12806 default:
12807 break;
12811 /* ... fall through ... */
12813 default:
12814 if (truth_value_p (TREE_CODE (t)))
12815 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12816 return true;
12819 /* We don't know sign of `t', so be conservative and return false. */
12820 return false;
12823 /* Return true when T is an address and is known to be nonzero.
12824 For floating point we further ensure that T is not denormal.
12825 Similar logic is present in nonzero_address in rtlanal.h. */
12827 bool
12828 tree_expr_nonzero_p (tree t)
12830 tree type = TREE_TYPE (t);
12832 /* Doing something useful for floating point would need more work. */
12833 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12834 return false;
12836 switch (TREE_CODE (t))
12838 case SSA_NAME:
12839 /* Query VRP to see if it has recorded any information about
12840 the range of this object. */
12841 return ssa_name_nonzero_p (t);
12843 case ABS_EXPR:
12844 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12846 case INTEGER_CST:
12847 return !integer_zerop (t);
12849 case PLUS_EXPR:
12850 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12852 /* With the presence of negative values it is hard
12853 to say something. */
12854 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12855 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12856 return false;
12857 /* One of operands must be positive and the other non-negative. */
12858 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12859 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12861 break;
12863 case MULT_EXPR:
12864 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12866 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12867 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12869 break;
12871 case NOP_EXPR:
12873 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12874 tree outer_type = TREE_TYPE (t);
12876 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12877 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12879 break;
12881 case ADDR_EXPR:
12883 tree base = get_base_address (TREE_OPERAND (t, 0));
12885 if (!base)
12886 return false;
12888 /* Weak declarations may link to NULL. */
12889 if (VAR_OR_FUNCTION_DECL_P (base))
12890 return !DECL_WEAK (base);
12892 /* Constants are never weak. */
12893 if (CONSTANT_CLASS_P (base))
12894 return true;
12896 return false;
12899 case COND_EXPR:
12900 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12901 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12903 case MIN_EXPR:
12904 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12905 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12907 case MAX_EXPR:
12908 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12910 /* When both operands are nonzero, then MAX must be too. */
12911 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12912 return true;
12914 /* MAX where operand 0 is positive is positive. */
12915 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12917 /* MAX where operand 1 is positive is positive. */
12918 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12919 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12920 return true;
12921 break;
12923 case COMPOUND_EXPR:
12924 case MODIFY_EXPR:
12925 case GIMPLE_MODIFY_STMT:
12926 case BIND_EXPR:
12927 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12929 case SAVE_EXPR:
12930 case NON_LVALUE_EXPR:
12931 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12933 case BIT_IOR_EXPR:
12934 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12935 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12937 case CALL_EXPR:
12938 return alloca_call_p (t);
12940 default:
12941 break;
12943 return false;
12946 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12947 attempt to fold the expression to a constant without modifying TYPE,
12948 OP0 or OP1.
12950 If the expression could be simplified to a constant, then return
12951 the constant. If the expression would not be simplified to a
12952 constant, then return NULL_TREE. */
12954 tree
12955 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12957 tree tem = fold_binary (code, type, op0, op1);
12958 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12961 /* Given the components of a unary expression CODE, TYPE and OP0,
12962 attempt to fold the expression to a constant without modifying
12963 TYPE or OP0.
12965 If the expression could be simplified to a constant, then return
12966 the constant. If the expression would not be simplified to a
12967 constant, then return NULL_TREE. */
12969 tree
12970 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12972 tree tem = fold_unary (code, type, op0);
12973 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12976 /* If EXP represents referencing an element in a constant string
12977 (either via pointer arithmetic or array indexing), return the
12978 tree representing the value accessed, otherwise return NULL. */
12980 tree
12981 fold_read_from_constant_string (tree exp)
12983 if ((TREE_CODE (exp) == INDIRECT_REF
12984 || TREE_CODE (exp) == ARRAY_REF)
12985 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12987 tree exp1 = TREE_OPERAND (exp, 0);
12988 tree index;
12989 tree string;
12991 if (TREE_CODE (exp) == INDIRECT_REF)
12992 string = string_constant (exp1, &index);
12993 else
12995 tree low_bound = array_ref_low_bound (exp);
12996 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12998 /* Optimize the special-case of a zero lower bound.
13000 We convert the low_bound to sizetype to avoid some problems
13001 with constant folding. (E.g. suppose the lower bound is 1,
13002 and its mode is QI. Without the conversion,l (ARRAY
13003 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13004 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13005 if (! integer_zerop (low_bound))
13006 index = size_diffop (index, fold_convert (sizetype, low_bound));
13008 string = exp1;
13011 if (string
13012 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13013 && TREE_CODE (string) == STRING_CST
13014 && TREE_CODE (index) == INTEGER_CST
13015 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13016 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13017 == MODE_INT)
13018 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13019 return fold_convert (TREE_TYPE (exp),
13020 build_int_cst (NULL_TREE,
13021 (TREE_STRING_POINTER (string)
13022 [TREE_INT_CST_LOW (index)])));
13024 return NULL;
13027 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13028 an integer constant or real constant.
13030 TYPE is the type of the result. */
13032 static tree
13033 fold_negate_const (tree arg0, tree type)
13035 tree t = NULL_TREE;
13037 switch (TREE_CODE (arg0))
13039 case INTEGER_CST:
13041 unsigned HOST_WIDE_INT low;
13042 HOST_WIDE_INT high;
13043 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13044 TREE_INT_CST_HIGH (arg0),
13045 &low, &high);
13046 t = force_fit_type_double (type, low, high, 1,
13047 (overflow | TREE_OVERFLOW (arg0))
13048 && !TYPE_UNSIGNED (type),
13049 false);
13050 break;
13053 case REAL_CST:
13054 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13055 break;
13057 default:
13058 gcc_unreachable ();
13061 return t;
13064 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13065 an integer constant or real constant.
13067 TYPE is the type of the result. */
13069 tree
13070 fold_abs_const (tree arg0, tree type)
13072 tree t = NULL_TREE;
13074 switch (TREE_CODE (arg0))
13076 case INTEGER_CST:
13077 /* If the value is unsigned, then the absolute value is
13078 the same as the ordinary value. */
13079 if (TYPE_UNSIGNED (type))
13080 t = arg0;
13081 /* Similarly, if the value is non-negative. */
13082 else if (INT_CST_LT (integer_minus_one_node, arg0))
13083 t = arg0;
13084 /* If the value is negative, then the absolute value is
13085 its negation. */
13086 else
13088 unsigned HOST_WIDE_INT low;
13089 HOST_WIDE_INT high;
13090 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13091 TREE_INT_CST_HIGH (arg0),
13092 &low, &high);
13093 t = force_fit_type_double (type, low, high, -1,
13094 overflow | TREE_OVERFLOW (arg0),
13095 false);
13097 break;
13099 case REAL_CST:
13100 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13101 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13102 else
13103 t = arg0;
13104 break;
13106 default:
13107 gcc_unreachable ();
13110 return t;
13113 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13114 constant. TYPE is the type of the result. */
13116 static tree
13117 fold_not_const (tree arg0, tree type)
13119 tree t = NULL_TREE;
13121 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13123 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13124 ~TREE_INT_CST_HIGH (arg0), 0,
13125 TREE_OVERFLOW (arg0),
13126 false);
13128 return t;
13131 /* Given CODE, a relational operator, the target type, TYPE and two
13132 constant operands OP0 and OP1, return the result of the
13133 relational operation. If the result is not a compile time
13134 constant, then return NULL_TREE. */
13136 static tree
13137 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13139 int result, invert;
13141 /* From here on, the only cases we handle are when the result is
13142 known to be a constant. */
13144 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13146 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13147 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13149 /* Handle the cases where either operand is a NaN. */
13150 if (real_isnan (c0) || real_isnan (c1))
13152 switch (code)
13154 case EQ_EXPR:
13155 case ORDERED_EXPR:
13156 result = 0;
13157 break;
13159 case NE_EXPR:
13160 case UNORDERED_EXPR:
13161 case UNLT_EXPR:
13162 case UNLE_EXPR:
13163 case UNGT_EXPR:
13164 case UNGE_EXPR:
13165 case UNEQ_EXPR:
13166 result = 1;
13167 break;
13169 case LT_EXPR:
13170 case LE_EXPR:
13171 case GT_EXPR:
13172 case GE_EXPR:
13173 case LTGT_EXPR:
13174 if (flag_trapping_math)
13175 return NULL_TREE;
13176 result = 0;
13177 break;
13179 default:
13180 gcc_unreachable ();
13183 return constant_boolean_node (result, type);
13186 return constant_boolean_node (real_compare (code, c0, c1), type);
13189 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13191 To compute GT, swap the arguments and do LT.
13192 To compute GE, do LT and invert the result.
13193 To compute LE, swap the arguments, do LT and invert the result.
13194 To compute NE, do EQ and invert the result.
13196 Therefore, the code below must handle only EQ and LT. */
13198 if (code == LE_EXPR || code == GT_EXPR)
13200 tree tem = op0;
13201 op0 = op1;
13202 op1 = tem;
13203 code = swap_tree_comparison (code);
13206 /* Note that it is safe to invert for real values here because we
13207 have already handled the one case that it matters. */
13209 invert = 0;
13210 if (code == NE_EXPR || code == GE_EXPR)
13212 invert = 1;
13213 code = invert_tree_comparison (code, false);
13216 /* Compute a result for LT or EQ if args permit;
13217 Otherwise return T. */
13218 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13220 if (code == EQ_EXPR)
13221 result = tree_int_cst_equal (op0, op1);
13222 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13223 result = INT_CST_LT_UNSIGNED (op0, op1);
13224 else
13225 result = INT_CST_LT (op0, op1);
13227 else
13228 return NULL_TREE;
13230 if (invert)
13231 result ^= 1;
13232 return constant_boolean_node (result, type);
13235 /* Build an expression for the a clean point containing EXPR with type TYPE.
13236 Don't build a cleanup point expression for EXPR which don't have side
13237 effects. */
13239 tree
13240 fold_build_cleanup_point_expr (tree type, tree expr)
13242 /* If the expression does not have side effects then we don't have to wrap
13243 it with a cleanup point expression. */
13244 if (!TREE_SIDE_EFFECTS (expr))
13245 return expr;
13247 /* If the expression is a return, check to see if the expression inside the
13248 return has no side effects or the right hand side of the modify expression
13249 inside the return. If either don't have side effects set we don't need to
13250 wrap the expression in a cleanup point expression. Note we don't check the
13251 left hand side of the modify because it should always be a return decl. */
13252 if (TREE_CODE (expr) == RETURN_EXPR)
13254 tree op = TREE_OPERAND (expr, 0);
13255 if (!op || !TREE_SIDE_EFFECTS (op))
13256 return expr;
13257 op = TREE_OPERAND (op, 1);
13258 if (!TREE_SIDE_EFFECTS (op))
13259 return expr;
13262 return build1 (CLEANUP_POINT_EXPR, type, expr);
13265 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13266 avoid confusing the gimplify process. */
13268 tree
13269 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13271 /* The size of the object is not relevant when talking about its address. */
13272 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13273 t = TREE_OPERAND (t, 0);
13275 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13276 if (TREE_CODE (t) == INDIRECT_REF
13277 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13279 t = TREE_OPERAND (t, 0);
13280 if (TREE_TYPE (t) != ptrtype)
13281 t = build1 (NOP_EXPR, ptrtype, t);
13283 else
13285 tree base = t;
13287 while (handled_component_p (base))
13288 base = TREE_OPERAND (base, 0);
13289 if (DECL_P (base))
13290 TREE_ADDRESSABLE (base) = 1;
13292 t = build1 (ADDR_EXPR, ptrtype, t);
13295 return t;
13298 tree
13299 build_fold_addr_expr (tree t)
13301 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13304 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13305 of an indirection through OP0, or NULL_TREE if no simplification is
13306 possible. */
13308 tree
13309 fold_indirect_ref_1 (tree type, tree op0)
13311 tree sub = op0;
13312 tree subtype;
13314 STRIP_NOPS (sub);
13315 subtype = TREE_TYPE (sub);
13316 if (!POINTER_TYPE_P (subtype))
13317 return NULL_TREE;
13319 if (TREE_CODE (sub) == ADDR_EXPR)
13321 tree op = TREE_OPERAND (sub, 0);
13322 tree optype = TREE_TYPE (op);
13323 /* *&CONST_DECL -> to the value of the const decl. */
13324 if (TREE_CODE (op) == CONST_DECL)
13325 return DECL_INITIAL (op);
13326 /* *&p => p; make sure to handle *&"str"[cst] here. */
13327 if (type == optype)
13329 tree fop = fold_read_from_constant_string (op);
13330 if (fop)
13331 return fop;
13332 else
13333 return op;
13335 /* *(foo *)&fooarray => fooarray[0] */
13336 else if (TREE_CODE (optype) == ARRAY_TYPE
13337 && type == TREE_TYPE (optype))
13339 tree type_domain = TYPE_DOMAIN (optype);
13340 tree min_val = size_zero_node;
13341 if (type_domain && TYPE_MIN_VALUE (type_domain))
13342 min_val = TYPE_MIN_VALUE (type_domain);
13343 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13345 /* *(foo *)&complexfoo => __real__ complexfoo */
13346 else if (TREE_CODE (optype) == COMPLEX_TYPE
13347 && type == TREE_TYPE (optype))
13348 return fold_build1 (REALPART_EXPR, type, op);
13349 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13350 else if (TREE_CODE (optype) == VECTOR_TYPE
13351 && type == TREE_TYPE (optype))
13353 tree part_width = TYPE_SIZE (type);
13354 tree index = bitsize_int (0);
13355 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13359 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13360 if (TREE_CODE (sub) == PLUS_EXPR
13361 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13363 tree op00 = TREE_OPERAND (sub, 0);
13364 tree op01 = TREE_OPERAND (sub, 1);
13365 tree op00type;
13367 STRIP_NOPS (op00);
13368 op00type = TREE_TYPE (op00);
13369 if (TREE_CODE (op00) == ADDR_EXPR
13370 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13371 && type == TREE_TYPE (TREE_TYPE (op00type)))
13373 tree size = TYPE_SIZE_UNIT (type);
13374 if (tree_int_cst_equal (size, op01))
13375 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13379 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13380 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13381 && type == TREE_TYPE (TREE_TYPE (subtype)))
13383 tree type_domain;
13384 tree min_val = size_zero_node;
13385 sub = build_fold_indirect_ref (sub);
13386 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13387 if (type_domain && TYPE_MIN_VALUE (type_domain))
13388 min_val = TYPE_MIN_VALUE (type_domain);
13389 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13392 return NULL_TREE;
13395 /* Builds an expression for an indirection through T, simplifying some
13396 cases. */
13398 tree
13399 build_fold_indirect_ref (tree t)
13401 tree type = TREE_TYPE (TREE_TYPE (t));
13402 tree sub = fold_indirect_ref_1 (type, t);
13404 if (sub)
13405 return sub;
13406 else
13407 return build1 (INDIRECT_REF, type, t);
13410 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13412 tree
13413 fold_indirect_ref (tree t)
13415 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13417 if (sub)
13418 return sub;
13419 else
13420 return t;
13423 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13424 whose result is ignored. The type of the returned tree need not be
13425 the same as the original expression. */
13427 tree
13428 fold_ignored_result (tree t)
13430 if (!TREE_SIDE_EFFECTS (t))
13431 return integer_zero_node;
13433 for (;;)
13434 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13436 case tcc_unary:
13437 t = TREE_OPERAND (t, 0);
13438 break;
13440 case tcc_binary:
13441 case tcc_comparison:
13442 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13443 t = TREE_OPERAND (t, 0);
13444 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13445 t = TREE_OPERAND (t, 1);
13446 else
13447 return t;
13448 break;
13450 case tcc_expression:
13451 switch (TREE_CODE (t))
13453 case COMPOUND_EXPR:
13454 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13455 return t;
13456 t = TREE_OPERAND (t, 0);
13457 break;
13459 case COND_EXPR:
13460 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13461 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13462 return t;
13463 t = TREE_OPERAND (t, 0);
13464 break;
13466 default:
13467 return t;
13469 break;
13471 default:
13472 return t;
13476 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13477 This can only be applied to objects of a sizetype. */
13479 tree
13480 round_up (tree value, int divisor)
13482 tree div = NULL_TREE;
13484 gcc_assert (divisor > 0);
13485 if (divisor == 1)
13486 return value;
13488 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13489 have to do anything. Only do this when we are not given a const,
13490 because in that case, this check is more expensive than just
13491 doing it. */
13492 if (TREE_CODE (value) != INTEGER_CST)
13494 div = build_int_cst (TREE_TYPE (value), divisor);
13496 if (multiple_of_p (TREE_TYPE (value), value, div))
13497 return value;
13500 /* If divisor is a power of two, simplify this to bit manipulation. */
13501 if (divisor == (divisor & -divisor))
13503 tree t;
13505 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13506 value = size_binop (PLUS_EXPR, value, t);
13507 t = build_int_cst (TREE_TYPE (value), -divisor);
13508 value = size_binop (BIT_AND_EXPR, value, t);
13510 else
13512 if (!div)
13513 div = build_int_cst (TREE_TYPE (value), divisor);
13514 value = size_binop (CEIL_DIV_EXPR, value, div);
13515 value = size_binop (MULT_EXPR, value, div);
13518 return value;
13521 /* Likewise, but round down. */
13523 tree
13524 round_down (tree value, int divisor)
13526 tree div = NULL_TREE;
13528 gcc_assert (divisor > 0);
13529 if (divisor == 1)
13530 return value;
13532 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13533 have to do anything. Only do this when we are not given a const,
13534 because in that case, this check is more expensive than just
13535 doing it. */
13536 if (TREE_CODE (value) != INTEGER_CST)
13538 div = build_int_cst (TREE_TYPE (value), divisor);
13540 if (multiple_of_p (TREE_TYPE (value), value, div))
13541 return value;
13544 /* If divisor is a power of two, simplify this to bit manipulation. */
13545 if (divisor == (divisor & -divisor))
13547 tree t;
13549 t = build_int_cst (TREE_TYPE (value), -divisor);
13550 value = size_binop (BIT_AND_EXPR, value, t);
13552 else
13554 if (!div)
13555 div = build_int_cst (TREE_TYPE (value), divisor);
13556 value = size_binop (FLOOR_DIV_EXPR, value, div);
13557 value = size_binop (MULT_EXPR, value, div);
13560 return value;
13563 /* Returns the pointer to the base of the object addressed by EXP and
13564 extracts the information about the offset of the access, storing it
13565 to PBITPOS and POFFSET. */
13567 static tree
13568 split_address_to_core_and_offset (tree exp,
13569 HOST_WIDE_INT *pbitpos, tree *poffset)
13571 tree core;
13572 enum machine_mode mode;
13573 int unsignedp, volatilep;
13574 HOST_WIDE_INT bitsize;
13576 if (TREE_CODE (exp) == ADDR_EXPR)
13578 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13579 poffset, &mode, &unsignedp, &volatilep,
13580 false);
13581 core = build_fold_addr_expr (core);
13583 else
13585 core = exp;
13586 *pbitpos = 0;
13587 *poffset = NULL_TREE;
13590 return core;
13593 /* Returns true if addresses of E1 and E2 differ by a constant, false
13594 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13596 bool
13597 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13599 tree core1, core2;
13600 HOST_WIDE_INT bitpos1, bitpos2;
13601 tree toffset1, toffset2, tdiff, type;
13603 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13604 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13606 if (bitpos1 % BITS_PER_UNIT != 0
13607 || bitpos2 % BITS_PER_UNIT != 0
13608 || !operand_equal_p (core1, core2, 0))
13609 return false;
13611 if (toffset1 && toffset2)
13613 type = TREE_TYPE (toffset1);
13614 if (type != TREE_TYPE (toffset2))
13615 toffset2 = fold_convert (type, toffset2);
13617 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13618 if (!cst_and_fits_in_hwi (tdiff))
13619 return false;
13621 *diff = int_cst_value (tdiff);
13623 else if (toffset1 || toffset2)
13625 /* If only one of the offsets is non-constant, the difference cannot
13626 be a constant. */
13627 return false;
13629 else
13630 *diff = 0;
13632 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13633 return true;
13636 /* Simplify the floating point expression EXP when the sign of the
13637 result is not significant. Return NULL_TREE if no simplification
13638 is possible. */
13640 tree
13641 fold_strip_sign_ops (tree exp)
13643 tree arg0, arg1;
13645 switch (TREE_CODE (exp))
13647 case ABS_EXPR:
13648 case NEGATE_EXPR:
13649 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13650 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13652 case MULT_EXPR:
13653 case RDIV_EXPR:
13654 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13655 return NULL_TREE;
13656 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13657 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13658 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13659 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13660 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13661 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13662 break;
13664 case COMPOUND_EXPR:
13665 arg0 = TREE_OPERAND (exp, 0);
13666 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13667 if (arg1)
13668 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13669 break;
13671 case COND_EXPR:
13672 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13673 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13674 if (arg0 || arg1)
13675 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13676 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13677 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13678 break;
13680 case CALL_EXPR:
13682 const enum built_in_function fcode = builtin_mathfn_code (exp);
13683 switch (fcode)
13685 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13686 /* Strip copysign function call, return the 1st argument. */
13687 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13688 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13689 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13691 default:
13692 /* Strip sign ops from the argument of "odd" math functions. */
13693 if (negate_mathfn_p (fcode))
13695 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13696 if (arg0)
13697 return build_function_call_expr (get_callee_fndecl (exp),
13698 build_tree_list (NULL_TREE,
13699 arg0));
13701 break;
13704 break;
13706 default:
13707 break;
13709 return NULL_TREE;