tree.h: Include real.h and fixed-value.h as basic datatypes.
[official-gcc.git] / gcc / fold-const.c
blob65ded14e95fe2a5f08a3e97840e09025a4f6102f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "rtl.h"
57 #include "expr.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "toplev.h"
61 #include "intl.h"
62 #include "ggc.h"
63 #include "hashtab.h"
64 #include "langhooks.h"
65 #include "md5.h"
66 #include "gimple.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static int operand_equal_for_comparison_p (tree, tree, tree);
103 static int twoval_comparison_p (tree, tree *, tree *, int *);
104 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
105 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
106 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
107 static tree make_bit_field_ref (location_t, tree, tree,
108 HOST_WIDE_INT, HOST_WIDE_INT, int);
109 static tree optimize_bit_field_compare (location_t, enum tree_code,
110 tree, tree, tree);
111 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
112 HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
114 tree *, tree *);
115 static int all_ones_mask_p (const_tree, int);
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 extern tree make_range (tree, int *, tree *, tree *, bool *);
122 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
123 tree, tree);
124 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
125 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
126 static tree unextend (tree, int, int, tree);
127 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
128 static tree optimize_minmax_comparison (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (location_t,
137 enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static tree fold_convert_const (enum tree_code, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* If ARG2 divides ARG1 with zero remainder, carries out the division
159 of type CODE and returns the quotient.
160 Otherwise returns NULL_TREE. */
162 tree
163 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
165 double_int quo, rem;
166 int uns;
168 /* The sign of the division is according to operand two, that
169 does the correct thing for POINTER_PLUS_EXPR where we want
170 a signed division. */
171 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
172 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
173 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
174 uns = false;
176 quo = double_int_divmod (tree_to_double_int (arg1),
177 tree_to_double_int (arg2),
178 uns, code, &rem);
180 if (double_int_zero_p (rem))
181 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
183 return NULL_TREE;
186 /* This is nonzero if we should defer warnings about undefined
187 overflow. This facility exists because these warnings are a
188 special case. The code to estimate loop iterations does not want
189 to issue any warnings, since it works with expressions which do not
190 occur in user code. Various bits of cleanup code call fold(), but
191 only use the result if it has certain characteristics (e.g., is a
192 constant); that code only wants to issue a warning if the result is
193 used. */
195 static int fold_deferring_overflow_warnings;
197 /* If a warning about undefined overflow is deferred, this is the
198 warning. Note that this may cause us to turn two warnings into
199 one, but that is fine since it is sufficient to only give one
200 warning per expression. */
202 static const char* fold_deferred_overflow_warning;
204 /* If a warning about undefined overflow is deferred, this is the
205 level at which the warning should be emitted. */
207 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209 /* Start deferring overflow warnings. We could use a stack here to
210 permit nested calls, but at present it is not necessary. */
212 void
213 fold_defer_overflow_warnings (void)
215 ++fold_deferring_overflow_warnings;
218 /* Stop deferring overflow warnings. If there is a pending warning,
219 and ISSUE is true, then issue the warning if appropriate. STMT is
220 the statement with which the warning should be associated (used for
221 location information); STMT may be NULL. CODE is the level of the
222 warning--a warn_strict_overflow_code value. This function will use
223 the smaller of CODE and the deferred code when deciding whether to
224 issue the warning. CODE may be zero to mean to always use the
225 deferred code. */
227 void
228 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
230 const char *warnmsg;
231 location_t locus;
233 gcc_assert (fold_deferring_overflow_warnings > 0);
234 --fold_deferring_overflow_warnings;
235 if (fold_deferring_overflow_warnings > 0)
237 if (fold_deferred_overflow_warning != NULL
238 && code != 0
239 && code < (int) fold_deferred_overflow_code)
240 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
241 return;
244 warnmsg = fold_deferred_overflow_warning;
245 fold_deferred_overflow_warning = NULL;
247 if (!issue || warnmsg == NULL)
248 return;
250 if (gimple_no_warning_p (stmt))
251 return;
253 /* Use the smallest code level when deciding to issue the
254 warning. */
255 if (code == 0 || code > (int) fold_deferred_overflow_code)
256 code = fold_deferred_overflow_code;
258 if (!issue_strict_overflow_warning (code))
259 return;
261 if (stmt == NULL)
262 locus = input_location;
263 else
264 locus = gimple_location (stmt);
265 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 /* Stop deferring overflow warnings, ignoring any deferred
269 warnings. */
271 void
272 fold_undefer_and_ignore_overflow_warnings (void)
274 fold_undefer_overflow_warnings (false, NULL, 0);
277 /* Whether we are deferring overflow warnings. */
279 bool
280 fold_deferring_overflow_warnings_p (void)
282 return fold_deferring_overflow_warnings > 0;
285 /* This is called when we fold something based on the fact that signed
286 overflow is undefined. */
288 static void
289 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 if (fold_deferring_overflow_warnings > 0)
293 if (fold_deferred_overflow_warning == NULL
294 || wc < fold_deferred_overflow_code)
296 fold_deferred_overflow_warning = gmsgid;
297 fold_deferred_overflow_code = wc;
300 else if (issue_strict_overflow_warning (wc))
301 warning (OPT_Wstrict_overflow, gmsgid);
304 /* Return true if the built-in mathematical function specified by CODE
305 is odd, i.e. -f(x) == f(-x). */
307 static bool
308 negate_mathfn_p (enum built_in_function code)
310 switch (code)
312 CASE_FLT_FN (BUILT_IN_ASIN):
313 CASE_FLT_FN (BUILT_IN_ASINH):
314 CASE_FLT_FN (BUILT_IN_ATAN):
315 CASE_FLT_FN (BUILT_IN_ATANH):
316 CASE_FLT_FN (BUILT_IN_CASIN):
317 CASE_FLT_FN (BUILT_IN_CASINH):
318 CASE_FLT_FN (BUILT_IN_CATAN):
319 CASE_FLT_FN (BUILT_IN_CATANH):
320 CASE_FLT_FN (BUILT_IN_CBRT):
321 CASE_FLT_FN (BUILT_IN_CPROJ):
322 CASE_FLT_FN (BUILT_IN_CSIN):
323 CASE_FLT_FN (BUILT_IN_CSINH):
324 CASE_FLT_FN (BUILT_IN_CTAN):
325 CASE_FLT_FN (BUILT_IN_CTANH):
326 CASE_FLT_FN (BUILT_IN_ERF):
327 CASE_FLT_FN (BUILT_IN_LLROUND):
328 CASE_FLT_FN (BUILT_IN_LROUND):
329 CASE_FLT_FN (BUILT_IN_ROUND):
330 CASE_FLT_FN (BUILT_IN_SIN):
331 CASE_FLT_FN (BUILT_IN_SINH):
332 CASE_FLT_FN (BUILT_IN_TAN):
333 CASE_FLT_FN (BUILT_IN_TANH):
334 CASE_FLT_FN (BUILT_IN_TRUNC):
335 return true;
337 CASE_FLT_FN (BUILT_IN_LLRINT):
338 CASE_FLT_FN (BUILT_IN_LRINT):
339 CASE_FLT_FN (BUILT_IN_NEARBYINT):
340 CASE_FLT_FN (BUILT_IN_RINT):
341 return !flag_rounding_math;
343 default:
344 break;
346 return false;
349 /* Check whether we may negate an integer constant T without causing
350 overflow. */
352 bool
353 may_negate_without_overflow_p (const_tree t)
355 unsigned HOST_WIDE_INT val;
356 unsigned int prec;
357 tree type;
359 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 type = TREE_TYPE (t);
362 if (TYPE_UNSIGNED (type))
363 return false;
365 prec = TYPE_PRECISION (type);
366 if (prec > HOST_BITS_PER_WIDE_INT)
368 if (TREE_INT_CST_LOW (t) != 0)
369 return true;
370 prec -= HOST_BITS_PER_WIDE_INT;
371 val = TREE_INT_CST_HIGH (t);
373 else
374 val = TREE_INT_CST_LOW (t);
375 if (prec < HOST_BITS_PER_WIDE_INT)
376 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
377 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
380 /* Determine whether an expression T can be cheaply negated using
381 the function negate_expr without introducing undefined overflow. */
383 static bool
384 negate_expr_p (tree t)
386 tree type;
388 if (t == 0)
389 return false;
391 type = TREE_TYPE (t);
393 STRIP_SIGN_NOPS (t);
394 switch (TREE_CODE (t))
396 case INTEGER_CST:
397 if (TYPE_OVERFLOW_WRAPS (type))
398 return true;
400 /* Check that -CST will not overflow type. */
401 return may_negate_without_overflow_p (t);
402 case BIT_NOT_EXPR:
403 return (INTEGRAL_TYPE_P (type)
404 && TYPE_OVERFLOW_WRAPS (type));
406 case FIXED_CST:
407 case NEGATE_EXPR:
408 return true;
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
419 case COMPLEX_EXPR:
420 return negate_expr_p (TREE_OPERAND (t, 0))
421 && negate_expr_p (TREE_OPERAND (t, 1));
423 case CONJ_EXPR:
424 return negate_expr_p (TREE_OPERAND (t, 0));
426 case PLUS_EXPR:
427 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
428 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
429 return false;
430 /* -(A + B) -> (-B) - A. */
431 if (negate_expr_p (TREE_OPERAND (t, 1))
432 && reorder_operands_p (TREE_OPERAND (t, 0),
433 TREE_OPERAND (t, 1)))
434 return true;
435 /* -(A + B) -> (-A) - B. */
436 return negate_expr_p (TREE_OPERAND (t, 0));
438 case MINUS_EXPR:
439 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
440 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
441 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
442 && reorder_operands_p (TREE_OPERAND (t, 0),
443 TREE_OPERAND (t, 1));
445 case MULT_EXPR:
446 if (TYPE_UNSIGNED (TREE_TYPE (t)))
447 break;
449 /* Fall through. */
451 case RDIV_EXPR:
452 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
453 return negate_expr_p (TREE_OPERAND (t, 1))
454 || negate_expr_p (TREE_OPERAND (t, 0));
455 break;
457 case TRUNC_DIV_EXPR:
458 case ROUND_DIV_EXPR:
459 case FLOOR_DIV_EXPR:
460 case CEIL_DIV_EXPR:
461 case EXACT_DIV_EXPR:
462 /* In general we can't negate A / B, because if A is INT_MIN and
463 B is 1, we may turn this into INT_MIN / -1 which is undefined
464 and actually traps on some architectures. But if overflow is
465 undefined, we can negate, because - (INT_MIN / 1) is an
466 overflow. */
467 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
468 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
469 break;
470 return negate_expr_p (TREE_OPERAND (t, 1))
471 || negate_expr_p (TREE_OPERAND (t, 0));
473 case NOP_EXPR:
474 /* Negate -((double)float) as (double)(-float). */
475 if (TREE_CODE (type) == REAL_TYPE)
477 tree tem = strip_float_extensions (t);
478 if (tem != t)
479 return negate_expr_p (tem);
481 break;
483 case CALL_EXPR:
484 /* Negate -f(x) as f(-x). */
485 if (negate_mathfn_p (builtin_mathfn_code (t)))
486 return negate_expr_p (CALL_EXPR_ARG (t, 0));
487 break;
489 case RSHIFT_EXPR:
490 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
491 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
493 tree op1 = TREE_OPERAND (t, 1);
494 if (TREE_INT_CST_HIGH (op1) == 0
495 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
496 == TREE_INT_CST_LOW (op1))
497 return true;
499 break;
501 default:
502 break;
504 return false;
507 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
508 simplification is possible.
509 If negate_expr_p would return true for T, NULL_TREE will never be
510 returned. */
512 static tree
513 fold_negate_expr (location_t loc, tree t)
515 tree type = TREE_TYPE (t);
516 tree tem;
518 switch (TREE_CODE (t))
520 /* Convert - (~A) to A + 1. */
521 case BIT_NOT_EXPR:
522 if (INTEGRAL_TYPE_P (type))
523 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
524 build_int_cst (type, 1));
525 break;
527 case INTEGER_CST:
528 tem = fold_negate_const (t, type);
529 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
530 || !TYPE_OVERFLOW_TRAPS (type))
531 return tem;
532 break;
534 case REAL_CST:
535 tem = fold_negate_const (t, type);
536 /* Two's complement FP formats, such as c4x, may overflow. */
537 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
538 return tem;
539 break;
541 case FIXED_CST:
542 tem = fold_negate_const (t, type);
543 return tem;
545 case COMPLEX_CST:
547 tree rpart = negate_expr (TREE_REALPART (t));
548 tree ipart = negate_expr (TREE_IMAGPART (t));
550 if ((TREE_CODE (rpart) == REAL_CST
551 && TREE_CODE (ipart) == REAL_CST)
552 || (TREE_CODE (rpart) == INTEGER_CST
553 && TREE_CODE (ipart) == INTEGER_CST))
554 return build_complex (type, rpart, ipart);
556 break;
558 case COMPLEX_EXPR:
559 if (negate_expr_p (t))
560 return fold_build2_loc (loc, COMPLEX_EXPR, type,
561 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
562 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
563 break;
565 case CONJ_EXPR:
566 if (negate_expr_p (t))
567 return fold_build1_loc (loc, CONJ_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
569 break;
571 case NEGATE_EXPR:
572 return TREE_OPERAND (t, 0);
574 case PLUS_EXPR:
575 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
576 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
578 /* -(A + B) -> (-B) - A. */
579 if (negate_expr_p (TREE_OPERAND (t, 1))
580 && reorder_operands_p (TREE_OPERAND (t, 0),
581 TREE_OPERAND (t, 1)))
583 tem = negate_expr (TREE_OPERAND (t, 1));
584 return fold_build2_loc (loc, MINUS_EXPR, type,
585 tem, TREE_OPERAND (t, 0));
588 /* -(A + B) -> (-A) - B. */
589 if (negate_expr_p (TREE_OPERAND (t, 0)))
591 tem = negate_expr (TREE_OPERAND (t, 0));
592 return fold_build2_loc (loc, MINUS_EXPR, type,
593 tem, TREE_OPERAND (t, 1));
596 break;
598 case MINUS_EXPR:
599 /* - (A - B) -> B - A */
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
602 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
603 return fold_build2_loc (loc, MINUS_EXPR, type,
604 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
605 break;
607 case MULT_EXPR:
608 if (TYPE_UNSIGNED (type))
609 break;
611 /* Fall through. */
613 case RDIV_EXPR:
614 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
616 tem = TREE_OPERAND (t, 1);
617 if (negate_expr_p (tem))
618 return fold_build2_loc (loc, TREE_CODE (t), type,
619 TREE_OPERAND (t, 0), negate_expr (tem));
620 tem = TREE_OPERAND (t, 0);
621 if (negate_expr_p (tem))
622 return fold_build2_loc (loc, TREE_CODE (t), type,
623 negate_expr (tem), TREE_OPERAND (t, 1));
625 break;
627 case TRUNC_DIV_EXPR:
628 case ROUND_DIV_EXPR:
629 case FLOOR_DIV_EXPR:
630 case CEIL_DIV_EXPR:
631 case EXACT_DIV_EXPR:
632 /* In general we can't negate A / B, because if A is INT_MIN and
633 B is 1, we may turn this into INT_MIN / -1 which is undefined
634 and actually traps on some architectures. But if overflow is
635 undefined, we can negate, because - (INT_MIN / 1) is an
636 overflow. */
637 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
639 const char * const warnmsg = G_("assuming signed overflow does not "
640 "occur when negating a division");
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
644 if (INTEGRAL_TYPE_P (type)
645 && (TREE_CODE (tem) != INTEGER_CST
646 || integer_onep (tem)))
647 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 TREE_OPERAND (t, 0), negate_expr (tem));
651 tem = TREE_OPERAND (t, 0);
652 if (negate_expr_p (tem))
654 if (INTEGRAL_TYPE_P (type)
655 && (TREE_CODE (tem) != INTEGER_CST
656 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
657 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 negate_expr (tem), TREE_OPERAND (t, 1));
662 break;
664 case NOP_EXPR:
665 /* Convert -((double)float) into (double)(-float). */
666 if (TREE_CODE (type) == REAL_TYPE)
668 tem = strip_float_extensions (t);
669 if (tem != t && negate_expr_p (tem))
670 return fold_convert_loc (loc, type, negate_expr (tem));
672 break;
674 case CALL_EXPR:
675 /* Negate -f(x) as f(-x). */
676 if (negate_mathfn_p (builtin_mathfn_code (t))
677 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
679 tree fndecl, arg;
681 fndecl = get_callee_fndecl (t);
682 arg = negate_expr (CALL_EXPR_ARG (t, 0));
683 return build_call_expr_loc (loc, fndecl, 1, arg);
685 break;
687 case RSHIFT_EXPR:
688 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
689 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
691 tree op1 = TREE_OPERAND (t, 1);
692 if (TREE_INT_CST_HIGH (op1) == 0
693 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
694 == TREE_INT_CST_LOW (op1))
696 tree ntype = TYPE_UNSIGNED (type)
697 ? signed_type_for (type)
698 : unsigned_type_for (type);
699 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
700 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
701 return fold_convert_loc (loc, type, temp);
704 break;
706 default:
707 break;
710 return NULL_TREE;
713 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
714 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
715 return NULL_TREE. */
717 static tree
718 negate_expr (tree t)
720 tree type, tem;
721 location_t loc;
723 if (t == NULL_TREE)
724 return NULL_TREE;
726 loc = EXPR_LOCATION (t);
727 type = TREE_TYPE (t);
728 STRIP_SIGN_NOPS (t);
730 tem = fold_negate_expr (loc, t);
731 if (!tem)
733 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
734 SET_EXPR_LOCATION (tem, loc);
736 return fold_convert_loc (loc, type, tem);
739 /* Split a tree IN into a constant, literal and variable parts that could be
740 combined with CODE to make IN. "constant" means an expression with
741 TREE_CONSTANT but that isn't an actual constant. CODE must be a
742 commutative arithmetic operation. Store the constant part into *CONP,
743 the literal in *LITP and return the variable part. If a part isn't
744 present, set it to null. If the tree does not decompose in this way,
745 return the entire tree as the variable part and the other parts as null.
747 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
748 case, we negate an operand that was subtracted. Except if it is a
749 literal for which we use *MINUS_LITP instead.
751 If NEGATE_P is true, we are negating all of IN, again except a literal
752 for which we use *MINUS_LITP instead.
754 If IN is itself a literal or constant, return it as appropriate.
756 Note that we do not guarantee that any of the three values will be the
757 same type as IN, but they will have the same signedness and mode. */
759 static tree
760 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
761 tree *minus_litp, int negate_p)
763 tree var = 0;
765 *conp = 0;
766 *litp = 0;
767 *minus_litp = 0;
769 /* Strip any conversions that don't change the machine mode or signedness. */
770 STRIP_SIGN_NOPS (in);
772 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
773 || TREE_CODE (in) == FIXED_CST)
774 *litp = in;
775 else if (TREE_CODE (in) == code
776 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
777 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
778 /* We can associate addition and subtraction together (even
779 though the C standard doesn't say so) for integers because
780 the value is not affected. For reals, the value might be
781 affected, so we can't. */
782 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
783 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
785 tree op0 = TREE_OPERAND (in, 0);
786 tree op1 = TREE_OPERAND (in, 1);
787 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
788 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
790 /* First see if either of the operands is a literal, then a constant. */
791 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
792 || TREE_CODE (op0) == FIXED_CST)
793 *litp = op0, op0 = 0;
794 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
795 || TREE_CODE (op1) == FIXED_CST)
796 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
798 if (op0 != 0 && TREE_CONSTANT (op0))
799 *conp = op0, op0 = 0;
800 else if (op1 != 0 && TREE_CONSTANT (op1))
801 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
803 /* If we haven't dealt with either operand, this is not a case we can
804 decompose. Otherwise, VAR is either of the ones remaining, if any. */
805 if (op0 != 0 && op1 != 0)
806 var = in;
807 else if (op0 != 0)
808 var = op0;
809 else
810 var = op1, neg_var_p = neg1_p;
812 /* Now do any needed negations. */
813 if (neg_litp_p)
814 *minus_litp = *litp, *litp = 0;
815 if (neg_conp_p)
816 *conp = negate_expr (*conp);
817 if (neg_var_p)
818 var = negate_expr (var);
820 else if (TREE_CONSTANT (in))
821 *conp = in;
822 else
823 var = in;
825 if (negate_p)
827 if (*litp)
828 *minus_litp = *litp, *litp = 0;
829 else if (*minus_litp)
830 *litp = *minus_litp, *minus_litp = 0;
831 *conp = negate_expr (*conp);
832 var = negate_expr (var);
835 return var;
838 /* Re-associate trees split by the above function. T1 and T2 are
839 either expressions to associate or null. Return the new
840 expression, if any. LOC is the location of the new expression. If
841 we build an operation, do it in TYPE and with CODE. */
843 static tree
844 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
846 tree tem;
848 if (t1 == 0)
849 return t2;
850 else if (t2 == 0)
851 return t1;
853 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
854 try to fold this since we will have infinite recursion. But do
855 deal with any NEGATE_EXPRs. */
856 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
857 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
859 if (code == PLUS_EXPR)
861 if (TREE_CODE (t1) == NEGATE_EXPR)
862 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
863 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
864 else if (TREE_CODE (t2) == NEGATE_EXPR)
865 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
866 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
867 else if (integer_zerop (t2))
868 return fold_convert_loc (loc, type, t1);
870 else if (code == MINUS_EXPR)
872 if (integer_zerop (t2))
873 return fold_convert_loc (loc, type, t1);
876 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
877 fold_convert_loc (loc, type, t2));
878 goto associate_trees_exit;
881 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
882 fold_convert_loc (loc, type, t2));
883 associate_trees_exit:
884 protected_set_expr_location (tem, loc);
885 return tem;
888 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
889 for use in int_const_binop, size_binop and size_diffop. */
891 static bool
892 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
894 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
895 return false;
896 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
897 return false;
899 switch (code)
901 case LSHIFT_EXPR:
902 case RSHIFT_EXPR:
903 case LROTATE_EXPR:
904 case RROTATE_EXPR:
905 return true;
907 default:
908 break;
911 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
912 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
913 && TYPE_MODE (type1) == TYPE_MODE (type2);
917 /* Combine two integer constants ARG1 and ARG2 under operation CODE
918 to produce a new constant. Return NULL_TREE if we don't know how
919 to evaluate CODE at compile-time.
921 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
923 tree
924 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
926 unsigned HOST_WIDE_INT int1l, int2l;
927 HOST_WIDE_INT int1h, int2h;
928 unsigned HOST_WIDE_INT low;
929 HOST_WIDE_INT hi;
930 unsigned HOST_WIDE_INT garbagel;
931 HOST_WIDE_INT garbageh;
932 tree t;
933 tree type = TREE_TYPE (arg1);
934 int uns = TYPE_UNSIGNED (type);
935 int is_sizetype
936 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
937 int overflow = 0;
939 int1l = TREE_INT_CST_LOW (arg1);
940 int1h = TREE_INT_CST_HIGH (arg1);
941 int2l = TREE_INT_CST_LOW (arg2);
942 int2h = TREE_INT_CST_HIGH (arg2);
944 switch (code)
946 case BIT_IOR_EXPR:
947 low = int1l | int2l, hi = int1h | int2h;
948 break;
950 case BIT_XOR_EXPR:
951 low = int1l ^ int2l, hi = int1h ^ int2h;
952 break;
954 case BIT_AND_EXPR:
955 low = int1l & int2l, hi = int1h & int2h;
956 break;
958 case RSHIFT_EXPR:
959 int2l = -int2l;
960 case LSHIFT_EXPR:
961 /* It's unclear from the C standard whether shifts can overflow.
962 The following code ignores overflow; perhaps a C standard
963 interpretation ruling is needed. */
964 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
965 &low, &hi, !uns);
966 break;
968 case RROTATE_EXPR:
969 int2l = - int2l;
970 case LROTATE_EXPR:
971 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
972 &low, &hi);
973 break;
975 case PLUS_EXPR:
976 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
977 break;
979 case MINUS_EXPR:
980 neg_double (int2l, int2h, &low, &hi);
981 add_double (int1l, int1h, low, hi, &low, &hi);
982 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
983 break;
985 case MULT_EXPR:
986 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
987 break;
989 case TRUNC_DIV_EXPR:
990 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
991 case EXACT_DIV_EXPR:
992 /* This is a shortcut for a common special case. */
993 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
994 && !TREE_OVERFLOW (arg1)
995 && !TREE_OVERFLOW (arg2)
996 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
998 if (code == CEIL_DIV_EXPR)
999 int1l += int2l - 1;
1001 low = int1l / int2l, hi = 0;
1002 break;
1005 /* ... fall through ... */
1007 case ROUND_DIV_EXPR:
1008 if (int2h == 0 && int2l == 0)
1009 return NULL_TREE;
1010 if (int2h == 0 && int2l == 1)
1012 low = int1l, hi = int1h;
1013 break;
1015 if (int1l == int2l && int1h == int2h
1016 && ! (int1l == 0 && int1h == 0))
1018 low = 1, hi = 0;
1019 break;
1021 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1022 &low, &hi, &garbagel, &garbageh);
1023 break;
1025 case TRUNC_MOD_EXPR:
1026 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1027 /* This is a shortcut for a common special case. */
1028 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1029 && !TREE_OVERFLOW (arg1)
1030 && !TREE_OVERFLOW (arg2)
1031 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1033 if (code == CEIL_MOD_EXPR)
1034 int1l += int2l - 1;
1035 low = int1l % int2l, hi = 0;
1036 break;
1039 /* ... fall through ... */
1041 case ROUND_MOD_EXPR:
1042 if (int2h == 0 && int2l == 0)
1043 return NULL_TREE;
1044 overflow = div_and_round_double (code, uns,
1045 int1l, int1h, int2l, int2h,
1046 &garbagel, &garbageh, &low, &hi);
1047 break;
1049 case MIN_EXPR:
1050 case MAX_EXPR:
1051 if (uns)
1052 low = (((unsigned HOST_WIDE_INT) int1h
1053 < (unsigned HOST_WIDE_INT) int2h)
1054 || (((unsigned HOST_WIDE_INT) int1h
1055 == (unsigned HOST_WIDE_INT) int2h)
1056 && int1l < int2l));
1057 else
1058 low = (int1h < int2h
1059 || (int1h == int2h && int1l < int2l));
1061 if (low == (code == MIN_EXPR))
1062 low = int1l, hi = int1h;
1063 else
1064 low = int2l, hi = int2h;
1065 break;
1067 default:
1068 return NULL_TREE;
1071 if (notrunc)
1073 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1075 /* Propagate overflow flags ourselves. */
1076 if (((!uns || is_sizetype) && overflow)
1077 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1079 t = copy_node (t);
1080 TREE_OVERFLOW (t) = 1;
1083 else
1084 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1085 ((!uns || is_sizetype) && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1088 return t;
1091 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1092 constant. We assume ARG1 and ARG2 have the same data type, or at least
1093 are the same kind of constant and the same machine mode. Return zero if
1094 combining the constants is not allowed in the current operating mode.
1096 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1098 static tree
1099 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1101 /* Sanity check for the recursive cases. */
1102 if (!arg1 || !arg2)
1103 return NULL_TREE;
1105 STRIP_NOPS (arg1);
1106 STRIP_NOPS (arg2);
1108 if (TREE_CODE (arg1) == INTEGER_CST)
1109 return int_const_binop (code, arg1, arg2, notrunc);
1111 if (TREE_CODE (arg1) == REAL_CST)
1113 enum machine_mode mode;
1114 REAL_VALUE_TYPE d1;
1115 REAL_VALUE_TYPE d2;
1116 REAL_VALUE_TYPE value;
1117 REAL_VALUE_TYPE result;
1118 bool inexact;
1119 tree t, type;
1121 /* The following codes are handled by real_arithmetic. */
1122 switch (code)
1124 case PLUS_EXPR:
1125 case MINUS_EXPR:
1126 case MULT_EXPR:
1127 case RDIV_EXPR:
1128 case MIN_EXPR:
1129 case MAX_EXPR:
1130 break;
1132 default:
1133 return NULL_TREE;
1136 d1 = TREE_REAL_CST (arg1);
1137 d2 = TREE_REAL_CST (arg2);
1139 type = TREE_TYPE (arg1);
1140 mode = TYPE_MODE (type);
1142 /* Don't perform operation if we honor signaling NaNs and
1143 either operand is a NaN. */
1144 if (HONOR_SNANS (mode)
1145 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1146 return NULL_TREE;
1148 /* Don't perform operation if it would raise a division
1149 by zero exception. */
1150 if (code == RDIV_EXPR
1151 && REAL_VALUES_EQUAL (d2, dconst0)
1152 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1153 return NULL_TREE;
1155 /* If either operand is a NaN, just return it. Otherwise, set up
1156 for floating-point trap; we return an overflow. */
1157 if (REAL_VALUE_ISNAN (d1))
1158 return arg1;
1159 else if (REAL_VALUE_ISNAN (d2))
1160 return arg2;
1162 inexact = real_arithmetic (&value, code, &d1, &d2);
1163 real_convert (&result, mode, &value);
1165 /* Don't constant fold this floating point operation if
1166 the result has overflowed and flag_trapping_math. */
1167 if (flag_trapping_math
1168 && MODE_HAS_INFINITIES (mode)
1169 && REAL_VALUE_ISINF (result)
1170 && !REAL_VALUE_ISINF (d1)
1171 && !REAL_VALUE_ISINF (d2))
1172 return NULL_TREE;
1174 /* Don't constant fold this floating point operation if the
1175 result may dependent upon the run-time rounding mode and
1176 flag_rounding_math is set, or if GCC's software emulation
1177 is unable to accurately represent the result. */
1178 if ((flag_rounding_math
1179 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1180 && (inexact || !real_identical (&result, &value)))
1181 return NULL_TREE;
1183 t = build_real (type, result);
1185 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1186 return t;
1189 if (TREE_CODE (arg1) == FIXED_CST)
1191 FIXED_VALUE_TYPE f1;
1192 FIXED_VALUE_TYPE f2;
1193 FIXED_VALUE_TYPE result;
1194 tree t, type;
1195 int sat_p;
1196 bool overflow_p;
1198 /* The following codes are handled by fixed_arithmetic. */
1199 switch (code)
1201 case PLUS_EXPR:
1202 case MINUS_EXPR:
1203 case MULT_EXPR:
1204 case TRUNC_DIV_EXPR:
1205 f2 = TREE_FIXED_CST (arg2);
1206 break;
1208 case LSHIFT_EXPR:
1209 case RSHIFT_EXPR:
1210 f2.data.high = TREE_INT_CST_HIGH (arg2);
1211 f2.data.low = TREE_INT_CST_LOW (arg2);
1212 f2.mode = SImode;
1213 break;
1215 default:
1216 return NULL_TREE;
1219 f1 = TREE_FIXED_CST (arg1);
1220 type = TREE_TYPE (arg1);
1221 sat_p = TYPE_SATURATING (type);
1222 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1223 t = build_fixed (type, result);
1224 /* Propagate overflow flags. */
1225 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1226 TREE_OVERFLOW (t) = 1;
1227 return t;
1230 if (TREE_CODE (arg1) == COMPLEX_CST)
1232 tree type = TREE_TYPE (arg1);
1233 tree r1 = TREE_REALPART (arg1);
1234 tree i1 = TREE_IMAGPART (arg1);
1235 tree r2 = TREE_REALPART (arg2);
1236 tree i2 = TREE_IMAGPART (arg2);
1237 tree real, imag;
1239 switch (code)
1241 case PLUS_EXPR:
1242 case MINUS_EXPR:
1243 real = const_binop (code, r1, r2, notrunc);
1244 imag = const_binop (code, i1, i2, notrunc);
1245 break;
1247 case MULT_EXPR:
1248 if (COMPLEX_FLOAT_TYPE_P (type))
1249 return do_mpc_arg2 (arg1, arg2, type,
1250 /* do_nonfinite= */ folding_initializer,
1251 mpc_mul);
1253 real = const_binop (MINUS_EXPR,
1254 const_binop (MULT_EXPR, r1, r2, notrunc),
1255 const_binop (MULT_EXPR, i1, i2, notrunc),
1256 notrunc);
1257 imag = const_binop (PLUS_EXPR,
1258 const_binop (MULT_EXPR, r1, i2, notrunc),
1259 const_binop (MULT_EXPR, i1, r2, notrunc),
1260 notrunc);
1261 break;
1263 case RDIV_EXPR:
1264 if (COMPLEX_FLOAT_TYPE_P (type))
1265 return do_mpc_arg2 (arg1, arg2, type,
1266 /* do_nonfinite= */ folding_initializer,
1267 mpc_div);
1268 /* Fallthru ... */
1269 case TRUNC_DIV_EXPR:
1270 case CEIL_DIV_EXPR:
1271 case FLOOR_DIV_EXPR:
1272 case ROUND_DIV_EXPR:
1273 if (flag_complex_method == 0)
1275 /* Keep this algorithm in sync with
1276 tree-complex.c:expand_complex_div_straight().
1278 Expand complex division to scalars, straightforward algorithm.
1279 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1280 t = br*br + bi*bi
1282 tree magsquared
1283 = const_binop (PLUS_EXPR,
1284 const_binop (MULT_EXPR, r2, r2, notrunc),
1285 const_binop (MULT_EXPR, i2, i2, notrunc),
1286 notrunc);
1287 tree t1
1288 = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, r2, notrunc),
1290 const_binop (MULT_EXPR, i1, i2, notrunc),
1291 notrunc);
1292 tree t2
1293 = const_binop (MINUS_EXPR,
1294 const_binop (MULT_EXPR, i1, r2, notrunc),
1295 const_binop (MULT_EXPR, r1, i2, notrunc),
1296 notrunc);
1298 real = const_binop (code, t1, magsquared, notrunc);
1299 imag = const_binop (code, t2, magsquared, notrunc);
1301 else
1303 /* Keep this algorithm in sync with
1304 tree-complex.c:expand_complex_div_wide().
1306 Expand complex division to scalars, modified algorithm to minimize
1307 overflow with wide input ranges. */
1308 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1309 fold_abs_const (r2, TREE_TYPE (type)),
1310 fold_abs_const (i2, TREE_TYPE (type)));
1312 if (integer_nonzerop (compare))
1314 /* In the TRUE branch, we compute
1315 ratio = br/bi;
1316 div = (br * ratio) + bi;
1317 tr = (ar * ratio) + ai;
1318 ti = (ai * ratio) - ar;
1319 tr = tr / div;
1320 ti = ti / div; */
1321 tree ratio = const_binop (code, r2, i2, notrunc);
1322 tree div = const_binop (PLUS_EXPR, i2,
1323 const_binop (MULT_EXPR, r2, ratio,
1324 notrunc),
1325 notrunc);
1326 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
1327 real = const_binop (PLUS_EXPR, real, i1, notrunc);
1328 real = const_binop (code, real, div, notrunc);
1330 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
1331 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
1332 imag = const_binop (code, imag, div, notrunc);
1334 else
1336 /* In the FALSE branch, we compute
1337 ratio = d/c;
1338 divisor = (d * ratio) + c;
1339 tr = (b * ratio) + a;
1340 ti = b - (a * ratio);
1341 tr = tr / div;
1342 ti = ti / div; */
1343 tree ratio = const_binop (code, i2, r2, notrunc);
1344 tree div = const_binop (PLUS_EXPR, r2,
1345 const_binop (MULT_EXPR, i2, ratio,
1346 notrunc),
1347 notrunc);
1349 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
1350 real = const_binop (PLUS_EXPR, real, r1, notrunc);
1351 real = const_binop (code, real, div, notrunc);
1353 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
1354 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
1355 imag = const_binop (code, imag, div, notrunc);
1358 break;
1360 default:
1361 return NULL_TREE;
1364 if (real && imag)
1365 return build_complex (type, real, imag);
1368 if (TREE_CODE (arg1) == VECTOR_CST)
1370 tree type = TREE_TYPE(arg1);
1371 int count = TYPE_VECTOR_SUBPARTS (type), i;
1372 tree elements1, elements2, list = NULL_TREE;
1374 if(TREE_CODE(arg2) != VECTOR_CST)
1375 return NULL_TREE;
1377 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1378 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1380 for (i = 0; i < count; i++)
1382 tree elem1, elem2, elem;
1384 /* The trailing elements can be empty and should be treated as 0 */
1385 if(!elements1)
1386 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1387 else
1389 elem1 = TREE_VALUE(elements1);
1390 elements1 = TREE_CHAIN (elements1);
1393 if(!elements2)
1394 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1395 else
1397 elem2 = TREE_VALUE(elements2);
1398 elements2 = TREE_CHAIN (elements2);
1401 elem = const_binop (code, elem1, elem2, notrunc);
1403 /* It is possible that const_binop cannot handle the given
1404 code and return NULL_TREE */
1405 if(elem == NULL_TREE)
1406 return NULL_TREE;
1408 list = tree_cons (NULL_TREE, elem, list);
1410 return build_vector(type, nreverse(list));
1412 return NULL_TREE;
1415 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1416 indicates which particular sizetype to create. */
1418 tree
1419 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1421 return build_int_cst (sizetype_tab[(int) kind], number);
1424 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1425 is a tree code. The type of the result is taken from the operands.
1426 Both must be equivalent integer types, ala int_binop_types_match_p.
1427 If the operands are constant, so is the result. */
1429 tree
1430 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1432 tree type = TREE_TYPE (arg0);
1434 if (arg0 == error_mark_node || arg1 == error_mark_node)
1435 return error_mark_node;
1437 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1438 TREE_TYPE (arg1)));
1440 /* Handle the special case of two integer constants faster. */
1441 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1443 /* And some specific cases even faster than that. */
1444 if (code == PLUS_EXPR)
1446 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1447 return arg1;
1448 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1449 return arg0;
1451 else if (code == MINUS_EXPR)
1453 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1454 return arg0;
1456 else if (code == MULT_EXPR)
1458 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1459 return arg1;
1462 /* Handle general case of two integer constants. */
1463 return int_const_binop (code, arg0, arg1, 0);
1466 return fold_build2_loc (loc, code, type, arg0, arg1);
1469 /* Given two values, either both of sizetype or both of bitsizetype,
1470 compute the difference between the two values. Return the value
1471 in signed type corresponding to the type of the operands. */
1473 tree
1474 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1476 tree type = TREE_TYPE (arg0);
1477 tree ctype;
1479 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1480 TREE_TYPE (arg1)));
1482 /* If the type is already signed, just do the simple thing. */
1483 if (!TYPE_UNSIGNED (type))
1484 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1486 if (type == sizetype)
1487 ctype = ssizetype;
1488 else if (type == bitsizetype)
1489 ctype = sbitsizetype;
1490 else
1491 ctype = signed_type_for (type);
1493 /* If either operand is not a constant, do the conversions to the signed
1494 type and subtract. The hardware will do the right thing with any
1495 overflow in the subtraction. */
1496 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1497 return size_binop_loc (loc, MINUS_EXPR,
1498 fold_convert_loc (loc, ctype, arg0),
1499 fold_convert_loc (loc, ctype, arg1));
1501 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1502 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1503 overflow) and negate (which can't either). Special-case a result
1504 of zero while we're here. */
1505 if (tree_int_cst_equal (arg0, arg1))
1506 return build_int_cst (ctype, 0);
1507 else if (tree_int_cst_lt (arg1, arg0))
1508 return fold_convert_loc (loc, ctype,
1509 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1510 else
1511 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1512 fold_convert_loc (loc, ctype,
1513 size_binop_loc (loc,
1514 MINUS_EXPR,
1515 arg1, arg0)));
1518 /* A subroutine of fold_convert_const handling conversions of an
1519 INTEGER_CST to another integer type. */
1521 static tree
1522 fold_convert_const_int_from_int (tree type, const_tree arg1)
1524 tree t;
1526 /* Given an integer constant, make new constant with new type,
1527 appropriately sign-extended or truncated. */
1528 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1529 TREE_INT_CST_HIGH (arg1),
1530 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1531 (TREE_INT_CST_HIGH (arg1) < 0
1532 && (TYPE_UNSIGNED (type)
1533 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1534 | TREE_OVERFLOW (arg1));
1536 return t;
1539 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1540 to an integer type. */
1542 static tree
1543 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1545 int overflow = 0;
1546 tree t;
1548 /* The following code implements the floating point to integer
1549 conversion rules required by the Java Language Specification,
1550 that IEEE NaNs are mapped to zero and values that overflow
1551 the target precision saturate, i.e. values greater than
1552 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1553 are mapped to INT_MIN. These semantics are allowed by the
1554 C and C++ standards that simply state that the behavior of
1555 FP-to-integer conversion is unspecified upon overflow. */
1557 double_int val;
1558 REAL_VALUE_TYPE r;
1559 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1561 switch (code)
1563 case FIX_TRUNC_EXPR:
1564 real_trunc (&r, VOIDmode, &x);
1565 break;
1567 default:
1568 gcc_unreachable ();
1571 /* If R is NaN, return zero and show we have an overflow. */
1572 if (REAL_VALUE_ISNAN (r))
1574 overflow = 1;
1575 val = double_int_zero;
1578 /* See if R is less than the lower bound or greater than the
1579 upper bound. */
1581 if (! overflow)
1583 tree lt = TYPE_MIN_VALUE (type);
1584 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1585 if (REAL_VALUES_LESS (r, l))
1587 overflow = 1;
1588 val = tree_to_double_int (lt);
1592 if (! overflow)
1594 tree ut = TYPE_MAX_VALUE (type);
1595 if (ut)
1597 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1598 if (REAL_VALUES_LESS (u, r))
1600 overflow = 1;
1601 val = tree_to_double_int (ut);
1606 if (! overflow)
1607 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1609 t = force_fit_type_double (type, val.low, val.high, -1,
1610 overflow | TREE_OVERFLOW (arg1));
1611 return t;
1614 /* A subroutine of fold_convert_const handling conversions of a
1615 FIXED_CST to an integer type. */
1617 static tree
1618 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1620 tree t;
1621 double_int temp, temp_trunc;
1622 unsigned int mode;
1624 /* Right shift FIXED_CST to temp by fbit. */
1625 temp = TREE_FIXED_CST (arg1).data;
1626 mode = TREE_FIXED_CST (arg1).mode;
1627 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1629 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1630 HOST_BITS_PER_DOUBLE_INT,
1631 SIGNED_FIXED_POINT_MODE_P (mode));
1633 /* Left shift temp to temp_trunc by fbit. */
1634 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1635 HOST_BITS_PER_DOUBLE_INT,
1636 SIGNED_FIXED_POINT_MODE_P (mode));
1638 else
1640 temp = double_int_zero;
1641 temp_trunc = double_int_zero;
1644 /* If FIXED_CST is negative, we need to round the value toward 0.
1645 By checking if the fractional bits are not zero to add 1 to temp. */
1646 if (SIGNED_FIXED_POINT_MODE_P (mode)
1647 && double_int_negative_p (temp_trunc)
1648 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1649 temp = double_int_add (temp, double_int_one);
1651 /* Given a fixed-point constant, make new constant with new type,
1652 appropriately sign-extended or truncated. */
1653 t = force_fit_type_double (type, temp.low, temp.high, -1,
1654 (double_int_negative_p (temp)
1655 && (TYPE_UNSIGNED (type)
1656 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1657 | TREE_OVERFLOW (arg1));
1659 return t;
1662 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1663 to another floating point type. */
1665 static tree
1666 fold_convert_const_real_from_real (tree type, const_tree arg1)
1668 REAL_VALUE_TYPE value;
1669 tree t;
1671 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1672 t = build_real (type, value);
1674 /* If converting an infinity or NAN to a representation that doesn't
1675 have one, set the overflow bit so that we can produce some kind of
1676 error message at the appropriate point if necessary. It's not the
1677 most user-friendly message, but it's better than nothing. */
1678 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1679 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1680 TREE_OVERFLOW (t) = 1;
1681 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1682 && !MODE_HAS_NANS (TYPE_MODE (type)))
1683 TREE_OVERFLOW (t) = 1;
1684 /* Regular overflow, conversion produced an infinity in a mode that
1685 can't represent them. */
1686 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1687 && REAL_VALUE_ISINF (value)
1688 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1689 TREE_OVERFLOW (t) = 1;
1690 else
1691 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1692 return t;
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to a floating point type. */
1698 static tree
1699 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1701 REAL_VALUE_TYPE value;
1702 tree t;
1704 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1705 t = build_real (type, value);
1707 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1708 return t;
1711 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1712 to another fixed-point type. */
1714 static tree
1715 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1717 FIXED_VALUE_TYPE value;
1718 tree t;
1719 bool overflow_p;
1721 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1722 TYPE_SATURATING (type));
1723 t = build_fixed (type, value);
1725 /* Propagate overflow flags. */
1726 if (overflow_p | TREE_OVERFLOW (arg1))
1727 TREE_OVERFLOW (t) = 1;
1728 return t;
1731 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1732 to a fixed-point type. */
1734 static tree
1735 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1737 FIXED_VALUE_TYPE value;
1738 tree t;
1739 bool overflow_p;
1741 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1742 TREE_INT_CST (arg1),
1743 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1744 TYPE_SATURATING (type));
1745 t = build_fixed (type, value);
1747 /* Propagate overflow flags. */
1748 if (overflow_p | TREE_OVERFLOW (arg1))
1749 TREE_OVERFLOW (t) = 1;
1750 return t;
1753 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1754 to a fixed-point type. */
1756 static tree
1757 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1759 FIXED_VALUE_TYPE value;
1760 tree t;
1761 bool overflow_p;
1763 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1764 &TREE_REAL_CST (arg1),
1765 TYPE_SATURATING (type));
1766 t = build_fixed (type, value);
1768 /* Propagate overflow flags. */
1769 if (overflow_p | TREE_OVERFLOW (arg1))
1770 TREE_OVERFLOW (t) = 1;
1771 return t;
1774 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1775 type TYPE. If no simplification can be done return NULL_TREE. */
1777 static tree
1778 fold_convert_const (enum tree_code code, tree type, tree arg1)
1780 if (TREE_TYPE (arg1) == type)
1781 return arg1;
1783 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1784 || TREE_CODE (type) == OFFSET_TYPE)
1786 if (TREE_CODE (arg1) == INTEGER_CST)
1787 return fold_convert_const_int_from_int (type, arg1);
1788 else if (TREE_CODE (arg1) == REAL_CST)
1789 return fold_convert_const_int_from_real (code, type, arg1);
1790 else if (TREE_CODE (arg1) == FIXED_CST)
1791 return fold_convert_const_int_from_fixed (type, arg1);
1793 else if (TREE_CODE (type) == REAL_TYPE)
1795 if (TREE_CODE (arg1) == INTEGER_CST)
1796 return build_real_from_int_cst (type, arg1);
1797 else if (TREE_CODE (arg1) == REAL_CST)
1798 return fold_convert_const_real_from_real (type, arg1);
1799 else if (TREE_CODE (arg1) == FIXED_CST)
1800 return fold_convert_const_real_from_fixed (type, arg1);
1802 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1804 if (TREE_CODE (arg1) == FIXED_CST)
1805 return fold_convert_const_fixed_from_fixed (type, arg1);
1806 else if (TREE_CODE (arg1) == INTEGER_CST)
1807 return fold_convert_const_fixed_from_int (type, arg1);
1808 else if (TREE_CODE (arg1) == REAL_CST)
1809 return fold_convert_const_fixed_from_real (type, arg1);
1811 return NULL_TREE;
1814 /* Construct a vector of zero elements of vector type TYPE. */
1816 static tree
1817 build_zero_vector (tree type)
1819 tree elem, list;
1820 int i, units;
1822 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1823 units = TYPE_VECTOR_SUBPARTS (type);
1825 list = NULL_TREE;
1826 for (i = 0; i < units; i++)
1827 list = tree_cons (NULL_TREE, elem, list);
1828 return build_vector (type, list);
1831 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1833 bool
1834 fold_convertible_p (const_tree type, const_tree arg)
1836 tree orig = TREE_TYPE (arg);
1838 if (type == orig)
1839 return true;
1841 if (TREE_CODE (arg) == ERROR_MARK
1842 || TREE_CODE (type) == ERROR_MARK
1843 || TREE_CODE (orig) == ERROR_MARK)
1844 return false;
1846 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1847 return true;
1849 switch (TREE_CODE (type))
1851 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1852 case POINTER_TYPE: case REFERENCE_TYPE:
1853 case OFFSET_TYPE:
1854 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1855 || TREE_CODE (orig) == OFFSET_TYPE)
1856 return true;
1857 return (TREE_CODE (orig) == VECTOR_TYPE
1858 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1860 case REAL_TYPE:
1861 case FIXED_POINT_TYPE:
1862 case COMPLEX_TYPE:
1863 case VECTOR_TYPE:
1864 case VOID_TYPE:
1865 return TREE_CODE (type) == TREE_CODE (orig);
1867 default:
1868 return false;
1872 /* Convert expression ARG to type TYPE. Used by the middle-end for
1873 simple conversions in preference to calling the front-end's convert. */
1875 tree
1876 fold_convert_loc (location_t loc, tree type, tree arg)
1878 tree orig = TREE_TYPE (arg);
1879 tree tem;
1881 if (type == orig)
1882 return arg;
1884 if (TREE_CODE (arg) == ERROR_MARK
1885 || TREE_CODE (type) == ERROR_MARK
1886 || TREE_CODE (orig) == ERROR_MARK)
1887 return error_mark_node;
1889 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1890 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1892 switch (TREE_CODE (type))
1894 case POINTER_TYPE:
1895 case REFERENCE_TYPE:
1896 /* Handle conversions between pointers to different address spaces. */
1897 if (POINTER_TYPE_P (orig)
1898 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1899 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1900 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1901 /* fall through */
1903 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case OFFSET_TYPE:
1905 if (TREE_CODE (arg) == INTEGER_CST)
1907 tem = fold_convert_const (NOP_EXPR, type, arg);
1908 if (tem != NULL_TREE)
1909 return tem;
1911 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1912 || TREE_CODE (orig) == OFFSET_TYPE)
1913 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1914 if (TREE_CODE (orig) == COMPLEX_TYPE)
1915 return fold_convert_loc (loc, type,
1916 fold_build1_loc (loc, REALPART_EXPR,
1917 TREE_TYPE (orig), arg));
1918 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1919 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1920 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1922 case REAL_TYPE:
1923 if (TREE_CODE (arg) == INTEGER_CST)
1925 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1926 if (tem != NULL_TREE)
1927 return tem;
1929 else if (TREE_CODE (arg) == REAL_CST)
1931 tem = fold_convert_const (NOP_EXPR, type, arg);
1932 if (tem != NULL_TREE)
1933 return tem;
1935 else if (TREE_CODE (arg) == FIXED_CST)
1937 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1938 if (tem != NULL_TREE)
1939 return tem;
1942 switch (TREE_CODE (orig))
1944 case INTEGER_TYPE:
1945 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1946 case POINTER_TYPE: case REFERENCE_TYPE:
1947 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1949 case REAL_TYPE:
1950 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1952 case FIXED_POINT_TYPE:
1953 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1955 case COMPLEX_TYPE:
1956 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1957 return fold_convert_loc (loc, type, tem);
1959 default:
1960 gcc_unreachable ();
1963 case FIXED_POINT_TYPE:
1964 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1965 || TREE_CODE (arg) == REAL_CST)
1967 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1968 if (tem != NULL_TREE)
1969 goto fold_convert_exit;
1972 switch (TREE_CODE (orig))
1974 case FIXED_POINT_TYPE:
1975 case INTEGER_TYPE:
1976 case ENUMERAL_TYPE:
1977 case BOOLEAN_TYPE:
1978 case REAL_TYPE:
1979 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1981 case COMPLEX_TYPE:
1982 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1983 return fold_convert_loc (loc, type, tem);
1985 default:
1986 gcc_unreachable ();
1989 case COMPLEX_TYPE:
1990 switch (TREE_CODE (orig))
1992 case INTEGER_TYPE:
1993 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1994 case POINTER_TYPE: case REFERENCE_TYPE:
1995 case REAL_TYPE:
1996 case FIXED_POINT_TYPE:
1997 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1998 fold_convert_loc (loc, TREE_TYPE (type), arg),
1999 fold_convert_loc (loc, TREE_TYPE (type),
2000 integer_zero_node));
2001 case COMPLEX_TYPE:
2003 tree rpart, ipart;
2005 if (TREE_CODE (arg) == COMPLEX_EXPR)
2007 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2008 TREE_OPERAND (arg, 0));
2009 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2010 TREE_OPERAND (arg, 1));
2011 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2014 arg = save_expr (arg);
2015 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2016 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2017 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2018 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2019 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2022 default:
2023 gcc_unreachable ();
2026 case VECTOR_TYPE:
2027 if (integer_zerop (arg))
2028 return build_zero_vector (type);
2029 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2031 || TREE_CODE (orig) == VECTOR_TYPE);
2032 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2034 case VOID_TYPE:
2035 tem = fold_ignored_result (arg);
2036 if (TREE_CODE (tem) == MODIFY_EXPR)
2037 goto fold_convert_exit;
2038 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2040 default:
2041 gcc_unreachable ();
2043 fold_convert_exit:
2044 protected_set_expr_location (tem, loc);
2045 return tem;
2048 /* Return false if expr can be assumed not to be an lvalue, true
2049 otherwise. */
2051 static bool
2052 maybe_lvalue_p (const_tree x)
2054 /* We only need to wrap lvalue tree codes. */
2055 switch (TREE_CODE (x))
2057 case VAR_DECL:
2058 case PARM_DECL:
2059 case RESULT_DECL:
2060 case LABEL_DECL:
2061 case FUNCTION_DECL:
2062 case SSA_NAME:
2064 case COMPONENT_REF:
2065 case INDIRECT_REF:
2066 case ALIGN_INDIRECT_REF:
2067 case MISALIGNED_INDIRECT_REF:
2068 case ARRAY_REF:
2069 case ARRAY_RANGE_REF:
2070 case BIT_FIELD_REF:
2071 case OBJ_TYPE_REF:
2073 case REALPART_EXPR:
2074 case IMAGPART_EXPR:
2075 case PREINCREMENT_EXPR:
2076 case PREDECREMENT_EXPR:
2077 case SAVE_EXPR:
2078 case TRY_CATCH_EXPR:
2079 case WITH_CLEANUP_EXPR:
2080 case COMPOUND_EXPR:
2081 case MODIFY_EXPR:
2082 case TARGET_EXPR:
2083 case COND_EXPR:
2084 case BIND_EXPR:
2085 break;
2087 default:
2088 /* Assume the worst for front-end tree codes. */
2089 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2090 break;
2091 return false;
2094 return true;
2097 /* Return an expr equal to X but certainly not valid as an lvalue. */
2099 tree
2100 non_lvalue_loc (location_t loc, tree x)
2102 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2103 us. */
2104 if (in_gimple_form)
2105 return x;
2107 if (! maybe_lvalue_p (x))
2108 return x;
2109 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2110 SET_EXPR_LOCATION (x, loc);
2111 return x;
2114 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2115 Zero means allow extended lvalues. */
2117 int pedantic_lvalues;
2119 /* When pedantic, return an expr equal to X but certainly not valid as a
2120 pedantic lvalue. Otherwise, return X. */
2122 static tree
2123 pedantic_non_lvalue_loc (location_t loc, tree x)
2125 if (pedantic_lvalues)
2126 return non_lvalue_loc (loc, x);
2127 protected_set_expr_location (x, loc);
2128 return x;
2131 /* Given a tree comparison code, return the code that is the logical inverse
2132 of the given code. It is not safe to do this for floating-point
2133 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2134 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2136 enum tree_code
2137 invert_tree_comparison (enum tree_code code, bool honor_nans)
2139 if (honor_nans && flag_trapping_math)
2140 return ERROR_MARK;
2142 switch (code)
2144 case EQ_EXPR:
2145 return NE_EXPR;
2146 case NE_EXPR:
2147 return EQ_EXPR;
2148 case GT_EXPR:
2149 return honor_nans ? UNLE_EXPR : LE_EXPR;
2150 case GE_EXPR:
2151 return honor_nans ? UNLT_EXPR : LT_EXPR;
2152 case LT_EXPR:
2153 return honor_nans ? UNGE_EXPR : GE_EXPR;
2154 case LE_EXPR:
2155 return honor_nans ? UNGT_EXPR : GT_EXPR;
2156 case LTGT_EXPR:
2157 return UNEQ_EXPR;
2158 case UNEQ_EXPR:
2159 return LTGT_EXPR;
2160 case UNGT_EXPR:
2161 return LE_EXPR;
2162 case UNGE_EXPR:
2163 return LT_EXPR;
2164 case UNLT_EXPR:
2165 return GE_EXPR;
2166 case UNLE_EXPR:
2167 return GT_EXPR;
2168 case ORDERED_EXPR:
2169 return UNORDERED_EXPR;
2170 case UNORDERED_EXPR:
2171 return ORDERED_EXPR;
2172 default:
2173 gcc_unreachable ();
2177 /* Similar, but return the comparison that results if the operands are
2178 swapped. This is safe for floating-point. */
2180 enum tree_code
2181 swap_tree_comparison (enum tree_code code)
2183 switch (code)
2185 case EQ_EXPR:
2186 case NE_EXPR:
2187 case ORDERED_EXPR:
2188 case UNORDERED_EXPR:
2189 case LTGT_EXPR:
2190 case UNEQ_EXPR:
2191 return code;
2192 case GT_EXPR:
2193 return LT_EXPR;
2194 case GE_EXPR:
2195 return LE_EXPR;
2196 case LT_EXPR:
2197 return GT_EXPR;
2198 case LE_EXPR:
2199 return GE_EXPR;
2200 case UNGT_EXPR:
2201 return UNLT_EXPR;
2202 case UNGE_EXPR:
2203 return UNLE_EXPR;
2204 case UNLT_EXPR:
2205 return UNGT_EXPR;
2206 case UNLE_EXPR:
2207 return UNGE_EXPR;
2208 default:
2209 gcc_unreachable ();
2214 /* Convert a comparison tree code from an enum tree_code representation
2215 into a compcode bit-based encoding. This function is the inverse of
2216 compcode_to_comparison. */
2218 static enum comparison_code
2219 comparison_to_compcode (enum tree_code code)
2221 switch (code)
2223 case LT_EXPR:
2224 return COMPCODE_LT;
2225 case EQ_EXPR:
2226 return COMPCODE_EQ;
2227 case LE_EXPR:
2228 return COMPCODE_LE;
2229 case GT_EXPR:
2230 return COMPCODE_GT;
2231 case NE_EXPR:
2232 return COMPCODE_NE;
2233 case GE_EXPR:
2234 return COMPCODE_GE;
2235 case ORDERED_EXPR:
2236 return COMPCODE_ORD;
2237 case UNORDERED_EXPR:
2238 return COMPCODE_UNORD;
2239 case UNLT_EXPR:
2240 return COMPCODE_UNLT;
2241 case UNEQ_EXPR:
2242 return COMPCODE_UNEQ;
2243 case UNLE_EXPR:
2244 return COMPCODE_UNLE;
2245 case UNGT_EXPR:
2246 return COMPCODE_UNGT;
2247 case LTGT_EXPR:
2248 return COMPCODE_LTGT;
2249 case UNGE_EXPR:
2250 return COMPCODE_UNGE;
2251 default:
2252 gcc_unreachable ();
2256 /* Convert a compcode bit-based encoding of a comparison operator back
2257 to GCC's enum tree_code representation. This function is the
2258 inverse of comparison_to_compcode. */
2260 static enum tree_code
2261 compcode_to_comparison (enum comparison_code code)
2263 switch (code)
2265 case COMPCODE_LT:
2266 return LT_EXPR;
2267 case COMPCODE_EQ:
2268 return EQ_EXPR;
2269 case COMPCODE_LE:
2270 return LE_EXPR;
2271 case COMPCODE_GT:
2272 return GT_EXPR;
2273 case COMPCODE_NE:
2274 return NE_EXPR;
2275 case COMPCODE_GE:
2276 return GE_EXPR;
2277 case COMPCODE_ORD:
2278 return ORDERED_EXPR;
2279 case COMPCODE_UNORD:
2280 return UNORDERED_EXPR;
2281 case COMPCODE_UNLT:
2282 return UNLT_EXPR;
2283 case COMPCODE_UNEQ:
2284 return UNEQ_EXPR;
2285 case COMPCODE_UNLE:
2286 return UNLE_EXPR;
2287 case COMPCODE_UNGT:
2288 return UNGT_EXPR;
2289 case COMPCODE_LTGT:
2290 return LTGT_EXPR;
2291 case COMPCODE_UNGE:
2292 return UNGE_EXPR;
2293 default:
2294 gcc_unreachable ();
2298 /* Return a tree for the comparison which is the combination of
2299 doing the AND or OR (depending on CODE) of the two operations LCODE
2300 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2301 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2302 if this makes the transformation invalid. */
2304 tree
2305 combine_comparisons (location_t loc,
2306 enum tree_code code, enum tree_code lcode,
2307 enum tree_code rcode, tree truth_type,
2308 tree ll_arg, tree lr_arg)
2310 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2311 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2312 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2313 int compcode;
2315 switch (code)
2317 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2318 compcode = lcompcode & rcompcode;
2319 break;
2321 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2322 compcode = lcompcode | rcompcode;
2323 break;
2325 default:
2326 return NULL_TREE;
2329 if (!honor_nans)
2331 /* Eliminate unordered comparisons, as well as LTGT and ORD
2332 which are not used unless the mode has NaNs. */
2333 compcode &= ~COMPCODE_UNORD;
2334 if (compcode == COMPCODE_LTGT)
2335 compcode = COMPCODE_NE;
2336 else if (compcode == COMPCODE_ORD)
2337 compcode = COMPCODE_TRUE;
2339 else if (flag_trapping_math)
2341 /* Check that the original operation and the optimized ones will trap
2342 under the same condition. */
2343 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2344 && (lcompcode != COMPCODE_EQ)
2345 && (lcompcode != COMPCODE_ORD);
2346 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2347 && (rcompcode != COMPCODE_EQ)
2348 && (rcompcode != COMPCODE_ORD);
2349 bool trap = (compcode & COMPCODE_UNORD) == 0
2350 && (compcode != COMPCODE_EQ)
2351 && (compcode != COMPCODE_ORD);
2353 /* In a short-circuited boolean expression the LHS might be
2354 such that the RHS, if evaluated, will never trap. For
2355 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2356 if neither x nor y is NaN. (This is a mixed blessing: for
2357 example, the expression above will never trap, hence
2358 optimizing it to x < y would be invalid). */
2359 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2360 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2361 rtrap = false;
2363 /* If the comparison was short-circuited, and only the RHS
2364 trapped, we may now generate a spurious trap. */
2365 if (rtrap && !ltrap
2366 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2367 return NULL_TREE;
2369 /* If we changed the conditions that cause a trap, we lose. */
2370 if ((ltrap || rtrap) != trap)
2371 return NULL_TREE;
2374 if (compcode == COMPCODE_TRUE)
2375 return constant_boolean_node (true, truth_type);
2376 else if (compcode == COMPCODE_FALSE)
2377 return constant_boolean_node (false, truth_type);
2378 else
2380 enum tree_code tcode;
2382 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2383 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2387 /* Return nonzero if two operands (typically of the same tree node)
2388 are necessarily equal. If either argument has side-effects this
2389 function returns zero. FLAGS modifies behavior as follows:
2391 If OEP_ONLY_CONST is set, only return nonzero for constants.
2392 This function tests whether the operands are indistinguishable;
2393 it does not test whether they are equal using C's == operation.
2394 The distinction is important for IEEE floating point, because
2395 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2396 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2398 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2399 even though it may hold multiple values during a function.
2400 This is because a GCC tree node guarantees that nothing else is
2401 executed between the evaluation of its "operands" (which may often
2402 be evaluated in arbitrary order). Hence if the operands themselves
2403 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2404 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2405 unset means assuming isochronic (or instantaneous) tree equivalence.
2406 Unless comparing arbitrary expression trees, such as from different
2407 statements, this flag can usually be left unset.
2409 If OEP_PURE_SAME is set, then pure functions with identical arguments
2410 are considered the same. It is used when the caller has other ways
2411 to ensure that global memory is unchanged in between. */
2414 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2416 /* If either is ERROR_MARK, they aren't equal. */
2417 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2418 || TREE_TYPE (arg0) == error_mark_node
2419 || TREE_TYPE (arg1) == error_mark_node)
2420 return 0;
2422 /* Similar, if either does not have a type (like a released SSA name),
2423 they aren't equal. */
2424 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2425 return 0;
2427 /* Check equality of integer constants before bailing out due to
2428 precision differences. */
2429 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2430 return tree_int_cst_equal (arg0, arg1);
2432 /* If both types don't have the same signedness, then we can't consider
2433 them equal. We must check this before the STRIP_NOPS calls
2434 because they may change the signedness of the arguments. As pointers
2435 strictly don't have a signedness, require either two pointers or
2436 two non-pointers as well. */
2437 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2438 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2439 return 0;
2441 /* We cannot consider pointers to different address space equal. */
2442 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2443 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2444 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2445 return 0;
2447 /* If both types don't have the same precision, then it is not safe
2448 to strip NOPs. */
2449 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2450 return 0;
2452 STRIP_NOPS (arg0);
2453 STRIP_NOPS (arg1);
2455 /* In case both args are comparisons but with different comparison
2456 code, try to swap the comparison operands of one arg to produce
2457 a match and compare that variant. */
2458 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2459 && COMPARISON_CLASS_P (arg0)
2460 && COMPARISON_CLASS_P (arg1))
2462 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2464 if (TREE_CODE (arg0) == swap_code)
2465 return operand_equal_p (TREE_OPERAND (arg0, 0),
2466 TREE_OPERAND (arg1, 1), flags)
2467 && operand_equal_p (TREE_OPERAND (arg0, 1),
2468 TREE_OPERAND (arg1, 0), flags);
2471 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2472 /* This is needed for conversions and for COMPONENT_REF.
2473 Might as well play it safe and always test this. */
2474 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2475 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2476 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2477 return 0;
2479 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2480 We don't care about side effects in that case because the SAVE_EXPR
2481 takes care of that for us. In all other cases, two expressions are
2482 equal if they have no side effects. If we have two identical
2483 expressions with side effects that should be treated the same due
2484 to the only side effects being identical SAVE_EXPR's, that will
2485 be detected in the recursive calls below. */
2486 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2487 && (TREE_CODE (arg0) == SAVE_EXPR
2488 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2489 return 1;
2491 /* Next handle constant cases, those for which we can return 1 even
2492 if ONLY_CONST is set. */
2493 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2494 switch (TREE_CODE (arg0))
2496 case INTEGER_CST:
2497 return tree_int_cst_equal (arg0, arg1);
2499 case FIXED_CST:
2500 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2501 TREE_FIXED_CST (arg1));
2503 case REAL_CST:
2504 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2505 TREE_REAL_CST (arg1)))
2506 return 1;
2509 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2511 /* If we do not distinguish between signed and unsigned zero,
2512 consider them equal. */
2513 if (real_zerop (arg0) && real_zerop (arg1))
2514 return 1;
2516 return 0;
2518 case VECTOR_CST:
2520 tree v1, v2;
2522 v1 = TREE_VECTOR_CST_ELTS (arg0);
2523 v2 = TREE_VECTOR_CST_ELTS (arg1);
2524 while (v1 && v2)
2526 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2527 flags))
2528 return 0;
2529 v1 = TREE_CHAIN (v1);
2530 v2 = TREE_CHAIN (v2);
2533 return v1 == v2;
2536 case COMPLEX_CST:
2537 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2538 flags)
2539 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2540 flags));
2542 case STRING_CST:
2543 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2544 && ! memcmp (TREE_STRING_POINTER (arg0),
2545 TREE_STRING_POINTER (arg1),
2546 TREE_STRING_LENGTH (arg0)));
2548 case ADDR_EXPR:
2549 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2551 default:
2552 break;
2555 if (flags & OEP_ONLY_CONST)
2556 return 0;
2558 /* Define macros to test an operand from arg0 and arg1 for equality and a
2559 variant that allows null and views null as being different from any
2560 non-null value. In the latter case, if either is null, the both
2561 must be; otherwise, do the normal comparison. */
2562 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2563 TREE_OPERAND (arg1, N), flags)
2565 #define OP_SAME_WITH_NULL(N) \
2566 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2567 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2569 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2571 case tcc_unary:
2572 /* Two conversions are equal only if signedness and modes match. */
2573 switch (TREE_CODE (arg0))
2575 CASE_CONVERT:
2576 case FIX_TRUNC_EXPR:
2577 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2578 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2579 return 0;
2580 break;
2581 default:
2582 break;
2585 return OP_SAME (0);
2588 case tcc_comparison:
2589 case tcc_binary:
2590 if (OP_SAME (0) && OP_SAME (1))
2591 return 1;
2593 /* For commutative ops, allow the other order. */
2594 return (commutative_tree_code (TREE_CODE (arg0))
2595 && operand_equal_p (TREE_OPERAND (arg0, 0),
2596 TREE_OPERAND (arg1, 1), flags)
2597 && operand_equal_p (TREE_OPERAND (arg0, 1),
2598 TREE_OPERAND (arg1, 0), flags));
2600 case tcc_reference:
2601 /* If either of the pointer (or reference) expressions we are
2602 dereferencing contain a side effect, these cannot be equal. */
2603 if (TREE_SIDE_EFFECTS (arg0)
2604 || TREE_SIDE_EFFECTS (arg1))
2605 return 0;
2607 switch (TREE_CODE (arg0))
2609 case INDIRECT_REF:
2610 case ALIGN_INDIRECT_REF:
2611 case MISALIGNED_INDIRECT_REF:
2612 case REALPART_EXPR:
2613 case IMAGPART_EXPR:
2614 return OP_SAME (0);
2616 case ARRAY_REF:
2617 case ARRAY_RANGE_REF:
2618 /* Operands 2 and 3 may be null.
2619 Compare the array index by value if it is constant first as we
2620 may have different types but same value here. */
2621 return (OP_SAME (0)
2622 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2623 TREE_OPERAND (arg1, 1))
2624 || OP_SAME (1))
2625 && OP_SAME_WITH_NULL (2)
2626 && OP_SAME_WITH_NULL (3));
2628 case COMPONENT_REF:
2629 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2630 may be NULL when we're called to compare MEM_EXPRs. */
2631 return OP_SAME_WITH_NULL (0)
2632 && OP_SAME (1)
2633 && OP_SAME_WITH_NULL (2);
2635 case BIT_FIELD_REF:
2636 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2638 default:
2639 return 0;
2642 case tcc_expression:
2643 switch (TREE_CODE (arg0))
2645 case ADDR_EXPR:
2646 case TRUTH_NOT_EXPR:
2647 return OP_SAME (0);
2649 case TRUTH_ANDIF_EXPR:
2650 case TRUTH_ORIF_EXPR:
2651 return OP_SAME (0) && OP_SAME (1);
2653 case TRUTH_AND_EXPR:
2654 case TRUTH_OR_EXPR:
2655 case TRUTH_XOR_EXPR:
2656 if (OP_SAME (0) && OP_SAME (1))
2657 return 1;
2659 /* Otherwise take into account this is a commutative operation. */
2660 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2665 case COND_EXPR:
2666 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2668 default:
2669 return 0;
2672 case tcc_vl_exp:
2673 switch (TREE_CODE (arg0))
2675 case CALL_EXPR:
2676 /* If the CALL_EXPRs call different functions, then they
2677 clearly can not be equal. */
2678 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2679 flags))
2680 return 0;
2683 unsigned int cef = call_expr_flags (arg0);
2684 if (flags & OEP_PURE_SAME)
2685 cef &= ECF_CONST | ECF_PURE;
2686 else
2687 cef &= ECF_CONST;
2688 if (!cef)
2689 return 0;
2692 /* Now see if all the arguments are the same. */
2694 const_call_expr_arg_iterator iter0, iter1;
2695 const_tree a0, a1;
2696 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2697 a1 = first_const_call_expr_arg (arg1, &iter1);
2698 a0 && a1;
2699 a0 = next_const_call_expr_arg (&iter0),
2700 a1 = next_const_call_expr_arg (&iter1))
2701 if (! operand_equal_p (a0, a1, flags))
2702 return 0;
2704 /* If we get here and both argument lists are exhausted
2705 then the CALL_EXPRs are equal. */
2706 return ! (a0 || a1);
2708 default:
2709 return 0;
2712 case tcc_declaration:
2713 /* Consider __builtin_sqrt equal to sqrt. */
2714 return (TREE_CODE (arg0) == FUNCTION_DECL
2715 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2716 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2717 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2719 default:
2720 return 0;
2723 #undef OP_SAME
2724 #undef OP_SAME_WITH_NULL
2727 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2728 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2730 When in doubt, return 0. */
2732 static int
2733 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2735 int unsignedp1, unsignedpo;
2736 tree primarg0, primarg1, primother;
2737 unsigned int correct_width;
2739 if (operand_equal_p (arg0, arg1, 0))
2740 return 1;
2742 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2743 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2744 return 0;
2746 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2747 and see if the inner values are the same. This removes any
2748 signedness comparison, which doesn't matter here. */
2749 primarg0 = arg0, primarg1 = arg1;
2750 STRIP_NOPS (primarg0);
2751 STRIP_NOPS (primarg1);
2752 if (operand_equal_p (primarg0, primarg1, 0))
2753 return 1;
2755 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2756 actual comparison operand, ARG0.
2758 First throw away any conversions to wider types
2759 already present in the operands. */
2761 primarg1 = get_narrower (arg1, &unsignedp1);
2762 primother = get_narrower (other, &unsignedpo);
2764 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2765 if (unsignedp1 == unsignedpo
2766 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2767 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2769 tree type = TREE_TYPE (arg0);
2771 /* Make sure shorter operand is extended the right way
2772 to match the longer operand. */
2773 primarg1 = fold_convert (signed_or_unsigned_type_for
2774 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2776 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2777 return 1;
2780 return 0;
2783 /* See if ARG is an expression that is either a comparison or is performing
2784 arithmetic on comparisons. The comparisons must only be comparing
2785 two different values, which will be stored in *CVAL1 and *CVAL2; if
2786 they are nonzero it means that some operands have already been found.
2787 No variables may be used anywhere else in the expression except in the
2788 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2789 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2791 If this is true, return 1. Otherwise, return zero. */
2793 static int
2794 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2796 enum tree_code code = TREE_CODE (arg);
2797 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2799 /* We can handle some of the tcc_expression cases here. */
2800 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2801 tclass = tcc_unary;
2802 else if (tclass == tcc_expression
2803 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2804 || code == COMPOUND_EXPR))
2805 tclass = tcc_binary;
2807 else if (tclass == tcc_expression && code == SAVE_EXPR
2808 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2810 /* If we've already found a CVAL1 or CVAL2, this expression is
2811 two complex to handle. */
2812 if (*cval1 || *cval2)
2813 return 0;
2815 tclass = tcc_unary;
2816 *save_p = 1;
2819 switch (tclass)
2821 case tcc_unary:
2822 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2824 case tcc_binary:
2825 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2826 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2827 cval1, cval2, save_p));
2829 case tcc_constant:
2830 return 1;
2832 case tcc_expression:
2833 if (code == COND_EXPR)
2834 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2835 cval1, cval2, save_p)
2836 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2839 cval1, cval2, save_p));
2840 return 0;
2842 case tcc_comparison:
2843 /* First see if we can handle the first operand, then the second. For
2844 the second operand, we know *CVAL1 can't be zero. It must be that
2845 one side of the comparison is each of the values; test for the
2846 case where this isn't true by failing if the two operands
2847 are the same. */
2849 if (operand_equal_p (TREE_OPERAND (arg, 0),
2850 TREE_OPERAND (arg, 1), 0))
2851 return 0;
2853 if (*cval1 == 0)
2854 *cval1 = TREE_OPERAND (arg, 0);
2855 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2857 else if (*cval2 == 0)
2858 *cval2 = TREE_OPERAND (arg, 0);
2859 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2861 else
2862 return 0;
2864 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2866 else if (*cval2 == 0)
2867 *cval2 = TREE_OPERAND (arg, 1);
2868 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2870 else
2871 return 0;
2873 return 1;
2875 default:
2876 return 0;
2880 /* ARG is a tree that is known to contain just arithmetic operations and
2881 comparisons. Evaluate the operations in the tree substituting NEW0 for
2882 any occurrence of OLD0 as an operand of a comparison and likewise for
2883 NEW1 and OLD1. */
2885 static tree
2886 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2887 tree old1, tree new1)
2889 tree type = TREE_TYPE (arg);
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2895 tclass = tcc_unary;
2896 else if (tclass == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2898 tclass = tcc_binary;
2900 switch (tclass)
2902 case tcc_unary:
2903 return fold_build1_loc (loc, code, type,
2904 eval_subst (loc, TREE_OPERAND (arg, 0),
2905 old0, new0, old1, new1));
2907 case tcc_binary:
2908 return fold_build2_loc (loc, code, type,
2909 eval_subst (loc, TREE_OPERAND (arg, 0),
2910 old0, new0, old1, new1),
2911 eval_subst (loc, TREE_OPERAND (arg, 1),
2912 old0, new0, old1, new1));
2914 case tcc_expression:
2915 switch (code)
2917 case SAVE_EXPR:
2918 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2919 old1, new1);
2921 case COMPOUND_EXPR:
2922 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2923 old1, new1);
2925 case COND_EXPR:
2926 return fold_build3_loc (loc, code, type,
2927 eval_subst (loc, TREE_OPERAND (arg, 0),
2928 old0, new0, old1, new1),
2929 eval_subst (loc, TREE_OPERAND (arg, 1),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 2),
2932 old0, new0, old1, new1));
2933 default:
2934 break;
2936 /* Fall through - ??? */
2938 case tcc_comparison:
2940 tree arg0 = TREE_OPERAND (arg, 0);
2941 tree arg1 = TREE_OPERAND (arg, 1);
2943 /* We need to check both for exact equality and tree equality. The
2944 former will be true if the operand has a side-effect. In that
2945 case, we know the operand occurred exactly once. */
2947 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2948 arg0 = new0;
2949 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2950 arg0 = new1;
2952 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2953 arg1 = new0;
2954 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2955 arg1 = new1;
2957 return fold_build2_loc (loc, code, type, arg0, arg1);
2960 default:
2961 return arg;
2965 /* Return a tree for the case when the result of an expression is RESULT
2966 converted to TYPE and OMITTED was previously an operand of the expression
2967 but is now not needed (e.g., we folded OMITTED * 0).
2969 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2970 the conversion of RESULT to TYPE. */
2972 tree
2973 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2975 tree t = fold_convert_loc (loc, type, result);
2977 /* If the resulting operand is an empty statement, just return the omitted
2978 statement casted to void. */
2979 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2981 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2982 goto omit_one_operand_exit;
2985 if (TREE_SIDE_EFFECTS (omitted))
2987 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2988 goto omit_one_operand_exit;
2991 return non_lvalue_loc (loc, t);
2993 omit_one_operand_exit:
2994 protected_set_expr_location (t, loc);
2995 return t;
2998 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3000 static tree
3001 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3002 tree omitted)
3004 tree t = fold_convert_loc (loc, type, result);
3006 /* If the resulting operand is an empty statement, just return the omitted
3007 statement casted to void. */
3008 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3010 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3011 goto pedantic_omit_one_operand_exit;
3014 if (TREE_SIDE_EFFECTS (omitted))
3016 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3017 goto pedantic_omit_one_operand_exit;
3020 return pedantic_non_lvalue_loc (loc, t);
3022 pedantic_omit_one_operand_exit:
3023 protected_set_expr_location (t, loc);
3024 return t;
3027 /* Return a tree for the case when the result of an expression is RESULT
3028 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3029 of the expression but are now not needed.
3031 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3032 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3033 evaluated before OMITTED2. Otherwise, if neither has side effects,
3034 just do the conversion of RESULT to TYPE. */
3036 tree
3037 omit_two_operands_loc (location_t loc, tree type, tree result,
3038 tree omitted1, tree omitted2)
3040 tree t = fold_convert_loc (loc, type, result);
3042 if (TREE_SIDE_EFFECTS (omitted2))
3044 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3045 SET_EXPR_LOCATION (t, loc);
3047 if (TREE_SIDE_EFFECTS (omitted1))
3049 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3050 SET_EXPR_LOCATION (t, loc);
3053 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3057 /* Return a simplified tree node for the truth-negation of ARG. This
3058 never alters ARG itself. We assume that ARG is an operation that
3059 returns a truth value (0 or 1).
3061 FIXME: one would think we would fold the result, but it causes
3062 problems with the dominator optimizer. */
3064 tree
3065 fold_truth_not_expr (location_t loc, tree arg)
3067 tree t, type = TREE_TYPE (arg);
3068 enum tree_code code = TREE_CODE (arg);
3069 location_t loc1, loc2;
3071 /* If this is a comparison, we can simply invert it, except for
3072 floating-point non-equality comparisons, in which case we just
3073 enclose a TRUTH_NOT_EXPR around what we have. */
3075 if (TREE_CODE_CLASS (code) == tcc_comparison)
3077 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3078 if (FLOAT_TYPE_P (op_type)
3079 && flag_trapping_math
3080 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3081 && code != NE_EXPR && code != EQ_EXPR)
3082 return NULL_TREE;
3084 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3085 if (code == ERROR_MARK)
3086 return NULL_TREE;
3088 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3089 SET_EXPR_LOCATION (t, loc);
3090 return t;
3093 switch (code)
3095 case INTEGER_CST:
3096 return constant_boolean_node (integer_zerop (arg), type);
3098 case TRUTH_AND_EXPR:
3099 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3100 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3101 if (loc1 == UNKNOWN_LOCATION)
3102 loc1 = loc;
3103 if (loc2 == UNKNOWN_LOCATION)
3104 loc2 = loc;
3105 t = build2 (TRUTH_OR_EXPR, type,
3106 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3107 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3108 break;
3110 case TRUTH_OR_EXPR:
3111 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3112 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3113 if (loc1 == UNKNOWN_LOCATION)
3114 loc1 = loc;
3115 if (loc2 == UNKNOWN_LOCATION)
3116 loc2 = loc;
3117 t = build2 (TRUTH_AND_EXPR, type,
3118 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3119 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3120 break;
3122 case TRUTH_XOR_EXPR:
3123 /* Here we can invert either operand. We invert the first operand
3124 unless the second operand is a TRUTH_NOT_EXPR in which case our
3125 result is the XOR of the first operand with the inside of the
3126 negation of the second operand. */
3128 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3129 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3130 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3131 else
3132 t = build2 (TRUTH_XOR_EXPR, type,
3133 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3134 TREE_OPERAND (arg, 1));
3135 break;
3137 case TRUTH_ANDIF_EXPR:
3138 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3139 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3140 if (loc1 == UNKNOWN_LOCATION)
3141 loc1 = loc;
3142 if (loc2 == UNKNOWN_LOCATION)
3143 loc2 = loc;
3144 t = build2 (TRUTH_ORIF_EXPR, type,
3145 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3146 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3147 break;
3149 case TRUTH_ORIF_EXPR:
3150 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3151 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3152 if (loc1 == UNKNOWN_LOCATION)
3153 loc1 = loc;
3154 if (loc2 == UNKNOWN_LOCATION)
3155 loc2 = loc;
3156 t = build2 (TRUTH_ANDIF_EXPR, type,
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3158 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3159 break;
3161 case TRUTH_NOT_EXPR:
3162 return TREE_OPERAND (arg, 0);
3164 case COND_EXPR:
3166 tree arg1 = TREE_OPERAND (arg, 1);
3167 tree arg2 = TREE_OPERAND (arg, 2);
3169 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3170 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3171 if (loc1 == UNKNOWN_LOCATION)
3172 loc1 = loc;
3173 if (loc2 == UNKNOWN_LOCATION)
3174 loc2 = loc;
3176 /* A COND_EXPR may have a throw as one operand, which
3177 then has void type. Just leave void operands
3178 as they are. */
3179 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3180 VOID_TYPE_P (TREE_TYPE (arg1))
3181 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3182 VOID_TYPE_P (TREE_TYPE (arg2))
3183 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3184 break;
3187 case COMPOUND_EXPR:
3188 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3189 if (loc1 == UNKNOWN_LOCATION)
3190 loc1 = loc;
3191 t = build2 (COMPOUND_EXPR, type,
3192 TREE_OPERAND (arg, 0),
3193 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3194 break;
3196 case NON_LVALUE_EXPR:
3197 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3198 if (loc1 == UNKNOWN_LOCATION)
3199 loc1 = loc;
3200 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3202 CASE_CONVERT:
3203 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3205 t = build1 (TRUTH_NOT_EXPR, type, arg);
3206 break;
3209 /* ... fall through ... */
3211 case FLOAT_EXPR:
3212 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3213 if (loc1 == UNKNOWN_LOCATION)
3214 loc1 = loc;
3215 t = build1 (TREE_CODE (arg), type,
3216 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3217 break;
3219 case BIT_AND_EXPR:
3220 if (!integer_onep (TREE_OPERAND (arg, 1)))
3221 return NULL_TREE;
3222 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3223 break;
3225 case SAVE_EXPR:
3226 t = build1 (TRUTH_NOT_EXPR, type, arg);
3227 break;
3229 case CLEANUP_POINT_EXPR:
3230 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3231 if (loc1 == UNKNOWN_LOCATION)
3232 loc1 = loc;
3233 t = build1 (CLEANUP_POINT_EXPR, type,
3234 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3235 break;
3237 default:
3238 t = NULL_TREE;
3239 break;
3242 if (t)
3243 SET_EXPR_LOCATION (t, loc);
3245 return t;
3248 /* Return a simplified tree node for the truth-negation of ARG. This
3249 never alters ARG itself. We assume that ARG is an operation that
3250 returns a truth value (0 or 1).
3252 FIXME: one would think we would fold the result, but it causes
3253 problems with the dominator optimizer. */
3255 tree
3256 invert_truthvalue_loc (location_t loc, tree arg)
3258 tree tem;
3260 if (TREE_CODE (arg) == ERROR_MARK)
3261 return arg;
3263 tem = fold_truth_not_expr (loc, arg);
3264 if (!tem)
3266 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3267 SET_EXPR_LOCATION (tem, loc);
3270 return tem;
3273 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3274 operands are another bit-wise operation with a common input. If so,
3275 distribute the bit operations to save an operation and possibly two if
3276 constants are involved. For example, convert
3277 (A | B) & (A | C) into A | (B & C)
3278 Further simplification will occur if B and C are constants.
3280 If this optimization cannot be done, 0 will be returned. */
3282 static tree
3283 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3284 tree arg0, tree arg1)
3286 tree common;
3287 tree left, right;
3289 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3290 || TREE_CODE (arg0) == code
3291 || (TREE_CODE (arg0) != BIT_AND_EXPR
3292 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3293 return 0;
3295 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3297 common = TREE_OPERAND (arg0, 0);
3298 left = TREE_OPERAND (arg0, 1);
3299 right = TREE_OPERAND (arg1, 1);
3301 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3303 common = TREE_OPERAND (arg0, 0);
3304 left = TREE_OPERAND (arg0, 1);
3305 right = TREE_OPERAND (arg1, 0);
3307 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3309 common = TREE_OPERAND (arg0, 1);
3310 left = TREE_OPERAND (arg0, 0);
3311 right = TREE_OPERAND (arg1, 1);
3313 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3315 common = TREE_OPERAND (arg0, 1);
3316 left = TREE_OPERAND (arg0, 0);
3317 right = TREE_OPERAND (arg1, 0);
3319 else
3320 return 0;
3322 common = fold_convert_loc (loc, type, common);
3323 left = fold_convert_loc (loc, type, left);
3324 right = fold_convert_loc (loc, type, right);
3325 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3326 fold_build2_loc (loc, code, type, left, right));
3329 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3330 with code CODE. This optimization is unsafe. */
3331 static tree
3332 distribute_real_division (location_t loc, enum tree_code code, tree type,
3333 tree arg0, tree arg1)
3335 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3336 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3338 /* (A / C) +- (B / C) -> (A +- B) / C. */
3339 if (mul0 == mul1
3340 && operand_equal_p (TREE_OPERAND (arg0, 1),
3341 TREE_OPERAND (arg1, 1), 0))
3342 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3343 fold_build2_loc (loc, code, type,
3344 TREE_OPERAND (arg0, 0),
3345 TREE_OPERAND (arg1, 0)),
3346 TREE_OPERAND (arg0, 1));
3348 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3349 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3350 TREE_OPERAND (arg1, 0), 0)
3351 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3352 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3354 REAL_VALUE_TYPE r0, r1;
3355 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3356 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3357 if (!mul0)
3358 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3359 if (!mul1)
3360 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3361 real_arithmetic (&r0, code, &r0, &r1);
3362 return fold_build2_loc (loc, MULT_EXPR, type,
3363 TREE_OPERAND (arg0, 0),
3364 build_real (type, r0));
3367 return NULL_TREE;
3370 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3371 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3373 static tree
3374 make_bit_field_ref (location_t loc, tree inner, tree type,
3375 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3377 tree result, bftype;
3379 if (bitpos == 0)
3381 tree size = TYPE_SIZE (TREE_TYPE (inner));
3382 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3383 || POINTER_TYPE_P (TREE_TYPE (inner)))
3384 && host_integerp (size, 0)
3385 && tree_low_cst (size, 0) == bitsize)
3386 return fold_convert_loc (loc, type, inner);
3389 bftype = type;
3390 if (TYPE_PRECISION (bftype) != bitsize
3391 || TYPE_UNSIGNED (bftype) == !unsignedp)
3392 bftype = build_nonstandard_integer_type (bitsize, 0);
3394 result = build3 (BIT_FIELD_REF, bftype, inner,
3395 size_int (bitsize), bitsize_int (bitpos));
3396 SET_EXPR_LOCATION (result, loc);
3398 if (bftype != type)
3399 result = fold_convert_loc (loc, type, result);
3401 return result;
3404 /* Optimize a bit-field compare.
3406 There are two cases: First is a compare against a constant and the
3407 second is a comparison of two items where the fields are at the same
3408 bit position relative to the start of a chunk (byte, halfword, word)
3409 large enough to contain it. In these cases we can avoid the shift
3410 implicit in bitfield extractions.
3412 For constants, we emit a compare of the shifted constant with the
3413 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3414 compared. For two fields at the same position, we do the ANDs with the
3415 similar mask and compare the result of the ANDs.
3417 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3418 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3419 are the left and right operands of the comparison, respectively.
3421 If the optimization described above can be done, we return the resulting
3422 tree. Otherwise we return zero. */
3424 static tree
3425 optimize_bit_field_compare (location_t loc, enum tree_code code,
3426 tree compare_type, tree lhs, tree rhs)
3428 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3429 tree type = TREE_TYPE (lhs);
3430 tree signed_type, unsigned_type;
3431 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3432 enum machine_mode lmode, rmode, nmode;
3433 int lunsignedp, runsignedp;
3434 int lvolatilep = 0, rvolatilep = 0;
3435 tree linner, rinner = NULL_TREE;
3436 tree mask;
3437 tree offset;
3439 /* Get all the information about the extractions being done. If the bit size
3440 if the same as the size of the underlying object, we aren't doing an
3441 extraction at all and so can do nothing. We also don't want to
3442 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3443 then will no longer be able to replace it. */
3444 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3445 &lunsignedp, &lvolatilep, false);
3446 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3447 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3448 return 0;
3450 if (!const_p)
3452 /* If this is not a constant, we can only do something if bit positions,
3453 sizes, and signedness are the same. */
3454 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3455 &runsignedp, &rvolatilep, false);
3457 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3458 || lunsignedp != runsignedp || offset != 0
3459 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3460 return 0;
3463 /* See if we can find a mode to refer to this field. We should be able to,
3464 but fail if we can't. */
3465 nmode = get_best_mode (lbitsize, lbitpos,
3466 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3467 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3468 TYPE_ALIGN (TREE_TYPE (rinner))),
3469 word_mode, lvolatilep || rvolatilep);
3470 if (nmode == VOIDmode)
3471 return 0;
3473 /* Set signed and unsigned types of the precision of this mode for the
3474 shifts below. */
3475 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3476 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3478 /* Compute the bit position and size for the new reference and our offset
3479 within it. If the new reference is the same size as the original, we
3480 won't optimize anything, so return zero. */
3481 nbitsize = GET_MODE_BITSIZE (nmode);
3482 nbitpos = lbitpos & ~ (nbitsize - 1);
3483 lbitpos -= nbitpos;
3484 if (nbitsize == lbitsize)
3485 return 0;
3487 if (BYTES_BIG_ENDIAN)
3488 lbitpos = nbitsize - lbitsize - lbitpos;
3490 /* Make the mask to be used against the extracted field. */
3491 mask = build_int_cst_type (unsigned_type, -1);
3492 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3493 mask = const_binop (RSHIFT_EXPR, mask,
3494 size_int (nbitsize - lbitsize - lbitpos), 0);
3496 if (! const_p)
3497 /* If not comparing with constant, just rework the comparison
3498 and return. */
3499 return fold_build2_loc (loc, code, compare_type,
3500 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3501 make_bit_field_ref (loc, linner,
3502 unsigned_type,
3503 nbitsize, nbitpos,
3505 mask),
3506 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3507 make_bit_field_ref (loc, rinner,
3508 unsigned_type,
3509 nbitsize, nbitpos,
3511 mask));
3513 /* Otherwise, we are handling the constant case. See if the constant is too
3514 big for the field. Warn and return a tree of for 0 (false) if so. We do
3515 this not only for its own sake, but to avoid having to test for this
3516 error case below. If we didn't, we might generate wrong code.
3518 For unsigned fields, the constant shifted right by the field length should
3519 be all zero. For signed fields, the high-order bits should agree with
3520 the sign bit. */
3522 if (lunsignedp)
3524 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3525 fold_convert_loc (loc,
3526 unsigned_type, rhs),
3527 size_int (lbitsize), 0)))
3529 warning (0, "comparison is always %d due to width of bit-field",
3530 code == NE_EXPR);
3531 return constant_boolean_node (code == NE_EXPR, compare_type);
3534 else
3536 tree tem = const_binop (RSHIFT_EXPR,
3537 fold_convert_loc (loc, signed_type, rhs),
3538 size_int (lbitsize - 1), 0);
3539 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3541 warning (0, "comparison is always %d due to width of bit-field",
3542 code == NE_EXPR);
3543 return constant_boolean_node (code == NE_EXPR, compare_type);
3547 /* Single-bit compares should always be against zero. */
3548 if (lbitsize == 1 && ! integer_zerop (rhs))
3550 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3551 rhs = build_int_cst (type, 0);
3554 /* Make a new bitfield reference, shift the constant over the
3555 appropriate number of bits and mask it with the computed mask
3556 (in case this was a signed field). If we changed it, make a new one. */
3557 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3558 if (lvolatilep)
3560 TREE_SIDE_EFFECTS (lhs) = 1;
3561 TREE_THIS_VOLATILE (lhs) = 1;
3564 rhs = const_binop (BIT_AND_EXPR,
3565 const_binop (LSHIFT_EXPR,
3566 fold_convert_loc (loc, unsigned_type, rhs),
3567 size_int (lbitpos), 0),
3568 mask, 0);
3570 lhs = build2 (code, compare_type,
3571 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3572 rhs);
3573 SET_EXPR_LOCATION (lhs, loc);
3574 return lhs;
3577 /* Subroutine for fold_truthop: decode a field reference.
3579 If EXP is a comparison reference, we return the innermost reference.
3581 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3582 set to the starting bit number.
3584 If the innermost field can be completely contained in a mode-sized
3585 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3587 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3588 otherwise it is not changed.
3590 *PUNSIGNEDP is set to the signedness of the field.
3592 *PMASK is set to the mask used. This is either contained in a
3593 BIT_AND_EXPR or derived from the width of the field.
3595 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3597 Return 0 if this is not a component reference or is one that we can't
3598 do anything with. */
3600 static tree
3601 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3602 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3603 int *punsignedp, int *pvolatilep,
3604 tree *pmask, tree *pand_mask)
3606 tree outer_type = 0;
3607 tree and_mask = 0;
3608 tree mask, inner, offset;
3609 tree unsigned_type;
3610 unsigned int precision;
3612 /* All the optimizations using this function assume integer fields.
3613 There are problems with FP fields since the type_for_size call
3614 below can fail for, e.g., XFmode. */
3615 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3616 return 0;
3618 /* We are interested in the bare arrangement of bits, so strip everything
3619 that doesn't affect the machine mode. However, record the type of the
3620 outermost expression if it may matter below. */
3621 if (CONVERT_EXPR_P (exp)
3622 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3623 outer_type = TREE_TYPE (exp);
3624 STRIP_NOPS (exp);
3626 if (TREE_CODE (exp) == BIT_AND_EXPR)
3628 and_mask = TREE_OPERAND (exp, 1);
3629 exp = TREE_OPERAND (exp, 0);
3630 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3631 if (TREE_CODE (and_mask) != INTEGER_CST)
3632 return 0;
3635 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3636 punsignedp, pvolatilep, false);
3637 if ((inner == exp && and_mask == 0)
3638 || *pbitsize < 0 || offset != 0
3639 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3640 return 0;
3642 /* If the number of bits in the reference is the same as the bitsize of
3643 the outer type, then the outer type gives the signedness. Otherwise
3644 (in case of a small bitfield) the signedness is unchanged. */
3645 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3646 *punsignedp = TYPE_UNSIGNED (outer_type);
3648 /* Compute the mask to access the bitfield. */
3649 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3650 precision = TYPE_PRECISION (unsigned_type);
3652 mask = build_int_cst_type (unsigned_type, -1);
3654 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3655 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3657 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3658 if (and_mask != 0)
3659 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3660 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3662 *pmask = mask;
3663 *pand_mask = and_mask;
3664 return inner;
3667 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3668 bit positions. */
3670 static int
3671 all_ones_mask_p (const_tree mask, int size)
3673 tree type = TREE_TYPE (mask);
3674 unsigned int precision = TYPE_PRECISION (type);
3675 tree tmask;
3677 tmask = build_int_cst_type (signed_type_for (type), -1);
3679 return
3680 tree_int_cst_equal (mask,
3681 const_binop (RSHIFT_EXPR,
3682 const_binop (LSHIFT_EXPR, tmask,
3683 size_int (precision - size),
3685 size_int (precision - size), 0));
3688 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3689 represents the sign bit of EXP's type. If EXP represents a sign
3690 or zero extension, also test VAL against the unextended type.
3691 The return value is the (sub)expression whose sign bit is VAL,
3692 or NULL_TREE otherwise. */
3694 static tree
3695 sign_bit_p (tree exp, const_tree val)
3697 unsigned HOST_WIDE_INT mask_lo, lo;
3698 HOST_WIDE_INT mask_hi, hi;
3699 int width;
3700 tree t;
3702 /* Tree EXP must have an integral type. */
3703 t = TREE_TYPE (exp);
3704 if (! INTEGRAL_TYPE_P (t))
3705 return NULL_TREE;
3707 /* Tree VAL must be an integer constant. */
3708 if (TREE_CODE (val) != INTEGER_CST
3709 || TREE_OVERFLOW (val))
3710 return NULL_TREE;
3712 width = TYPE_PRECISION (t);
3713 if (width > HOST_BITS_PER_WIDE_INT)
3715 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3716 lo = 0;
3718 mask_hi = ((unsigned HOST_WIDE_INT) -1
3719 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3720 mask_lo = -1;
3722 else
3724 hi = 0;
3725 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3727 mask_hi = 0;
3728 mask_lo = ((unsigned HOST_WIDE_INT) -1
3729 >> (HOST_BITS_PER_WIDE_INT - width));
3732 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3733 treat VAL as if it were unsigned. */
3734 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3735 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3736 return exp;
3738 /* Handle extension from a narrower type. */
3739 if (TREE_CODE (exp) == NOP_EXPR
3740 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3741 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3743 return NULL_TREE;
3746 /* Subroutine for fold_truthop: determine if an operand is simple enough
3747 to be evaluated unconditionally. */
3749 static int
3750 simple_operand_p (const_tree exp)
3752 /* Strip any conversions that don't change the machine mode. */
3753 STRIP_NOPS (exp);
3755 return (CONSTANT_CLASS_P (exp)
3756 || TREE_CODE (exp) == SSA_NAME
3757 || (DECL_P (exp)
3758 && ! TREE_ADDRESSABLE (exp)
3759 && ! TREE_THIS_VOLATILE (exp)
3760 && ! DECL_NONLOCAL (exp)
3761 /* Don't regard global variables as simple. They may be
3762 allocated in ways unknown to the compiler (shared memory,
3763 #pragma weak, etc). */
3764 && ! TREE_PUBLIC (exp)
3765 && ! DECL_EXTERNAL (exp)
3766 /* Loading a static variable is unduly expensive, but global
3767 registers aren't expensive. */
3768 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3771 /* The following functions are subroutines to fold_range_test and allow it to
3772 try to change a logical combination of comparisons into a range test.
3774 For example, both
3775 X == 2 || X == 3 || X == 4 || X == 5
3777 X >= 2 && X <= 5
3778 are converted to
3779 (unsigned) (X - 2) <= 3
3781 We describe each set of comparisons as being either inside or outside
3782 a range, using a variable named like IN_P, and then describe the
3783 range with a lower and upper bound. If one of the bounds is omitted,
3784 it represents either the highest or lowest value of the type.
3786 In the comments below, we represent a range by two numbers in brackets
3787 preceded by a "+" to designate being inside that range, or a "-" to
3788 designate being outside that range, so the condition can be inverted by
3789 flipping the prefix. An omitted bound is represented by a "-". For
3790 example, "- [-, 10]" means being outside the range starting at the lowest
3791 possible value and ending at 10, in other words, being greater than 10.
3792 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3793 always false.
3795 We set up things so that the missing bounds are handled in a consistent
3796 manner so neither a missing bound nor "true" and "false" need to be
3797 handled using a special case. */
3799 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3800 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3801 and UPPER1_P are nonzero if the respective argument is an upper bound
3802 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3803 must be specified for a comparison. ARG1 will be converted to ARG0's
3804 type if both are specified. */
3806 static tree
3807 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3808 tree arg1, int upper1_p)
3810 tree tem;
3811 int result;
3812 int sgn0, sgn1;
3814 /* If neither arg represents infinity, do the normal operation.
3815 Else, if not a comparison, return infinity. Else handle the special
3816 comparison rules. Note that most of the cases below won't occur, but
3817 are handled for consistency. */
3819 if (arg0 != 0 && arg1 != 0)
3821 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3822 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3823 STRIP_NOPS (tem);
3824 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3827 if (TREE_CODE_CLASS (code) != tcc_comparison)
3828 return 0;
3830 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3831 for neither. In real maths, we cannot assume open ended ranges are
3832 the same. But, this is computer arithmetic, where numbers are finite.
3833 We can therefore make the transformation of any unbounded range with
3834 the value Z, Z being greater than any representable number. This permits
3835 us to treat unbounded ranges as equal. */
3836 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3837 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3838 switch (code)
3840 case EQ_EXPR:
3841 result = sgn0 == sgn1;
3842 break;
3843 case NE_EXPR:
3844 result = sgn0 != sgn1;
3845 break;
3846 case LT_EXPR:
3847 result = sgn0 < sgn1;
3848 break;
3849 case LE_EXPR:
3850 result = sgn0 <= sgn1;
3851 break;
3852 case GT_EXPR:
3853 result = sgn0 > sgn1;
3854 break;
3855 case GE_EXPR:
3856 result = sgn0 >= sgn1;
3857 break;
3858 default:
3859 gcc_unreachable ();
3862 return constant_boolean_node (result, type);
3865 /* Given EXP, a logical expression, set the range it is testing into
3866 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3867 actually being tested. *PLOW and *PHIGH will be made of the same
3868 type as the returned expression. If EXP is not a comparison, we
3869 will most likely not be returning a useful value and range. Set
3870 *STRICT_OVERFLOW_P to true if the return value is only valid
3871 because signed overflow is undefined; otherwise, do not change
3872 *STRICT_OVERFLOW_P. */
3874 tree
3875 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3876 bool *strict_overflow_p)
3878 enum tree_code code;
3879 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3880 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3881 int in_p, n_in_p;
3882 tree low, high, n_low, n_high;
3883 location_t loc = EXPR_LOCATION (exp);
3885 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3886 and see if we can refine the range. Some of the cases below may not
3887 happen, but it doesn't seem worth worrying about this. We "continue"
3888 the outer loop when we've changed something; otherwise we "break"
3889 the switch, which will "break" the while. */
3891 in_p = 0;
3892 low = high = build_int_cst (TREE_TYPE (exp), 0);
3894 while (1)
3896 code = TREE_CODE (exp);
3897 exp_type = TREE_TYPE (exp);
3899 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3901 if (TREE_OPERAND_LENGTH (exp) > 0)
3902 arg0 = TREE_OPERAND (exp, 0);
3903 if (TREE_CODE_CLASS (code) == tcc_comparison
3904 || TREE_CODE_CLASS (code) == tcc_unary
3905 || TREE_CODE_CLASS (code) == tcc_binary)
3906 arg0_type = TREE_TYPE (arg0);
3907 if (TREE_CODE_CLASS (code) == tcc_binary
3908 || TREE_CODE_CLASS (code) == tcc_comparison
3909 || (TREE_CODE_CLASS (code) == tcc_expression
3910 && TREE_OPERAND_LENGTH (exp) > 1))
3911 arg1 = TREE_OPERAND (exp, 1);
3914 switch (code)
3916 case TRUTH_NOT_EXPR:
3917 in_p = ! in_p, exp = arg0;
3918 continue;
3920 case EQ_EXPR: case NE_EXPR:
3921 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3922 /* We can only do something if the range is testing for zero
3923 and if the second operand is an integer constant. Note that
3924 saying something is "in" the range we make is done by
3925 complementing IN_P since it will set in the initial case of
3926 being not equal to zero; "out" is leaving it alone. */
3927 if (low == 0 || high == 0
3928 || ! integer_zerop (low) || ! integer_zerop (high)
3929 || TREE_CODE (arg1) != INTEGER_CST)
3930 break;
3932 switch (code)
3934 case NE_EXPR: /* - [c, c] */
3935 low = high = arg1;
3936 break;
3937 case EQ_EXPR: /* + [c, c] */
3938 in_p = ! in_p, low = high = arg1;
3939 break;
3940 case GT_EXPR: /* - [-, c] */
3941 low = 0, high = arg1;
3942 break;
3943 case GE_EXPR: /* + [c, -] */
3944 in_p = ! in_p, low = arg1, high = 0;
3945 break;
3946 case LT_EXPR: /* - [c, -] */
3947 low = arg1, high = 0;
3948 break;
3949 case LE_EXPR: /* + [-, c] */
3950 in_p = ! in_p, low = 0, high = arg1;
3951 break;
3952 default:
3953 gcc_unreachable ();
3956 /* If this is an unsigned comparison, we also know that EXP is
3957 greater than or equal to zero. We base the range tests we make
3958 on that fact, so we record it here so we can parse existing
3959 range tests. We test arg0_type since often the return type
3960 of, e.g. EQ_EXPR, is boolean. */
3961 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3963 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3964 in_p, low, high, 1,
3965 build_int_cst (arg0_type, 0),
3966 NULL_TREE))
3967 break;
3969 in_p = n_in_p, low = n_low, high = n_high;
3971 /* If the high bound is missing, but we have a nonzero low
3972 bound, reverse the range so it goes from zero to the low bound
3973 minus 1. */
3974 if (high == 0 && low && ! integer_zerop (low))
3976 in_p = ! in_p;
3977 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3978 integer_one_node, 0);
3979 low = build_int_cst (arg0_type, 0);
3983 exp = arg0;
3984 continue;
3986 case NEGATE_EXPR:
3987 /* (-x) IN [a,b] -> x in [-b, -a] */
3988 n_low = range_binop (MINUS_EXPR, exp_type,
3989 build_int_cst (exp_type, 0),
3990 0, high, 1);
3991 n_high = range_binop (MINUS_EXPR, exp_type,
3992 build_int_cst (exp_type, 0),
3993 0, low, 0);
3994 low = n_low, high = n_high;
3995 exp = arg0;
3996 continue;
3998 case BIT_NOT_EXPR:
3999 /* ~ X -> -X - 1 */
4000 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4001 build_int_cst (exp_type, 1));
4002 SET_EXPR_LOCATION (exp, loc);
4003 continue;
4005 case PLUS_EXPR: case MINUS_EXPR:
4006 if (TREE_CODE (arg1) != INTEGER_CST)
4007 break;
4009 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4010 move a constant to the other side. */
4011 if (!TYPE_UNSIGNED (arg0_type)
4012 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4013 break;
4015 /* If EXP is signed, any overflow in the computation is undefined,
4016 so we don't worry about it so long as our computations on
4017 the bounds don't overflow. For unsigned, overflow is defined
4018 and this is exactly the right thing. */
4019 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4020 arg0_type, low, 0, arg1, 0);
4021 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4022 arg0_type, high, 1, arg1, 0);
4023 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4024 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4025 break;
4027 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4028 *strict_overflow_p = true;
4030 /* Check for an unsigned range which has wrapped around the maximum
4031 value thus making n_high < n_low, and normalize it. */
4032 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4034 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4035 integer_one_node, 0);
4036 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4037 integer_one_node, 0);
4039 /* If the range is of the form +/- [ x+1, x ], we won't
4040 be able to normalize it. But then, it represents the
4041 whole range or the empty set, so make it
4042 +/- [ -, - ]. */
4043 if (tree_int_cst_equal (n_low, low)
4044 && tree_int_cst_equal (n_high, high))
4045 low = high = 0;
4046 else
4047 in_p = ! in_p;
4049 else
4050 low = n_low, high = n_high;
4052 exp = arg0;
4053 continue;
4055 CASE_CONVERT: case NON_LVALUE_EXPR:
4056 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4057 break;
4059 if (! INTEGRAL_TYPE_P (arg0_type)
4060 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4061 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4062 break;
4064 n_low = low, n_high = high;
4066 if (n_low != 0)
4067 n_low = fold_convert_loc (loc, arg0_type, n_low);
4069 if (n_high != 0)
4070 n_high = fold_convert_loc (loc, arg0_type, n_high);
4073 /* If we're converting arg0 from an unsigned type, to exp,
4074 a signed type, we will be doing the comparison as unsigned.
4075 The tests above have already verified that LOW and HIGH
4076 are both positive.
4078 So we have to ensure that we will handle large unsigned
4079 values the same way that the current signed bounds treat
4080 negative values. */
4082 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4084 tree high_positive;
4085 tree equiv_type;
4086 /* For fixed-point modes, we need to pass the saturating flag
4087 as the 2nd parameter. */
4088 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4089 equiv_type = lang_hooks.types.type_for_mode
4090 (TYPE_MODE (arg0_type),
4091 TYPE_SATURATING (arg0_type));
4092 else
4093 equiv_type = lang_hooks.types.type_for_mode
4094 (TYPE_MODE (arg0_type), 1);
4096 /* A range without an upper bound is, naturally, unbounded.
4097 Since convert would have cropped a very large value, use
4098 the max value for the destination type. */
4099 high_positive
4100 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4101 : TYPE_MAX_VALUE (arg0_type);
4103 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4104 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4105 fold_convert_loc (loc, arg0_type,
4106 high_positive),
4107 build_int_cst (arg0_type, 1));
4109 /* If the low bound is specified, "and" the range with the
4110 range for which the original unsigned value will be
4111 positive. */
4112 if (low != 0)
4114 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4115 1, n_low, n_high, 1,
4116 fold_convert_loc (loc, arg0_type,
4117 integer_zero_node),
4118 high_positive))
4119 break;
4121 in_p = (n_in_p == in_p);
4123 else
4125 /* Otherwise, "or" the range with the range of the input
4126 that will be interpreted as negative. */
4127 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4128 0, n_low, n_high, 1,
4129 fold_convert_loc (loc, arg0_type,
4130 integer_zero_node),
4131 high_positive))
4132 break;
4134 in_p = (in_p != n_in_p);
4138 exp = arg0;
4139 low = n_low, high = n_high;
4140 continue;
4142 default:
4143 break;
4146 break;
4149 /* If EXP is a constant, we can evaluate whether this is true or false. */
4150 if (TREE_CODE (exp) == INTEGER_CST)
4152 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4153 exp, 0, low, 0))
4154 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4155 exp, 1, high, 1)));
4156 low = high = 0;
4157 exp = 0;
4160 *pin_p = in_p, *plow = low, *phigh = high;
4161 return exp;
4164 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4165 type, TYPE, return an expression to test if EXP is in (or out of, depending
4166 on IN_P) the range. Return 0 if the test couldn't be created. */
4168 tree
4169 build_range_check (location_t loc, tree type, tree exp, int in_p,
4170 tree low, tree high)
4172 tree etype = TREE_TYPE (exp), value;
4174 #ifdef HAVE_canonicalize_funcptr_for_compare
4175 /* Disable this optimization for function pointer expressions
4176 on targets that require function pointer canonicalization. */
4177 if (HAVE_canonicalize_funcptr_for_compare
4178 && TREE_CODE (etype) == POINTER_TYPE
4179 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4180 return NULL_TREE;
4181 #endif
4183 if (! in_p)
4185 value = build_range_check (loc, type, exp, 1, low, high);
4186 if (value != 0)
4187 return invert_truthvalue_loc (loc, value);
4189 return 0;
4192 if (low == 0 && high == 0)
4193 return build_int_cst (type, 1);
4195 if (low == 0)
4196 return fold_build2_loc (loc, LE_EXPR, type, exp,
4197 fold_convert_loc (loc, etype, high));
4199 if (high == 0)
4200 return fold_build2_loc (loc, GE_EXPR, type, exp,
4201 fold_convert_loc (loc, etype, low));
4203 if (operand_equal_p (low, high, 0))
4204 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4205 fold_convert_loc (loc, etype, low));
4207 if (integer_zerop (low))
4209 if (! TYPE_UNSIGNED (etype))
4211 etype = unsigned_type_for (etype);
4212 high = fold_convert_loc (loc, etype, high);
4213 exp = fold_convert_loc (loc, etype, exp);
4215 return build_range_check (loc, type, exp, 1, 0, high);
4218 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4219 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4221 unsigned HOST_WIDE_INT lo;
4222 HOST_WIDE_INT hi;
4223 int prec;
4225 prec = TYPE_PRECISION (etype);
4226 if (prec <= HOST_BITS_PER_WIDE_INT)
4228 hi = 0;
4229 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4231 else
4233 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4234 lo = (unsigned HOST_WIDE_INT) -1;
4237 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4239 if (TYPE_UNSIGNED (etype))
4241 tree signed_etype = signed_type_for (etype);
4242 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4243 etype
4244 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4245 else
4246 etype = signed_etype;
4247 exp = fold_convert_loc (loc, etype, exp);
4249 return fold_build2_loc (loc, GT_EXPR, type, exp,
4250 build_int_cst (etype, 0));
4254 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4255 This requires wrap-around arithmetics for the type of the expression.
4256 First make sure that arithmetics in this type is valid, then make sure
4257 that it wraps around. */
4258 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4259 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4260 TYPE_UNSIGNED (etype));
4262 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4264 tree utype, minv, maxv;
4266 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4267 for the type in question, as we rely on this here. */
4268 utype = unsigned_type_for (etype);
4269 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4270 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4271 integer_one_node, 1);
4272 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4274 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4275 minv, 1, maxv, 1)))
4276 etype = utype;
4277 else
4278 return 0;
4281 high = fold_convert_loc (loc, etype, high);
4282 low = fold_convert_loc (loc, etype, low);
4283 exp = fold_convert_loc (loc, etype, exp);
4285 value = const_binop (MINUS_EXPR, high, low, 0);
4288 if (POINTER_TYPE_P (etype))
4290 if (value != 0 && !TREE_OVERFLOW (value))
4292 low = fold_convert_loc (loc, sizetype, low);
4293 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4294 return build_range_check (loc, type,
4295 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4296 etype, exp, low),
4297 1, build_int_cst (etype, 0), value);
4299 return 0;
4302 if (value != 0 && !TREE_OVERFLOW (value))
4303 return build_range_check (loc, type,
4304 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4305 1, build_int_cst (etype, 0), value);
4307 return 0;
4310 /* Return the predecessor of VAL in its type, handling the infinite case. */
4312 static tree
4313 range_predecessor (tree val)
4315 tree type = TREE_TYPE (val);
4317 if (INTEGRAL_TYPE_P (type)
4318 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4319 return 0;
4320 else
4321 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4324 /* Return the successor of VAL in its type, handling the infinite case. */
4326 static tree
4327 range_successor (tree val)
4329 tree type = TREE_TYPE (val);
4331 if (INTEGRAL_TYPE_P (type)
4332 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4333 return 0;
4334 else
4335 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4338 /* Given two ranges, see if we can merge them into one. Return 1 if we
4339 can, 0 if we can't. Set the output range into the specified parameters. */
4341 bool
4342 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4343 tree high0, int in1_p, tree low1, tree high1)
4345 int no_overlap;
4346 int subset;
4347 int temp;
4348 tree tem;
4349 int in_p;
4350 tree low, high;
4351 int lowequal = ((low0 == 0 && low1 == 0)
4352 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4353 low0, 0, low1, 0)));
4354 int highequal = ((high0 == 0 && high1 == 0)
4355 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4356 high0, 1, high1, 1)));
4358 /* Make range 0 be the range that starts first, or ends last if they
4359 start at the same value. Swap them if it isn't. */
4360 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4361 low0, 0, low1, 0))
4362 || (lowequal
4363 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4364 high1, 1, high0, 1))))
4366 temp = in0_p, in0_p = in1_p, in1_p = temp;
4367 tem = low0, low0 = low1, low1 = tem;
4368 tem = high0, high0 = high1, high1 = tem;
4371 /* Now flag two cases, whether the ranges are disjoint or whether the
4372 second range is totally subsumed in the first. Note that the tests
4373 below are simplified by the ones above. */
4374 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4375 high0, 1, low1, 0));
4376 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4377 high1, 1, high0, 1));
4379 /* We now have four cases, depending on whether we are including or
4380 excluding the two ranges. */
4381 if (in0_p && in1_p)
4383 /* If they don't overlap, the result is false. If the second range
4384 is a subset it is the result. Otherwise, the range is from the start
4385 of the second to the end of the first. */
4386 if (no_overlap)
4387 in_p = 0, low = high = 0;
4388 else if (subset)
4389 in_p = 1, low = low1, high = high1;
4390 else
4391 in_p = 1, low = low1, high = high0;
4394 else if (in0_p && ! in1_p)
4396 /* If they don't overlap, the result is the first range. If they are
4397 equal, the result is false. If the second range is a subset of the
4398 first, and the ranges begin at the same place, we go from just after
4399 the end of the second range to the end of the first. If the second
4400 range is not a subset of the first, or if it is a subset and both
4401 ranges end at the same place, the range starts at the start of the
4402 first range and ends just before the second range.
4403 Otherwise, we can't describe this as a single range. */
4404 if (no_overlap)
4405 in_p = 1, low = low0, high = high0;
4406 else if (lowequal && highequal)
4407 in_p = 0, low = high = 0;
4408 else if (subset && lowequal)
4410 low = range_successor (high1);
4411 high = high0;
4412 in_p = 1;
4413 if (low == 0)
4415 /* We are in the weird situation where high0 > high1 but
4416 high1 has no successor. Punt. */
4417 return 0;
4420 else if (! subset || highequal)
4422 low = low0;
4423 high = range_predecessor (low1);
4424 in_p = 1;
4425 if (high == 0)
4427 /* low0 < low1 but low1 has no predecessor. Punt. */
4428 return 0;
4431 else
4432 return 0;
4435 else if (! in0_p && in1_p)
4437 /* If they don't overlap, the result is the second range. If the second
4438 is a subset of the first, the result is false. Otherwise,
4439 the range starts just after the first range and ends at the
4440 end of the second. */
4441 if (no_overlap)
4442 in_p = 1, low = low1, high = high1;
4443 else if (subset || highequal)
4444 in_p = 0, low = high = 0;
4445 else
4447 low = range_successor (high0);
4448 high = high1;
4449 in_p = 1;
4450 if (low == 0)
4452 /* high1 > high0 but high0 has no successor. Punt. */
4453 return 0;
4458 else
4460 /* The case where we are excluding both ranges. Here the complex case
4461 is if they don't overlap. In that case, the only time we have a
4462 range is if they are adjacent. If the second is a subset of the
4463 first, the result is the first. Otherwise, the range to exclude
4464 starts at the beginning of the first range and ends at the end of the
4465 second. */
4466 if (no_overlap)
4468 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4469 range_successor (high0),
4470 1, low1, 0)))
4471 in_p = 0, low = low0, high = high1;
4472 else
4474 /* Canonicalize - [min, x] into - [-, x]. */
4475 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4476 switch (TREE_CODE (TREE_TYPE (low0)))
4478 case ENUMERAL_TYPE:
4479 if (TYPE_PRECISION (TREE_TYPE (low0))
4480 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4481 break;
4482 /* FALLTHROUGH */
4483 case INTEGER_TYPE:
4484 if (tree_int_cst_equal (low0,
4485 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4486 low0 = 0;
4487 break;
4488 case POINTER_TYPE:
4489 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4490 && integer_zerop (low0))
4491 low0 = 0;
4492 break;
4493 default:
4494 break;
4497 /* Canonicalize - [x, max] into - [x, -]. */
4498 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4499 switch (TREE_CODE (TREE_TYPE (high1)))
4501 case ENUMERAL_TYPE:
4502 if (TYPE_PRECISION (TREE_TYPE (high1))
4503 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4504 break;
4505 /* FALLTHROUGH */
4506 case INTEGER_TYPE:
4507 if (tree_int_cst_equal (high1,
4508 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4509 high1 = 0;
4510 break;
4511 case POINTER_TYPE:
4512 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4513 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4514 high1, 1,
4515 integer_one_node, 1)))
4516 high1 = 0;
4517 break;
4518 default:
4519 break;
4522 /* The ranges might be also adjacent between the maximum and
4523 minimum values of the given type. For
4524 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4525 return + [x + 1, y - 1]. */
4526 if (low0 == 0 && high1 == 0)
4528 low = range_successor (high0);
4529 high = range_predecessor (low1);
4530 if (low == 0 || high == 0)
4531 return 0;
4533 in_p = 1;
4535 else
4536 return 0;
4539 else if (subset)
4540 in_p = 0, low = low0, high = high0;
4541 else
4542 in_p = 0, low = low0, high = high1;
4545 *pin_p = in_p, *plow = low, *phigh = high;
4546 return 1;
4550 /* Subroutine of fold, looking inside expressions of the form
4551 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4552 of the COND_EXPR. This function is being used also to optimize
4553 A op B ? C : A, by reversing the comparison first.
4555 Return a folded expression whose code is not a COND_EXPR
4556 anymore, or NULL_TREE if no folding opportunity is found. */
4558 static tree
4559 fold_cond_expr_with_comparison (location_t loc, tree type,
4560 tree arg0, tree arg1, tree arg2)
4562 enum tree_code comp_code = TREE_CODE (arg0);
4563 tree arg00 = TREE_OPERAND (arg0, 0);
4564 tree arg01 = TREE_OPERAND (arg0, 1);
4565 tree arg1_type = TREE_TYPE (arg1);
4566 tree tem;
4568 STRIP_NOPS (arg1);
4569 STRIP_NOPS (arg2);
4571 /* If we have A op 0 ? A : -A, consider applying the following
4572 transformations:
4574 A == 0? A : -A same as -A
4575 A != 0? A : -A same as A
4576 A >= 0? A : -A same as abs (A)
4577 A > 0? A : -A same as abs (A)
4578 A <= 0? A : -A same as -abs (A)
4579 A < 0? A : -A same as -abs (A)
4581 None of these transformations work for modes with signed
4582 zeros. If A is +/-0, the first two transformations will
4583 change the sign of the result (from +0 to -0, or vice
4584 versa). The last four will fix the sign of the result,
4585 even though the original expressions could be positive or
4586 negative, depending on the sign of A.
4588 Note that all these transformations are correct if A is
4589 NaN, since the two alternatives (A and -A) are also NaNs. */
4590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4591 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4592 ? real_zerop (arg01)
4593 : integer_zerop (arg01))
4594 && ((TREE_CODE (arg2) == NEGATE_EXPR
4595 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4596 /* In the case that A is of the form X-Y, '-A' (arg2) may
4597 have already been folded to Y-X, check for that. */
4598 || (TREE_CODE (arg1) == MINUS_EXPR
4599 && TREE_CODE (arg2) == MINUS_EXPR
4600 && operand_equal_p (TREE_OPERAND (arg1, 0),
4601 TREE_OPERAND (arg2, 1), 0)
4602 && operand_equal_p (TREE_OPERAND (arg1, 1),
4603 TREE_OPERAND (arg2, 0), 0))))
4604 switch (comp_code)
4606 case EQ_EXPR:
4607 case UNEQ_EXPR:
4608 tem = fold_convert_loc (loc, arg1_type, arg1);
4609 return pedantic_non_lvalue_loc (loc,
4610 fold_convert_loc (loc, type,
4611 negate_expr (tem)));
4612 case NE_EXPR:
4613 case LTGT_EXPR:
4614 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4615 case UNGE_EXPR:
4616 case UNGT_EXPR:
4617 if (flag_trapping_math)
4618 break;
4619 /* Fall through. */
4620 case GE_EXPR:
4621 case GT_EXPR:
4622 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4623 arg1 = fold_convert_loc (loc, signed_type_for
4624 (TREE_TYPE (arg1)), arg1);
4625 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4626 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4627 case UNLE_EXPR:
4628 case UNLT_EXPR:
4629 if (flag_trapping_math)
4630 break;
4631 case LE_EXPR:
4632 case LT_EXPR:
4633 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4634 arg1 = fold_convert_loc (loc, signed_type_for
4635 (TREE_TYPE (arg1)), arg1);
4636 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4637 return negate_expr (fold_convert_loc (loc, type, tem));
4638 default:
4639 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4640 break;
4643 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4644 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4645 both transformations are correct when A is NaN: A != 0
4646 is then true, and A == 0 is false. */
4648 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4649 && integer_zerop (arg01) && integer_zerop (arg2))
4651 if (comp_code == NE_EXPR)
4652 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4653 else if (comp_code == EQ_EXPR)
4654 return build_int_cst (type, 0);
4657 /* Try some transformations of A op B ? A : B.
4659 A == B? A : B same as B
4660 A != B? A : B same as A
4661 A >= B? A : B same as max (A, B)
4662 A > B? A : B same as max (B, A)
4663 A <= B? A : B same as min (A, B)
4664 A < B? A : B same as min (B, A)
4666 As above, these transformations don't work in the presence
4667 of signed zeros. For example, if A and B are zeros of
4668 opposite sign, the first two transformations will change
4669 the sign of the result. In the last four, the original
4670 expressions give different results for (A=+0, B=-0) and
4671 (A=-0, B=+0), but the transformed expressions do not.
4673 The first two transformations are correct if either A or B
4674 is a NaN. In the first transformation, the condition will
4675 be false, and B will indeed be chosen. In the case of the
4676 second transformation, the condition A != B will be true,
4677 and A will be chosen.
4679 The conversions to max() and min() are not correct if B is
4680 a number and A is not. The conditions in the original
4681 expressions will be false, so all four give B. The min()
4682 and max() versions would give a NaN instead. */
4683 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4684 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4685 /* Avoid these transformations if the COND_EXPR may be used
4686 as an lvalue in the C++ front-end. PR c++/19199. */
4687 && (in_gimple_form
4688 || (strcmp (lang_hooks.name, "GNU C++") != 0
4689 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4690 || ! maybe_lvalue_p (arg1)
4691 || ! maybe_lvalue_p (arg2)))
4693 tree comp_op0 = arg00;
4694 tree comp_op1 = arg01;
4695 tree comp_type = TREE_TYPE (comp_op0);
4697 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4698 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4700 comp_type = type;
4701 comp_op0 = arg1;
4702 comp_op1 = arg2;
4705 switch (comp_code)
4707 case EQ_EXPR:
4708 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4709 case NE_EXPR:
4710 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4711 case LE_EXPR:
4712 case LT_EXPR:
4713 case UNLE_EXPR:
4714 case UNLT_EXPR:
4715 /* In C++ a ?: expression can be an lvalue, so put the
4716 operand which will be used if they are equal first
4717 so that we can convert this back to the
4718 corresponding COND_EXPR. */
4719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4721 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4722 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4723 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4724 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4725 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4726 comp_op1, comp_op0);
4727 return pedantic_non_lvalue_loc (loc,
4728 fold_convert_loc (loc, type, tem));
4730 break;
4731 case GE_EXPR:
4732 case GT_EXPR:
4733 case UNGE_EXPR:
4734 case UNGT_EXPR:
4735 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4737 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4738 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4739 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4740 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4741 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4742 comp_op1, comp_op0);
4743 return pedantic_non_lvalue_loc (loc,
4744 fold_convert_loc (loc, type, tem));
4746 break;
4747 case UNEQ_EXPR:
4748 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4749 return pedantic_non_lvalue_loc (loc,
4750 fold_convert_loc (loc, type, arg2));
4751 break;
4752 case LTGT_EXPR:
4753 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4754 return pedantic_non_lvalue_loc (loc,
4755 fold_convert_loc (loc, type, arg1));
4756 break;
4757 default:
4758 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4759 break;
4763 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4764 we might still be able to simplify this. For example,
4765 if C1 is one less or one more than C2, this might have started
4766 out as a MIN or MAX and been transformed by this function.
4767 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4769 if (INTEGRAL_TYPE_P (type)
4770 && TREE_CODE (arg01) == INTEGER_CST
4771 && TREE_CODE (arg2) == INTEGER_CST)
4772 switch (comp_code)
4774 case EQ_EXPR:
4775 if (TREE_CODE (arg1) == INTEGER_CST)
4776 break;
4777 /* We can replace A with C1 in this case. */
4778 arg1 = fold_convert_loc (loc, type, arg01);
4779 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4781 case LT_EXPR:
4782 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4783 MIN_EXPR, to preserve the signedness of the comparison. */
4784 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (PLUS_EXPR, arg2,
4788 build_int_cst (type, 1), 0),
4789 OEP_ONLY_CONST))
4791 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc,
4795 fold_convert_loc (loc, type, tem));
4797 break;
4799 case LE_EXPR:
4800 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4801 as above. */
4802 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4803 OEP_ONLY_CONST)
4804 && operand_equal_p (arg01,
4805 const_binop (MINUS_EXPR, arg2,
4806 build_int_cst (type, 1), 0),
4807 OEP_ONLY_CONST))
4809 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4810 fold_convert_loc (loc, TREE_TYPE (arg00),
4811 arg2));
4812 return pedantic_non_lvalue_loc (loc,
4813 fold_convert_loc (loc, type, tem));
4815 break;
4817 case GT_EXPR:
4818 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4819 MAX_EXPR, to preserve the signedness of the comparison. */
4820 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4821 OEP_ONLY_CONST)
4822 && operand_equal_p (arg01,
4823 const_binop (MINUS_EXPR, arg2,
4824 build_int_cst (type, 1), 0),
4825 OEP_ONLY_CONST))
4827 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4828 fold_convert_loc (loc, TREE_TYPE (arg00),
4829 arg2));
4830 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4832 break;
4834 case GE_EXPR:
4835 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4836 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4837 OEP_ONLY_CONST)
4838 && operand_equal_p (arg01,
4839 const_binop (PLUS_EXPR, arg2,
4840 build_int_cst (type, 1), 0),
4841 OEP_ONLY_CONST))
4843 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4844 fold_convert_loc (loc, TREE_TYPE (arg00),
4845 arg2));
4846 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4848 break;
4849 case NE_EXPR:
4850 break;
4851 default:
4852 gcc_unreachable ();
4855 return NULL_TREE;
4860 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4861 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4862 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4863 false) >= 2)
4864 #endif
4866 /* EXP is some logical combination of boolean tests. See if we can
4867 merge it into some range test. Return the new tree if so. */
4869 static tree
4870 fold_range_test (location_t loc, enum tree_code code, tree type,
4871 tree op0, tree op1)
4873 int or_op = (code == TRUTH_ORIF_EXPR
4874 || code == TRUTH_OR_EXPR);
4875 int in0_p, in1_p, in_p;
4876 tree low0, low1, low, high0, high1, high;
4877 bool strict_overflow_p = false;
4878 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4879 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4880 tree tem;
4881 const char * const warnmsg = G_("assuming signed overflow does not occur "
4882 "when simplifying range test");
4884 /* If this is an OR operation, invert both sides; we will invert
4885 again at the end. */
4886 if (or_op)
4887 in0_p = ! in0_p, in1_p = ! in1_p;
4889 /* If both expressions are the same, if we can merge the ranges, and we
4890 can build the range test, return it or it inverted. If one of the
4891 ranges is always true or always false, consider it to be the same
4892 expression as the other. */
4893 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4894 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4895 in1_p, low1, high1)
4896 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4897 lhs != 0 ? lhs
4898 : rhs != 0 ? rhs : integer_zero_node,
4899 in_p, low, high))))
4901 if (strict_overflow_p)
4902 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4903 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4906 /* On machines where the branch cost is expensive, if this is a
4907 short-circuited branch and the underlying object on both sides
4908 is the same, make a non-short-circuit operation. */
4909 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4910 && lhs != 0 && rhs != 0
4911 && (code == TRUTH_ANDIF_EXPR
4912 || code == TRUTH_ORIF_EXPR)
4913 && operand_equal_p (lhs, rhs, 0))
4915 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4916 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4917 which cases we can't do this. */
4918 if (simple_operand_p (lhs))
4920 tem = build2 (code == TRUTH_ANDIF_EXPR
4921 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4922 type, op0, op1);
4923 SET_EXPR_LOCATION (tem, loc);
4924 return tem;
4927 else if (lang_hooks.decls.global_bindings_p () == 0
4928 && ! CONTAINS_PLACEHOLDER_P (lhs))
4930 tree common = save_expr (lhs);
4932 if (0 != (lhs = build_range_check (loc, type, common,
4933 or_op ? ! in0_p : in0_p,
4934 low0, high0))
4935 && (0 != (rhs = build_range_check (loc, type, common,
4936 or_op ? ! in1_p : in1_p,
4937 low1, high1))))
4939 if (strict_overflow_p)
4940 fold_overflow_warning (warnmsg,
4941 WARN_STRICT_OVERFLOW_COMPARISON);
4942 tem = build2 (code == TRUTH_ANDIF_EXPR
4943 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4944 type, lhs, rhs);
4945 SET_EXPR_LOCATION (tem, loc);
4946 return tem;
4951 return 0;
4954 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4955 bit value. Arrange things so the extra bits will be set to zero if and
4956 only if C is signed-extended to its full width. If MASK is nonzero,
4957 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4959 static tree
4960 unextend (tree c, int p, int unsignedp, tree mask)
4962 tree type = TREE_TYPE (c);
4963 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4964 tree temp;
4966 if (p == modesize || unsignedp)
4967 return c;
4969 /* We work by getting just the sign bit into the low-order bit, then
4970 into the high-order bit, then sign-extend. We then XOR that value
4971 with C. */
4972 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4973 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4975 /* We must use a signed type in order to get an arithmetic right shift.
4976 However, we must also avoid introducing accidental overflows, so that
4977 a subsequent call to integer_zerop will work. Hence we must
4978 do the type conversion here. At this point, the constant is either
4979 zero or one, and the conversion to a signed type can never overflow.
4980 We could get an overflow if this conversion is done anywhere else. */
4981 if (TYPE_UNSIGNED (type))
4982 temp = fold_convert (signed_type_for (type), temp);
4984 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4985 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4986 if (mask != 0)
4987 temp = const_binop (BIT_AND_EXPR, temp,
4988 fold_convert (TREE_TYPE (c), mask),
4990 /* If necessary, convert the type back to match the type of C. */
4991 if (TYPE_UNSIGNED (type))
4992 temp = fold_convert (type, temp);
4994 return fold_convert (type,
4995 const_binop (BIT_XOR_EXPR, c, temp, 0));
4998 /* For an expression that has the form
4999 (A && B) || ~B
5001 (A || B) && ~B,
5002 we can drop one of the inner expressions and simplify to
5003 A || ~B
5005 A && ~B
5006 LOC is the location of the resulting expression. OP is the inner
5007 logical operation; the left-hand side in the examples above, while CMPOP
5008 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5009 removing a condition that guards another, as in
5010 (A != NULL && A->...) || A == NULL
5011 which we must not transform. If RHS_ONLY is true, only eliminate the
5012 right-most operand of the inner logical operation. */
5014 static tree
5015 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5016 bool rhs_only)
5018 tree type = TREE_TYPE (cmpop);
5019 enum tree_code code = TREE_CODE (cmpop);
5020 enum tree_code truthop_code = TREE_CODE (op);
5021 tree lhs = TREE_OPERAND (op, 0);
5022 tree rhs = TREE_OPERAND (op, 1);
5023 tree orig_lhs = lhs, orig_rhs = rhs;
5024 enum tree_code rhs_code = TREE_CODE (rhs);
5025 enum tree_code lhs_code = TREE_CODE (lhs);
5026 enum tree_code inv_code;
5028 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5029 return NULL_TREE;
5031 if (TREE_CODE_CLASS (code) != tcc_comparison)
5032 return NULL_TREE;
5034 if (rhs_code == truthop_code)
5036 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5037 if (newrhs != NULL_TREE)
5039 rhs = newrhs;
5040 rhs_code = TREE_CODE (rhs);
5043 if (lhs_code == truthop_code && !rhs_only)
5045 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5046 if (newlhs != NULL_TREE)
5048 lhs = newlhs;
5049 lhs_code = TREE_CODE (lhs);
5053 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5054 if (inv_code == rhs_code
5055 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5056 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5057 return lhs;
5058 if (!rhs_only && inv_code == lhs_code
5059 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5060 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5061 return rhs;
5062 if (rhs != orig_rhs || lhs != orig_lhs)
5063 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5064 lhs, rhs);
5065 return NULL_TREE;
5068 /* Find ways of folding logical expressions of LHS and RHS:
5069 Try to merge two comparisons to the same innermost item.
5070 Look for range tests like "ch >= '0' && ch <= '9'".
5071 Look for combinations of simple terms on machines with expensive branches
5072 and evaluate the RHS unconditionally.
5074 For example, if we have p->a == 2 && p->b == 4 and we can make an
5075 object large enough to span both A and B, we can do this with a comparison
5076 against the object ANDed with the a mask.
5078 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5079 operations to do this with one comparison.
5081 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5082 function and the one above.
5084 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5085 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5087 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5088 two operands.
5090 We return the simplified tree or 0 if no optimization is possible. */
5092 static tree
5093 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5094 tree lhs, tree rhs)
5096 /* If this is the "or" of two comparisons, we can do something if
5097 the comparisons are NE_EXPR. If this is the "and", we can do something
5098 if the comparisons are EQ_EXPR. I.e.,
5099 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5101 WANTED_CODE is this operation code. For single bit fields, we can
5102 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5103 comparison for one-bit fields. */
5105 enum tree_code wanted_code;
5106 enum tree_code lcode, rcode;
5107 tree ll_arg, lr_arg, rl_arg, rr_arg;
5108 tree ll_inner, lr_inner, rl_inner, rr_inner;
5109 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5110 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5111 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5112 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5113 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5114 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5115 enum machine_mode lnmode, rnmode;
5116 tree ll_mask, lr_mask, rl_mask, rr_mask;
5117 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5118 tree l_const, r_const;
5119 tree lntype, rntype, result;
5120 HOST_WIDE_INT first_bit, end_bit;
5121 int volatilep;
5122 tree orig_lhs = lhs, orig_rhs = rhs;
5123 enum tree_code orig_code = code;
5125 /* Start by getting the comparison codes. Fail if anything is volatile.
5126 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5127 it were surrounded with a NE_EXPR. */
5129 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5130 return 0;
5132 lcode = TREE_CODE (lhs);
5133 rcode = TREE_CODE (rhs);
5135 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5137 lhs = build2 (NE_EXPR, truth_type, lhs,
5138 build_int_cst (TREE_TYPE (lhs), 0));
5139 lcode = NE_EXPR;
5142 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5144 rhs = build2 (NE_EXPR, truth_type, rhs,
5145 build_int_cst (TREE_TYPE (rhs), 0));
5146 rcode = NE_EXPR;
5149 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5150 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5151 return 0;
5153 ll_arg = TREE_OPERAND (lhs, 0);
5154 lr_arg = TREE_OPERAND (lhs, 1);
5155 rl_arg = TREE_OPERAND (rhs, 0);
5156 rr_arg = TREE_OPERAND (rhs, 1);
5158 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5159 if (simple_operand_p (ll_arg)
5160 && simple_operand_p (lr_arg))
5162 tree result;
5163 if (operand_equal_p (ll_arg, rl_arg, 0)
5164 && operand_equal_p (lr_arg, rr_arg, 0))
5166 result = combine_comparisons (loc, code, lcode, rcode,
5167 truth_type, ll_arg, lr_arg);
5168 if (result)
5169 return result;
5171 else if (operand_equal_p (ll_arg, rr_arg, 0)
5172 && operand_equal_p (lr_arg, rl_arg, 0))
5174 result = combine_comparisons (loc, code, lcode,
5175 swap_tree_comparison (rcode),
5176 truth_type, ll_arg, lr_arg);
5177 if (result)
5178 return result;
5182 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5183 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5185 /* If the RHS can be evaluated unconditionally and its operands are
5186 simple, it wins to evaluate the RHS unconditionally on machines
5187 with expensive branches. In this case, this isn't a comparison
5188 that can be merged. Avoid doing this if the RHS is a floating-point
5189 comparison since those can trap. */
5191 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5192 false) >= 2
5193 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5194 && simple_operand_p (rl_arg)
5195 && simple_operand_p (rr_arg))
5197 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5198 if (code == TRUTH_OR_EXPR
5199 && lcode == NE_EXPR && integer_zerop (lr_arg)
5200 && rcode == NE_EXPR && integer_zerop (rr_arg)
5201 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5202 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5204 result = build2 (NE_EXPR, truth_type,
5205 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5206 ll_arg, rl_arg),
5207 build_int_cst (TREE_TYPE (ll_arg), 0));
5208 goto fold_truthop_exit;
5211 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5212 if (code == TRUTH_AND_EXPR
5213 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5214 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5215 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5216 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5218 result = build2 (EQ_EXPR, truth_type,
5219 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5220 ll_arg, rl_arg),
5221 build_int_cst (TREE_TYPE (ll_arg), 0));
5222 goto fold_truthop_exit;
5225 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5227 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5229 result = build2 (code, truth_type, lhs, rhs);
5230 goto fold_truthop_exit;
5232 return NULL_TREE;
5236 /* See if the comparisons can be merged. Then get all the parameters for
5237 each side. */
5239 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5240 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5241 return 0;
5243 volatilep = 0;
5244 ll_inner = decode_field_reference (loc, ll_arg,
5245 &ll_bitsize, &ll_bitpos, &ll_mode,
5246 &ll_unsignedp, &volatilep, &ll_mask,
5247 &ll_and_mask);
5248 lr_inner = decode_field_reference (loc, lr_arg,
5249 &lr_bitsize, &lr_bitpos, &lr_mode,
5250 &lr_unsignedp, &volatilep, &lr_mask,
5251 &lr_and_mask);
5252 rl_inner = decode_field_reference (loc, rl_arg,
5253 &rl_bitsize, &rl_bitpos, &rl_mode,
5254 &rl_unsignedp, &volatilep, &rl_mask,
5255 &rl_and_mask);
5256 rr_inner = decode_field_reference (loc, rr_arg,
5257 &rr_bitsize, &rr_bitpos, &rr_mode,
5258 &rr_unsignedp, &volatilep, &rr_mask,
5259 &rr_and_mask);
5261 /* It must be true that the inner operation on the lhs of each
5262 comparison must be the same if we are to be able to do anything.
5263 Then see if we have constants. If not, the same must be true for
5264 the rhs's. */
5265 if (volatilep || ll_inner == 0 || rl_inner == 0
5266 || ! operand_equal_p (ll_inner, rl_inner, 0))
5267 return 0;
5269 if (TREE_CODE (lr_arg) == INTEGER_CST
5270 && TREE_CODE (rr_arg) == INTEGER_CST)
5271 l_const = lr_arg, r_const = rr_arg;
5272 else if (lr_inner == 0 || rr_inner == 0
5273 || ! operand_equal_p (lr_inner, rr_inner, 0))
5274 return 0;
5275 else
5276 l_const = r_const = 0;
5278 /* If either comparison code is not correct for our logical operation,
5279 fail. However, we can convert a one-bit comparison against zero into
5280 the opposite comparison against that bit being set in the field. */
5282 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5283 if (lcode != wanted_code)
5285 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5287 /* Make the left operand unsigned, since we are only interested
5288 in the value of one bit. Otherwise we are doing the wrong
5289 thing below. */
5290 ll_unsignedp = 1;
5291 l_const = ll_mask;
5293 else
5294 return 0;
5297 /* This is analogous to the code for l_const above. */
5298 if (rcode != wanted_code)
5300 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5302 rl_unsignedp = 1;
5303 r_const = rl_mask;
5305 else
5306 return 0;
5309 /* See if we can find a mode that contains both fields being compared on
5310 the left. If we can't, fail. Otherwise, update all constants and masks
5311 to be relative to a field of that size. */
5312 first_bit = MIN (ll_bitpos, rl_bitpos);
5313 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5314 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5315 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5316 volatilep);
5317 if (lnmode == VOIDmode)
5318 return 0;
5320 lnbitsize = GET_MODE_BITSIZE (lnmode);
5321 lnbitpos = first_bit & ~ (lnbitsize - 1);
5322 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5323 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5325 if (BYTES_BIG_ENDIAN)
5327 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5328 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5331 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5332 size_int (xll_bitpos), 0);
5333 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5334 size_int (xrl_bitpos), 0);
5336 if (l_const)
5338 l_const = fold_convert_loc (loc, lntype, l_const);
5339 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5340 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5341 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5342 fold_build1_loc (loc, BIT_NOT_EXPR,
5343 lntype, ll_mask),
5344 0)))
5346 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5348 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5351 if (r_const)
5353 r_const = fold_convert_loc (loc, lntype, r_const);
5354 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5355 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5356 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5357 fold_build1_loc (loc, BIT_NOT_EXPR,
5358 lntype, rl_mask),
5359 0)))
5361 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5363 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5367 /* If the right sides are not constant, do the same for it. Also,
5368 disallow this optimization if a size or signedness mismatch occurs
5369 between the left and right sides. */
5370 if (l_const == 0)
5372 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5373 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5374 /* Make sure the two fields on the right
5375 correspond to the left without being swapped. */
5376 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5377 return 0;
5379 first_bit = MIN (lr_bitpos, rr_bitpos);
5380 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5381 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5382 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5383 volatilep);
5384 if (rnmode == VOIDmode)
5385 return 0;
5387 rnbitsize = GET_MODE_BITSIZE (rnmode);
5388 rnbitpos = first_bit & ~ (rnbitsize - 1);
5389 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5390 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5392 if (BYTES_BIG_ENDIAN)
5394 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5395 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5398 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5399 rntype, lr_mask),
5400 size_int (xlr_bitpos), 0);
5401 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5402 rntype, rr_mask),
5403 size_int (xrr_bitpos), 0);
5405 /* Make a mask that corresponds to both fields being compared.
5406 Do this for both items being compared. If the operands are the
5407 same size and the bits being compared are in the same position
5408 then we can do this by masking both and comparing the masked
5409 results. */
5410 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5411 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5412 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5414 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5415 ll_unsignedp || rl_unsignedp);
5416 if (! all_ones_mask_p (ll_mask, lnbitsize))
5417 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5419 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5420 lr_unsignedp || rr_unsignedp);
5421 if (! all_ones_mask_p (lr_mask, rnbitsize))
5422 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5424 result = build2 (wanted_code, truth_type, lhs, rhs);
5425 goto fold_truthop_exit;
5428 /* There is still another way we can do something: If both pairs of
5429 fields being compared are adjacent, we may be able to make a wider
5430 field containing them both.
5432 Note that we still must mask the lhs/rhs expressions. Furthermore,
5433 the mask must be shifted to account for the shift done by
5434 make_bit_field_ref. */
5435 if ((ll_bitsize + ll_bitpos == rl_bitpos
5436 && lr_bitsize + lr_bitpos == rr_bitpos)
5437 || (ll_bitpos == rl_bitpos + rl_bitsize
5438 && lr_bitpos == rr_bitpos + rr_bitsize))
5440 tree type;
5442 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5443 ll_bitsize + rl_bitsize,
5444 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5445 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5446 lr_bitsize + rr_bitsize,
5447 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5449 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5450 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5451 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5452 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5454 /* Convert to the smaller type before masking out unwanted bits. */
5455 type = lntype;
5456 if (lntype != rntype)
5458 if (lnbitsize > rnbitsize)
5460 lhs = fold_convert_loc (loc, rntype, lhs);
5461 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5462 type = rntype;
5464 else if (lnbitsize < rnbitsize)
5466 rhs = fold_convert_loc (loc, lntype, rhs);
5467 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5468 type = lntype;
5472 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5473 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5475 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5476 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5478 result = build2 (wanted_code, truth_type, lhs, rhs);
5479 goto fold_truthop_exit;
5482 return 0;
5485 /* Handle the case of comparisons with constants. If there is something in
5486 common between the masks, those bits of the constants must be the same.
5487 If not, the condition is always false. Test for this to avoid generating
5488 incorrect code below. */
5489 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5490 if (! integer_zerop (result)
5491 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5492 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5494 if (wanted_code == NE_EXPR)
5496 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5497 return constant_boolean_node (true, truth_type);
5499 else
5501 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5502 return constant_boolean_node (false, truth_type);
5506 /* Construct the expression we will return. First get the component
5507 reference we will make. Unless the mask is all ones the width of
5508 that field, perform the mask operation. Then compare with the
5509 merged constant. */
5510 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5511 ll_unsignedp || rl_unsignedp);
5513 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5514 if (! all_ones_mask_p (ll_mask, lnbitsize))
5516 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5517 SET_EXPR_LOCATION (result, loc);
5520 result = build2 (wanted_code, truth_type, result,
5521 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5523 fold_truthop_exit:
5524 SET_EXPR_LOCATION (result, loc);
5525 return result;
5528 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5529 constant. */
5531 static tree
5532 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5533 tree op0, tree op1)
5535 tree arg0 = op0;
5536 enum tree_code op_code;
5537 tree comp_const;
5538 tree minmax_const;
5539 int consts_equal, consts_lt;
5540 tree inner;
5542 STRIP_SIGN_NOPS (arg0);
5544 op_code = TREE_CODE (arg0);
5545 minmax_const = TREE_OPERAND (arg0, 1);
5546 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5547 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5548 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5549 inner = TREE_OPERAND (arg0, 0);
5551 /* If something does not permit us to optimize, return the original tree. */
5552 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5553 || TREE_CODE (comp_const) != INTEGER_CST
5554 || TREE_OVERFLOW (comp_const)
5555 || TREE_CODE (minmax_const) != INTEGER_CST
5556 || TREE_OVERFLOW (minmax_const))
5557 return NULL_TREE;
5559 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5560 and GT_EXPR, doing the rest with recursive calls using logical
5561 simplifications. */
5562 switch (code)
5564 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5566 tree tem
5567 = optimize_minmax_comparison (loc,
5568 invert_tree_comparison (code, false),
5569 type, op0, op1);
5570 if (tem)
5571 return invert_truthvalue_loc (loc, tem);
5572 return NULL_TREE;
5575 case GE_EXPR:
5576 return
5577 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5578 optimize_minmax_comparison
5579 (loc, EQ_EXPR, type, arg0, comp_const),
5580 optimize_minmax_comparison
5581 (loc, GT_EXPR, type, arg0, comp_const));
5583 case EQ_EXPR:
5584 if (op_code == MAX_EXPR && consts_equal)
5585 /* MAX (X, 0) == 0 -> X <= 0 */
5586 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5588 else if (op_code == MAX_EXPR && consts_lt)
5589 /* MAX (X, 0) == 5 -> X == 5 */
5590 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5592 else if (op_code == MAX_EXPR)
5593 /* MAX (X, 0) == -1 -> false */
5594 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5596 else if (consts_equal)
5597 /* MIN (X, 0) == 0 -> X >= 0 */
5598 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5600 else if (consts_lt)
5601 /* MIN (X, 0) == 5 -> false */
5602 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5604 else
5605 /* MIN (X, 0) == -1 -> X == -1 */
5606 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5608 case GT_EXPR:
5609 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5610 /* MAX (X, 0) > 0 -> X > 0
5611 MAX (X, 0) > 5 -> X > 5 */
5612 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5614 else if (op_code == MAX_EXPR)
5615 /* MAX (X, 0) > -1 -> true */
5616 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5618 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5619 /* MIN (X, 0) > 0 -> false
5620 MIN (X, 0) > 5 -> false */
5621 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5623 else
5624 /* MIN (X, 0) > -1 -> X > -1 */
5625 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5627 default:
5628 return NULL_TREE;
5632 /* T is an integer expression that is being multiplied, divided, or taken a
5633 modulus (CODE says which and what kind of divide or modulus) by a
5634 constant C. See if we can eliminate that operation by folding it with
5635 other operations already in T. WIDE_TYPE, if non-null, is a type that
5636 should be used for the computation if wider than our type.
5638 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5639 (X * 2) + (Y * 4). We must, however, be assured that either the original
5640 expression would not overflow or that overflow is undefined for the type
5641 in the language in question.
5643 If we return a non-null expression, it is an equivalent form of the
5644 original computation, but need not be in the original type.
5646 We set *STRICT_OVERFLOW_P to true if the return values depends on
5647 signed overflow being undefined. Otherwise we do not change
5648 *STRICT_OVERFLOW_P. */
5650 static tree
5651 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5652 bool *strict_overflow_p)
5654 /* To avoid exponential search depth, refuse to allow recursion past
5655 three levels. Beyond that (1) it's highly unlikely that we'll find
5656 something interesting and (2) we've probably processed it before
5657 when we built the inner expression. */
5659 static int depth;
5660 tree ret;
5662 if (depth > 3)
5663 return NULL;
5665 depth++;
5666 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5667 depth--;
5669 return ret;
5672 static tree
5673 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5674 bool *strict_overflow_p)
5676 tree type = TREE_TYPE (t);
5677 enum tree_code tcode = TREE_CODE (t);
5678 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5679 > GET_MODE_SIZE (TYPE_MODE (type)))
5680 ? wide_type : type);
5681 tree t1, t2;
5682 int same_p = tcode == code;
5683 tree op0 = NULL_TREE, op1 = NULL_TREE;
5684 bool sub_strict_overflow_p;
5686 /* Don't deal with constants of zero here; they confuse the code below. */
5687 if (integer_zerop (c))
5688 return NULL_TREE;
5690 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5691 op0 = TREE_OPERAND (t, 0);
5693 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5694 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5696 /* Note that we need not handle conditional operations here since fold
5697 already handles those cases. So just do arithmetic here. */
5698 switch (tcode)
5700 case INTEGER_CST:
5701 /* For a constant, we can always simplify if we are a multiply
5702 or (for divide and modulus) if it is a multiple of our constant. */
5703 if (code == MULT_EXPR
5704 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5705 return const_binop (code, fold_convert (ctype, t),
5706 fold_convert (ctype, c), 0);
5707 break;
5709 CASE_CONVERT: case NON_LVALUE_EXPR:
5710 /* If op0 is an expression ... */
5711 if ((COMPARISON_CLASS_P (op0)
5712 || UNARY_CLASS_P (op0)
5713 || BINARY_CLASS_P (op0)
5714 || VL_EXP_CLASS_P (op0)
5715 || EXPRESSION_CLASS_P (op0))
5716 /* ... and has wrapping overflow, and its type is smaller
5717 than ctype, then we cannot pass through as widening. */
5718 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5719 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5720 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5721 && (TYPE_PRECISION (ctype)
5722 > TYPE_PRECISION (TREE_TYPE (op0))))
5723 /* ... or this is a truncation (t is narrower than op0),
5724 then we cannot pass through this narrowing. */
5725 || (TYPE_PRECISION (type)
5726 < TYPE_PRECISION (TREE_TYPE (op0)))
5727 /* ... or signedness changes for division or modulus,
5728 then we cannot pass through this conversion. */
5729 || (code != MULT_EXPR
5730 && (TYPE_UNSIGNED (ctype)
5731 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5732 /* ... or has undefined overflow while the converted to
5733 type has not, we cannot do the operation in the inner type
5734 as that would introduce undefined overflow. */
5735 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5736 && !TYPE_OVERFLOW_UNDEFINED (type))))
5737 break;
5739 /* Pass the constant down and see if we can make a simplification. If
5740 we can, replace this expression with the inner simplification for
5741 possible later conversion to our or some other type. */
5742 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5743 && TREE_CODE (t2) == INTEGER_CST
5744 && !TREE_OVERFLOW (t2)
5745 && (0 != (t1 = extract_muldiv (op0, t2, code,
5746 code == MULT_EXPR
5747 ? ctype : NULL_TREE,
5748 strict_overflow_p))))
5749 return t1;
5750 break;
5752 case ABS_EXPR:
5753 /* If widening the type changes it from signed to unsigned, then we
5754 must avoid building ABS_EXPR itself as unsigned. */
5755 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5757 tree cstype = (*signed_type_for) (ctype);
5758 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5759 != 0)
5761 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5762 return fold_convert (ctype, t1);
5764 break;
5766 /* If the constant is negative, we cannot simplify this. */
5767 if (tree_int_cst_sgn (c) == -1)
5768 break;
5769 /* FALLTHROUGH */
5770 case NEGATE_EXPR:
5771 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5772 != 0)
5773 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5774 break;
5776 case MIN_EXPR: case MAX_EXPR:
5777 /* If widening the type changes the signedness, then we can't perform
5778 this optimization as that changes the result. */
5779 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5780 break;
5782 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5783 sub_strict_overflow_p = false;
5784 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5785 &sub_strict_overflow_p)) != 0
5786 && (t2 = extract_muldiv (op1, c, code, wide_type,
5787 &sub_strict_overflow_p)) != 0)
5789 if (tree_int_cst_sgn (c) < 0)
5790 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5791 if (sub_strict_overflow_p)
5792 *strict_overflow_p = true;
5793 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5794 fold_convert (ctype, t2));
5796 break;
5798 case LSHIFT_EXPR: case RSHIFT_EXPR:
5799 /* If the second operand is constant, this is a multiplication
5800 or floor division, by a power of two, so we can treat it that
5801 way unless the multiplier or divisor overflows. Signed
5802 left-shift overflow is implementation-defined rather than
5803 undefined in C90, so do not convert signed left shift into
5804 multiplication. */
5805 if (TREE_CODE (op1) == INTEGER_CST
5806 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5807 /* const_binop may not detect overflow correctly,
5808 so check for it explicitly here. */
5809 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5810 && TREE_INT_CST_HIGH (op1) == 0
5811 && 0 != (t1 = fold_convert (ctype,
5812 const_binop (LSHIFT_EXPR,
5813 size_one_node,
5814 op1, 0)))
5815 && !TREE_OVERFLOW (t1))
5816 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5817 ? MULT_EXPR : FLOOR_DIV_EXPR,
5818 ctype,
5819 fold_convert (ctype, op0),
5820 t1),
5821 c, code, wide_type, strict_overflow_p);
5822 break;
5824 case PLUS_EXPR: case MINUS_EXPR:
5825 /* See if we can eliminate the operation on both sides. If we can, we
5826 can return a new PLUS or MINUS. If we can't, the only remaining
5827 cases where we can do anything are if the second operand is a
5828 constant. */
5829 sub_strict_overflow_p = false;
5830 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5831 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5832 if (t1 != 0 && t2 != 0
5833 && (code == MULT_EXPR
5834 /* If not multiplication, we can only do this if both operands
5835 are divisible by c. */
5836 || (multiple_of_p (ctype, op0, c)
5837 && multiple_of_p (ctype, op1, c))))
5839 if (sub_strict_overflow_p)
5840 *strict_overflow_p = true;
5841 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5842 fold_convert (ctype, t2));
5845 /* If this was a subtraction, negate OP1 and set it to be an addition.
5846 This simplifies the logic below. */
5847 if (tcode == MINUS_EXPR)
5849 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5850 /* If OP1 was not easily negatable, the constant may be OP0. */
5851 if (TREE_CODE (op0) == INTEGER_CST)
5853 tree tem = op0;
5854 op0 = op1;
5855 op1 = tem;
5856 tem = t1;
5857 t1 = t2;
5858 t2 = tem;
5862 if (TREE_CODE (op1) != INTEGER_CST)
5863 break;
5865 /* If either OP1 or C are negative, this optimization is not safe for
5866 some of the division and remainder types while for others we need
5867 to change the code. */
5868 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5870 if (code == CEIL_DIV_EXPR)
5871 code = FLOOR_DIV_EXPR;
5872 else if (code == FLOOR_DIV_EXPR)
5873 code = CEIL_DIV_EXPR;
5874 else if (code != MULT_EXPR
5875 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5876 break;
5879 /* If it's a multiply or a division/modulus operation of a multiple
5880 of our constant, do the operation and verify it doesn't overflow. */
5881 if (code == MULT_EXPR
5882 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5884 op1 = const_binop (code, fold_convert (ctype, op1),
5885 fold_convert (ctype, c), 0);
5886 /* We allow the constant to overflow with wrapping semantics. */
5887 if (op1 == 0
5888 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5889 break;
5891 else
5892 break;
5894 /* If we have an unsigned type is not a sizetype, we cannot widen
5895 the operation since it will change the result if the original
5896 computation overflowed. */
5897 if (TYPE_UNSIGNED (ctype)
5898 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5899 && ctype != type)
5900 break;
5902 /* If we were able to eliminate our operation from the first side,
5903 apply our operation to the second side and reform the PLUS. */
5904 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5905 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5907 /* The last case is if we are a multiply. In that case, we can
5908 apply the distributive law to commute the multiply and addition
5909 if the multiplication of the constants doesn't overflow. */
5910 if (code == MULT_EXPR)
5911 return fold_build2 (tcode, ctype,
5912 fold_build2 (code, ctype,
5913 fold_convert (ctype, op0),
5914 fold_convert (ctype, c)),
5915 op1);
5917 break;
5919 case MULT_EXPR:
5920 /* We have a special case here if we are doing something like
5921 (C * 8) % 4 since we know that's zero. */
5922 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5923 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5924 /* If the multiplication can overflow we cannot optimize this.
5925 ??? Until we can properly mark individual operations as
5926 not overflowing we need to treat sizetype special here as
5927 stor-layout relies on this opimization to make
5928 DECL_FIELD_BIT_OFFSET always a constant. */
5929 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5930 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5931 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5932 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5933 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5935 *strict_overflow_p = true;
5936 return omit_one_operand (type, integer_zero_node, op0);
5939 /* ... fall through ... */
5941 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5942 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5943 /* If we can extract our operation from the LHS, do so and return a
5944 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5945 do something only if the second operand is a constant. */
5946 if (same_p
5947 && (t1 = extract_muldiv (op0, c, code, wide_type,
5948 strict_overflow_p)) != 0)
5949 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5950 fold_convert (ctype, op1));
5951 else if (tcode == MULT_EXPR && code == MULT_EXPR
5952 && (t1 = extract_muldiv (op1, c, code, wide_type,
5953 strict_overflow_p)) != 0)
5954 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5955 fold_convert (ctype, t1));
5956 else if (TREE_CODE (op1) != INTEGER_CST)
5957 return 0;
5959 /* If these are the same operation types, we can associate them
5960 assuming no overflow. */
5961 if (tcode == code
5962 && 0 != (t1 = int_const_binop (MULT_EXPR,
5963 fold_convert (ctype, op1),
5964 fold_convert (ctype, c), 1))
5965 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5966 TREE_INT_CST_HIGH (t1),
5967 (TYPE_UNSIGNED (ctype)
5968 && tcode != MULT_EXPR) ? -1 : 1,
5969 TREE_OVERFLOW (t1)))
5970 && !TREE_OVERFLOW (t1))
5971 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5973 /* If these operations "cancel" each other, we have the main
5974 optimizations of this pass, which occur when either constant is a
5975 multiple of the other, in which case we replace this with either an
5976 operation or CODE or TCODE.
5978 If we have an unsigned type that is not a sizetype, we cannot do
5979 this since it will change the result if the original computation
5980 overflowed. */
5981 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5982 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5983 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5984 || (tcode == MULT_EXPR
5985 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5986 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5987 && code != MULT_EXPR)))
5989 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5992 *strict_overflow_p = true;
5993 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5994 fold_convert (ctype,
5995 const_binop (TRUNC_DIV_EXPR,
5996 op1, c, 0)));
5998 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6001 *strict_overflow_p = true;
6002 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6003 fold_convert (ctype,
6004 const_binop (TRUNC_DIV_EXPR,
6005 c, op1, 0)));
6008 break;
6010 default:
6011 break;
6014 return 0;
6017 /* Return a node which has the indicated constant VALUE (either 0 or
6018 1), and is of the indicated TYPE. */
6020 tree
6021 constant_boolean_node (int value, tree type)
6023 if (type == integer_type_node)
6024 return value ? integer_one_node : integer_zero_node;
6025 else if (type == boolean_type_node)
6026 return value ? boolean_true_node : boolean_false_node;
6027 else
6028 return build_int_cst (type, value);
6032 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6033 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6034 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6035 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6036 COND is the first argument to CODE; otherwise (as in the example
6037 given here), it is the second argument. TYPE is the type of the
6038 original expression. Return NULL_TREE if no simplification is
6039 possible. */
6041 static tree
6042 fold_binary_op_with_conditional_arg (location_t loc,
6043 enum tree_code code,
6044 tree type, tree op0, tree op1,
6045 tree cond, tree arg, int cond_first_p)
6047 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6048 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6049 tree test, true_value, false_value;
6050 tree lhs = NULL_TREE;
6051 tree rhs = NULL_TREE;
6053 if (TREE_CODE (cond) == COND_EXPR)
6055 test = TREE_OPERAND (cond, 0);
6056 true_value = TREE_OPERAND (cond, 1);
6057 false_value = TREE_OPERAND (cond, 2);
6058 /* If this operand throws an expression, then it does not make
6059 sense to try to perform a logical or arithmetic operation
6060 involving it. */
6061 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6062 lhs = true_value;
6063 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6064 rhs = false_value;
6066 else
6068 tree testtype = TREE_TYPE (cond);
6069 test = cond;
6070 true_value = constant_boolean_node (true, testtype);
6071 false_value = constant_boolean_node (false, testtype);
6074 /* This transformation is only worthwhile if we don't have to wrap ARG
6075 in a SAVE_EXPR and the operation can be simplified on at least one
6076 of the branches once its pushed inside the COND_EXPR. */
6077 if (!TREE_CONSTANT (arg)
6078 && (TREE_SIDE_EFFECTS (arg)
6079 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6080 return NULL_TREE;
6082 arg = fold_convert_loc (loc, arg_type, arg);
6083 if (lhs == 0)
6085 true_value = fold_convert_loc (loc, cond_type, true_value);
6086 if (cond_first_p)
6087 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6088 else
6089 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6091 if (rhs == 0)
6093 false_value = fold_convert_loc (loc, cond_type, false_value);
6094 if (cond_first_p)
6095 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6096 else
6097 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6100 /* Check that we have simplified at least one of the branches. */
6101 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6102 return NULL_TREE;
6104 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6108 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6110 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6111 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6112 ADDEND is the same as X.
6114 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6115 and finite. The problematic cases are when X is zero, and its mode
6116 has signed zeros. In the case of rounding towards -infinity,
6117 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6118 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6120 bool
6121 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6123 if (!real_zerop (addend))
6124 return false;
6126 /* Don't allow the fold with -fsignaling-nans. */
6127 if (HONOR_SNANS (TYPE_MODE (type)))
6128 return false;
6130 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6131 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6132 return true;
6134 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6135 if (TREE_CODE (addend) == REAL_CST
6136 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6137 negate = !negate;
6139 /* The mode has signed zeros, and we have to honor their sign.
6140 In this situation, there is only one case we can return true for.
6141 X - 0 is the same as X unless rounding towards -infinity is
6142 supported. */
6143 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6146 /* Subroutine of fold() that checks comparisons of built-in math
6147 functions against real constants.
6149 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6150 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6151 is the type of the result and ARG0 and ARG1 are the operands of the
6152 comparison. ARG1 must be a TREE_REAL_CST.
6154 The function returns the constant folded tree if a simplification
6155 can be made, and NULL_TREE otherwise. */
6157 static tree
6158 fold_mathfn_compare (location_t loc,
6159 enum built_in_function fcode, enum tree_code code,
6160 tree type, tree arg0, tree arg1)
6162 REAL_VALUE_TYPE c;
6164 if (BUILTIN_SQRT_P (fcode))
6166 tree arg = CALL_EXPR_ARG (arg0, 0);
6167 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6169 c = TREE_REAL_CST (arg1);
6170 if (REAL_VALUE_NEGATIVE (c))
6172 /* sqrt(x) < y is always false, if y is negative. */
6173 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6174 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6176 /* sqrt(x) > y is always true, if y is negative and we
6177 don't care about NaNs, i.e. negative values of x. */
6178 if (code == NE_EXPR || !HONOR_NANS (mode))
6179 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6181 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6182 return fold_build2_loc (loc, GE_EXPR, type, arg,
6183 build_real (TREE_TYPE (arg), dconst0));
6185 else if (code == GT_EXPR || code == GE_EXPR)
6187 REAL_VALUE_TYPE c2;
6189 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6190 real_convert (&c2, mode, &c2);
6192 if (REAL_VALUE_ISINF (c2))
6194 /* sqrt(x) > y is x == +Inf, when y is very large. */
6195 if (HONOR_INFINITIES (mode))
6196 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6197 build_real (TREE_TYPE (arg), c2));
6199 /* sqrt(x) > y is always false, when y is very large
6200 and we don't care about infinities. */
6201 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6204 /* sqrt(x) > c is the same as x > c*c. */
6205 return fold_build2_loc (loc, code, type, arg,
6206 build_real (TREE_TYPE (arg), c2));
6208 else if (code == LT_EXPR || code == LE_EXPR)
6210 REAL_VALUE_TYPE c2;
6212 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6213 real_convert (&c2, mode, &c2);
6215 if (REAL_VALUE_ISINF (c2))
6217 /* sqrt(x) < y is always true, when y is a very large
6218 value and we don't care about NaNs or Infinities. */
6219 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6220 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6222 /* sqrt(x) < y is x != +Inf when y is very large and we
6223 don't care about NaNs. */
6224 if (! HONOR_NANS (mode))
6225 return fold_build2_loc (loc, NE_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), c2));
6228 /* sqrt(x) < y is x >= 0 when y is very large and we
6229 don't care about Infinities. */
6230 if (! HONOR_INFINITIES (mode))
6231 return fold_build2_loc (loc, GE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg), dconst0));
6234 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6235 if (lang_hooks.decls.global_bindings_p () != 0
6236 || CONTAINS_PLACEHOLDER_P (arg))
6237 return NULL_TREE;
6239 arg = save_expr (arg);
6240 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6241 fold_build2_loc (loc, GE_EXPR, type, arg,
6242 build_real (TREE_TYPE (arg),
6243 dconst0)),
6244 fold_build2_loc (loc, NE_EXPR, type, arg,
6245 build_real (TREE_TYPE (arg),
6246 c2)));
6249 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6250 if (! HONOR_NANS (mode))
6251 return fold_build2_loc (loc, code, type, arg,
6252 build_real (TREE_TYPE (arg), c2));
6254 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6255 if (lang_hooks.decls.global_bindings_p () == 0
6256 && ! CONTAINS_PLACEHOLDER_P (arg))
6258 arg = save_expr (arg);
6259 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6260 fold_build2_loc (loc, GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 dconst0)),
6263 fold_build2_loc (loc, code, type, arg,
6264 build_real (TREE_TYPE (arg),
6265 c2)));
6270 return NULL_TREE;
6273 /* Subroutine of fold() that optimizes comparisons against Infinities,
6274 either +Inf or -Inf.
6276 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6277 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6278 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6280 The function returns the constant folded tree if a simplification
6281 can be made, and NULL_TREE otherwise. */
6283 static tree
6284 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6285 tree arg0, tree arg1)
6287 enum machine_mode mode;
6288 REAL_VALUE_TYPE max;
6289 tree temp;
6290 bool neg;
6292 mode = TYPE_MODE (TREE_TYPE (arg0));
6294 /* For negative infinity swap the sense of the comparison. */
6295 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6296 if (neg)
6297 code = swap_tree_comparison (code);
6299 switch (code)
6301 case GT_EXPR:
6302 /* x > +Inf is always false, if with ignore sNANs. */
6303 if (HONOR_SNANS (mode))
6304 return NULL_TREE;
6305 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6307 case LE_EXPR:
6308 /* x <= +Inf is always true, if we don't case about NaNs. */
6309 if (! HONOR_NANS (mode))
6310 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6312 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6313 if (lang_hooks.decls.global_bindings_p () == 0
6314 && ! CONTAINS_PLACEHOLDER_P (arg0))
6316 arg0 = save_expr (arg0);
6317 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6319 break;
6321 case EQ_EXPR:
6322 case GE_EXPR:
6323 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6324 real_maxval (&max, neg, mode);
6325 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6326 arg0, build_real (TREE_TYPE (arg0), max));
6328 case LT_EXPR:
6329 /* x < +Inf is always equal to x <= DBL_MAX. */
6330 real_maxval (&max, neg, mode);
6331 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6334 case NE_EXPR:
6335 /* x != +Inf is always equal to !(x > DBL_MAX). */
6336 real_maxval (&max, neg, mode);
6337 if (! HONOR_NANS (mode))
6338 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6339 arg0, build_real (TREE_TYPE (arg0), max));
6341 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6343 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6345 default:
6346 break;
6349 return NULL_TREE;
6352 /* Subroutine of fold() that optimizes comparisons of a division by
6353 a nonzero integer constant against an integer constant, i.e.
6354 X/C1 op C2.
6356 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6357 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6358 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6360 The function returns the constant folded tree if a simplification
6361 can be made, and NULL_TREE otherwise. */
6363 static tree
6364 fold_div_compare (location_t loc,
6365 enum tree_code code, tree type, tree arg0, tree arg1)
6367 tree prod, tmp, hi, lo;
6368 tree arg00 = TREE_OPERAND (arg0, 0);
6369 tree arg01 = TREE_OPERAND (arg0, 1);
6370 unsigned HOST_WIDE_INT lpart;
6371 HOST_WIDE_INT hpart;
6372 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6373 bool neg_overflow;
6374 int overflow;
6376 /* We have to do this the hard way to detect unsigned overflow.
6377 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6378 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6379 TREE_INT_CST_HIGH (arg01),
6380 TREE_INT_CST_LOW (arg1),
6381 TREE_INT_CST_HIGH (arg1),
6382 &lpart, &hpart, unsigned_p);
6383 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6384 -1, overflow);
6385 neg_overflow = false;
6387 if (unsigned_p)
6389 tmp = int_const_binop (MINUS_EXPR, arg01,
6390 build_int_cst (TREE_TYPE (arg01), 1), 0);
6391 lo = prod;
6393 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6394 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6395 TREE_INT_CST_HIGH (prod),
6396 TREE_INT_CST_LOW (tmp),
6397 TREE_INT_CST_HIGH (tmp),
6398 &lpart, &hpart, unsigned_p);
6399 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6400 -1, overflow | TREE_OVERFLOW (prod));
6402 else if (tree_int_cst_sgn (arg01) >= 0)
6404 tmp = int_const_binop (MINUS_EXPR, arg01,
6405 build_int_cst (TREE_TYPE (arg01), 1), 0);
6406 switch (tree_int_cst_sgn (arg1))
6408 case -1:
6409 neg_overflow = true;
6410 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6411 hi = prod;
6412 break;
6414 case 0:
6415 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6416 hi = tmp;
6417 break;
6419 case 1:
6420 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6421 lo = prod;
6422 break;
6424 default:
6425 gcc_unreachable ();
6428 else
6430 /* A negative divisor reverses the relational operators. */
6431 code = swap_tree_comparison (code);
6433 tmp = int_const_binop (PLUS_EXPR, arg01,
6434 build_int_cst (TREE_TYPE (arg01), 1), 0);
6435 switch (tree_int_cst_sgn (arg1))
6437 case -1:
6438 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6439 lo = prod;
6440 break;
6442 case 0:
6443 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6444 lo = tmp;
6445 break;
6447 case 1:
6448 neg_overflow = true;
6449 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6450 hi = prod;
6451 break;
6453 default:
6454 gcc_unreachable ();
6458 switch (code)
6460 case EQ_EXPR:
6461 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6462 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6463 if (TREE_OVERFLOW (hi))
6464 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6465 if (TREE_OVERFLOW (lo))
6466 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6467 return build_range_check (loc, type, arg00, 1, lo, hi);
6469 case NE_EXPR:
6470 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6471 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6472 if (TREE_OVERFLOW (hi))
6473 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6474 if (TREE_OVERFLOW (lo))
6475 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6476 return build_range_check (loc, type, arg00, 0, lo, hi);
6478 case LT_EXPR:
6479 if (TREE_OVERFLOW (lo))
6481 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6482 return omit_one_operand_loc (loc, type, tmp, arg00);
6484 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6486 case LE_EXPR:
6487 if (TREE_OVERFLOW (hi))
6489 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6494 case GT_EXPR:
6495 if (TREE_OVERFLOW (hi))
6497 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6500 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6502 case GE_EXPR:
6503 if (TREE_OVERFLOW (lo))
6505 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6506 return omit_one_operand_loc (loc, type, tmp, arg00);
6508 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6510 default:
6511 break;
6514 return NULL_TREE;
6518 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6519 equality/inequality test, then return a simplified form of the test
6520 using a sign testing. Otherwise return NULL. TYPE is the desired
6521 result type. */
6523 static tree
6524 fold_single_bit_test_into_sign_test (location_t loc,
6525 enum tree_code code, tree arg0, tree arg1,
6526 tree result_type)
6528 /* If this is testing a single bit, we can optimize the test. */
6529 if ((code == NE_EXPR || code == EQ_EXPR)
6530 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6531 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6533 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6534 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6535 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6537 if (arg00 != NULL_TREE
6538 /* This is only a win if casting to a signed type is cheap,
6539 i.e. when arg00's type is not a partial mode. */
6540 && TYPE_PRECISION (TREE_TYPE (arg00))
6541 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6543 tree stype = signed_type_for (TREE_TYPE (arg00));
6544 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6545 result_type,
6546 fold_convert_loc (loc, stype, arg00),
6547 build_int_cst (stype, 0));
6551 return NULL_TREE;
6554 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6555 equality/inequality test, then return a simplified form of
6556 the test using shifts and logical operations. Otherwise return
6557 NULL. TYPE is the desired result type. */
6559 tree
6560 fold_single_bit_test (location_t loc, enum tree_code code,
6561 tree arg0, tree arg1, tree result_type)
6563 /* If this is testing a single bit, we can optimize the test. */
6564 if ((code == NE_EXPR || code == EQ_EXPR)
6565 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6566 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6568 tree inner = TREE_OPERAND (arg0, 0);
6569 tree type = TREE_TYPE (arg0);
6570 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6571 enum machine_mode operand_mode = TYPE_MODE (type);
6572 int ops_unsigned;
6573 tree signed_type, unsigned_type, intermediate_type;
6574 tree tem, one;
6576 /* First, see if we can fold the single bit test into a sign-bit
6577 test. */
6578 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6579 result_type);
6580 if (tem)
6581 return tem;
6583 /* Otherwise we have (A & C) != 0 where C is a single bit,
6584 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6585 Similarly for (A & C) == 0. */
6587 /* If INNER is a right shift of a constant and it plus BITNUM does
6588 not overflow, adjust BITNUM and INNER. */
6589 if (TREE_CODE (inner) == RSHIFT_EXPR
6590 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6591 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6592 && bitnum < TYPE_PRECISION (type)
6593 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6594 bitnum - TYPE_PRECISION (type)))
6596 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6597 inner = TREE_OPERAND (inner, 0);
6600 /* If we are going to be able to omit the AND below, we must do our
6601 operations as unsigned. If we must use the AND, we have a choice.
6602 Normally unsigned is faster, but for some machines signed is. */
6603 #ifdef LOAD_EXTEND_OP
6604 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6605 && !flag_syntax_only) ? 0 : 1;
6606 #else
6607 ops_unsigned = 1;
6608 #endif
6610 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6611 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6612 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6613 inner = fold_convert_loc (loc, intermediate_type, inner);
6615 if (bitnum != 0)
6616 inner = build2 (RSHIFT_EXPR, intermediate_type,
6617 inner, size_int (bitnum));
6619 one = build_int_cst (intermediate_type, 1);
6621 if (code == EQ_EXPR)
6622 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6624 /* Put the AND last so it can combine with more things. */
6625 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6627 /* Make sure to return the proper type. */
6628 inner = fold_convert_loc (loc, result_type, inner);
6630 return inner;
6632 return NULL_TREE;
6635 /* Check whether we are allowed to reorder operands arg0 and arg1,
6636 such that the evaluation of arg1 occurs before arg0. */
6638 static bool
6639 reorder_operands_p (const_tree arg0, const_tree arg1)
6641 if (! flag_evaluation_order)
6642 return true;
6643 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6644 return true;
6645 return ! TREE_SIDE_EFFECTS (arg0)
6646 && ! TREE_SIDE_EFFECTS (arg1);
6649 /* Test whether it is preferable two swap two operands, ARG0 and
6650 ARG1, for example because ARG0 is an integer constant and ARG1
6651 isn't. If REORDER is true, only recommend swapping if we can
6652 evaluate the operands in reverse order. */
6654 bool
6655 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6657 STRIP_SIGN_NOPS (arg0);
6658 STRIP_SIGN_NOPS (arg1);
6660 if (TREE_CODE (arg1) == INTEGER_CST)
6661 return 0;
6662 if (TREE_CODE (arg0) == INTEGER_CST)
6663 return 1;
6665 if (TREE_CODE (arg1) == REAL_CST)
6666 return 0;
6667 if (TREE_CODE (arg0) == REAL_CST)
6668 return 1;
6670 if (TREE_CODE (arg1) == FIXED_CST)
6671 return 0;
6672 if (TREE_CODE (arg0) == FIXED_CST)
6673 return 1;
6675 if (TREE_CODE (arg1) == COMPLEX_CST)
6676 return 0;
6677 if (TREE_CODE (arg0) == COMPLEX_CST)
6678 return 1;
6680 if (TREE_CONSTANT (arg1))
6681 return 0;
6682 if (TREE_CONSTANT (arg0))
6683 return 1;
6685 if (optimize_function_for_size_p (cfun))
6686 return 0;
6688 if (reorder && flag_evaluation_order
6689 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6690 return 0;
6692 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6693 for commutative and comparison operators. Ensuring a canonical
6694 form allows the optimizers to find additional redundancies without
6695 having to explicitly check for both orderings. */
6696 if (TREE_CODE (arg0) == SSA_NAME
6697 && TREE_CODE (arg1) == SSA_NAME
6698 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6699 return 1;
6701 /* Put SSA_NAMEs last. */
6702 if (TREE_CODE (arg1) == SSA_NAME)
6703 return 0;
6704 if (TREE_CODE (arg0) == SSA_NAME)
6705 return 1;
6707 /* Put variables last. */
6708 if (DECL_P (arg1))
6709 return 0;
6710 if (DECL_P (arg0))
6711 return 1;
6713 return 0;
6716 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6717 ARG0 is extended to a wider type. */
6719 static tree
6720 fold_widened_comparison (location_t loc, enum tree_code code,
6721 tree type, tree arg0, tree arg1)
6723 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6724 tree arg1_unw;
6725 tree shorter_type, outer_type;
6726 tree min, max;
6727 bool above, below;
6729 if (arg0_unw == arg0)
6730 return NULL_TREE;
6731 shorter_type = TREE_TYPE (arg0_unw);
6733 #ifdef HAVE_canonicalize_funcptr_for_compare
6734 /* Disable this optimization if we're casting a function pointer
6735 type on targets that require function pointer canonicalization. */
6736 if (HAVE_canonicalize_funcptr_for_compare
6737 && TREE_CODE (shorter_type) == POINTER_TYPE
6738 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6739 return NULL_TREE;
6740 #endif
6742 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6743 return NULL_TREE;
6745 arg1_unw = get_unwidened (arg1, NULL_TREE);
6747 /* If possible, express the comparison in the shorter mode. */
6748 if ((code == EQ_EXPR || code == NE_EXPR
6749 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6750 && (TREE_TYPE (arg1_unw) == shorter_type
6751 || ((TYPE_PRECISION (shorter_type)
6752 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6753 && (TYPE_UNSIGNED (shorter_type)
6754 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6755 || (TREE_CODE (arg1_unw) == INTEGER_CST
6756 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6757 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6758 && int_fits_type_p (arg1_unw, shorter_type))))
6759 return fold_build2_loc (loc, code, type, arg0_unw,
6760 fold_convert_loc (loc, shorter_type, arg1_unw));
6762 if (TREE_CODE (arg1_unw) != INTEGER_CST
6763 || TREE_CODE (shorter_type) != INTEGER_TYPE
6764 || !int_fits_type_p (arg1_unw, shorter_type))
6765 return NULL_TREE;
6767 /* If we are comparing with the integer that does not fit into the range
6768 of the shorter type, the result is known. */
6769 outer_type = TREE_TYPE (arg1_unw);
6770 min = lower_bound_in_type (outer_type, shorter_type);
6771 max = upper_bound_in_type (outer_type, shorter_type);
6773 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6774 max, arg1_unw));
6775 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6776 arg1_unw, min));
6778 switch (code)
6780 case EQ_EXPR:
6781 if (above || below)
6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6783 break;
6785 case NE_EXPR:
6786 if (above || below)
6787 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6788 break;
6790 case LT_EXPR:
6791 case LE_EXPR:
6792 if (above)
6793 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6794 else if (below)
6795 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6797 case GT_EXPR:
6798 case GE_EXPR:
6799 if (above)
6800 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6801 else if (below)
6802 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6804 default:
6805 break;
6808 return NULL_TREE;
6811 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6812 ARG0 just the signedness is changed. */
6814 static tree
6815 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6816 tree arg0, tree arg1)
6818 tree arg0_inner;
6819 tree inner_type, outer_type;
6821 if (!CONVERT_EXPR_P (arg0))
6822 return NULL_TREE;
6824 outer_type = TREE_TYPE (arg0);
6825 arg0_inner = TREE_OPERAND (arg0, 0);
6826 inner_type = TREE_TYPE (arg0_inner);
6828 #ifdef HAVE_canonicalize_funcptr_for_compare
6829 /* Disable this optimization if we're casting a function pointer
6830 type on targets that require function pointer canonicalization. */
6831 if (HAVE_canonicalize_funcptr_for_compare
6832 && TREE_CODE (inner_type) == POINTER_TYPE
6833 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6834 return NULL_TREE;
6835 #endif
6837 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6838 return NULL_TREE;
6840 if (TREE_CODE (arg1) != INTEGER_CST
6841 && !(CONVERT_EXPR_P (arg1)
6842 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6843 return NULL_TREE;
6845 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6846 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6847 && code != NE_EXPR
6848 && code != EQ_EXPR)
6849 return NULL_TREE;
6851 if (TREE_CODE (arg1) == INTEGER_CST)
6852 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6853 TREE_INT_CST_HIGH (arg1), 0,
6854 TREE_OVERFLOW (arg1));
6855 else
6856 arg1 = fold_convert_loc (loc, inner_type, arg1);
6858 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6861 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6862 step of the array. Reconstructs s and delta in the case of s *
6863 delta being an integer constant (and thus already folded). ADDR is
6864 the address. MULT is the multiplicative expression. If the
6865 function succeeds, the new address expression is returned.
6866 Otherwise NULL_TREE is returned. LOC is the location of the
6867 resulting expression. */
6869 static tree
6870 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6872 tree s, delta, step;
6873 tree ref = TREE_OPERAND (addr, 0), pref;
6874 tree ret, pos;
6875 tree itype;
6876 bool mdim = false;
6878 /* Strip the nops that might be added when converting op1 to sizetype. */
6879 STRIP_NOPS (op1);
6881 /* Canonicalize op1 into a possibly non-constant delta
6882 and an INTEGER_CST s. */
6883 if (TREE_CODE (op1) == MULT_EXPR)
6885 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6887 STRIP_NOPS (arg0);
6888 STRIP_NOPS (arg1);
6890 if (TREE_CODE (arg0) == INTEGER_CST)
6892 s = arg0;
6893 delta = arg1;
6895 else if (TREE_CODE (arg1) == INTEGER_CST)
6897 s = arg1;
6898 delta = arg0;
6900 else
6901 return NULL_TREE;
6903 else if (TREE_CODE (op1) == INTEGER_CST)
6905 delta = op1;
6906 s = NULL_TREE;
6908 else
6910 /* Simulate we are delta * 1. */
6911 delta = op1;
6912 s = integer_one_node;
6915 for (;; ref = TREE_OPERAND (ref, 0))
6917 if (TREE_CODE (ref) == ARRAY_REF)
6919 tree domain;
6921 /* Remember if this was a multi-dimensional array. */
6922 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6923 mdim = true;
6925 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6926 if (! domain)
6927 continue;
6928 itype = TREE_TYPE (domain);
6930 step = array_ref_element_size (ref);
6931 if (TREE_CODE (step) != INTEGER_CST)
6932 continue;
6934 if (s)
6936 if (! tree_int_cst_equal (step, s))
6937 continue;
6939 else
6941 /* Try if delta is a multiple of step. */
6942 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6943 if (! tmp)
6944 continue;
6945 delta = tmp;
6948 /* Only fold here if we can verify we do not overflow one
6949 dimension of a multi-dimensional array. */
6950 if (mdim)
6952 tree tmp;
6954 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6955 || !TYPE_MAX_VALUE (domain)
6956 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6957 continue;
6959 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6960 fold_convert_loc (loc, itype,
6961 TREE_OPERAND (ref, 1)),
6962 fold_convert_loc (loc, itype, delta));
6963 if (!tmp
6964 || TREE_CODE (tmp) != INTEGER_CST
6965 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6966 continue;
6969 break;
6971 else
6972 mdim = false;
6974 if (!handled_component_p (ref))
6975 return NULL_TREE;
6978 /* We found the suitable array reference. So copy everything up to it,
6979 and replace the index. */
6981 pref = TREE_OPERAND (addr, 0);
6982 ret = copy_node (pref);
6983 SET_EXPR_LOCATION (ret, loc);
6984 pos = ret;
6986 while (pref != ref)
6988 pref = TREE_OPERAND (pref, 0);
6989 TREE_OPERAND (pos, 0) = copy_node (pref);
6990 pos = TREE_OPERAND (pos, 0);
6993 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6994 fold_convert_loc (loc, itype,
6995 TREE_OPERAND (pos, 1)),
6996 fold_convert_loc (loc, itype, delta));
6998 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7002 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7003 means A >= Y && A != MAX, but in this case we know that
7004 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7006 static tree
7007 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7009 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7011 if (TREE_CODE (bound) == LT_EXPR)
7012 a = TREE_OPERAND (bound, 0);
7013 else if (TREE_CODE (bound) == GT_EXPR)
7014 a = TREE_OPERAND (bound, 1);
7015 else
7016 return NULL_TREE;
7018 typea = TREE_TYPE (a);
7019 if (!INTEGRAL_TYPE_P (typea)
7020 && !POINTER_TYPE_P (typea))
7021 return NULL_TREE;
7023 if (TREE_CODE (ineq) == LT_EXPR)
7025 a1 = TREE_OPERAND (ineq, 1);
7026 y = TREE_OPERAND (ineq, 0);
7028 else if (TREE_CODE (ineq) == GT_EXPR)
7030 a1 = TREE_OPERAND (ineq, 0);
7031 y = TREE_OPERAND (ineq, 1);
7033 else
7034 return NULL_TREE;
7036 if (TREE_TYPE (a1) != typea)
7037 return NULL_TREE;
7039 if (POINTER_TYPE_P (typea))
7041 /* Convert the pointer types into integer before taking the difference. */
7042 tree ta = fold_convert_loc (loc, ssizetype, a);
7043 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7044 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7046 else
7047 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7049 if (!diff || !integer_onep (diff))
7050 return NULL_TREE;
7052 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7055 /* Fold a sum or difference of at least one multiplication.
7056 Returns the folded tree or NULL if no simplification could be made. */
7058 static tree
7059 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7060 tree arg0, tree arg1)
7062 tree arg00, arg01, arg10, arg11;
7063 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7065 /* (A * C) +- (B * C) -> (A+-B) * C.
7066 (A * C) +- A -> A * (C+-1).
7067 We are most concerned about the case where C is a constant,
7068 but other combinations show up during loop reduction. Since
7069 it is not difficult, try all four possibilities. */
7071 if (TREE_CODE (arg0) == MULT_EXPR)
7073 arg00 = TREE_OPERAND (arg0, 0);
7074 arg01 = TREE_OPERAND (arg0, 1);
7076 else if (TREE_CODE (arg0) == INTEGER_CST)
7078 arg00 = build_one_cst (type);
7079 arg01 = arg0;
7081 else
7083 /* We cannot generate constant 1 for fract. */
7084 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7085 return NULL_TREE;
7086 arg00 = arg0;
7087 arg01 = build_one_cst (type);
7089 if (TREE_CODE (arg1) == MULT_EXPR)
7091 arg10 = TREE_OPERAND (arg1, 0);
7092 arg11 = TREE_OPERAND (arg1, 1);
7094 else if (TREE_CODE (arg1) == INTEGER_CST)
7096 arg10 = build_one_cst (type);
7097 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7098 the purpose of this canonicalization. */
7099 if (TREE_INT_CST_HIGH (arg1) == -1
7100 && negate_expr_p (arg1)
7101 && code == PLUS_EXPR)
7103 arg11 = negate_expr (arg1);
7104 code = MINUS_EXPR;
7106 else
7107 arg11 = arg1;
7109 else
7111 /* We cannot generate constant 1 for fract. */
7112 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7113 return NULL_TREE;
7114 arg10 = arg1;
7115 arg11 = build_one_cst (type);
7117 same = NULL_TREE;
7119 if (operand_equal_p (arg01, arg11, 0))
7120 same = arg01, alt0 = arg00, alt1 = arg10;
7121 else if (operand_equal_p (arg00, arg10, 0))
7122 same = arg00, alt0 = arg01, alt1 = arg11;
7123 else if (operand_equal_p (arg00, arg11, 0))
7124 same = arg00, alt0 = arg01, alt1 = arg10;
7125 else if (operand_equal_p (arg01, arg10, 0))
7126 same = arg01, alt0 = arg00, alt1 = arg11;
7128 /* No identical multiplicands; see if we can find a common
7129 power-of-two factor in non-power-of-two multiplies. This
7130 can help in multi-dimensional array access. */
7131 else if (host_integerp (arg01, 0)
7132 && host_integerp (arg11, 0))
7134 HOST_WIDE_INT int01, int11, tmp;
7135 bool swap = false;
7136 tree maybe_same;
7137 int01 = TREE_INT_CST_LOW (arg01);
7138 int11 = TREE_INT_CST_LOW (arg11);
7140 /* Move min of absolute values to int11. */
7141 if ((int01 >= 0 ? int01 : -int01)
7142 < (int11 >= 0 ? int11 : -int11))
7144 tmp = int01, int01 = int11, int11 = tmp;
7145 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7146 maybe_same = arg01;
7147 swap = true;
7149 else
7150 maybe_same = arg11;
7152 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7153 /* The remainder should not be a constant, otherwise we
7154 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7155 increased the number of multiplications necessary. */
7156 && TREE_CODE (arg10) != INTEGER_CST)
7158 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7159 build_int_cst (TREE_TYPE (arg00),
7160 int01 / int11));
7161 alt1 = arg10;
7162 same = maybe_same;
7163 if (swap)
7164 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7168 if (same)
7169 return fold_build2_loc (loc, MULT_EXPR, type,
7170 fold_build2_loc (loc, code, type,
7171 fold_convert_loc (loc, type, alt0),
7172 fold_convert_loc (loc, type, alt1)),
7173 fold_convert_loc (loc, type, same));
7175 return NULL_TREE;
7178 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7179 specified by EXPR into the buffer PTR of length LEN bytes.
7180 Return the number of bytes placed in the buffer, or zero
7181 upon failure. */
7183 static int
7184 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7186 tree type = TREE_TYPE (expr);
7187 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7188 int byte, offset, word, words;
7189 unsigned char value;
7191 if (total_bytes > len)
7192 return 0;
7193 words = total_bytes / UNITS_PER_WORD;
7195 for (byte = 0; byte < total_bytes; byte++)
7197 int bitpos = byte * BITS_PER_UNIT;
7198 if (bitpos < HOST_BITS_PER_WIDE_INT)
7199 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7200 else
7201 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7202 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7204 if (total_bytes > UNITS_PER_WORD)
7206 word = byte / UNITS_PER_WORD;
7207 if (WORDS_BIG_ENDIAN)
7208 word = (words - 1) - word;
7209 offset = word * UNITS_PER_WORD;
7210 if (BYTES_BIG_ENDIAN)
7211 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7212 else
7213 offset += byte % UNITS_PER_WORD;
7215 else
7216 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7217 ptr[offset] = value;
7219 return total_bytes;
7223 /* Subroutine of native_encode_expr. Encode the REAL_CST
7224 specified by EXPR into the buffer PTR of length LEN bytes.
7225 Return the number of bytes placed in the buffer, or zero
7226 upon failure. */
7228 static int
7229 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7231 tree type = TREE_TYPE (expr);
7232 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7233 int byte, offset, word, words, bitpos;
7234 unsigned char value;
7236 /* There are always 32 bits in each long, no matter the size of
7237 the hosts long. We handle floating point representations with
7238 up to 192 bits. */
7239 long tmp[6];
7241 if (total_bytes > len)
7242 return 0;
7243 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7245 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7247 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7248 bitpos += BITS_PER_UNIT)
7250 byte = (bitpos / BITS_PER_UNIT) & 3;
7251 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7253 if (UNITS_PER_WORD < 4)
7255 word = byte / UNITS_PER_WORD;
7256 if (WORDS_BIG_ENDIAN)
7257 word = (words - 1) - word;
7258 offset = word * UNITS_PER_WORD;
7259 if (BYTES_BIG_ENDIAN)
7260 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7261 else
7262 offset += byte % UNITS_PER_WORD;
7264 else
7265 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7266 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7268 return total_bytes;
7271 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7272 specified by EXPR into the buffer PTR of length LEN bytes.
7273 Return the number of bytes placed in the buffer, or zero
7274 upon failure. */
7276 static int
7277 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7279 int rsize, isize;
7280 tree part;
7282 part = TREE_REALPART (expr);
7283 rsize = native_encode_expr (part, ptr, len);
7284 if (rsize == 0)
7285 return 0;
7286 part = TREE_IMAGPART (expr);
7287 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7288 if (isize != rsize)
7289 return 0;
7290 return rsize + isize;
7294 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7295 specified by EXPR into the buffer PTR of length LEN bytes.
7296 Return the number of bytes placed in the buffer, or zero
7297 upon failure. */
7299 static int
7300 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7302 int i, size, offset, count;
7303 tree itype, elem, elements;
7305 offset = 0;
7306 elements = TREE_VECTOR_CST_ELTS (expr);
7307 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7308 itype = TREE_TYPE (TREE_TYPE (expr));
7309 size = GET_MODE_SIZE (TYPE_MODE (itype));
7310 for (i = 0; i < count; i++)
7312 if (elements)
7314 elem = TREE_VALUE (elements);
7315 elements = TREE_CHAIN (elements);
7317 else
7318 elem = NULL_TREE;
7320 if (elem)
7322 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7323 return 0;
7325 else
7327 if (offset + size > len)
7328 return 0;
7329 memset (ptr+offset, 0, size);
7331 offset += size;
7333 return offset;
7337 /* Subroutine of native_encode_expr. Encode the STRING_CST
7338 specified by EXPR into the buffer PTR of length LEN bytes.
7339 Return the number of bytes placed in the buffer, or zero
7340 upon failure. */
7342 static int
7343 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7345 tree type = TREE_TYPE (expr);
7346 HOST_WIDE_INT total_bytes;
7348 if (TREE_CODE (type) != ARRAY_TYPE
7349 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7350 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7351 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7352 return 0;
7353 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7354 if (total_bytes > len)
7355 return 0;
7356 if (TREE_STRING_LENGTH (expr) < total_bytes)
7358 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7359 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7360 total_bytes - TREE_STRING_LENGTH (expr));
7362 else
7363 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7364 return total_bytes;
7368 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7369 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7370 buffer PTR of length LEN bytes. Return the number of bytes
7371 placed in the buffer, or zero upon failure. */
7374 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7376 switch (TREE_CODE (expr))
7378 case INTEGER_CST:
7379 return native_encode_int (expr, ptr, len);
7381 case REAL_CST:
7382 return native_encode_real (expr, ptr, len);
7384 case COMPLEX_CST:
7385 return native_encode_complex (expr, ptr, len);
7387 case VECTOR_CST:
7388 return native_encode_vector (expr, ptr, len);
7390 case STRING_CST:
7391 return native_encode_string (expr, ptr, len);
7393 default:
7394 return 0;
7399 /* Subroutine of native_interpret_expr. Interpret the contents of
7400 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7401 If the buffer cannot be interpreted, return NULL_TREE. */
7403 static tree
7404 native_interpret_int (tree type, const unsigned char *ptr, int len)
7406 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7407 int byte, offset, word, words;
7408 unsigned char value;
7409 double_int result;
7411 if (total_bytes > len)
7412 return NULL_TREE;
7413 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7414 return NULL_TREE;
7416 result = double_int_zero;
7417 words = total_bytes / UNITS_PER_WORD;
7419 for (byte = 0; byte < total_bytes; byte++)
7421 int bitpos = byte * BITS_PER_UNIT;
7422 if (total_bytes > UNITS_PER_WORD)
7424 word = byte / UNITS_PER_WORD;
7425 if (WORDS_BIG_ENDIAN)
7426 word = (words - 1) - word;
7427 offset = word * UNITS_PER_WORD;
7428 if (BYTES_BIG_ENDIAN)
7429 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7430 else
7431 offset += byte % UNITS_PER_WORD;
7433 else
7434 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7435 value = ptr[offset];
7437 if (bitpos < HOST_BITS_PER_WIDE_INT)
7438 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7439 else
7440 result.high |= (unsigned HOST_WIDE_INT) value
7441 << (bitpos - HOST_BITS_PER_WIDE_INT);
7444 return double_int_to_tree (type, result);
7448 /* Subroutine of native_interpret_expr. Interpret the contents of
7449 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7450 If the buffer cannot be interpreted, return NULL_TREE. */
7452 static tree
7453 native_interpret_real (tree type, const unsigned char *ptr, int len)
7455 enum machine_mode mode = TYPE_MODE (type);
7456 int total_bytes = GET_MODE_SIZE (mode);
7457 int byte, offset, word, words, bitpos;
7458 unsigned char value;
7459 /* There are always 32 bits in each long, no matter the size of
7460 the hosts long. We handle floating point representations with
7461 up to 192 bits. */
7462 REAL_VALUE_TYPE r;
7463 long tmp[6];
7465 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7466 if (total_bytes > len || total_bytes > 24)
7467 return NULL_TREE;
7468 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7470 memset (tmp, 0, sizeof (tmp));
7471 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7472 bitpos += BITS_PER_UNIT)
7474 byte = (bitpos / BITS_PER_UNIT) & 3;
7475 if (UNITS_PER_WORD < 4)
7477 word = byte / UNITS_PER_WORD;
7478 if (WORDS_BIG_ENDIAN)
7479 word = (words - 1) - word;
7480 offset = word * UNITS_PER_WORD;
7481 if (BYTES_BIG_ENDIAN)
7482 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7483 else
7484 offset += byte % UNITS_PER_WORD;
7486 else
7487 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7488 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7490 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7493 real_from_target (&r, tmp, mode);
7494 return build_real (type, r);
7498 /* Subroutine of native_interpret_expr. Interpret the contents of
7499 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7500 If the buffer cannot be interpreted, return NULL_TREE. */
7502 static tree
7503 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7505 tree etype, rpart, ipart;
7506 int size;
7508 etype = TREE_TYPE (type);
7509 size = GET_MODE_SIZE (TYPE_MODE (etype));
7510 if (size * 2 > len)
7511 return NULL_TREE;
7512 rpart = native_interpret_expr (etype, ptr, size);
7513 if (!rpart)
7514 return NULL_TREE;
7515 ipart = native_interpret_expr (etype, ptr+size, size);
7516 if (!ipart)
7517 return NULL_TREE;
7518 return build_complex (type, rpart, ipart);
7522 /* Subroutine of native_interpret_expr. Interpret the contents of
7523 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7524 If the buffer cannot be interpreted, return NULL_TREE. */
7526 static tree
7527 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7529 tree etype, elem, elements;
7530 int i, size, count;
7532 etype = TREE_TYPE (type);
7533 size = GET_MODE_SIZE (TYPE_MODE (etype));
7534 count = TYPE_VECTOR_SUBPARTS (type);
7535 if (size * count > len)
7536 return NULL_TREE;
7538 elements = NULL_TREE;
7539 for (i = count - 1; i >= 0; i--)
7541 elem = native_interpret_expr (etype, ptr+(i*size), size);
7542 if (!elem)
7543 return NULL_TREE;
7544 elements = tree_cons (NULL_TREE, elem, elements);
7546 return build_vector (type, elements);
7550 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7551 the buffer PTR of length LEN as a constant of type TYPE. For
7552 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7553 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7554 return NULL_TREE. */
7556 tree
7557 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7559 switch (TREE_CODE (type))
7561 case INTEGER_TYPE:
7562 case ENUMERAL_TYPE:
7563 case BOOLEAN_TYPE:
7564 return native_interpret_int (type, ptr, len);
7566 case REAL_TYPE:
7567 return native_interpret_real (type, ptr, len);
7569 case COMPLEX_TYPE:
7570 return native_interpret_complex (type, ptr, len);
7572 case VECTOR_TYPE:
7573 return native_interpret_vector (type, ptr, len);
7575 default:
7576 return NULL_TREE;
7581 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7582 TYPE at compile-time. If we're unable to perform the conversion
7583 return NULL_TREE. */
7585 static tree
7586 fold_view_convert_expr (tree type, tree expr)
7588 /* We support up to 512-bit values (for V8DFmode). */
7589 unsigned char buffer[64];
7590 int len;
7592 /* Check that the host and target are sane. */
7593 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7594 return NULL_TREE;
7596 len = native_encode_expr (expr, buffer, sizeof (buffer));
7597 if (len == 0)
7598 return NULL_TREE;
7600 return native_interpret_expr (type, buffer, len);
7603 /* Build an expression for the address of T. Folds away INDIRECT_REF
7604 to avoid confusing the gimplify process. */
7606 tree
7607 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7609 /* The size of the object is not relevant when talking about its address. */
7610 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7611 t = TREE_OPERAND (t, 0);
7613 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7614 if (TREE_CODE (t) == INDIRECT_REF
7615 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7617 t = TREE_OPERAND (t, 0);
7619 if (TREE_TYPE (t) != ptrtype)
7621 t = build1 (NOP_EXPR, ptrtype, t);
7622 SET_EXPR_LOCATION (t, loc);
7625 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7627 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7629 if (TREE_TYPE (t) != ptrtype)
7630 t = fold_convert_loc (loc, ptrtype, t);
7632 else
7634 t = build1 (ADDR_EXPR, ptrtype, t);
7635 SET_EXPR_LOCATION (t, loc);
7638 return t;
7641 /* Build an expression for the address of T. */
7643 tree
7644 build_fold_addr_expr_loc (location_t loc, tree t)
7646 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7648 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7651 /* Fold a unary expression of code CODE and type TYPE with operand
7652 OP0. Return the folded expression if folding is successful.
7653 Otherwise, return NULL_TREE. */
7655 tree
7656 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7658 tree tem;
7659 tree arg0;
7660 enum tree_code_class kind = TREE_CODE_CLASS (code);
7662 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7663 && TREE_CODE_LENGTH (code) == 1);
7665 arg0 = op0;
7666 if (arg0)
7668 if (CONVERT_EXPR_CODE_P (code)
7669 || code == FLOAT_EXPR || code == ABS_EXPR)
7671 /* Don't use STRIP_NOPS, because signedness of argument type
7672 matters. */
7673 STRIP_SIGN_NOPS (arg0);
7675 else
7677 /* Strip any conversions that don't change the mode. This
7678 is safe for every expression, except for a comparison
7679 expression because its signedness is derived from its
7680 operands.
7682 Note that this is done as an internal manipulation within
7683 the constant folder, in order to find the simplest
7684 representation of the arguments so that their form can be
7685 studied. In any cases, the appropriate type conversions
7686 should be put back in the tree that will get out of the
7687 constant folder. */
7688 STRIP_NOPS (arg0);
7692 if (TREE_CODE_CLASS (code) == tcc_unary)
7694 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7695 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7696 fold_build1_loc (loc, code, type,
7697 fold_convert_loc (loc, TREE_TYPE (op0),
7698 TREE_OPERAND (arg0, 1))));
7699 else if (TREE_CODE (arg0) == COND_EXPR)
7701 tree arg01 = TREE_OPERAND (arg0, 1);
7702 tree arg02 = TREE_OPERAND (arg0, 2);
7703 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7704 arg01 = fold_build1_loc (loc, code, type,
7705 fold_convert_loc (loc,
7706 TREE_TYPE (op0), arg01));
7707 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7708 arg02 = fold_build1_loc (loc, code, type,
7709 fold_convert_loc (loc,
7710 TREE_TYPE (op0), arg02));
7711 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7712 arg01, arg02);
7714 /* If this was a conversion, and all we did was to move into
7715 inside the COND_EXPR, bring it back out. But leave it if
7716 it is a conversion from integer to integer and the
7717 result precision is no wider than a word since such a
7718 conversion is cheap and may be optimized away by combine,
7719 while it couldn't if it were outside the COND_EXPR. Then return
7720 so we don't get into an infinite recursion loop taking the
7721 conversion out and then back in. */
7723 if ((CONVERT_EXPR_CODE_P (code)
7724 || code == NON_LVALUE_EXPR)
7725 && TREE_CODE (tem) == COND_EXPR
7726 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7727 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7728 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7729 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7730 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7731 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7732 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7733 && (INTEGRAL_TYPE_P
7734 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7735 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7736 || flag_syntax_only))
7738 tem = build1 (code, type,
7739 build3 (COND_EXPR,
7740 TREE_TYPE (TREE_OPERAND
7741 (TREE_OPERAND (tem, 1), 0)),
7742 TREE_OPERAND (tem, 0),
7743 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7744 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7745 SET_EXPR_LOCATION (tem, loc);
7747 return tem;
7749 else if (COMPARISON_CLASS_P (arg0))
7751 if (TREE_CODE (type) == BOOLEAN_TYPE)
7753 arg0 = copy_node (arg0);
7754 TREE_TYPE (arg0) = type;
7755 return arg0;
7757 else if (TREE_CODE (type) != INTEGER_TYPE)
7758 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7759 fold_build1_loc (loc, code, type,
7760 integer_one_node),
7761 fold_build1_loc (loc, code, type,
7762 integer_zero_node));
7766 switch (code)
7768 case PAREN_EXPR:
7769 /* Re-association barriers around constants and other re-association
7770 barriers can be removed. */
7771 if (CONSTANT_CLASS_P (op0)
7772 || TREE_CODE (op0) == PAREN_EXPR)
7773 return fold_convert_loc (loc, type, op0);
7774 return NULL_TREE;
7776 CASE_CONVERT:
7777 case FLOAT_EXPR:
7778 case FIX_TRUNC_EXPR:
7779 if (TREE_TYPE (op0) == type)
7780 return op0;
7782 /* If we have (type) (a CMP b) and type is an integral type, return
7783 new expression involving the new type. */
7784 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7785 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7786 TREE_OPERAND (op0, 1));
7788 /* Handle cases of two conversions in a row. */
7789 if (CONVERT_EXPR_P (op0))
7791 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7792 tree inter_type = TREE_TYPE (op0);
7793 int inside_int = INTEGRAL_TYPE_P (inside_type);
7794 int inside_ptr = POINTER_TYPE_P (inside_type);
7795 int inside_float = FLOAT_TYPE_P (inside_type);
7796 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7797 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7798 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7799 int inter_int = INTEGRAL_TYPE_P (inter_type);
7800 int inter_ptr = POINTER_TYPE_P (inter_type);
7801 int inter_float = FLOAT_TYPE_P (inter_type);
7802 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7803 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7804 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7805 int final_int = INTEGRAL_TYPE_P (type);
7806 int final_ptr = POINTER_TYPE_P (type);
7807 int final_float = FLOAT_TYPE_P (type);
7808 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7809 unsigned int final_prec = TYPE_PRECISION (type);
7810 int final_unsignedp = TYPE_UNSIGNED (type);
7812 /* In addition to the cases of two conversions in a row
7813 handled below, if we are converting something to its own
7814 type via an object of identical or wider precision, neither
7815 conversion is needed. */
7816 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7817 && (((inter_int || inter_ptr) && final_int)
7818 || (inter_float && final_float))
7819 && inter_prec >= final_prec)
7820 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7822 /* Likewise, if the intermediate and initial types are either both
7823 float or both integer, we don't need the middle conversion if the
7824 former is wider than the latter and doesn't change the signedness
7825 (for integers). Avoid this if the final type is a pointer since
7826 then we sometimes need the middle conversion. Likewise if the
7827 final type has a precision not equal to the size of its mode. */
7828 if (((inter_int && inside_int)
7829 || (inter_float && inside_float)
7830 || (inter_vec && inside_vec))
7831 && inter_prec >= inside_prec
7832 && (inter_float || inter_vec
7833 || inter_unsignedp == inside_unsignedp)
7834 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7835 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7836 && ! final_ptr
7837 && (! final_vec || inter_prec == inside_prec))
7838 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7840 /* If we have a sign-extension of a zero-extended value, we can
7841 replace that by a single zero-extension. */
7842 if (inside_int && inter_int && final_int
7843 && inside_prec < inter_prec && inter_prec < final_prec
7844 && inside_unsignedp && !inter_unsignedp)
7845 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7847 /* Two conversions in a row are not needed unless:
7848 - some conversion is floating-point (overstrict for now), or
7849 - some conversion is a vector (overstrict for now), or
7850 - the intermediate type is narrower than both initial and
7851 final, or
7852 - the intermediate type and innermost type differ in signedness,
7853 and the outermost type is wider than the intermediate, or
7854 - the initial type is a pointer type and the precisions of the
7855 intermediate and final types differ, or
7856 - the final type is a pointer type and the precisions of the
7857 initial and intermediate types differ. */
7858 if (! inside_float && ! inter_float && ! final_float
7859 && ! inside_vec && ! inter_vec && ! final_vec
7860 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7861 && ! (inside_int && inter_int
7862 && inter_unsignedp != inside_unsignedp
7863 && inter_prec < final_prec)
7864 && ((inter_unsignedp && inter_prec > inside_prec)
7865 == (final_unsignedp && final_prec > inter_prec))
7866 && ! (inside_ptr && inter_prec != final_prec)
7867 && ! (final_ptr && inside_prec != inter_prec)
7868 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7869 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7870 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7873 /* Handle (T *)&A.B.C for A being of type T and B and C
7874 living at offset zero. This occurs frequently in
7875 C++ upcasting and then accessing the base. */
7876 if (TREE_CODE (op0) == ADDR_EXPR
7877 && POINTER_TYPE_P (type)
7878 && handled_component_p (TREE_OPERAND (op0, 0)))
7880 HOST_WIDE_INT bitsize, bitpos;
7881 tree offset;
7882 enum machine_mode mode;
7883 int unsignedp, volatilep;
7884 tree base = TREE_OPERAND (op0, 0);
7885 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7886 &mode, &unsignedp, &volatilep, false);
7887 /* If the reference was to a (constant) zero offset, we can use
7888 the address of the base if it has the same base type
7889 as the result type and the pointer type is unqualified. */
7890 if (! offset && bitpos == 0
7891 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7892 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7893 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7894 return fold_convert_loc (loc, type,
7895 build_fold_addr_expr_loc (loc, base));
7898 if (TREE_CODE (op0) == MODIFY_EXPR
7899 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7900 /* Detect assigning a bitfield. */
7901 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7902 && DECL_BIT_FIELD
7903 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7905 /* Don't leave an assignment inside a conversion
7906 unless assigning a bitfield. */
7907 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7908 /* First do the assignment, then return converted constant. */
7909 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7910 TREE_NO_WARNING (tem) = 1;
7911 TREE_USED (tem) = 1;
7912 SET_EXPR_LOCATION (tem, loc);
7913 return tem;
7916 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7917 constants (if x has signed type, the sign bit cannot be set
7918 in c). This folds extension into the BIT_AND_EXPR.
7919 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7920 very likely don't have maximal range for their precision and this
7921 transformation effectively doesn't preserve non-maximal ranges. */
7922 if (TREE_CODE (type) == INTEGER_TYPE
7923 && TREE_CODE (op0) == BIT_AND_EXPR
7924 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7926 tree and_expr = op0;
7927 tree and0 = TREE_OPERAND (and_expr, 0);
7928 tree and1 = TREE_OPERAND (and_expr, 1);
7929 int change = 0;
7931 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7932 || (TYPE_PRECISION (type)
7933 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7934 change = 1;
7935 else if (TYPE_PRECISION (TREE_TYPE (and1))
7936 <= HOST_BITS_PER_WIDE_INT
7937 && host_integerp (and1, 1))
7939 unsigned HOST_WIDE_INT cst;
7941 cst = tree_low_cst (and1, 1);
7942 cst &= (HOST_WIDE_INT) -1
7943 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7944 change = (cst == 0);
7945 #ifdef LOAD_EXTEND_OP
7946 if (change
7947 && !flag_syntax_only
7948 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7949 == ZERO_EXTEND))
7951 tree uns = unsigned_type_for (TREE_TYPE (and0));
7952 and0 = fold_convert_loc (loc, uns, and0);
7953 and1 = fold_convert_loc (loc, uns, and1);
7955 #endif
7957 if (change)
7959 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7960 TREE_INT_CST_HIGH (and1), 0,
7961 TREE_OVERFLOW (and1));
7962 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7963 fold_convert_loc (loc, type, and0), tem);
7967 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7968 when one of the new casts will fold away. Conservatively we assume
7969 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7970 if (POINTER_TYPE_P (type)
7971 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7972 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7973 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7974 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7976 tree arg00 = TREE_OPERAND (arg0, 0);
7977 tree arg01 = TREE_OPERAND (arg0, 1);
7979 return fold_build2_loc (loc,
7980 TREE_CODE (arg0), type,
7981 fold_convert_loc (loc, type, arg00),
7982 fold_convert_loc (loc, sizetype, arg01));
7985 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7986 of the same precision, and X is an integer type not narrower than
7987 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7988 if (INTEGRAL_TYPE_P (type)
7989 && TREE_CODE (op0) == BIT_NOT_EXPR
7990 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7991 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7992 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7994 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7995 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7996 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7997 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7998 fold_convert_loc (loc, type, tem));
8001 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8002 type of X and Y (integer types only). */
8003 if (INTEGRAL_TYPE_P (type)
8004 && TREE_CODE (op0) == MULT_EXPR
8005 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8006 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8008 /* Be careful not to introduce new overflows. */
8009 tree mult_type;
8010 if (TYPE_OVERFLOW_WRAPS (type))
8011 mult_type = type;
8012 else
8013 mult_type = unsigned_type_for (type);
8015 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8017 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8018 fold_convert_loc (loc, mult_type,
8019 TREE_OPERAND (op0, 0)),
8020 fold_convert_loc (loc, mult_type,
8021 TREE_OPERAND (op0, 1)));
8022 return fold_convert_loc (loc, type, tem);
8026 tem = fold_convert_const (code, type, op0);
8027 return tem ? tem : NULL_TREE;
8029 case ADDR_SPACE_CONVERT_EXPR:
8030 if (integer_zerop (arg0))
8031 return fold_convert_const (code, type, arg0);
8032 return NULL_TREE;
8034 case FIXED_CONVERT_EXPR:
8035 tem = fold_convert_const (code, type, arg0);
8036 return tem ? tem : NULL_TREE;
8038 case VIEW_CONVERT_EXPR:
8039 if (TREE_TYPE (op0) == type)
8040 return op0;
8041 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8042 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8043 type, TREE_OPERAND (op0, 0));
8045 /* For integral conversions with the same precision or pointer
8046 conversions use a NOP_EXPR instead. */
8047 if ((INTEGRAL_TYPE_P (type)
8048 || POINTER_TYPE_P (type))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8050 || POINTER_TYPE_P (TREE_TYPE (op0)))
8051 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8052 return fold_convert_loc (loc, type, op0);
8054 /* Strip inner integral conversions that do not change the precision. */
8055 if (CONVERT_EXPR_P (op0)
8056 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8057 || POINTER_TYPE_P (TREE_TYPE (op0)))
8058 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8059 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8060 && (TYPE_PRECISION (TREE_TYPE (op0))
8061 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8062 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8063 type, TREE_OPERAND (op0, 0));
8065 return fold_view_convert_expr (type, op0);
8067 case NEGATE_EXPR:
8068 tem = fold_negate_expr (loc, arg0);
8069 if (tem)
8070 return fold_convert_loc (loc, type, tem);
8071 return NULL_TREE;
8073 case ABS_EXPR:
8074 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8075 return fold_abs_const (arg0, type);
8076 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8077 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8078 /* Convert fabs((double)float) into (double)fabsf(float). */
8079 else if (TREE_CODE (arg0) == NOP_EXPR
8080 && TREE_CODE (type) == REAL_TYPE)
8082 tree targ0 = strip_float_extensions (arg0);
8083 if (targ0 != arg0)
8084 return fold_convert_loc (loc, type,
8085 fold_build1_loc (loc, ABS_EXPR,
8086 TREE_TYPE (targ0),
8087 targ0));
8089 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8090 else if (TREE_CODE (arg0) == ABS_EXPR)
8091 return arg0;
8092 else if (tree_expr_nonnegative_p (arg0))
8093 return arg0;
8095 /* Strip sign ops from argument. */
8096 if (TREE_CODE (type) == REAL_TYPE)
8098 tem = fold_strip_sign_ops (arg0);
8099 if (tem)
8100 return fold_build1_loc (loc, ABS_EXPR, type,
8101 fold_convert_loc (loc, type, tem));
8103 return NULL_TREE;
8105 case CONJ_EXPR:
8106 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8107 return fold_convert_loc (loc, type, arg0);
8108 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8110 tree itype = TREE_TYPE (type);
8111 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8112 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8113 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8114 negate_expr (ipart));
8116 if (TREE_CODE (arg0) == COMPLEX_CST)
8118 tree itype = TREE_TYPE (type);
8119 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8120 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8121 return build_complex (type, rpart, negate_expr (ipart));
8123 if (TREE_CODE (arg0) == CONJ_EXPR)
8124 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8125 return NULL_TREE;
8127 case BIT_NOT_EXPR:
8128 if (TREE_CODE (arg0) == INTEGER_CST)
8129 return fold_not_const (arg0, type);
8130 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8131 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8132 /* Convert ~ (-A) to A - 1. */
8133 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8134 return fold_build2_loc (loc, MINUS_EXPR, type,
8135 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8136 build_int_cst (type, 1));
8137 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8138 else if (INTEGRAL_TYPE_P (type)
8139 && ((TREE_CODE (arg0) == MINUS_EXPR
8140 && integer_onep (TREE_OPERAND (arg0, 1)))
8141 || (TREE_CODE (arg0) == PLUS_EXPR
8142 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8143 return fold_build1_loc (loc, NEGATE_EXPR, type,
8144 fold_convert_loc (loc, type,
8145 TREE_OPERAND (arg0, 0)));
8146 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8147 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8148 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8149 fold_convert_loc (loc, type,
8150 TREE_OPERAND (arg0, 0)))))
8151 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8152 fold_convert_loc (loc, type,
8153 TREE_OPERAND (arg0, 1)));
8154 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8155 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8156 fold_convert_loc (loc, type,
8157 TREE_OPERAND (arg0, 1)))))
8158 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8159 fold_convert_loc (loc, type,
8160 TREE_OPERAND (arg0, 0)), tem);
8161 /* Perform BIT_NOT_EXPR on each element individually. */
8162 else if (TREE_CODE (arg0) == VECTOR_CST)
8164 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8165 int count = TYPE_VECTOR_SUBPARTS (type), i;
8167 for (i = 0; i < count; i++)
8169 if (elements)
8171 elem = TREE_VALUE (elements);
8172 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8173 if (elem == NULL_TREE)
8174 break;
8175 elements = TREE_CHAIN (elements);
8177 else
8178 elem = build_int_cst (TREE_TYPE (type), -1);
8179 list = tree_cons (NULL_TREE, elem, list);
8181 if (i == count)
8182 return build_vector (type, nreverse (list));
8185 return NULL_TREE;
8187 case TRUTH_NOT_EXPR:
8188 /* The argument to invert_truthvalue must have Boolean type. */
8189 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8190 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8192 /* Note that the operand of this must be an int
8193 and its values must be 0 or 1.
8194 ("true" is a fixed value perhaps depending on the language,
8195 but we don't handle values other than 1 correctly yet.) */
8196 tem = fold_truth_not_expr (loc, arg0);
8197 if (!tem)
8198 return NULL_TREE;
8199 return fold_convert_loc (loc, type, tem);
8201 case REALPART_EXPR:
8202 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8203 return fold_convert_loc (loc, type, arg0);
8204 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8205 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8206 TREE_OPERAND (arg0, 1));
8207 if (TREE_CODE (arg0) == COMPLEX_CST)
8208 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8209 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8211 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8212 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8213 fold_build1_loc (loc, REALPART_EXPR, itype,
8214 TREE_OPERAND (arg0, 0)),
8215 fold_build1_loc (loc, REALPART_EXPR, itype,
8216 TREE_OPERAND (arg0, 1)));
8217 return fold_convert_loc (loc, type, tem);
8219 if (TREE_CODE (arg0) == CONJ_EXPR)
8221 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8222 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8223 TREE_OPERAND (arg0, 0));
8224 return fold_convert_loc (loc, type, tem);
8226 if (TREE_CODE (arg0) == CALL_EXPR)
8228 tree fn = get_callee_fndecl (arg0);
8229 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8230 switch (DECL_FUNCTION_CODE (fn))
8232 CASE_FLT_FN (BUILT_IN_CEXPI):
8233 fn = mathfn_built_in (type, BUILT_IN_COS);
8234 if (fn)
8235 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8236 break;
8238 default:
8239 break;
8242 return NULL_TREE;
8244 case IMAGPART_EXPR:
8245 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8246 return fold_convert_loc (loc, type, integer_zero_node);
8247 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8248 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8249 TREE_OPERAND (arg0, 0));
8250 if (TREE_CODE (arg0) == COMPLEX_CST)
8251 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8252 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8254 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8255 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8256 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8257 TREE_OPERAND (arg0, 0)),
8258 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8259 TREE_OPERAND (arg0, 1)));
8260 return fold_convert_loc (loc, type, tem);
8262 if (TREE_CODE (arg0) == CONJ_EXPR)
8264 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8265 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8266 return fold_convert_loc (loc, type, negate_expr (tem));
8268 if (TREE_CODE (arg0) == CALL_EXPR)
8270 tree fn = get_callee_fndecl (arg0);
8271 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8272 switch (DECL_FUNCTION_CODE (fn))
8274 CASE_FLT_FN (BUILT_IN_CEXPI):
8275 fn = mathfn_built_in (type, BUILT_IN_SIN);
8276 if (fn)
8277 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8278 break;
8280 default:
8281 break;
8284 return NULL_TREE;
8286 case INDIRECT_REF:
8287 /* Fold *&X to X if X is an lvalue. */
8288 if (TREE_CODE (op0) == ADDR_EXPR)
8290 tree op00 = TREE_OPERAND (op0, 0);
8291 if ((TREE_CODE (op00) == VAR_DECL
8292 || TREE_CODE (op00) == PARM_DECL
8293 || TREE_CODE (op00) == RESULT_DECL)
8294 && !TREE_READONLY (op00))
8295 return op00;
8297 return NULL_TREE;
8299 default:
8300 return NULL_TREE;
8301 } /* switch (code) */
8305 /* If the operation was a conversion do _not_ mark a resulting constant
8306 with TREE_OVERFLOW if the original constant was not. These conversions
8307 have implementation defined behavior and retaining the TREE_OVERFLOW
8308 flag here would confuse later passes such as VRP. */
8309 tree
8310 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8311 tree type, tree op0)
8313 tree res = fold_unary_loc (loc, code, type, op0);
8314 if (res
8315 && TREE_CODE (res) == INTEGER_CST
8316 && TREE_CODE (op0) == INTEGER_CST
8317 && CONVERT_EXPR_CODE_P (code))
8318 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8320 return res;
8323 /* Fold a binary expression of code CODE and type TYPE with operands
8324 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8325 Return the folded expression if folding is successful. Otherwise,
8326 return NULL_TREE. */
8328 static tree
8329 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8331 enum tree_code compl_code;
8333 if (code == MIN_EXPR)
8334 compl_code = MAX_EXPR;
8335 else if (code == MAX_EXPR)
8336 compl_code = MIN_EXPR;
8337 else
8338 gcc_unreachable ();
8340 /* MIN (MAX (a, b), b) == b. */
8341 if (TREE_CODE (op0) == compl_code
8342 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8343 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8345 /* MIN (MAX (b, a), b) == b. */
8346 if (TREE_CODE (op0) == compl_code
8347 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8348 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8349 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8351 /* MIN (a, MAX (a, b)) == a. */
8352 if (TREE_CODE (op1) == compl_code
8353 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8354 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8355 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8357 /* MIN (a, MAX (b, a)) == a. */
8358 if (TREE_CODE (op1) == compl_code
8359 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8360 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8361 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8363 return NULL_TREE;
8366 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8367 by changing CODE to reduce the magnitude of constants involved in
8368 ARG0 of the comparison.
8369 Returns a canonicalized comparison tree if a simplification was
8370 possible, otherwise returns NULL_TREE.
8371 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8372 valid if signed overflow is undefined. */
8374 static tree
8375 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8376 tree arg0, tree arg1,
8377 bool *strict_overflow_p)
8379 enum tree_code code0 = TREE_CODE (arg0);
8380 tree t, cst0 = NULL_TREE;
8381 int sgn0;
8382 bool swap = false;
8384 /* Match A +- CST code arg1 and CST code arg1. We can change the
8385 first form only if overflow is undefined. */
8386 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8387 /* In principle pointers also have undefined overflow behavior,
8388 but that causes problems elsewhere. */
8389 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8390 && (code0 == MINUS_EXPR
8391 || code0 == PLUS_EXPR)
8392 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8393 || code0 == INTEGER_CST))
8394 return NULL_TREE;
8396 /* Identify the constant in arg0 and its sign. */
8397 if (code0 == INTEGER_CST)
8398 cst0 = arg0;
8399 else
8400 cst0 = TREE_OPERAND (arg0, 1);
8401 sgn0 = tree_int_cst_sgn (cst0);
8403 /* Overflowed constants and zero will cause problems. */
8404 if (integer_zerop (cst0)
8405 || TREE_OVERFLOW (cst0))
8406 return NULL_TREE;
8408 /* See if we can reduce the magnitude of the constant in
8409 arg0 by changing the comparison code. */
8410 if (code0 == INTEGER_CST)
8412 /* CST <= arg1 -> CST-1 < arg1. */
8413 if (code == LE_EXPR && sgn0 == 1)
8414 code = LT_EXPR;
8415 /* -CST < arg1 -> -CST-1 <= arg1. */
8416 else if (code == LT_EXPR && sgn0 == -1)
8417 code = LE_EXPR;
8418 /* CST > arg1 -> CST-1 >= arg1. */
8419 else if (code == GT_EXPR && sgn0 == 1)
8420 code = GE_EXPR;
8421 /* -CST >= arg1 -> -CST-1 > arg1. */
8422 else if (code == GE_EXPR && sgn0 == -1)
8423 code = GT_EXPR;
8424 else
8425 return NULL_TREE;
8426 /* arg1 code' CST' might be more canonical. */
8427 swap = true;
8429 else
8431 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8432 if (code == LT_EXPR
8433 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8434 code = LE_EXPR;
8435 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8436 else if (code == GT_EXPR
8437 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8438 code = GE_EXPR;
8439 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8440 else if (code == LE_EXPR
8441 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8442 code = LT_EXPR;
8443 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8444 else if (code == GE_EXPR
8445 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8446 code = GT_EXPR;
8447 else
8448 return NULL_TREE;
8449 *strict_overflow_p = true;
8452 /* Now build the constant reduced in magnitude. But not if that
8453 would produce one outside of its types range. */
8454 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8455 && ((sgn0 == 1
8456 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8457 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8458 || (sgn0 == -1
8459 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8460 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8461 /* We cannot swap the comparison here as that would cause us to
8462 endlessly recurse. */
8463 return NULL_TREE;
8465 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8466 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8467 if (code0 != INTEGER_CST)
8468 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8470 /* If swapping might yield to a more canonical form, do so. */
8471 if (swap)
8472 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8473 else
8474 return fold_build2_loc (loc, code, type, t, arg1);
8477 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8478 overflow further. Try to decrease the magnitude of constants involved
8479 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8480 and put sole constants at the second argument position.
8481 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8483 static tree
8484 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8485 tree arg0, tree arg1)
8487 tree t;
8488 bool strict_overflow_p;
8489 const char * const warnmsg = G_("assuming signed overflow does not occur "
8490 "when reducing constant in comparison");
8492 /* Try canonicalization by simplifying arg0. */
8493 strict_overflow_p = false;
8494 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8495 &strict_overflow_p);
8496 if (t)
8498 if (strict_overflow_p)
8499 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8500 return t;
8503 /* Try canonicalization by simplifying arg1 using the swapped
8504 comparison. */
8505 code = swap_tree_comparison (code);
8506 strict_overflow_p = false;
8507 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8508 &strict_overflow_p);
8509 if (t && strict_overflow_p)
8510 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8511 return t;
8514 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8515 space. This is used to avoid issuing overflow warnings for
8516 expressions like &p->x which can not wrap. */
8518 static bool
8519 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8521 unsigned HOST_WIDE_INT offset_low, total_low;
8522 HOST_WIDE_INT size, offset_high, total_high;
8524 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8525 return true;
8527 if (bitpos < 0)
8528 return true;
8530 if (offset == NULL_TREE)
8532 offset_low = 0;
8533 offset_high = 0;
8535 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8536 return true;
8537 else
8539 offset_low = TREE_INT_CST_LOW (offset);
8540 offset_high = TREE_INT_CST_HIGH (offset);
8543 if (add_double_with_sign (offset_low, offset_high,
8544 bitpos / BITS_PER_UNIT, 0,
8545 &total_low, &total_high,
8546 true))
8547 return true;
8549 if (total_high != 0)
8550 return true;
8552 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8553 if (size <= 0)
8554 return true;
8556 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8557 array. */
8558 if (TREE_CODE (base) == ADDR_EXPR)
8560 HOST_WIDE_INT base_size;
8562 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8563 if (base_size > 0 && size < base_size)
8564 size = base_size;
8567 return total_low > (unsigned HOST_WIDE_INT) size;
8570 /* Subroutine of fold_binary. This routine performs all of the
8571 transformations that are common to the equality/inequality
8572 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8573 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8574 fold_binary should call fold_binary. Fold a comparison with
8575 tree code CODE and type TYPE with operands OP0 and OP1. Return
8576 the folded comparison or NULL_TREE. */
8578 static tree
8579 fold_comparison (location_t loc, enum tree_code code, tree type,
8580 tree op0, tree op1)
8582 tree arg0, arg1, tem;
8584 arg0 = op0;
8585 arg1 = op1;
8587 STRIP_SIGN_NOPS (arg0);
8588 STRIP_SIGN_NOPS (arg1);
8590 tem = fold_relational_const (code, type, arg0, arg1);
8591 if (tem != NULL_TREE)
8592 return tem;
8594 /* If one arg is a real or integer constant, put it last. */
8595 if (tree_swap_operands_p (arg0, arg1, true))
8596 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8598 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8599 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8600 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8601 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8602 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8603 && (TREE_CODE (arg1) == INTEGER_CST
8604 && !TREE_OVERFLOW (arg1)))
8606 tree const1 = TREE_OPERAND (arg0, 1);
8607 tree const2 = arg1;
8608 tree variable = TREE_OPERAND (arg0, 0);
8609 tree lhs;
8610 int lhs_add;
8611 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8613 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8614 TREE_TYPE (arg1), const2, const1);
8616 /* If the constant operation overflowed this can be
8617 simplified as a comparison against INT_MAX/INT_MIN. */
8618 if (TREE_CODE (lhs) == INTEGER_CST
8619 && TREE_OVERFLOW (lhs))
8621 int const1_sgn = tree_int_cst_sgn (const1);
8622 enum tree_code code2 = code;
8624 /* Get the sign of the constant on the lhs if the
8625 operation were VARIABLE + CONST1. */
8626 if (TREE_CODE (arg0) == MINUS_EXPR)
8627 const1_sgn = -const1_sgn;
8629 /* The sign of the constant determines if we overflowed
8630 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8631 Canonicalize to the INT_MIN overflow by swapping the comparison
8632 if necessary. */
8633 if (const1_sgn == -1)
8634 code2 = swap_tree_comparison (code);
8636 /* We now can look at the canonicalized case
8637 VARIABLE + 1 CODE2 INT_MIN
8638 and decide on the result. */
8639 if (code2 == LT_EXPR
8640 || code2 == LE_EXPR
8641 || code2 == EQ_EXPR)
8642 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8643 else if (code2 == NE_EXPR
8644 || code2 == GE_EXPR
8645 || code2 == GT_EXPR)
8646 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8649 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8650 && (TREE_CODE (lhs) != INTEGER_CST
8651 || !TREE_OVERFLOW (lhs)))
8653 fold_overflow_warning (("assuming signed overflow does not occur "
8654 "when changing X +- C1 cmp C2 to "
8655 "X cmp C1 +- C2"),
8656 WARN_STRICT_OVERFLOW_COMPARISON);
8657 return fold_build2_loc (loc, code, type, variable, lhs);
8661 /* For comparisons of pointers we can decompose it to a compile time
8662 comparison of the base objects and the offsets into the object.
8663 This requires at least one operand being an ADDR_EXPR or a
8664 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8665 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8666 && (TREE_CODE (arg0) == ADDR_EXPR
8667 || TREE_CODE (arg1) == ADDR_EXPR
8668 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8669 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8671 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8672 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8673 enum machine_mode mode;
8674 int volatilep, unsignedp;
8675 bool indirect_base0 = false, indirect_base1 = false;
8677 /* Get base and offset for the access. Strip ADDR_EXPR for
8678 get_inner_reference, but put it back by stripping INDIRECT_REF
8679 off the base object if possible. indirect_baseN will be true
8680 if baseN is not an address but refers to the object itself. */
8681 base0 = arg0;
8682 if (TREE_CODE (arg0) == ADDR_EXPR)
8684 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8685 &bitsize, &bitpos0, &offset0, &mode,
8686 &unsignedp, &volatilep, false);
8687 if (TREE_CODE (base0) == INDIRECT_REF)
8688 base0 = TREE_OPERAND (base0, 0);
8689 else
8690 indirect_base0 = true;
8692 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8694 base0 = TREE_OPERAND (arg0, 0);
8695 offset0 = TREE_OPERAND (arg0, 1);
8698 base1 = arg1;
8699 if (TREE_CODE (arg1) == ADDR_EXPR)
8701 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8702 &bitsize, &bitpos1, &offset1, &mode,
8703 &unsignedp, &volatilep, false);
8704 if (TREE_CODE (base1) == INDIRECT_REF)
8705 base1 = TREE_OPERAND (base1, 0);
8706 else
8707 indirect_base1 = true;
8709 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8711 base1 = TREE_OPERAND (arg1, 0);
8712 offset1 = TREE_OPERAND (arg1, 1);
8715 /* A local variable can never be pointed to by
8716 the default SSA name of an incoming parameter. */
8717 if ((TREE_CODE (arg0) == ADDR_EXPR
8718 && indirect_base0
8719 && TREE_CODE (base0) == VAR_DECL
8720 && auto_var_in_fn_p (base0, current_function_decl)
8721 && !indirect_base1
8722 && TREE_CODE (base1) == SSA_NAME
8723 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8724 && SSA_NAME_IS_DEFAULT_DEF (base1))
8725 || (TREE_CODE (arg1) == ADDR_EXPR
8726 && indirect_base1
8727 && TREE_CODE (base1) == VAR_DECL
8728 && auto_var_in_fn_p (base1, current_function_decl)
8729 && !indirect_base0
8730 && TREE_CODE (base0) == SSA_NAME
8731 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8732 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8734 if (code == NE_EXPR)
8735 return constant_boolean_node (1, type);
8736 else if (code == EQ_EXPR)
8737 return constant_boolean_node (0, type);
8739 /* If we have equivalent bases we might be able to simplify. */
8740 else if (indirect_base0 == indirect_base1
8741 && operand_equal_p (base0, base1, 0))
8743 /* We can fold this expression to a constant if the non-constant
8744 offset parts are equal. */
8745 if ((offset0 == offset1
8746 || (offset0 && offset1
8747 && operand_equal_p (offset0, offset1, 0)))
8748 && (code == EQ_EXPR
8749 || code == NE_EXPR
8750 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8753 if (code != EQ_EXPR
8754 && code != NE_EXPR
8755 && bitpos0 != bitpos1
8756 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8757 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8758 fold_overflow_warning (("assuming pointer wraparound does not "
8759 "occur when comparing P +- C1 with "
8760 "P +- C2"),
8761 WARN_STRICT_OVERFLOW_CONDITIONAL);
8763 switch (code)
8765 case EQ_EXPR:
8766 return constant_boolean_node (bitpos0 == bitpos1, type);
8767 case NE_EXPR:
8768 return constant_boolean_node (bitpos0 != bitpos1, type);
8769 case LT_EXPR:
8770 return constant_boolean_node (bitpos0 < bitpos1, type);
8771 case LE_EXPR:
8772 return constant_boolean_node (bitpos0 <= bitpos1, type);
8773 case GE_EXPR:
8774 return constant_boolean_node (bitpos0 >= bitpos1, type);
8775 case GT_EXPR:
8776 return constant_boolean_node (bitpos0 > bitpos1, type);
8777 default:;
8780 /* We can simplify the comparison to a comparison of the variable
8781 offset parts if the constant offset parts are equal.
8782 Be careful to use signed size type here because otherwise we
8783 mess with array offsets in the wrong way. This is possible
8784 because pointer arithmetic is restricted to retain within an
8785 object and overflow on pointer differences is undefined as of
8786 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8787 else if (bitpos0 == bitpos1
8788 && ((code == EQ_EXPR || code == NE_EXPR)
8789 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8791 /* By converting to signed size type we cover middle-end pointer
8792 arithmetic which operates on unsigned pointer types of size
8793 type size and ARRAY_REF offsets which are properly sign or
8794 zero extended from their type in case it is narrower than
8795 size type. */
8796 if (offset0 == NULL_TREE)
8797 offset0 = build_int_cst (ssizetype, 0);
8798 else
8799 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8800 if (offset1 == NULL_TREE)
8801 offset1 = build_int_cst (ssizetype, 0);
8802 else
8803 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8805 if (code != EQ_EXPR
8806 && code != NE_EXPR
8807 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8808 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8809 fold_overflow_warning (("assuming pointer wraparound does not "
8810 "occur when comparing P +- C1 with "
8811 "P +- C2"),
8812 WARN_STRICT_OVERFLOW_COMPARISON);
8814 return fold_build2_loc (loc, code, type, offset0, offset1);
8817 /* For non-equal bases we can simplify if they are addresses
8818 of local binding decls or constants. */
8819 else if (indirect_base0 && indirect_base1
8820 /* We know that !operand_equal_p (base0, base1, 0)
8821 because the if condition was false. But make
8822 sure two decls are not the same. */
8823 && base0 != base1
8824 && TREE_CODE (arg0) == ADDR_EXPR
8825 && TREE_CODE (arg1) == ADDR_EXPR
8826 && (((TREE_CODE (base0) == VAR_DECL
8827 || TREE_CODE (base0) == PARM_DECL)
8828 && (targetm.binds_local_p (base0)
8829 || CONSTANT_CLASS_P (base1)))
8830 || CONSTANT_CLASS_P (base0))
8831 && (((TREE_CODE (base1) == VAR_DECL
8832 || TREE_CODE (base1) == PARM_DECL)
8833 && (targetm.binds_local_p (base1)
8834 || CONSTANT_CLASS_P (base0)))
8835 || CONSTANT_CLASS_P (base1)))
8837 if (code == EQ_EXPR)
8838 return omit_two_operands_loc (loc, type, boolean_false_node,
8839 arg0, arg1);
8840 else if (code == NE_EXPR)
8841 return omit_two_operands_loc (loc, type, boolean_true_node,
8842 arg0, arg1);
8844 /* For equal offsets we can simplify to a comparison of the
8845 base addresses. */
8846 else if (bitpos0 == bitpos1
8847 && (indirect_base0
8848 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8849 && (indirect_base1
8850 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8851 && ((offset0 == offset1)
8852 || (offset0 && offset1
8853 && operand_equal_p (offset0, offset1, 0))))
8855 if (indirect_base0)
8856 base0 = build_fold_addr_expr_loc (loc, base0);
8857 if (indirect_base1)
8858 base1 = build_fold_addr_expr_loc (loc, base1);
8859 return fold_build2_loc (loc, code, type, base0, base1);
8863 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8864 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8865 the resulting offset is smaller in absolute value than the
8866 original one. */
8867 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8868 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8869 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8870 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8871 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8872 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8873 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8875 tree const1 = TREE_OPERAND (arg0, 1);
8876 tree const2 = TREE_OPERAND (arg1, 1);
8877 tree variable1 = TREE_OPERAND (arg0, 0);
8878 tree variable2 = TREE_OPERAND (arg1, 0);
8879 tree cst;
8880 const char * const warnmsg = G_("assuming signed overflow does not "
8881 "occur when combining constants around "
8882 "a comparison");
8884 /* Put the constant on the side where it doesn't overflow and is
8885 of lower absolute value than before. */
8886 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8887 ? MINUS_EXPR : PLUS_EXPR,
8888 const2, const1, 0);
8889 if (!TREE_OVERFLOW (cst)
8890 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8892 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8893 return fold_build2_loc (loc, code, type,
8894 variable1,
8895 fold_build2_loc (loc,
8896 TREE_CODE (arg1), TREE_TYPE (arg1),
8897 variable2, cst));
8900 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8901 ? MINUS_EXPR : PLUS_EXPR,
8902 const1, const2, 0);
8903 if (!TREE_OVERFLOW (cst)
8904 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8906 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8907 return fold_build2_loc (loc, code, type,
8908 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8909 variable1, cst),
8910 variable2);
8914 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8915 signed arithmetic case. That form is created by the compiler
8916 often enough for folding it to be of value. One example is in
8917 computing loop trip counts after Operator Strength Reduction. */
8918 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8919 && TREE_CODE (arg0) == MULT_EXPR
8920 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8921 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8922 && integer_zerop (arg1))
8924 tree const1 = TREE_OPERAND (arg0, 1);
8925 tree const2 = arg1; /* zero */
8926 tree variable1 = TREE_OPERAND (arg0, 0);
8927 enum tree_code cmp_code = code;
8929 /* Handle unfolded multiplication by zero. */
8930 if (integer_zerop (const1))
8931 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8933 fold_overflow_warning (("assuming signed overflow does not occur when "
8934 "eliminating multiplication in comparison "
8935 "with zero"),
8936 WARN_STRICT_OVERFLOW_COMPARISON);
8938 /* If const1 is negative we swap the sense of the comparison. */
8939 if (tree_int_cst_sgn (const1) < 0)
8940 cmp_code = swap_tree_comparison (cmp_code);
8942 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8945 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8946 if (tem)
8947 return tem;
8949 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8951 tree targ0 = strip_float_extensions (arg0);
8952 tree targ1 = strip_float_extensions (arg1);
8953 tree newtype = TREE_TYPE (targ0);
8955 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8956 newtype = TREE_TYPE (targ1);
8958 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8959 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8960 return fold_build2_loc (loc, code, type,
8961 fold_convert_loc (loc, newtype, targ0),
8962 fold_convert_loc (loc, newtype, targ1));
8964 /* (-a) CMP (-b) -> b CMP a */
8965 if (TREE_CODE (arg0) == NEGATE_EXPR
8966 && TREE_CODE (arg1) == NEGATE_EXPR)
8967 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8968 TREE_OPERAND (arg0, 0));
8970 if (TREE_CODE (arg1) == REAL_CST)
8972 REAL_VALUE_TYPE cst;
8973 cst = TREE_REAL_CST (arg1);
8975 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8976 if (TREE_CODE (arg0) == NEGATE_EXPR)
8977 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8978 TREE_OPERAND (arg0, 0),
8979 build_real (TREE_TYPE (arg1),
8980 REAL_VALUE_NEGATE (cst)));
8982 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8983 /* a CMP (-0) -> a CMP 0 */
8984 if (REAL_VALUE_MINUS_ZERO (cst))
8985 return fold_build2_loc (loc, code, type, arg0,
8986 build_real (TREE_TYPE (arg1), dconst0));
8988 /* x != NaN is always true, other ops are always false. */
8989 if (REAL_VALUE_ISNAN (cst)
8990 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8992 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8993 return omit_one_operand_loc (loc, type, tem, arg0);
8996 /* Fold comparisons against infinity. */
8997 if (REAL_VALUE_ISINF (cst)
8998 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9000 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9001 if (tem != NULL_TREE)
9002 return tem;
9006 /* If this is a comparison of a real constant with a PLUS_EXPR
9007 or a MINUS_EXPR of a real constant, we can convert it into a
9008 comparison with a revised real constant as long as no overflow
9009 occurs when unsafe_math_optimizations are enabled. */
9010 if (flag_unsafe_math_optimizations
9011 && TREE_CODE (arg1) == REAL_CST
9012 && (TREE_CODE (arg0) == PLUS_EXPR
9013 || TREE_CODE (arg0) == MINUS_EXPR)
9014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9015 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9016 ? MINUS_EXPR : PLUS_EXPR,
9017 arg1, TREE_OPERAND (arg0, 1), 0))
9018 && !TREE_OVERFLOW (tem))
9019 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9021 /* Likewise, we can simplify a comparison of a real constant with
9022 a MINUS_EXPR whose first operand is also a real constant, i.e.
9023 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9024 floating-point types only if -fassociative-math is set. */
9025 if (flag_associative_math
9026 && TREE_CODE (arg1) == REAL_CST
9027 && TREE_CODE (arg0) == MINUS_EXPR
9028 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9029 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9030 arg1, 0))
9031 && !TREE_OVERFLOW (tem))
9032 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9033 TREE_OPERAND (arg0, 1), tem);
9035 /* Fold comparisons against built-in math functions. */
9036 if (TREE_CODE (arg1) == REAL_CST
9037 && flag_unsafe_math_optimizations
9038 && ! flag_errno_math)
9040 enum built_in_function fcode = builtin_mathfn_code (arg0);
9042 if (fcode != END_BUILTINS)
9044 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9045 if (tem != NULL_TREE)
9046 return tem;
9051 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9052 && CONVERT_EXPR_P (arg0))
9054 /* If we are widening one operand of an integer comparison,
9055 see if the other operand is similarly being widened. Perhaps we
9056 can do the comparison in the narrower type. */
9057 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9058 if (tem)
9059 return tem;
9061 /* Or if we are changing signedness. */
9062 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9063 if (tem)
9064 return tem;
9067 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9068 constant, we can simplify it. */
9069 if (TREE_CODE (arg1) == INTEGER_CST
9070 && (TREE_CODE (arg0) == MIN_EXPR
9071 || TREE_CODE (arg0) == MAX_EXPR)
9072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9074 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9075 if (tem)
9076 return tem;
9079 /* Simplify comparison of something with itself. (For IEEE
9080 floating-point, we can only do some of these simplifications.) */
9081 if (operand_equal_p (arg0, arg1, 0))
9083 switch (code)
9085 case EQ_EXPR:
9086 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9087 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9088 return constant_boolean_node (1, type);
9089 break;
9091 case GE_EXPR:
9092 case LE_EXPR:
9093 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9094 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9095 return constant_boolean_node (1, type);
9096 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9098 case NE_EXPR:
9099 /* For NE, we can only do this simplification if integer
9100 or we don't honor IEEE floating point NaNs. */
9101 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9102 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9103 break;
9104 /* ... fall through ... */
9105 case GT_EXPR:
9106 case LT_EXPR:
9107 return constant_boolean_node (0, type);
9108 default:
9109 gcc_unreachable ();
9113 /* If we are comparing an expression that just has comparisons
9114 of two integer values, arithmetic expressions of those comparisons,
9115 and constants, we can simplify it. There are only three cases
9116 to check: the two values can either be equal, the first can be
9117 greater, or the second can be greater. Fold the expression for
9118 those three values. Since each value must be 0 or 1, we have
9119 eight possibilities, each of which corresponds to the constant 0
9120 or 1 or one of the six possible comparisons.
9122 This handles common cases like (a > b) == 0 but also handles
9123 expressions like ((x > y) - (y > x)) > 0, which supposedly
9124 occur in macroized code. */
9126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9128 tree cval1 = 0, cval2 = 0;
9129 int save_p = 0;
9131 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9132 /* Don't handle degenerate cases here; they should already
9133 have been handled anyway. */
9134 && cval1 != 0 && cval2 != 0
9135 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9136 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9137 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9138 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9139 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9140 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9141 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9143 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9144 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9146 /* We can't just pass T to eval_subst in case cval1 or cval2
9147 was the same as ARG1. */
9149 tree high_result
9150 = fold_build2_loc (loc, code, type,
9151 eval_subst (loc, arg0, cval1, maxval,
9152 cval2, minval),
9153 arg1);
9154 tree equal_result
9155 = fold_build2_loc (loc, code, type,
9156 eval_subst (loc, arg0, cval1, maxval,
9157 cval2, maxval),
9158 arg1);
9159 tree low_result
9160 = fold_build2_loc (loc, code, type,
9161 eval_subst (loc, arg0, cval1, minval,
9162 cval2, maxval),
9163 arg1);
9165 /* All three of these results should be 0 or 1. Confirm they are.
9166 Then use those values to select the proper code to use. */
9168 if (TREE_CODE (high_result) == INTEGER_CST
9169 && TREE_CODE (equal_result) == INTEGER_CST
9170 && TREE_CODE (low_result) == INTEGER_CST)
9172 /* Make a 3-bit mask with the high-order bit being the
9173 value for `>', the next for '=', and the low for '<'. */
9174 switch ((integer_onep (high_result) * 4)
9175 + (integer_onep (equal_result) * 2)
9176 + integer_onep (low_result))
9178 case 0:
9179 /* Always false. */
9180 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9181 case 1:
9182 code = LT_EXPR;
9183 break;
9184 case 2:
9185 code = EQ_EXPR;
9186 break;
9187 case 3:
9188 code = LE_EXPR;
9189 break;
9190 case 4:
9191 code = GT_EXPR;
9192 break;
9193 case 5:
9194 code = NE_EXPR;
9195 break;
9196 case 6:
9197 code = GE_EXPR;
9198 break;
9199 case 7:
9200 /* Always true. */
9201 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9204 if (save_p)
9206 tem = save_expr (build2 (code, type, cval1, cval2));
9207 SET_EXPR_LOCATION (tem, loc);
9208 return tem;
9210 return fold_build2_loc (loc, code, type, cval1, cval2);
9215 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9216 into a single range test. */
9217 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9218 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9219 && TREE_CODE (arg1) == INTEGER_CST
9220 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9221 && !integer_zerop (TREE_OPERAND (arg0, 1))
9222 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9223 && !TREE_OVERFLOW (arg1))
9225 tem = fold_div_compare (loc, code, type, arg0, arg1);
9226 if (tem != NULL_TREE)
9227 return tem;
9230 /* Fold ~X op ~Y as Y op X. */
9231 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9232 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9234 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9235 return fold_build2_loc (loc, code, type,
9236 fold_convert_loc (loc, cmp_type,
9237 TREE_OPERAND (arg1, 0)),
9238 TREE_OPERAND (arg0, 0));
9241 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9242 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9243 && TREE_CODE (arg1) == INTEGER_CST)
9245 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9246 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9247 TREE_OPERAND (arg0, 0),
9248 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9249 fold_convert_loc (loc, cmp_type, arg1)));
9252 return NULL_TREE;
9256 /* Subroutine of fold_binary. Optimize complex multiplications of the
9257 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9258 argument EXPR represents the expression "z" of type TYPE. */
9260 static tree
9261 fold_mult_zconjz (location_t loc, tree type, tree expr)
9263 tree itype = TREE_TYPE (type);
9264 tree rpart, ipart, tem;
9266 if (TREE_CODE (expr) == COMPLEX_EXPR)
9268 rpart = TREE_OPERAND (expr, 0);
9269 ipart = TREE_OPERAND (expr, 1);
9271 else if (TREE_CODE (expr) == COMPLEX_CST)
9273 rpart = TREE_REALPART (expr);
9274 ipart = TREE_IMAGPART (expr);
9276 else
9278 expr = save_expr (expr);
9279 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9280 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9283 rpart = save_expr (rpart);
9284 ipart = save_expr (ipart);
9285 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9286 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9287 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9288 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9289 fold_convert_loc (loc, itype, integer_zero_node));
9293 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9294 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9295 guarantees that P and N have the same least significant log2(M) bits.
9296 N is not otherwise constrained. In particular, N is not normalized to
9297 0 <= N < M as is common. In general, the precise value of P is unknown.
9298 M is chosen as large as possible such that constant N can be determined.
9300 Returns M and sets *RESIDUE to N.
9302 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9303 account. This is not always possible due to PR 35705.
9306 static unsigned HOST_WIDE_INT
9307 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9308 bool allow_func_align)
9310 enum tree_code code;
9312 *residue = 0;
9314 code = TREE_CODE (expr);
9315 if (code == ADDR_EXPR)
9317 expr = TREE_OPERAND (expr, 0);
9318 if (handled_component_p (expr))
9320 HOST_WIDE_INT bitsize, bitpos;
9321 tree offset;
9322 enum machine_mode mode;
9323 int unsignedp, volatilep;
9325 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9326 &mode, &unsignedp, &volatilep, false);
9327 *residue = bitpos / BITS_PER_UNIT;
9328 if (offset)
9330 if (TREE_CODE (offset) == INTEGER_CST)
9331 *residue += TREE_INT_CST_LOW (offset);
9332 else
9333 /* We don't handle more complicated offset expressions. */
9334 return 1;
9338 if (DECL_P (expr)
9339 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9340 return DECL_ALIGN_UNIT (expr);
9342 else if (code == POINTER_PLUS_EXPR)
9344 tree op0, op1;
9345 unsigned HOST_WIDE_INT modulus;
9346 enum tree_code inner_code;
9348 op0 = TREE_OPERAND (expr, 0);
9349 STRIP_NOPS (op0);
9350 modulus = get_pointer_modulus_and_residue (op0, residue,
9351 allow_func_align);
9353 op1 = TREE_OPERAND (expr, 1);
9354 STRIP_NOPS (op1);
9355 inner_code = TREE_CODE (op1);
9356 if (inner_code == INTEGER_CST)
9358 *residue += TREE_INT_CST_LOW (op1);
9359 return modulus;
9361 else if (inner_code == MULT_EXPR)
9363 op1 = TREE_OPERAND (op1, 1);
9364 if (TREE_CODE (op1) == INTEGER_CST)
9366 unsigned HOST_WIDE_INT align;
9368 /* Compute the greatest power-of-2 divisor of op1. */
9369 align = TREE_INT_CST_LOW (op1);
9370 align &= -align;
9372 /* If align is non-zero and less than *modulus, replace
9373 *modulus with align., If align is 0, then either op1 is 0
9374 or the greatest power-of-2 divisor of op1 doesn't fit in an
9375 unsigned HOST_WIDE_INT. In either case, no additional
9376 constraint is imposed. */
9377 if (align)
9378 modulus = MIN (modulus, align);
9380 return modulus;
9385 /* If we get here, we were unable to determine anything useful about the
9386 expression. */
9387 return 1;
9391 /* Fold a binary expression of code CODE and type TYPE with operands
9392 OP0 and OP1. LOC is the location of the resulting expression.
9393 Return the folded expression if folding is successful. Otherwise,
9394 return NULL_TREE. */
9396 tree
9397 fold_binary_loc (location_t loc,
9398 enum tree_code code, tree type, tree op0, tree op1)
9400 enum tree_code_class kind = TREE_CODE_CLASS (code);
9401 tree arg0, arg1, tem;
9402 tree t1 = NULL_TREE;
9403 bool strict_overflow_p;
9405 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9406 && TREE_CODE_LENGTH (code) == 2
9407 && op0 != NULL_TREE
9408 && op1 != NULL_TREE);
9410 arg0 = op0;
9411 arg1 = op1;
9413 /* Strip any conversions that don't change the mode. This is
9414 safe for every expression, except for a comparison expression
9415 because its signedness is derived from its operands. So, in
9416 the latter case, only strip conversions that don't change the
9417 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9418 preserved.
9420 Note that this is done as an internal manipulation within the
9421 constant folder, in order to find the simplest representation
9422 of the arguments so that their form can be studied. In any
9423 cases, the appropriate type conversions should be put back in
9424 the tree that will get out of the constant folder. */
9426 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9428 STRIP_SIGN_NOPS (arg0);
9429 STRIP_SIGN_NOPS (arg1);
9431 else
9433 STRIP_NOPS (arg0);
9434 STRIP_NOPS (arg1);
9437 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9438 constant but we can't do arithmetic on them. */
9439 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9440 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9441 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9442 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9443 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9444 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9446 if (kind == tcc_binary)
9448 /* Make sure type and arg0 have the same saturating flag. */
9449 gcc_assert (TYPE_SATURATING (type)
9450 == TYPE_SATURATING (TREE_TYPE (arg0)));
9451 tem = const_binop (code, arg0, arg1, 0);
9453 else if (kind == tcc_comparison)
9454 tem = fold_relational_const (code, type, arg0, arg1);
9455 else
9456 tem = NULL_TREE;
9458 if (tem != NULL_TREE)
9460 if (TREE_TYPE (tem) != type)
9461 tem = fold_convert_loc (loc, type, tem);
9462 return tem;
9466 /* If this is a commutative operation, and ARG0 is a constant, move it
9467 to ARG1 to reduce the number of tests below. */
9468 if (commutative_tree_code (code)
9469 && tree_swap_operands_p (arg0, arg1, true))
9470 return fold_build2_loc (loc, code, type, op1, op0);
9472 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9474 First check for cases where an arithmetic operation is applied to a
9475 compound, conditional, or comparison operation. Push the arithmetic
9476 operation inside the compound or conditional to see if any folding
9477 can then be done. Convert comparison to conditional for this purpose.
9478 The also optimizes non-constant cases that used to be done in
9479 expand_expr.
9481 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9482 one of the operands is a comparison and the other is a comparison, a
9483 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9484 code below would make the expression more complex. Change it to a
9485 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9486 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9488 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9489 || code == EQ_EXPR || code == NE_EXPR)
9490 && ((truth_value_p (TREE_CODE (arg0))
9491 && (truth_value_p (TREE_CODE (arg1))
9492 || (TREE_CODE (arg1) == BIT_AND_EXPR
9493 && integer_onep (TREE_OPERAND (arg1, 1)))))
9494 || (truth_value_p (TREE_CODE (arg1))
9495 && (truth_value_p (TREE_CODE (arg0))
9496 || (TREE_CODE (arg0) == BIT_AND_EXPR
9497 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9499 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9500 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9501 : TRUTH_XOR_EXPR,
9502 boolean_type_node,
9503 fold_convert_loc (loc, boolean_type_node, arg0),
9504 fold_convert_loc (loc, boolean_type_node, arg1));
9506 if (code == EQ_EXPR)
9507 tem = invert_truthvalue_loc (loc, tem);
9509 return fold_convert_loc (loc, type, tem);
9512 if (TREE_CODE_CLASS (code) == tcc_binary
9513 || TREE_CODE_CLASS (code) == tcc_comparison)
9515 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9517 tem = fold_build2_loc (loc, code, type,
9518 fold_convert_loc (loc, TREE_TYPE (op0),
9519 TREE_OPERAND (arg0, 1)), op1);
9520 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9521 goto fold_binary_exit;
9523 if (TREE_CODE (arg1) == COMPOUND_EXPR
9524 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9526 tem = fold_build2_loc (loc, code, type, op0,
9527 fold_convert_loc (loc, TREE_TYPE (op1),
9528 TREE_OPERAND (arg1, 1)));
9529 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9530 goto fold_binary_exit;
9533 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9535 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9536 arg0, arg1,
9537 /*cond_first_p=*/1);
9538 if (tem != NULL_TREE)
9539 return tem;
9542 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9544 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9545 arg1, arg0,
9546 /*cond_first_p=*/0);
9547 if (tem != NULL_TREE)
9548 return tem;
9552 switch (code)
9554 case POINTER_PLUS_EXPR:
9555 /* 0 +p index -> (type)index */
9556 if (integer_zerop (arg0))
9557 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9559 /* PTR +p 0 -> PTR */
9560 if (integer_zerop (arg1))
9561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9563 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9564 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9565 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9566 return fold_convert_loc (loc, type,
9567 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9568 fold_convert_loc (loc, sizetype,
9569 arg1),
9570 fold_convert_loc (loc, sizetype,
9571 arg0)));
9573 /* index +p PTR -> PTR +p index */
9574 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9575 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9576 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9577 fold_convert_loc (loc, type, arg1),
9578 fold_convert_loc (loc, sizetype, arg0));
9580 /* (PTR +p B) +p A -> PTR +p (B + A) */
9581 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9583 tree inner;
9584 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9585 tree arg00 = TREE_OPERAND (arg0, 0);
9586 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9587 arg01, fold_convert_loc (loc, sizetype, arg1));
9588 return fold_convert_loc (loc, type,
9589 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9590 TREE_TYPE (arg00),
9591 arg00, inner));
9594 /* PTR_CST +p CST -> CST1 */
9595 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9596 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9597 fold_convert_loc (loc, type, arg1));
9599 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9600 of the array. Loop optimizer sometimes produce this type of
9601 expressions. */
9602 if (TREE_CODE (arg0) == ADDR_EXPR)
9604 tem = try_move_mult_to_index (loc, arg0,
9605 fold_convert_loc (loc, sizetype, arg1));
9606 if (tem)
9607 return fold_convert_loc (loc, type, tem);
9610 return NULL_TREE;
9612 case PLUS_EXPR:
9613 /* A + (-B) -> A - B */
9614 if (TREE_CODE (arg1) == NEGATE_EXPR)
9615 return fold_build2_loc (loc, MINUS_EXPR, type,
9616 fold_convert_loc (loc, type, arg0),
9617 fold_convert_loc (loc, type,
9618 TREE_OPERAND (arg1, 0)));
9619 /* (-A) + B -> B - A */
9620 if (TREE_CODE (arg0) == NEGATE_EXPR
9621 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9622 return fold_build2_loc (loc, MINUS_EXPR, type,
9623 fold_convert_loc (loc, type, arg1),
9624 fold_convert_loc (loc, type,
9625 TREE_OPERAND (arg0, 0)));
9627 if (INTEGRAL_TYPE_P (type))
9629 /* Convert ~A + 1 to -A. */
9630 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9631 && integer_onep (arg1))
9632 return fold_build1_loc (loc, NEGATE_EXPR, type,
9633 fold_convert_loc (loc, type,
9634 TREE_OPERAND (arg0, 0)));
9636 /* ~X + X is -1. */
9637 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9638 && !TYPE_OVERFLOW_TRAPS (type))
9640 tree tem = TREE_OPERAND (arg0, 0);
9642 STRIP_NOPS (tem);
9643 if (operand_equal_p (tem, arg1, 0))
9645 t1 = build_int_cst_type (type, -1);
9646 return omit_one_operand_loc (loc, type, t1, arg1);
9650 /* X + ~X is -1. */
9651 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9652 && !TYPE_OVERFLOW_TRAPS (type))
9654 tree tem = TREE_OPERAND (arg1, 0);
9656 STRIP_NOPS (tem);
9657 if (operand_equal_p (arg0, tem, 0))
9659 t1 = build_int_cst_type (type, -1);
9660 return omit_one_operand_loc (loc, type, t1, arg0);
9664 /* X + (X / CST) * -CST is X % CST. */
9665 if (TREE_CODE (arg1) == MULT_EXPR
9666 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9667 && operand_equal_p (arg0,
9668 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9670 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9671 tree cst1 = TREE_OPERAND (arg1, 1);
9672 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9673 cst1, cst0);
9674 if (sum && integer_zerop (sum))
9675 return fold_convert_loc (loc, type,
9676 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9677 TREE_TYPE (arg0), arg0,
9678 cst0));
9682 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9683 same or one. Make sure type is not saturating.
9684 fold_plusminus_mult_expr will re-associate. */
9685 if ((TREE_CODE (arg0) == MULT_EXPR
9686 || TREE_CODE (arg1) == MULT_EXPR)
9687 && !TYPE_SATURATING (type)
9688 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9690 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9691 if (tem)
9692 return tem;
9695 if (! FLOAT_TYPE_P (type))
9697 if (integer_zerop (arg1))
9698 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9700 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9701 with a constant, and the two constants have no bits in common,
9702 we should treat this as a BIT_IOR_EXPR since this may produce more
9703 simplifications. */
9704 if (TREE_CODE (arg0) == BIT_AND_EXPR
9705 && TREE_CODE (arg1) == BIT_AND_EXPR
9706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9707 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9708 && integer_zerop (const_binop (BIT_AND_EXPR,
9709 TREE_OPERAND (arg0, 1),
9710 TREE_OPERAND (arg1, 1), 0)))
9712 code = BIT_IOR_EXPR;
9713 goto bit_ior;
9716 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9717 (plus (plus (mult) (mult)) (foo)) so that we can
9718 take advantage of the factoring cases below. */
9719 if (((TREE_CODE (arg0) == PLUS_EXPR
9720 || TREE_CODE (arg0) == MINUS_EXPR)
9721 && TREE_CODE (arg1) == MULT_EXPR)
9722 || ((TREE_CODE (arg1) == PLUS_EXPR
9723 || TREE_CODE (arg1) == MINUS_EXPR)
9724 && TREE_CODE (arg0) == MULT_EXPR))
9726 tree parg0, parg1, parg, marg;
9727 enum tree_code pcode;
9729 if (TREE_CODE (arg1) == MULT_EXPR)
9730 parg = arg0, marg = arg1;
9731 else
9732 parg = arg1, marg = arg0;
9733 pcode = TREE_CODE (parg);
9734 parg0 = TREE_OPERAND (parg, 0);
9735 parg1 = TREE_OPERAND (parg, 1);
9736 STRIP_NOPS (parg0);
9737 STRIP_NOPS (parg1);
9739 if (TREE_CODE (parg0) == MULT_EXPR
9740 && TREE_CODE (parg1) != MULT_EXPR)
9741 return fold_build2_loc (loc, pcode, type,
9742 fold_build2_loc (loc, PLUS_EXPR, type,
9743 fold_convert_loc (loc, type,
9744 parg0),
9745 fold_convert_loc (loc, type,
9746 marg)),
9747 fold_convert_loc (loc, type, parg1));
9748 if (TREE_CODE (parg0) != MULT_EXPR
9749 && TREE_CODE (parg1) == MULT_EXPR)
9750 return
9751 fold_build2_loc (loc, PLUS_EXPR, type,
9752 fold_convert_loc (loc, type, parg0),
9753 fold_build2_loc (loc, pcode, type,
9754 fold_convert_loc (loc, type, marg),
9755 fold_convert_loc (loc, type,
9756 parg1)));
9759 else
9761 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9762 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9763 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9765 /* Likewise if the operands are reversed. */
9766 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9767 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9769 /* Convert X + -C into X - C. */
9770 if (TREE_CODE (arg1) == REAL_CST
9771 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9773 tem = fold_negate_const (arg1, type);
9774 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9775 return fold_build2_loc (loc, MINUS_EXPR, type,
9776 fold_convert_loc (loc, type, arg0),
9777 fold_convert_loc (loc, type, tem));
9780 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9781 to __complex__ ( x, y ). This is not the same for SNaNs or
9782 if signed zeros are involved. */
9783 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9784 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9785 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9787 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9788 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9789 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9790 bool arg0rz = false, arg0iz = false;
9791 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9792 || (arg0i && (arg0iz = real_zerop (arg0i))))
9794 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9795 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9796 if (arg0rz && arg1i && real_zerop (arg1i))
9798 tree rp = arg1r ? arg1r
9799 : build1 (REALPART_EXPR, rtype, arg1);
9800 tree ip = arg0i ? arg0i
9801 : build1 (IMAGPART_EXPR, rtype, arg0);
9802 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9804 else if (arg0iz && arg1r && real_zerop (arg1r))
9806 tree rp = arg0r ? arg0r
9807 : build1 (REALPART_EXPR, rtype, arg0);
9808 tree ip = arg1i ? arg1i
9809 : build1 (IMAGPART_EXPR, rtype, arg1);
9810 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9815 if (flag_unsafe_math_optimizations
9816 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9817 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9818 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9819 return tem;
9821 /* Convert x+x into x*2.0. */
9822 if (operand_equal_p (arg0, arg1, 0)
9823 && SCALAR_FLOAT_TYPE_P (type))
9824 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9825 build_real (type, dconst2));
9827 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9828 We associate floats only if the user has specified
9829 -fassociative-math. */
9830 if (flag_associative_math
9831 && TREE_CODE (arg1) == PLUS_EXPR
9832 && TREE_CODE (arg0) != MULT_EXPR)
9834 tree tree10 = TREE_OPERAND (arg1, 0);
9835 tree tree11 = TREE_OPERAND (arg1, 1);
9836 if (TREE_CODE (tree11) == MULT_EXPR
9837 && TREE_CODE (tree10) == MULT_EXPR)
9839 tree tree0;
9840 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9841 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9844 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9845 We associate floats only if the user has specified
9846 -fassociative-math. */
9847 if (flag_associative_math
9848 && TREE_CODE (arg0) == PLUS_EXPR
9849 && TREE_CODE (arg1) != MULT_EXPR)
9851 tree tree00 = TREE_OPERAND (arg0, 0);
9852 tree tree01 = TREE_OPERAND (arg0, 1);
9853 if (TREE_CODE (tree01) == MULT_EXPR
9854 && TREE_CODE (tree00) == MULT_EXPR)
9856 tree tree0;
9857 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9858 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9863 bit_rotate:
9864 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9865 is a rotate of A by C1 bits. */
9866 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9867 is a rotate of A by B bits. */
9869 enum tree_code code0, code1;
9870 tree rtype;
9871 code0 = TREE_CODE (arg0);
9872 code1 = TREE_CODE (arg1);
9873 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9874 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9875 && operand_equal_p (TREE_OPERAND (arg0, 0),
9876 TREE_OPERAND (arg1, 0), 0)
9877 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9878 TYPE_UNSIGNED (rtype))
9879 /* Only create rotates in complete modes. Other cases are not
9880 expanded properly. */
9881 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9883 tree tree01, tree11;
9884 enum tree_code code01, code11;
9886 tree01 = TREE_OPERAND (arg0, 1);
9887 tree11 = TREE_OPERAND (arg1, 1);
9888 STRIP_NOPS (tree01);
9889 STRIP_NOPS (tree11);
9890 code01 = TREE_CODE (tree01);
9891 code11 = TREE_CODE (tree11);
9892 if (code01 == INTEGER_CST
9893 && code11 == INTEGER_CST
9894 && TREE_INT_CST_HIGH (tree01) == 0
9895 && TREE_INT_CST_HIGH (tree11) == 0
9896 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9897 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9899 tem = build2 (LROTATE_EXPR,
9900 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9901 TREE_OPERAND (arg0, 0),
9902 code0 == LSHIFT_EXPR
9903 ? tree01 : tree11);
9904 SET_EXPR_LOCATION (tem, loc);
9905 return fold_convert_loc (loc, type, tem);
9907 else if (code11 == MINUS_EXPR)
9909 tree tree110, tree111;
9910 tree110 = TREE_OPERAND (tree11, 0);
9911 tree111 = TREE_OPERAND (tree11, 1);
9912 STRIP_NOPS (tree110);
9913 STRIP_NOPS (tree111);
9914 if (TREE_CODE (tree110) == INTEGER_CST
9915 && 0 == compare_tree_int (tree110,
9916 TYPE_PRECISION
9917 (TREE_TYPE (TREE_OPERAND
9918 (arg0, 0))))
9919 && operand_equal_p (tree01, tree111, 0))
9920 return
9921 fold_convert_loc (loc, type,
9922 build2 ((code0 == LSHIFT_EXPR
9923 ? LROTATE_EXPR
9924 : RROTATE_EXPR),
9925 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9926 TREE_OPERAND (arg0, 0), tree01));
9928 else if (code01 == MINUS_EXPR)
9930 tree tree010, tree011;
9931 tree010 = TREE_OPERAND (tree01, 0);
9932 tree011 = TREE_OPERAND (tree01, 1);
9933 STRIP_NOPS (tree010);
9934 STRIP_NOPS (tree011);
9935 if (TREE_CODE (tree010) == INTEGER_CST
9936 && 0 == compare_tree_int (tree010,
9937 TYPE_PRECISION
9938 (TREE_TYPE (TREE_OPERAND
9939 (arg0, 0))))
9940 && operand_equal_p (tree11, tree011, 0))
9941 return fold_convert_loc
9942 (loc, type,
9943 build2 ((code0 != LSHIFT_EXPR
9944 ? LROTATE_EXPR
9945 : RROTATE_EXPR),
9946 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9947 TREE_OPERAND (arg0, 0), tree11));
9952 associate:
9953 /* In most languages, can't associate operations on floats through
9954 parentheses. Rather than remember where the parentheses were, we
9955 don't associate floats at all, unless the user has specified
9956 -fassociative-math.
9957 And, we need to make sure type is not saturating. */
9959 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9960 && !TYPE_SATURATING (type))
9962 tree var0, con0, lit0, minus_lit0;
9963 tree var1, con1, lit1, minus_lit1;
9964 bool ok = true;
9966 /* Split both trees into variables, constants, and literals. Then
9967 associate each group together, the constants with literals,
9968 then the result with variables. This increases the chances of
9969 literals being recombined later and of generating relocatable
9970 expressions for the sum of a constant and literal. */
9971 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9972 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9973 code == MINUS_EXPR);
9975 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9976 if (code == MINUS_EXPR)
9977 code = PLUS_EXPR;
9979 /* With undefined overflow we can only associate constants with one
9980 variable, and constants whose association doesn't overflow. */
9981 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9982 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9984 if (var0 && var1)
9986 tree tmp0 = var0;
9987 tree tmp1 = var1;
9989 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9990 tmp0 = TREE_OPERAND (tmp0, 0);
9991 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9992 tmp1 = TREE_OPERAND (tmp1, 0);
9993 /* The only case we can still associate with two variables
9994 is if they are the same, modulo negation. */
9995 if (!operand_equal_p (tmp0, tmp1, 0))
9996 ok = false;
9999 if (ok && lit0 && lit1)
10001 tree tmp0 = fold_convert (type, lit0);
10002 tree tmp1 = fold_convert (type, lit1);
10004 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10005 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10006 ok = false;
10010 /* Only do something if we found more than two objects. Otherwise,
10011 nothing has changed and we risk infinite recursion. */
10012 if (ok
10013 && (2 < ((var0 != 0) + (var1 != 0)
10014 + (con0 != 0) + (con1 != 0)
10015 + (lit0 != 0) + (lit1 != 0)
10016 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10018 var0 = associate_trees (loc, var0, var1, code, type);
10019 con0 = associate_trees (loc, con0, con1, code, type);
10020 lit0 = associate_trees (loc, lit0, lit1, code, type);
10021 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10023 /* Preserve the MINUS_EXPR if the negative part of the literal is
10024 greater than the positive part. Otherwise, the multiplicative
10025 folding code (i.e extract_muldiv) may be fooled in case
10026 unsigned constants are subtracted, like in the following
10027 example: ((X*2 + 4) - 8U)/2. */
10028 if (minus_lit0 && lit0)
10030 if (TREE_CODE (lit0) == INTEGER_CST
10031 && TREE_CODE (minus_lit0) == INTEGER_CST
10032 && tree_int_cst_lt (lit0, minus_lit0))
10034 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10035 MINUS_EXPR, type);
10036 lit0 = 0;
10038 else
10040 lit0 = associate_trees (loc, lit0, minus_lit0,
10041 MINUS_EXPR, type);
10042 minus_lit0 = 0;
10045 if (minus_lit0)
10047 if (con0 == 0)
10048 return
10049 fold_convert_loc (loc, type,
10050 associate_trees (loc, var0, minus_lit0,
10051 MINUS_EXPR, type));
10052 else
10054 con0 = associate_trees (loc, con0, minus_lit0,
10055 MINUS_EXPR, type);
10056 return
10057 fold_convert_loc (loc, type,
10058 associate_trees (loc, var0, con0,
10059 PLUS_EXPR, type));
10063 con0 = associate_trees (loc, con0, lit0, code, type);
10064 return
10065 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10066 code, type));
10070 return NULL_TREE;
10072 case MINUS_EXPR:
10073 /* Pointer simplifications for subtraction, simple reassociations. */
10074 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10076 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10077 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10078 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10080 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10081 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10082 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10083 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10084 return fold_build2_loc (loc, PLUS_EXPR, type,
10085 fold_build2_loc (loc, MINUS_EXPR, type,
10086 arg00, arg10),
10087 fold_build2_loc (loc, MINUS_EXPR, type,
10088 arg01, arg11));
10090 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10091 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10093 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10094 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10095 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10096 fold_convert_loc (loc, type, arg1));
10097 if (tmp)
10098 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10101 /* A - (-B) -> A + B */
10102 if (TREE_CODE (arg1) == NEGATE_EXPR)
10103 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10104 fold_convert_loc (loc, type,
10105 TREE_OPERAND (arg1, 0)));
10106 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10107 if (TREE_CODE (arg0) == NEGATE_EXPR
10108 && (FLOAT_TYPE_P (type)
10109 || INTEGRAL_TYPE_P (type))
10110 && negate_expr_p (arg1)
10111 && reorder_operands_p (arg0, arg1))
10112 return fold_build2_loc (loc, MINUS_EXPR, type,
10113 fold_convert_loc (loc, type,
10114 negate_expr (arg1)),
10115 fold_convert_loc (loc, type,
10116 TREE_OPERAND (arg0, 0)));
10117 /* Convert -A - 1 to ~A. */
10118 if (INTEGRAL_TYPE_P (type)
10119 && TREE_CODE (arg0) == NEGATE_EXPR
10120 && integer_onep (arg1)
10121 && !TYPE_OVERFLOW_TRAPS (type))
10122 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10123 fold_convert_loc (loc, type,
10124 TREE_OPERAND (arg0, 0)));
10126 /* Convert -1 - A to ~A. */
10127 if (INTEGRAL_TYPE_P (type)
10128 && integer_all_onesp (arg0))
10129 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10132 /* X - (X / CST) * CST is X % CST. */
10133 if (INTEGRAL_TYPE_P (type)
10134 && TREE_CODE (arg1) == MULT_EXPR
10135 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10136 && operand_equal_p (arg0,
10137 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10138 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10139 TREE_OPERAND (arg1, 1), 0))
10140 return
10141 fold_convert_loc (loc, type,
10142 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10143 arg0, TREE_OPERAND (arg1, 1)));
10145 if (! FLOAT_TYPE_P (type))
10147 if (integer_zerop (arg0))
10148 return negate_expr (fold_convert_loc (loc, type, arg1));
10149 if (integer_zerop (arg1))
10150 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10152 /* Fold A - (A & B) into ~B & A. */
10153 if (!TREE_SIDE_EFFECTS (arg0)
10154 && TREE_CODE (arg1) == BIT_AND_EXPR)
10156 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10158 tree arg10 = fold_convert_loc (loc, type,
10159 TREE_OPERAND (arg1, 0));
10160 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10161 fold_build1_loc (loc, BIT_NOT_EXPR,
10162 type, arg10),
10163 fold_convert_loc (loc, type, arg0));
10165 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10167 tree arg11 = fold_convert_loc (loc,
10168 type, TREE_OPERAND (arg1, 1));
10169 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10170 fold_build1_loc (loc, BIT_NOT_EXPR,
10171 type, arg11),
10172 fold_convert_loc (loc, type, arg0));
10176 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10177 any power of 2 minus 1. */
10178 if (TREE_CODE (arg0) == BIT_AND_EXPR
10179 && TREE_CODE (arg1) == BIT_AND_EXPR
10180 && operand_equal_p (TREE_OPERAND (arg0, 0),
10181 TREE_OPERAND (arg1, 0), 0))
10183 tree mask0 = TREE_OPERAND (arg0, 1);
10184 tree mask1 = TREE_OPERAND (arg1, 1);
10185 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10187 if (operand_equal_p (tem, mask1, 0))
10189 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10190 TREE_OPERAND (arg0, 0), mask1);
10191 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10196 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10197 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10198 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10200 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10201 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10202 (-ARG1 + ARG0) reduces to -ARG1. */
10203 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10204 return negate_expr (fold_convert_loc (loc, type, arg1));
10206 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10207 __complex__ ( x, -y ). This is not the same for SNaNs or if
10208 signed zeros are involved. */
10209 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10210 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10211 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10213 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10214 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10215 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10216 bool arg0rz = false, arg0iz = false;
10217 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10218 || (arg0i && (arg0iz = real_zerop (arg0i))))
10220 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10221 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10222 if (arg0rz && arg1i && real_zerop (arg1i))
10224 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10225 arg1r ? arg1r
10226 : build1 (REALPART_EXPR, rtype, arg1));
10227 tree ip = arg0i ? arg0i
10228 : build1 (IMAGPART_EXPR, rtype, arg0);
10229 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10231 else if (arg0iz && arg1r && real_zerop (arg1r))
10233 tree rp = arg0r ? arg0r
10234 : build1 (REALPART_EXPR, rtype, arg0);
10235 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10236 arg1i ? arg1i
10237 : build1 (IMAGPART_EXPR, rtype, arg1));
10238 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10243 /* Fold &x - &x. This can happen from &x.foo - &x.
10244 This is unsafe for certain floats even in non-IEEE formats.
10245 In IEEE, it is unsafe because it does wrong for NaNs.
10246 Also note that operand_equal_p is always false if an operand
10247 is volatile. */
10249 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10250 && operand_equal_p (arg0, arg1, 0))
10251 return fold_convert_loc (loc, type, integer_zero_node);
10253 /* A - B -> A + (-B) if B is easily negatable. */
10254 if (negate_expr_p (arg1)
10255 && ((FLOAT_TYPE_P (type)
10256 /* Avoid this transformation if B is a positive REAL_CST. */
10257 && (TREE_CODE (arg1) != REAL_CST
10258 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10259 || INTEGRAL_TYPE_P (type)))
10260 return fold_build2_loc (loc, PLUS_EXPR, type,
10261 fold_convert_loc (loc, type, arg0),
10262 fold_convert_loc (loc, type,
10263 negate_expr (arg1)));
10265 /* Try folding difference of addresses. */
10267 HOST_WIDE_INT diff;
10269 if ((TREE_CODE (arg0) == ADDR_EXPR
10270 || TREE_CODE (arg1) == ADDR_EXPR)
10271 && ptr_difference_const (arg0, arg1, &diff))
10272 return build_int_cst_type (type, diff);
10275 /* Fold &a[i] - &a[j] to i-j. */
10276 if (TREE_CODE (arg0) == ADDR_EXPR
10277 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10278 && TREE_CODE (arg1) == ADDR_EXPR
10279 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10281 tree aref0 = TREE_OPERAND (arg0, 0);
10282 tree aref1 = TREE_OPERAND (arg1, 0);
10283 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10284 TREE_OPERAND (aref1, 0), 0))
10286 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10287 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10288 tree esz = array_ref_element_size (aref0);
10289 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10290 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10291 fold_convert_loc (loc, type, esz));
10296 if (FLOAT_TYPE_P (type)
10297 && flag_unsafe_math_optimizations
10298 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10299 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10300 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10301 return tem;
10303 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10304 same or one. Make sure type is not saturating.
10305 fold_plusminus_mult_expr will re-associate. */
10306 if ((TREE_CODE (arg0) == MULT_EXPR
10307 || TREE_CODE (arg1) == MULT_EXPR)
10308 && !TYPE_SATURATING (type)
10309 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10311 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10312 if (tem)
10313 return tem;
10316 goto associate;
10318 case MULT_EXPR:
10319 /* (-A) * (-B) -> A * B */
10320 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10321 return fold_build2_loc (loc, MULT_EXPR, type,
10322 fold_convert_loc (loc, type,
10323 TREE_OPERAND (arg0, 0)),
10324 fold_convert_loc (loc, type,
10325 negate_expr (arg1)));
10326 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10327 return fold_build2_loc (loc, MULT_EXPR, type,
10328 fold_convert_loc (loc, type,
10329 negate_expr (arg0)),
10330 fold_convert_loc (loc, type,
10331 TREE_OPERAND (arg1, 0)));
10333 if (! FLOAT_TYPE_P (type))
10335 if (integer_zerop (arg1))
10336 return omit_one_operand_loc (loc, type, arg1, arg0);
10337 if (integer_onep (arg1))
10338 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10339 /* Transform x * -1 into -x. Make sure to do the negation
10340 on the original operand with conversions not stripped
10341 because we can only strip non-sign-changing conversions. */
10342 if (integer_all_onesp (arg1))
10343 return fold_convert_loc (loc, type, negate_expr (op0));
10344 /* Transform x * -C into -x * C if x is easily negatable. */
10345 if (TREE_CODE (arg1) == INTEGER_CST
10346 && tree_int_cst_sgn (arg1) == -1
10347 && negate_expr_p (arg0)
10348 && (tem = negate_expr (arg1)) != arg1
10349 && !TREE_OVERFLOW (tem))
10350 return fold_build2_loc (loc, MULT_EXPR, type,
10351 fold_convert_loc (loc, type,
10352 negate_expr (arg0)),
10353 tem);
10355 /* (a * (1 << b)) is (a << b) */
10356 if (TREE_CODE (arg1) == LSHIFT_EXPR
10357 && integer_onep (TREE_OPERAND (arg1, 0)))
10358 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10359 TREE_OPERAND (arg1, 1));
10360 if (TREE_CODE (arg0) == LSHIFT_EXPR
10361 && integer_onep (TREE_OPERAND (arg0, 0)))
10362 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10363 TREE_OPERAND (arg0, 1));
10365 /* (A + A) * C -> A * 2 * C */
10366 if (TREE_CODE (arg0) == PLUS_EXPR
10367 && TREE_CODE (arg1) == INTEGER_CST
10368 && operand_equal_p (TREE_OPERAND (arg0, 0),
10369 TREE_OPERAND (arg0, 1), 0))
10370 return fold_build2_loc (loc, MULT_EXPR, type,
10371 omit_one_operand_loc (loc, type,
10372 TREE_OPERAND (arg0, 0),
10373 TREE_OPERAND (arg0, 1)),
10374 fold_build2_loc (loc, MULT_EXPR, type,
10375 build_int_cst (type, 2) , arg1));
10377 strict_overflow_p = false;
10378 if (TREE_CODE (arg1) == INTEGER_CST
10379 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10380 &strict_overflow_p)))
10382 if (strict_overflow_p)
10383 fold_overflow_warning (("assuming signed overflow does not "
10384 "occur when simplifying "
10385 "multiplication"),
10386 WARN_STRICT_OVERFLOW_MISC);
10387 return fold_convert_loc (loc, type, tem);
10390 /* Optimize z * conj(z) for integer complex numbers. */
10391 if (TREE_CODE (arg0) == CONJ_EXPR
10392 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10393 return fold_mult_zconjz (loc, type, arg1);
10394 if (TREE_CODE (arg1) == CONJ_EXPR
10395 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10396 return fold_mult_zconjz (loc, type, arg0);
10398 else
10400 /* Maybe fold x * 0 to 0. The expressions aren't the same
10401 when x is NaN, since x * 0 is also NaN. Nor are they the
10402 same in modes with signed zeros, since multiplying a
10403 negative value by 0 gives -0, not +0. */
10404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10405 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10406 && real_zerop (arg1))
10407 return omit_one_operand_loc (loc, type, arg1, arg0);
10408 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10409 Likewise for complex arithmetic with signed zeros. */
10410 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10411 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10412 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10413 && real_onep (arg1))
10414 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10416 /* Transform x * -1.0 into -x. */
10417 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10418 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10419 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10420 && real_minus_onep (arg1))
10421 return fold_convert_loc (loc, type, negate_expr (arg0));
10423 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10424 the result for floating point types due to rounding so it is applied
10425 only if -fassociative-math was specify. */
10426 if (flag_associative_math
10427 && TREE_CODE (arg0) == RDIV_EXPR
10428 && TREE_CODE (arg1) == REAL_CST
10429 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10431 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10432 arg1, 0);
10433 if (tem)
10434 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10435 TREE_OPERAND (arg0, 1));
10438 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10439 if (operand_equal_p (arg0, arg1, 0))
10441 tree tem = fold_strip_sign_ops (arg0);
10442 if (tem != NULL_TREE)
10444 tem = fold_convert_loc (loc, type, tem);
10445 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10449 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10450 This is not the same for NaNs or if signed zeros are
10451 involved. */
10452 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10453 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10454 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10455 && TREE_CODE (arg1) == COMPLEX_CST
10456 && real_zerop (TREE_REALPART (arg1)))
10458 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10459 if (real_onep (TREE_IMAGPART (arg1)))
10460 return
10461 fold_build2_loc (loc, COMPLEX_EXPR, type,
10462 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10463 rtype, arg0)),
10464 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10465 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10466 return
10467 fold_build2_loc (loc, COMPLEX_EXPR, type,
10468 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10469 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10470 rtype, arg0)));
10473 /* Optimize z * conj(z) for floating point complex numbers.
10474 Guarded by flag_unsafe_math_optimizations as non-finite
10475 imaginary components don't produce scalar results. */
10476 if (flag_unsafe_math_optimizations
10477 && TREE_CODE (arg0) == CONJ_EXPR
10478 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10479 return fold_mult_zconjz (loc, type, arg1);
10480 if (flag_unsafe_math_optimizations
10481 && TREE_CODE (arg1) == CONJ_EXPR
10482 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10483 return fold_mult_zconjz (loc, type, arg0);
10485 if (flag_unsafe_math_optimizations)
10487 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10488 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10490 /* Optimizations of root(...)*root(...). */
10491 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10493 tree rootfn, arg;
10494 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10495 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10497 /* Optimize sqrt(x)*sqrt(x) as x. */
10498 if (BUILTIN_SQRT_P (fcode0)
10499 && operand_equal_p (arg00, arg10, 0)
10500 && ! HONOR_SNANS (TYPE_MODE (type)))
10501 return arg00;
10503 /* Optimize root(x)*root(y) as root(x*y). */
10504 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10505 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10506 return build_call_expr_loc (loc, rootfn, 1, arg);
10509 /* Optimize expN(x)*expN(y) as expN(x+y). */
10510 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10512 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10513 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10514 CALL_EXPR_ARG (arg0, 0),
10515 CALL_EXPR_ARG (arg1, 0));
10516 return build_call_expr_loc (loc, expfn, 1, arg);
10519 /* Optimizations of pow(...)*pow(...). */
10520 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10521 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10522 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10524 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10525 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10526 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10527 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10529 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10530 if (operand_equal_p (arg01, arg11, 0))
10532 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10533 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10534 arg00, arg10);
10535 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10538 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10539 if (operand_equal_p (arg00, arg10, 0))
10541 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10542 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10543 arg01, arg11);
10544 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10548 /* Optimize tan(x)*cos(x) as sin(x). */
10549 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10550 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10551 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10552 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10553 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10554 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10555 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10556 CALL_EXPR_ARG (arg1, 0), 0))
10558 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10560 if (sinfn != NULL_TREE)
10561 return build_call_expr_loc (loc, sinfn, 1,
10562 CALL_EXPR_ARG (arg0, 0));
10565 /* Optimize x*pow(x,c) as pow(x,c+1). */
10566 if (fcode1 == BUILT_IN_POW
10567 || fcode1 == BUILT_IN_POWF
10568 || fcode1 == BUILT_IN_POWL)
10570 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10571 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10572 if (TREE_CODE (arg11) == REAL_CST
10573 && !TREE_OVERFLOW (arg11)
10574 && operand_equal_p (arg0, arg10, 0))
10576 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10577 REAL_VALUE_TYPE c;
10578 tree arg;
10580 c = TREE_REAL_CST (arg11);
10581 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10582 arg = build_real (type, c);
10583 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10587 /* Optimize pow(x,c)*x as pow(x,c+1). */
10588 if (fcode0 == BUILT_IN_POW
10589 || fcode0 == BUILT_IN_POWF
10590 || fcode0 == BUILT_IN_POWL)
10592 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10593 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10594 if (TREE_CODE (arg01) == REAL_CST
10595 && !TREE_OVERFLOW (arg01)
10596 && operand_equal_p (arg1, arg00, 0))
10598 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10599 REAL_VALUE_TYPE c;
10600 tree arg;
10602 c = TREE_REAL_CST (arg01);
10603 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10604 arg = build_real (type, c);
10605 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10609 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10610 if (optimize_function_for_speed_p (cfun)
10611 && operand_equal_p (arg0, arg1, 0))
10613 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10615 if (powfn)
10617 tree arg = build_real (type, dconst2);
10618 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10623 goto associate;
10625 case BIT_IOR_EXPR:
10626 bit_ior:
10627 if (integer_all_onesp (arg1))
10628 return omit_one_operand_loc (loc, type, arg1, arg0);
10629 if (integer_zerop (arg1))
10630 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10631 if (operand_equal_p (arg0, arg1, 0))
10632 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10634 /* ~X | X is -1. */
10635 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10636 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10638 t1 = fold_convert_loc (loc, type, integer_zero_node);
10639 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10640 return omit_one_operand_loc (loc, type, t1, arg1);
10643 /* X | ~X is -1. */
10644 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10647 t1 = fold_convert_loc (loc, type, integer_zero_node);
10648 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10649 return omit_one_operand_loc (loc, type, t1, arg0);
10652 /* Canonicalize (X & C1) | C2. */
10653 if (TREE_CODE (arg0) == BIT_AND_EXPR
10654 && TREE_CODE (arg1) == INTEGER_CST
10655 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10657 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10658 int width = TYPE_PRECISION (type), w;
10659 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10660 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10661 hi2 = TREE_INT_CST_HIGH (arg1);
10662 lo2 = TREE_INT_CST_LOW (arg1);
10664 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10665 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10666 return omit_one_operand_loc (loc, type, arg1,
10667 TREE_OPERAND (arg0, 0));
10669 if (width > HOST_BITS_PER_WIDE_INT)
10671 mhi = (unsigned HOST_WIDE_INT) -1
10672 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10673 mlo = -1;
10675 else
10677 mhi = 0;
10678 mlo = (unsigned HOST_WIDE_INT) -1
10679 >> (HOST_BITS_PER_WIDE_INT - width);
10682 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10683 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10684 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10685 TREE_OPERAND (arg0, 0), arg1);
10687 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10688 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10689 mode which allows further optimizations. */
10690 hi1 &= mhi;
10691 lo1 &= mlo;
10692 hi2 &= mhi;
10693 lo2 &= mlo;
10694 hi3 = hi1 & ~hi2;
10695 lo3 = lo1 & ~lo2;
10696 for (w = BITS_PER_UNIT;
10697 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10698 w <<= 1)
10700 unsigned HOST_WIDE_INT mask
10701 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10702 if (((lo1 | lo2) & mask) == mask
10703 && (lo1 & ~mask) == 0 && hi1 == 0)
10705 hi3 = 0;
10706 lo3 = mask;
10707 break;
10710 if (hi3 != hi1 || lo3 != lo1)
10711 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10712 fold_build2_loc (loc, BIT_AND_EXPR, type,
10713 TREE_OPERAND (arg0, 0),
10714 build_int_cst_wide (type,
10715 lo3, hi3)),
10716 arg1);
10719 /* (X & Y) | Y is (X, Y). */
10720 if (TREE_CODE (arg0) == BIT_AND_EXPR
10721 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10722 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10723 /* (X & Y) | X is (Y, X). */
10724 if (TREE_CODE (arg0) == BIT_AND_EXPR
10725 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10726 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10727 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10728 /* X | (X & Y) is (Y, X). */
10729 if (TREE_CODE (arg1) == BIT_AND_EXPR
10730 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10731 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10732 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10733 /* X | (Y & X) is (Y, X). */
10734 if (TREE_CODE (arg1) == BIT_AND_EXPR
10735 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10736 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10737 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10739 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10740 if (t1 != NULL_TREE)
10741 return t1;
10743 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10745 This results in more efficient code for machines without a NAND
10746 instruction. Combine will canonicalize to the first form
10747 which will allow use of NAND instructions provided by the
10748 backend if they exist. */
10749 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10750 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10752 return
10753 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10754 build2 (BIT_AND_EXPR, type,
10755 fold_convert_loc (loc, type,
10756 TREE_OPERAND (arg0, 0)),
10757 fold_convert_loc (loc, type,
10758 TREE_OPERAND (arg1, 0))));
10761 /* See if this can be simplified into a rotate first. If that
10762 is unsuccessful continue in the association code. */
10763 goto bit_rotate;
10765 case BIT_XOR_EXPR:
10766 if (integer_zerop (arg1))
10767 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10768 if (integer_all_onesp (arg1))
10769 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10770 if (operand_equal_p (arg0, arg1, 0))
10771 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10773 /* ~X ^ X is -1. */
10774 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10775 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10777 t1 = fold_convert_loc (loc, type, integer_zero_node);
10778 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10779 return omit_one_operand_loc (loc, type, t1, arg1);
10782 /* X ^ ~X is -1. */
10783 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10784 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10786 t1 = fold_convert_loc (loc, type, integer_zero_node);
10787 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10788 return omit_one_operand_loc (loc, type, t1, arg0);
10791 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10792 with a constant, and the two constants have no bits in common,
10793 we should treat this as a BIT_IOR_EXPR since this may produce more
10794 simplifications. */
10795 if (TREE_CODE (arg0) == BIT_AND_EXPR
10796 && TREE_CODE (arg1) == BIT_AND_EXPR
10797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10798 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10799 && integer_zerop (const_binop (BIT_AND_EXPR,
10800 TREE_OPERAND (arg0, 1),
10801 TREE_OPERAND (arg1, 1), 0)))
10803 code = BIT_IOR_EXPR;
10804 goto bit_ior;
10807 /* (X | Y) ^ X -> Y & ~ X*/
10808 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10809 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10811 tree t2 = TREE_OPERAND (arg0, 1);
10812 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10813 arg1);
10814 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10815 fold_convert_loc (loc, type, t2),
10816 fold_convert_loc (loc, type, t1));
10817 return t1;
10820 /* (Y | X) ^ X -> Y & ~ X*/
10821 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10822 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10824 tree t2 = TREE_OPERAND (arg0, 0);
10825 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10826 arg1);
10827 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10828 fold_convert_loc (loc, type, t2),
10829 fold_convert_loc (loc, type, t1));
10830 return t1;
10833 /* X ^ (X | Y) -> Y & ~ X*/
10834 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10835 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10837 tree t2 = TREE_OPERAND (arg1, 1);
10838 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10839 arg0);
10840 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10841 fold_convert_loc (loc, type, t2),
10842 fold_convert_loc (loc, type, t1));
10843 return t1;
10846 /* X ^ (Y | X) -> Y & ~ X*/
10847 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10848 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10850 tree t2 = TREE_OPERAND (arg1, 0);
10851 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10852 arg0);
10853 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10854 fold_convert_loc (loc, type, t2),
10855 fold_convert_loc (loc, type, t1));
10856 return t1;
10859 /* Convert ~X ^ ~Y to X ^ Y. */
10860 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10861 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10862 return fold_build2_loc (loc, code, type,
10863 fold_convert_loc (loc, type,
10864 TREE_OPERAND (arg0, 0)),
10865 fold_convert_loc (loc, type,
10866 TREE_OPERAND (arg1, 0)));
10868 /* Convert ~X ^ C to X ^ ~C. */
10869 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10870 && TREE_CODE (arg1) == INTEGER_CST)
10871 return fold_build2_loc (loc, code, type,
10872 fold_convert_loc (loc, type,
10873 TREE_OPERAND (arg0, 0)),
10874 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10876 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10877 if (TREE_CODE (arg0) == BIT_AND_EXPR
10878 && integer_onep (TREE_OPERAND (arg0, 1))
10879 && integer_onep (arg1))
10880 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10881 build_int_cst (TREE_TYPE (arg0), 0));
10883 /* Fold (X & Y) ^ Y as ~X & Y. */
10884 if (TREE_CODE (arg0) == BIT_AND_EXPR
10885 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10887 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10888 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10889 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10890 fold_convert_loc (loc, type, arg1));
10892 /* Fold (X & Y) ^ X as ~Y & X. */
10893 if (TREE_CODE (arg0) == BIT_AND_EXPR
10894 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10895 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10897 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10898 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10899 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10900 fold_convert_loc (loc, type, arg1));
10902 /* Fold X ^ (X & Y) as X & ~Y. */
10903 if (TREE_CODE (arg1) == BIT_AND_EXPR
10904 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10906 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10907 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10908 fold_convert_loc (loc, type, arg0),
10909 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10911 /* Fold X ^ (Y & X) as ~Y & X. */
10912 if (TREE_CODE (arg1) == BIT_AND_EXPR
10913 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10914 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10916 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10917 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10918 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10919 fold_convert_loc (loc, type, arg0));
10922 /* See if this can be simplified into a rotate first. If that
10923 is unsuccessful continue in the association code. */
10924 goto bit_rotate;
10926 case BIT_AND_EXPR:
10927 if (integer_all_onesp (arg1))
10928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10929 if (integer_zerop (arg1))
10930 return omit_one_operand_loc (loc, type, arg1, arg0);
10931 if (operand_equal_p (arg0, arg1, 0))
10932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10934 /* ~X & X is always zero. */
10935 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10937 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10939 /* X & ~X is always zero. */
10940 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10941 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10942 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10944 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10945 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10946 && TREE_CODE (arg1) == INTEGER_CST
10947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10949 tree tmp1 = fold_convert_loc (loc, type, arg1);
10950 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10951 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10952 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10953 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10954 return
10955 fold_convert_loc (loc, type,
10956 fold_build2_loc (loc, BIT_IOR_EXPR,
10957 type, tmp2, tmp3));
10960 /* (X | Y) & Y is (X, Y). */
10961 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10962 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10963 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10964 /* (X | Y) & X is (Y, X). */
10965 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10966 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10967 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10968 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10969 /* X & (X | Y) is (Y, X). */
10970 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10972 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10973 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10974 /* X & (Y | X) is (Y, X). */
10975 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10976 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10977 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10978 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10980 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10981 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10982 && integer_onep (TREE_OPERAND (arg0, 1))
10983 && integer_onep (arg1))
10985 tem = TREE_OPERAND (arg0, 0);
10986 return fold_build2_loc (loc, EQ_EXPR, type,
10987 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10988 build_int_cst (TREE_TYPE (tem), 1)),
10989 build_int_cst (TREE_TYPE (tem), 0));
10991 /* Fold ~X & 1 as (X & 1) == 0. */
10992 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10993 && integer_onep (arg1))
10995 tem = TREE_OPERAND (arg0, 0);
10996 return fold_build2_loc (loc, EQ_EXPR, type,
10997 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10998 build_int_cst (TREE_TYPE (tem), 1)),
10999 build_int_cst (TREE_TYPE (tem), 0));
11002 /* Fold (X ^ Y) & Y as ~X & Y. */
11003 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11004 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11006 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11007 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11008 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11009 fold_convert_loc (loc, type, arg1));
11011 /* Fold (X ^ Y) & X as ~Y & X. */
11012 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11013 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11014 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11016 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11017 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11018 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11019 fold_convert_loc (loc, type, arg1));
11021 /* Fold X & (X ^ Y) as X & ~Y. */
11022 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11023 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11025 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11026 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11027 fold_convert_loc (loc, type, arg0),
11028 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11030 /* Fold X & (Y ^ X) as ~Y & X. */
11031 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11032 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11033 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11035 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11036 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11037 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11038 fold_convert_loc (loc, type, arg0));
11041 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11042 if (t1 != NULL_TREE)
11043 return t1;
11044 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11045 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11046 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11048 unsigned int prec
11049 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11051 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11052 && (~TREE_INT_CST_LOW (arg1)
11053 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11054 return
11055 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11058 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11060 This results in more efficient code for machines without a NOR
11061 instruction. Combine will canonicalize to the first form
11062 which will allow use of NOR instructions provided by the
11063 backend if they exist. */
11064 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11065 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11067 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11068 build2 (BIT_IOR_EXPR, type,
11069 fold_convert_loc (loc, type,
11070 TREE_OPERAND (arg0, 0)),
11071 fold_convert_loc (loc, type,
11072 TREE_OPERAND (arg1, 0))));
11075 /* If arg0 is derived from the address of an object or function, we may
11076 be able to fold this expression using the object or function's
11077 alignment. */
11078 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11080 unsigned HOST_WIDE_INT modulus, residue;
11081 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11083 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11084 integer_onep (arg1));
11086 /* This works because modulus is a power of 2. If this weren't the
11087 case, we'd have to replace it by its greatest power-of-2
11088 divisor: modulus & -modulus. */
11089 if (low < modulus)
11090 return build_int_cst (type, residue & low);
11093 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11094 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11095 if the new mask might be further optimized. */
11096 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11097 || TREE_CODE (arg0) == RSHIFT_EXPR)
11098 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11099 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11100 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11101 < TYPE_PRECISION (TREE_TYPE (arg0))
11102 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11103 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11105 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11106 unsigned HOST_WIDE_INT mask
11107 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11108 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11109 tree shift_type = TREE_TYPE (arg0);
11111 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11112 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11113 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11114 && TYPE_PRECISION (TREE_TYPE (arg0))
11115 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11117 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11118 tree arg00 = TREE_OPERAND (arg0, 0);
11119 /* See if more bits can be proven as zero because of
11120 zero extension. */
11121 if (TREE_CODE (arg00) == NOP_EXPR
11122 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11124 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11125 if (TYPE_PRECISION (inner_type)
11126 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11127 && TYPE_PRECISION (inner_type) < prec)
11129 prec = TYPE_PRECISION (inner_type);
11130 /* See if we can shorten the right shift. */
11131 if (shiftc < prec)
11132 shift_type = inner_type;
11135 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11136 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11137 zerobits <<= prec - shiftc;
11138 /* For arithmetic shift if sign bit could be set, zerobits
11139 can contain actually sign bits, so no transformation is
11140 possible, unless MASK masks them all away. In that
11141 case the shift needs to be converted into logical shift. */
11142 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11143 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11145 if ((mask & zerobits) == 0)
11146 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11147 else
11148 zerobits = 0;
11152 /* ((X << 16) & 0xff00) is (X, 0). */
11153 if ((mask & zerobits) == mask)
11154 return omit_one_operand_loc (loc, type,
11155 build_int_cst (type, 0), arg0);
11157 newmask = mask | zerobits;
11158 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11160 unsigned int prec;
11162 /* Only do the transformation if NEWMASK is some integer
11163 mode's mask. */
11164 for (prec = BITS_PER_UNIT;
11165 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11166 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11167 break;
11168 if (prec < HOST_BITS_PER_WIDE_INT
11169 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11171 tree newmaskt;
11173 if (shift_type != TREE_TYPE (arg0))
11175 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11176 fold_convert_loc (loc, shift_type,
11177 TREE_OPERAND (arg0, 0)),
11178 TREE_OPERAND (arg0, 1));
11179 tem = fold_convert_loc (loc, type, tem);
11181 else
11182 tem = op0;
11183 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11184 if (!tree_int_cst_equal (newmaskt, arg1))
11185 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11190 goto associate;
11192 case RDIV_EXPR:
11193 /* Don't touch a floating-point divide by zero unless the mode
11194 of the constant can represent infinity. */
11195 if (TREE_CODE (arg1) == REAL_CST
11196 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11197 && real_zerop (arg1))
11198 return NULL_TREE;
11200 /* Optimize A / A to 1.0 if we don't care about
11201 NaNs or Infinities. Skip the transformation
11202 for non-real operands. */
11203 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11204 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11205 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11206 && operand_equal_p (arg0, arg1, 0))
11208 tree r = build_real (TREE_TYPE (arg0), dconst1);
11210 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11213 /* The complex version of the above A / A optimization. */
11214 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11215 && operand_equal_p (arg0, arg1, 0))
11217 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11218 if (! HONOR_NANS (TYPE_MODE (elem_type))
11219 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11221 tree r = build_real (elem_type, dconst1);
11222 /* omit_two_operands will call fold_convert for us. */
11223 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11227 /* (-A) / (-B) -> A / B */
11228 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11229 return fold_build2_loc (loc, RDIV_EXPR, type,
11230 TREE_OPERAND (arg0, 0),
11231 negate_expr (arg1));
11232 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11233 return fold_build2_loc (loc, RDIV_EXPR, type,
11234 negate_expr (arg0),
11235 TREE_OPERAND (arg1, 0));
11237 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11238 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11239 && real_onep (arg1))
11240 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11242 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11243 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11244 && real_minus_onep (arg1))
11245 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11246 negate_expr (arg0)));
11248 /* If ARG1 is a constant, we can convert this to a multiply by the
11249 reciprocal. This does not have the same rounding properties,
11250 so only do this if -freciprocal-math. We can actually
11251 always safely do it if ARG1 is a power of two, but it's hard to
11252 tell if it is or not in a portable manner. */
11253 if (TREE_CODE (arg1) == REAL_CST)
11255 if (flag_reciprocal_math
11256 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11257 arg1, 0)))
11258 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11259 /* Find the reciprocal if optimizing and the result is exact. */
11260 if (optimize)
11262 REAL_VALUE_TYPE r;
11263 r = TREE_REAL_CST (arg1);
11264 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11266 tem = build_real (type, r);
11267 return fold_build2_loc (loc, MULT_EXPR, type,
11268 fold_convert_loc (loc, type, arg0), tem);
11272 /* Convert A/B/C to A/(B*C). */
11273 if (flag_reciprocal_math
11274 && TREE_CODE (arg0) == RDIV_EXPR)
11275 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11276 fold_build2_loc (loc, MULT_EXPR, type,
11277 TREE_OPERAND (arg0, 1), arg1));
11279 /* Convert A/(B/C) to (A/B)*C. */
11280 if (flag_reciprocal_math
11281 && TREE_CODE (arg1) == RDIV_EXPR)
11282 return fold_build2_loc (loc, MULT_EXPR, type,
11283 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11284 TREE_OPERAND (arg1, 0)),
11285 TREE_OPERAND (arg1, 1));
11287 /* Convert C1/(X*C2) into (C1/C2)/X. */
11288 if (flag_reciprocal_math
11289 && TREE_CODE (arg1) == MULT_EXPR
11290 && TREE_CODE (arg0) == REAL_CST
11291 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11293 tree tem = const_binop (RDIV_EXPR, arg0,
11294 TREE_OPERAND (arg1, 1), 0);
11295 if (tem)
11296 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11297 TREE_OPERAND (arg1, 0));
11300 if (flag_unsafe_math_optimizations)
11302 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11303 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11305 /* Optimize sin(x)/cos(x) as tan(x). */
11306 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11307 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11308 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11309 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11310 CALL_EXPR_ARG (arg1, 0), 0))
11312 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11314 if (tanfn != NULL_TREE)
11315 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11318 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11319 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11320 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11321 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11322 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11323 CALL_EXPR_ARG (arg1, 0), 0))
11325 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11327 if (tanfn != NULL_TREE)
11329 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11330 CALL_EXPR_ARG (arg0, 0));
11331 return fold_build2_loc (loc, RDIV_EXPR, type,
11332 build_real (type, dconst1), tmp);
11336 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11337 NaNs or Infinities. */
11338 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11339 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11340 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11342 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11343 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11345 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11346 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11347 && operand_equal_p (arg00, arg01, 0))
11349 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11351 if (cosfn != NULL_TREE)
11352 return build_call_expr_loc (loc, cosfn, 1, arg00);
11356 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11357 NaNs or Infinities. */
11358 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11359 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11360 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11362 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11363 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11365 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11366 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11367 && operand_equal_p (arg00, arg01, 0))
11369 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11371 if (cosfn != NULL_TREE)
11373 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11374 return fold_build2_loc (loc, RDIV_EXPR, type,
11375 build_real (type, dconst1),
11376 tmp);
11381 /* Optimize pow(x,c)/x as pow(x,c-1). */
11382 if (fcode0 == BUILT_IN_POW
11383 || fcode0 == BUILT_IN_POWF
11384 || fcode0 == BUILT_IN_POWL)
11386 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11387 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11388 if (TREE_CODE (arg01) == REAL_CST
11389 && !TREE_OVERFLOW (arg01)
11390 && operand_equal_p (arg1, arg00, 0))
11392 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11393 REAL_VALUE_TYPE c;
11394 tree arg;
11396 c = TREE_REAL_CST (arg01);
11397 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11398 arg = build_real (type, c);
11399 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11403 /* Optimize a/root(b/c) into a*root(c/b). */
11404 if (BUILTIN_ROOT_P (fcode1))
11406 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11408 if (TREE_CODE (rootarg) == RDIV_EXPR)
11410 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11411 tree b = TREE_OPERAND (rootarg, 0);
11412 tree c = TREE_OPERAND (rootarg, 1);
11414 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11416 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11417 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11421 /* Optimize x/expN(y) into x*expN(-y). */
11422 if (BUILTIN_EXPONENT_P (fcode1))
11424 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11425 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11426 arg1 = build_call_expr_loc (loc,
11427 expfn, 1,
11428 fold_convert_loc (loc, type, arg));
11429 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11432 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11433 if (fcode1 == BUILT_IN_POW
11434 || fcode1 == BUILT_IN_POWF
11435 || fcode1 == BUILT_IN_POWL)
11437 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11438 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11439 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11440 tree neg11 = fold_convert_loc (loc, type,
11441 negate_expr (arg11));
11442 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11443 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11446 return NULL_TREE;
11448 case TRUNC_DIV_EXPR:
11449 case FLOOR_DIV_EXPR:
11450 /* Simplify A / (B << N) where A and B are positive and B is
11451 a power of 2, to A >> (N + log2(B)). */
11452 strict_overflow_p = false;
11453 if (TREE_CODE (arg1) == LSHIFT_EXPR
11454 && (TYPE_UNSIGNED (type)
11455 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11457 tree sval = TREE_OPERAND (arg1, 0);
11458 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11460 tree sh_cnt = TREE_OPERAND (arg1, 1);
11461 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11463 if (strict_overflow_p)
11464 fold_overflow_warning (("assuming signed overflow does not "
11465 "occur when simplifying A / (B << N)"),
11466 WARN_STRICT_OVERFLOW_MISC);
11468 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11469 sh_cnt, build_int_cst (NULL_TREE, pow2));
11470 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11471 fold_convert_loc (loc, type, arg0), sh_cnt);
11475 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11476 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11477 if (INTEGRAL_TYPE_P (type)
11478 && TYPE_UNSIGNED (type)
11479 && code == FLOOR_DIV_EXPR)
11480 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11482 /* Fall thru */
11484 case ROUND_DIV_EXPR:
11485 case CEIL_DIV_EXPR:
11486 case EXACT_DIV_EXPR:
11487 if (integer_onep (arg1))
11488 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11489 if (integer_zerop (arg1))
11490 return NULL_TREE;
11491 /* X / -1 is -X. */
11492 if (!TYPE_UNSIGNED (type)
11493 && TREE_CODE (arg1) == INTEGER_CST
11494 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11495 && TREE_INT_CST_HIGH (arg1) == -1)
11496 return fold_convert_loc (loc, type, negate_expr (arg0));
11498 /* Convert -A / -B to A / B when the type is signed and overflow is
11499 undefined. */
11500 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11501 && TREE_CODE (arg0) == NEGATE_EXPR
11502 && negate_expr_p (arg1))
11504 if (INTEGRAL_TYPE_P (type))
11505 fold_overflow_warning (("assuming signed overflow does not occur "
11506 "when distributing negation across "
11507 "division"),
11508 WARN_STRICT_OVERFLOW_MISC);
11509 return fold_build2_loc (loc, code, type,
11510 fold_convert_loc (loc, type,
11511 TREE_OPERAND (arg0, 0)),
11512 fold_convert_loc (loc, type,
11513 negate_expr (arg1)));
11515 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11516 && TREE_CODE (arg1) == NEGATE_EXPR
11517 && negate_expr_p (arg0))
11519 if (INTEGRAL_TYPE_P (type))
11520 fold_overflow_warning (("assuming signed overflow does not occur "
11521 "when distributing negation across "
11522 "division"),
11523 WARN_STRICT_OVERFLOW_MISC);
11524 return fold_build2_loc (loc, code, type,
11525 fold_convert_loc (loc, type,
11526 negate_expr (arg0)),
11527 fold_convert_loc (loc, type,
11528 TREE_OPERAND (arg1, 0)));
11531 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11532 operation, EXACT_DIV_EXPR.
11534 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11535 At one time others generated faster code, it's not clear if they do
11536 after the last round to changes to the DIV code in expmed.c. */
11537 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11538 && multiple_of_p (type, arg0, arg1))
11539 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11541 strict_overflow_p = false;
11542 if (TREE_CODE (arg1) == INTEGER_CST
11543 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11544 &strict_overflow_p)))
11546 if (strict_overflow_p)
11547 fold_overflow_warning (("assuming signed overflow does not occur "
11548 "when simplifying division"),
11549 WARN_STRICT_OVERFLOW_MISC);
11550 return fold_convert_loc (loc, type, tem);
11553 return NULL_TREE;
11555 case CEIL_MOD_EXPR:
11556 case FLOOR_MOD_EXPR:
11557 case ROUND_MOD_EXPR:
11558 case TRUNC_MOD_EXPR:
11559 /* X % 1 is always zero, but be sure to preserve any side
11560 effects in X. */
11561 if (integer_onep (arg1))
11562 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11564 /* X % 0, return X % 0 unchanged so that we can get the
11565 proper warnings and errors. */
11566 if (integer_zerop (arg1))
11567 return NULL_TREE;
11569 /* 0 % X is always zero, but be sure to preserve any side
11570 effects in X. Place this after checking for X == 0. */
11571 if (integer_zerop (arg0))
11572 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11574 /* X % -1 is zero. */
11575 if (!TYPE_UNSIGNED (type)
11576 && TREE_CODE (arg1) == INTEGER_CST
11577 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11578 && TREE_INT_CST_HIGH (arg1) == -1)
11579 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11581 /* X % -C is the same as X % C. */
11582 if (code == TRUNC_MOD_EXPR
11583 && !TYPE_UNSIGNED (type)
11584 && TREE_CODE (arg1) == INTEGER_CST
11585 && !TREE_OVERFLOW (arg1)
11586 && TREE_INT_CST_HIGH (arg1) < 0
11587 && !TYPE_OVERFLOW_TRAPS (type)
11588 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11589 && !sign_bit_p (arg1, arg1))
11590 return fold_build2_loc (loc, code, type,
11591 fold_convert_loc (loc, type, arg0),
11592 fold_convert_loc (loc, type,
11593 negate_expr (arg1)));
11595 /* X % -Y is the same as X % Y. */
11596 if (code == TRUNC_MOD_EXPR
11597 && !TYPE_UNSIGNED (type)
11598 && TREE_CODE (arg1) == NEGATE_EXPR
11599 && !TYPE_OVERFLOW_TRAPS (type))
11600 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11601 fold_convert_loc (loc, type,
11602 TREE_OPERAND (arg1, 0)));
11604 strict_overflow_p = false;
11605 if (TREE_CODE (arg1) == INTEGER_CST
11606 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11607 &strict_overflow_p)))
11609 if (strict_overflow_p)
11610 fold_overflow_warning (("assuming signed overflow does not occur "
11611 "when simplifying modulus"),
11612 WARN_STRICT_OVERFLOW_MISC);
11613 return fold_convert_loc (loc, type, tem);
11616 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11617 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11618 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11619 && (TYPE_UNSIGNED (type)
11620 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11622 tree c = arg1;
11623 /* Also optimize A % (C << N) where C is a power of 2,
11624 to A & ((C << N) - 1). */
11625 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11626 c = TREE_OPERAND (arg1, 0);
11628 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11630 tree mask
11631 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11632 build_int_cst (TREE_TYPE (arg1), 1));
11633 if (strict_overflow_p)
11634 fold_overflow_warning (("assuming signed overflow does not "
11635 "occur when simplifying "
11636 "X % (power of two)"),
11637 WARN_STRICT_OVERFLOW_MISC);
11638 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11639 fold_convert_loc (loc, type, arg0),
11640 fold_convert_loc (loc, type, mask));
11644 return NULL_TREE;
11646 case LROTATE_EXPR:
11647 case RROTATE_EXPR:
11648 if (integer_all_onesp (arg0))
11649 return omit_one_operand_loc (loc, type, arg0, arg1);
11650 goto shift;
11652 case RSHIFT_EXPR:
11653 /* Optimize -1 >> x for arithmetic right shifts. */
11654 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11655 && tree_expr_nonnegative_p (arg1))
11656 return omit_one_operand_loc (loc, type, arg0, arg1);
11657 /* ... fall through ... */
11659 case LSHIFT_EXPR:
11660 shift:
11661 if (integer_zerop (arg1))
11662 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11663 if (integer_zerop (arg0))
11664 return omit_one_operand_loc (loc, type, arg0, arg1);
11666 /* Since negative shift count is not well-defined,
11667 don't try to compute it in the compiler. */
11668 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11669 return NULL_TREE;
11671 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11672 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11673 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11674 && host_integerp (TREE_OPERAND (arg0, 1), false)
11675 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11677 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11678 + TREE_INT_CST_LOW (arg1));
11680 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11681 being well defined. */
11682 if (low >= TYPE_PRECISION (type))
11684 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11685 low = low % TYPE_PRECISION (type);
11686 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11687 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11688 TREE_OPERAND (arg0, 0));
11689 else
11690 low = TYPE_PRECISION (type) - 1;
11693 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11694 build_int_cst (type, low));
11697 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11698 into x & ((unsigned)-1 >> c) for unsigned types. */
11699 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11700 || (TYPE_UNSIGNED (type)
11701 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11702 && host_integerp (arg1, false)
11703 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11704 && host_integerp (TREE_OPERAND (arg0, 1), false)
11705 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11707 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11708 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11709 tree lshift;
11710 tree arg00;
11712 if (low0 == low1)
11714 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11716 lshift = build_int_cst (type, -1);
11717 lshift = int_const_binop (code, lshift, arg1, 0);
11719 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11723 /* Rewrite an LROTATE_EXPR by a constant into an
11724 RROTATE_EXPR by a new constant. */
11725 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11727 tree tem = build_int_cst (TREE_TYPE (arg1),
11728 TYPE_PRECISION (type));
11729 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11730 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11733 /* If we have a rotate of a bit operation with the rotate count and
11734 the second operand of the bit operation both constant,
11735 permute the two operations. */
11736 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11737 && (TREE_CODE (arg0) == BIT_AND_EXPR
11738 || TREE_CODE (arg0) == BIT_IOR_EXPR
11739 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11740 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11741 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11742 fold_build2_loc (loc, code, type,
11743 TREE_OPERAND (arg0, 0), arg1),
11744 fold_build2_loc (loc, code, type,
11745 TREE_OPERAND (arg0, 1), arg1));
11747 /* Two consecutive rotates adding up to the precision of the
11748 type can be ignored. */
11749 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11750 && TREE_CODE (arg0) == RROTATE_EXPR
11751 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11752 && TREE_INT_CST_HIGH (arg1) == 0
11753 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11754 && ((TREE_INT_CST_LOW (arg1)
11755 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11756 == (unsigned int) TYPE_PRECISION (type)))
11757 return TREE_OPERAND (arg0, 0);
11759 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11760 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11761 if the latter can be further optimized. */
11762 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11763 && TREE_CODE (arg0) == BIT_AND_EXPR
11764 && TREE_CODE (arg1) == INTEGER_CST
11765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11767 tree mask = fold_build2_loc (loc, code, type,
11768 fold_convert_loc (loc, type,
11769 TREE_OPERAND (arg0, 1)),
11770 arg1);
11771 tree shift = fold_build2_loc (loc, code, type,
11772 fold_convert_loc (loc, type,
11773 TREE_OPERAND (arg0, 0)),
11774 arg1);
11775 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11776 if (tem)
11777 return tem;
11780 return NULL_TREE;
11782 case MIN_EXPR:
11783 if (operand_equal_p (arg0, arg1, 0))
11784 return omit_one_operand_loc (loc, type, arg0, arg1);
11785 if (INTEGRAL_TYPE_P (type)
11786 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11787 return omit_one_operand_loc (loc, type, arg1, arg0);
11788 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11789 if (tem)
11790 return tem;
11791 goto associate;
11793 case MAX_EXPR:
11794 if (operand_equal_p (arg0, arg1, 0))
11795 return omit_one_operand_loc (loc, type, arg0, arg1);
11796 if (INTEGRAL_TYPE_P (type)
11797 && TYPE_MAX_VALUE (type)
11798 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11799 return omit_one_operand_loc (loc, type, arg1, arg0);
11800 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11801 if (tem)
11802 return tem;
11803 goto associate;
11805 case TRUTH_ANDIF_EXPR:
11806 /* Note that the operands of this must be ints
11807 and their values must be 0 or 1.
11808 ("true" is a fixed value perhaps depending on the language.) */
11809 /* If first arg is constant zero, return it. */
11810 if (integer_zerop (arg0))
11811 return fold_convert_loc (loc, type, arg0);
11812 case TRUTH_AND_EXPR:
11813 /* If either arg is constant true, drop it. */
11814 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11815 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11816 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11817 /* Preserve sequence points. */
11818 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11819 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11820 /* If second arg is constant zero, result is zero, but first arg
11821 must be evaluated. */
11822 if (integer_zerop (arg1))
11823 return omit_one_operand_loc (loc, type, arg1, arg0);
11824 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11825 case will be handled here. */
11826 if (integer_zerop (arg0))
11827 return omit_one_operand_loc (loc, type, arg0, arg1);
11829 /* !X && X is always false. */
11830 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11831 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11832 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11833 /* X && !X is always false. */
11834 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11835 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11836 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11838 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11839 means A >= Y && A != MAX, but in this case we know that
11840 A < X <= MAX. */
11842 if (!TREE_SIDE_EFFECTS (arg0)
11843 && !TREE_SIDE_EFFECTS (arg1))
11845 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11846 if (tem && !operand_equal_p (tem, arg0, 0))
11847 return fold_build2_loc (loc, code, type, tem, arg1);
11849 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11850 if (tem && !operand_equal_p (tem, arg1, 0))
11851 return fold_build2_loc (loc, code, type, arg0, tem);
11854 truth_andor:
11855 /* We only do these simplifications if we are optimizing. */
11856 if (!optimize)
11857 return NULL_TREE;
11859 /* Check for things like (A || B) && (A || C). We can convert this
11860 to A || (B && C). Note that either operator can be any of the four
11861 truth and/or operations and the transformation will still be
11862 valid. Also note that we only care about order for the
11863 ANDIF and ORIF operators. If B contains side effects, this
11864 might change the truth-value of A. */
11865 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11866 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11867 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11868 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11869 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11870 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11872 tree a00 = TREE_OPERAND (arg0, 0);
11873 tree a01 = TREE_OPERAND (arg0, 1);
11874 tree a10 = TREE_OPERAND (arg1, 0);
11875 tree a11 = TREE_OPERAND (arg1, 1);
11876 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11877 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11878 && (code == TRUTH_AND_EXPR
11879 || code == TRUTH_OR_EXPR));
11881 if (operand_equal_p (a00, a10, 0))
11882 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11883 fold_build2_loc (loc, code, type, a01, a11));
11884 else if (commutative && operand_equal_p (a00, a11, 0))
11885 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11886 fold_build2_loc (loc, code, type, a01, a10));
11887 else if (commutative && operand_equal_p (a01, a10, 0))
11888 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11889 fold_build2_loc (loc, code, type, a00, a11));
11891 /* This case if tricky because we must either have commutative
11892 operators or else A10 must not have side-effects. */
11894 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11895 && operand_equal_p (a01, a11, 0))
11896 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11897 fold_build2_loc (loc, code, type, a00, a10),
11898 a01);
11901 /* See if we can build a range comparison. */
11902 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11903 return tem;
11905 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11906 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11908 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11909 if (tem)
11910 return fold_build2_loc (loc, code, type, tem, arg1);
11913 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11914 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11916 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11917 if (tem)
11918 return fold_build2_loc (loc, code, type, arg0, tem);
11921 /* Check for the possibility of merging component references. If our
11922 lhs is another similar operation, try to merge its rhs with our
11923 rhs. Then try to merge our lhs and rhs. */
11924 if (TREE_CODE (arg0) == code
11925 && 0 != (tem = fold_truthop (loc, code, type,
11926 TREE_OPERAND (arg0, 1), arg1)))
11927 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11929 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11930 return tem;
11932 return NULL_TREE;
11934 case TRUTH_ORIF_EXPR:
11935 /* Note that the operands of this must be ints
11936 and their values must be 0 or true.
11937 ("true" is a fixed value perhaps depending on the language.) */
11938 /* If first arg is constant true, return it. */
11939 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11940 return fold_convert_loc (loc, type, arg0);
11941 case TRUTH_OR_EXPR:
11942 /* If either arg is constant zero, drop it. */
11943 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11944 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11945 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11946 /* Preserve sequence points. */
11947 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11948 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11949 /* If second arg is constant true, result is true, but we must
11950 evaluate first arg. */
11951 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11952 return omit_one_operand_loc (loc, type, arg1, arg0);
11953 /* Likewise for first arg, but note this only occurs here for
11954 TRUTH_OR_EXPR. */
11955 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11956 return omit_one_operand_loc (loc, type, arg0, arg1);
11958 /* !X || X is always true. */
11959 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11960 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11961 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11962 /* X || !X is always true. */
11963 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11964 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11965 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11967 goto truth_andor;
11969 case TRUTH_XOR_EXPR:
11970 /* If the second arg is constant zero, drop it. */
11971 if (integer_zerop (arg1))
11972 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11973 /* If the second arg is constant true, this is a logical inversion. */
11974 if (integer_onep (arg1))
11976 /* Only call invert_truthvalue if operand is a truth value. */
11977 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11978 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11979 else
11980 tem = invert_truthvalue_loc (loc, arg0);
11981 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11983 /* Identical arguments cancel to zero. */
11984 if (operand_equal_p (arg0, arg1, 0))
11985 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11987 /* !X ^ X is always true. */
11988 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11990 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11992 /* X ^ !X is always true. */
11993 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11994 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11995 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11997 return NULL_TREE;
11999 case EQ_EXPR:
12000 case NE_EXPR:
12001 tem = fold_comparison (loc, code, type, op0, op1);
12002 if (tem != NULL_TREE)
12003 return tem;
12005 /* bool_var != 0 becomes bool_var. */
12006 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12007 && code == NE_EXPR)
12008 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12010 /* bool_var == 1 becomes bool_var. */
12011 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12012 && code == EQ_EXPR)
12013 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12015 /* bool_var != 1 becomes !bool_var. */
12016 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12017 && code == NE_EXPR)
12018 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12019 fold_convert_loc (loc, type, arg0));
12021 /* bool_var == 0 becomes !bool_var. */
12022 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12023 && code == EQ_EXPR)
12024 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12025 fold_convert_loc (loc, type, arg0));
12027 /* !exp != 0 becomes !exp */
12028 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12029 && code == NE_EXPR)
12030 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12032 /* If this is an equality comparison of the address of two non-weak,
12033 unaliased symbols neither of which are extern (since we do not
12034 have access to attributes for externs), then we know the result. */
12035 if (TREE_CODE (arg0) == ADDR_EXPR
12036 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12037 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12038 && ! lookup_attribute ("alias",
12039 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12040 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12041 && TREE_CODE (arg1) == ADDR_EXPR
12042 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12043 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12044 && ! lookup_attribute ("alias",
12045 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12046 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12048 /* We know that we're looking at the address of two
12049 non-weak, unaliased, static _DECL nodes.
12051 It is both wasteful and incorrect to call operand_equal_p
12052 to compare the two ADDR_EXPR nodes. It is wasteful in that
12053 all we need to do is test pointer equality for the arguments
12054 to the two ADDR_EXPR nodes. It is incorrect to use
12055 operand_equal_p as that function is NOT equivalent to a
12056 C equality test. It can in fact return false for two
12057 objects which would test as equal using the C equality
12058 operator. */
12059 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12060 return constant_boolean_node (equal
12061 ? code == EQ_EXPR : code != EQ_EXPR,
12062 type);
12065 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12066 a MINUS_EXPR of a constant, we can convert it into a comparison with
12067 a revised constant as long as no overflow occurs. */
12068 if (TREE_CODE (arg1) == INTEGER_CST
12069 && (TREE_CODE (arg0) == PLUS_EXPR
12070 || TREE_CODE (arg0) == MINUS_EXPR)
12071 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12072 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12073 ? MINUS_EXPR : PLUS_EXPR,
12074 fold_convert_loc (loc, TREE_TYPE (arg0),
12075 arg1),
12076 TREE_OPERAND (arg0, 1), 0))
12077 && !TREE_OVERFLOW (tem))
12078 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12080 /* Similarly for a NEGATE_EXPR. */
12081 if (TREE_CODE (arg0) == NEGATE_EXPR
12082 && TREE_CODE (arg1) == INTEGER_CST
12083 && 0 != (tem = negate_expr (arg1))
12084 && TREE_CODE (tem) == INTEGER_CST
12085 && !TREE_OVERFLOW (tem))
12086 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12088 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12089 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12090 && TREE_CODE (arg1) == INTEGER_CST
12091 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12092 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12093 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12094 fold_convert_loc (loc,
12095 TREE_TYPE (arg0),
12096 arg1),
12097 TREE_OPERAND (arg0, 1)));
12099 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12100 if ((TREE_CODE (arg0) == PLUS_EXPR
12101 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12102 || TREE_CODE (arg0) == MINUS_EXPR)
12103 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12104 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12105 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12107 tree val = TREE_OPERAND (arg0, 1);
12108 return omit_two_operands_loc (loc, type,
12109 fold_build2_loc (loc, code, type,
12110 val,
12111 build_int_cst (TREE_TYPE (val),
12112 0)),
12113 TREE_OPERAND (arg0, 0), arg1);
12116 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12117 if (TREE_CODE (arg0) == MINUS_EXPR
12118 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12119 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12120 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12122 return omit_two_operands_loc (loc, type,
12123 code == NE_EXPR
12124 ? boolean_true_node : boolean_false_node,
12125 TREE_OPERAND (arg0, 1), arg1);
12128 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12129 for !=. Don't do this for ordered comparisons due to overflow. */
12130 if (TREE_CODE (arg0) == MINUS_EXPR
12131 && integer_zerop (arg1))
12132 return fold_build2_loc (loc, code, type,
12133 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12135 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12136 if (TREE_CODE (arg0) == ABS_EXPR
12137 && (integer_zerop (arg1) || real_zerop (arg1)))
12138 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12140 /* If this is an EQ or NE comparison with zero and ARG0 is
12141 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12142 two operations, but the latter can be done in one less insn
12143 on machines that have only two-operand insns or on which a
12144 constant cannot be the first operand. */
12145 if (TREE_CODE (arg0) == BIT_AND_EXPR
12146 && integer_zerop (arg1))
12148 tree arg00 = TREE_OPERAND (arg0, 0);
12149 tree arg01 = TREE_OPERAND (arg0, 1);
12150 if (TREE_CODE (arg00) == LSHIFT_EXPR
12151 && integer_onep (TREE_OPERAND (arg00, 0)))
12153 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12154 arg01, TREE_OPERAND (arg00, 1));
12155 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12156 build_int_cst (TREE_TYPE (arg0), 1));
12157 return fold_build2_loc (loc, code, type,
12158 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12159 arg1);
12161 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12162 && integer_onep (TREE_OPERAND (arg01, 0)))
12164 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12165 arg00, TREE_OPERAND (arg01, 1));
12166 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12167 build_int_cst (TREE_TYPE (arg0), 1));
12168 return fold_build2_loc (loc, code, type,
12169 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12170 arg1);
12174 /* If this is an NE or EQ comparison of zero against the result of a
12175 signed MOD operation whose second operand is a power of 2, make
12176 the MOD operation unsigned since it is simpler and equivalent. */
12177 if (integer_zerop (arg1)
12178 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12179 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12180 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12181 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12182 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12183 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12185 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12186 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12187 fold_convert_loc (loc, newtype,
12188 TREE_OPERAND (arg0, 0)),
12189 fold_convert_loc (loc, newtype,
12190 TREE_OPERAND (arg0, 1)));
12192 return fold_build2_loc (loc, code, type, newmod,
12193 fold_convert_loc (loc, newtype, arg1));
12196 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12197 C1 is a valid shift constant, and C2 is a power of two, i.e.
12198 a single bit. */
12199 if (TREE_CODE (arg0) == BIT_AND_EXPR
12200 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12201 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12202 == INTEGER_CST
12203 && integer_pow2p (TREE_OPERAND (arg0, 1))
12204 && integer_zerop (arg1))
12206 tree itype = TREE_TYPE (arg0);
12207 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12208 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12210 /* Check for a valid shift count. */
12211 if (TREE_INT_CST_HIGH (arg001) == 0
12212 && TREE_INT_CST_LOW (arg001) < prec)
12214 tree arg01 = TREE_OPERAND (arg0, 1);
12215 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12216 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12217 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12218 can be rewritten as (X & (C2 << C1)) != 0. */
12219 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12221 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12222 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12223 return fold_build2_loc (loc, code, type, tem, arg1);
12225 /* Otherwise, for signed (arithmetic) shifts,
12226 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12227 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12228 else if (!TYPE_UNSIGNED (itype))
12229 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12230 arg000, build_int_cst (itype, 0));
12231 /* Otherwise, of unsigned (logical) shifts,
12232 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12233 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12234 else
12235 return omit_one_operand_loc (loc, type,
12236 code == EQ_EXPR ? integer_one_node
12237 : integer_zero_node,
12238 arg000);
12242 /* If this is an NE comparison of zero with an AND of one, remove the
12243 comparison since the AND will give the correct value. */
12244 if (code == NE_EXPR
12245 && integer_zerop (arg1)
12246 && TREE_CODE (arg0) == BIT_AND_EXPR
12247 && integer_onep (TREE_OPERAND (arg0, 1)))
12248 return fold_convert_loc (loc, type, arg0);
12250 /* If we have (A & C) == C where C is a power of 2, convert this into
12251 (A & C) != 0. Similarly for NE_EXPR. */
12252 if (TREE_CODE (arg0) == BIT_AND_EXPR
12253 && integer_pow2p (TREE_OPERAND (arg0, 1))
12254 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12255 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12256 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12257 integer_zero_node));
12259 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12260 bit, then fold the expression into A < 0 or A >= 0. */
12261 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12262 if (tem)
12263 return tem;
12265 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12266 Similarly for NE_EXPR. */
12267 if (TREE_CODE (arg0) == BIT_AND_EXPR
12268 && TREE_CODE (arg1) == INTEGER_CST
12269 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12271 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12272 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12273 TREE_OPERAND (arg0, 1));
12274 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12275 arg1, notc);
12276 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12277 if (integer_nonzerop (dandnotc))
12278 return omit_one_operand_loc (loc, type, rslt, arg0);
12281 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12282 Similarly for NE_EXPR. */
12283 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12284 && TREE_CODE (arg1) == INTEGER_CST
12285 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12287 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12288 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12289 TREE_OPERAND (arg0, 1), notd);
12290 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12291 if (integer_nonzerop (candnotd))
12292 return omit_one_operand_loc (loc, type, rslt, arg0);
12295 /* If this is a comparison of a field, we may be able to simplify it. */
12296 if ((TREE_CODE (arg0) == COMPONENT_REF
12297 || TREE_CODE (arg0) == BIT_FIELD_REF)
12298 /* Handle the constant case even without -O
12299 to make sure the warnings are given. */
12300 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12302 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12303 if (t1)
12304 return t1;
12307 /* Optimize comparisons of strlen vs zero to a compare of the
12308 first character of the string vs zero. To wit,
12309 strlen(ptr) == 0 => *ptr == 0
12310 strlen(ptr) != 0 => *ptr != 0
12311 Other cases should reduce to one of these two (or a constant)
12312 due to the return value of strlen being unsigned. */
12313 if (TREE_CODE (arg0) == CALL_EXPR
12314 && integer_zerop (arg1))
12316 tree fndecl = get_callee_fndecl (arg0);
12318 if (fndecl
12319 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12320 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12321 && call_expr_nargs (arg0) == 1
12322 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12324 tree iref = build_fold_indirect_ref_loc (loc,
12325 CALL_EXPR_ARG (arg0, 0));
12326 return fold_build2_loc (loc, code, type, iref,
12327 build_int_cst (TREE_TYPE (iref), 0));
12331 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12332 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12333 if (TREE_CODE (arg0) == RSHIFT_EXPR
12334 && integer_zerop (arg1)
12335 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12337 tree arg00 = TREE_OPERAND (arg0, 0);
12338 tree arg01 = TREE_OPERAND (arg0, 1);
12339 tree itype = TREE_TYPE (arg00);
12340 if (TREE_INT_CST_HIGH (arg01) == 0
12341 && TREE_INT_CST_LOW (arg01)
12342 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12344 if (TYPE_UNSIGNED (itype))
12346 itype = signed_type_for (itype);
12347 arg00 = fold_convert_loc (loc, itype, arg00);
12349 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12350 type, arg00, build_int_cst (itype, 0));
12354 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12355 if (integer_zerop (arg1)
12356 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12357 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12358 TREE_OPERAND (arg0, 1));
12360 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12361 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12362 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12363 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12364 build_int_cst (TREE_TYPE (arg1), 0));
12365 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12366 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12367 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12368 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12369 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12370 build_int_cst (TREE_TYPE (arg1), 0));
12372 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12373 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12374 && TREE_CODE (arg1) == INTEGER_CST
12375 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12376 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12377 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12378 TREE_OPERAND (arg0, 1), arg1));
12380 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12381 (X & C) == 0 when C is a single bit. */
12382 if (TREE_CODE (arg0) == BIT_AND_EXPR
12383 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12384 && integer_zerop (arg1)
12385 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12387 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12388 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12389 TREE_OPERAND (arg0, 1));
12390 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12391 type, tem, arg1);
12394 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12395 constant C is a power of two, i.e. a single bit. */
12396 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12397 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12398 && integer_zerop (arg1)
12399 && integer_pow2p (TREE_OPERAND (arg0, 1))
12400 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12401 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12403 tree arg00 = TREE_OPERAND (arg0, 0);
12404 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12405 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12408 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12409 when is C is a power of two, i.e. a single bit. */
12410 if (TREE_CODE (arg0) == BIT_AND_EXPR
12411 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12412 && integer_zerop (arg1)
12413 && integer_pow2p (TREE_OPERAND (arg0, 1))
12414 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12415 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12417 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12418 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12419 arg000, TREE_OPERAND (arg0, 1));
12420 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12421 tem, build_int_cst (TREE_TYPE (tem), 0));
12424 if (integer_zerop (arg1)
12425 && tree_expr_nonzero_p (arg0))
12427 tree res = constant_boolean_node (code==NE_EXPR, type);
12428 return omit_one_operand_loc (loc, type, res, arg0);
12431 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12432 if (TREE_CODE (arg0) == NEGATE_EXPR
12433 && TREE_CODE (arg1) == NEGATE_EXPR)
12434 return fold_build2_loc (loc, code, type,
12435 TREE_OPERAND (arg0, 0),
12436 TREE_OPERAND (arg1, 0));
12438 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12439 if (TREE_CODE (arg0) == BIT_AND_EXPR
12440 && TREE_CODE (arg1) == BIT_AND_EXPR)
12442 tree arg00 = TREE_OPERAND (arg0, 0);
12443 tree arg01 = TREE_OPERAND (arg0, 1);
12444 tree arg10 = TREE_OPERAND (arg1, 0);
12445 tree arg11 = TREE_OPERAND (arg1, 1);
12446 tree itype = TREE_TYPE (arg0);
12448 if (operand_equal_p (arg01, arg11, 0))
12449 return fold_build2_loc (loc, code, type,
12450 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12451 fold_build2_loc (loc,
12452 BIT_XOR_EXPR, itype,
12453 arg00, arg10),
12454 arg01),
12455 build_int_cst (itype, 0));
12457 if (operand_equal_p (arg01, arg10, 0))
12458 return fold_build2_loc (loc, code, type,
12459 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12460 fold_build2_loc (loc,
12461 BIT_XOR_EXPR, itype,
12462 arg00, arg11),
12463 arg01),
12464 build_int_cst (itype, 0));
12466 if (operand_equal_p (arg00, arg11, 0))
12467 return fold_build2_loc (loc, code, type,
12468 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12469 fold_build2_loc (loc,
12470 BIT_XOR_EXPR, itype,
12471 arg01, arg10),
12472 arg00),
12473 build_int_cst (itype, 0));
12475 if (operand_equal_p (arg00, arg10, 0))
12476 return fold_build2_loc (loc, code, type,
12477 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12478 fold_build2_loc (loc,
12479 BIT_XOR_EXPR, itype,
12480 arg01, arg11),
12481 arg00),
12482 build_int_cst (itype, 0));
12485 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12486 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12488 tree arg00 = TREE_OPERAND (arg0, 0);
12489 tree arg01 = TREE_OPERAND (arg0, 1);
12490 tree arg10 = TREE_OPERAND (arg1, 0);
12491 tree arg11 = TREE_OPERAND (arg1, 1);
12492 tree itype = TREE_TYPE (arg0);
12494 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12495 operand_equal_p guarantees no side-effects so we don't need
12496 to use omit_one_operand on Z. */
12497 if (operand_equal_p (arg01, arg11, 0))
12498 return fold_build2_loc (loc, code, type, arg00, arg10);
12499 if (operand_equal_p (arg01, arg10, 0))
12500 return fold_build2_loc (loc, code, type, arg00, arg11);
12501 if (operand_equal_p (arg00, arg11, 0))
12502 return fold_build2_loc (loc, code, type, arg01, arg10);
12503 if (operand_equal_p (arg00, arg10, 0))
12504 return fold_build2_loc (loc, code, type, arg01, arg11);
12506 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12507 if (TREE_CODE (arg01) == INTEGER_CST
12508 && TREE_CODE (arg11) == INTEGER_CST)
12509 return fold_build2_loc (loc, code, type,
12510 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12511 fold_build2_loc (loc,
12512 BIT_XOR_EXPR, itype,
12513 arg01, arg11)),
12514 arg10);
12517 /* Attempt to simplify equality/inequality comparisons of complex
12518 values. Only lower the comparison if the result is known or
12519 can be simplified to a single scalar comparison. */
12520 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12521 || TREE_CODE (arg0) == COMPLEX_CST)
12522 && (TREE_CODE (arg1) == COMPLEX_EXPR
12523 || TREE_CODE (arg1) == COMPLEX_CST))
12525 tree real0, imag0, real1, imag1;
12526 tree rcond, icond;
12528 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12530 real0 = TREE_OPERAND (arg0, 0);
12531 imag0 = TREE_OPERAND (arg0, 1);
12533 else
12535 real0 = TREE_REALPART (arg0);
12536 imag0 = TREE_IMAGPART (arg0);
12539 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12541 real1 = TREE_OPERAND (arg1, 0);
12542 imag1 = TREE_OPERAND (arg1, 1);
12544 else
12546 real1 = TREE_REALPART (arg1);
12547 imag1 = TREE_IMAGPART (arg1);
12550 rcond = fold_binary_loc (loc, code, type, real0, real1);
12551 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12553 if (integer_zerop (rcond))
12555 if (code == EQ_EXPR)
12556 return omit_two_operands_loc (loc, type, boolean_false_node,
12557 imag0, imag1);
12558 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12560 else
12562 if (code == NE_EXPR)
12563 return omit_two_operands_loc (loc, type, boolean_true_node,
12564 imag0, imag1);
12565 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12569 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12570 if (icond && TREE_CODE (icond) == INTEGER_CST)
12572 if (integer_zerop (icond))
12574 if (code == EQ_EXPR)
12575 return omit_two_operands_loc (loc, type, boolean_false_node,
12576 real0, real1);
12577 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12579 else
12581 if (code == NE_EXPR)
12582 return omit_two_operands_loc (loc, type, boolean_true_node,
12583 real0, real1);
12584 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12589 return NULL_TREE;
12591 case LT_EXPR:
12592 case GT_EXPR:
12593 case LE_EXPR:
12594 case GE_EXPR:
12595 tem = fold_comparison (loc, code, type, op0, op1);
12596 if (tem != NULL_TREE)
12597 return tem;
12599 /* Transform comparisons of the form X +- C CMP X. */
12600 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12601 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12602 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12603 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12604 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12605 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12607 tree arg01 = TREE_OPERAND (arg0, 1);
12608 enum tree_code code0 = TREE_CODE (arg0);
12609 int is_positive;
12611 if (TREE_CODE (arg01) == REAL_CST)
12612 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12613 else
12614 is_positive = tree_int_cst_sgn (arg01);
12616 /* (X - c) > X becomes false. */
12617 if (code == GT_EXPR
12618 && ((code0 == MINUS_EXPR && is_positive >= 0)
12619 || (code0 == PLUS_EXPR && is_positive <= 0)))
12621 if (TREE_CODE (arg01) == INTEGER_CST
12622 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12623 fold_overflow_warning (("assuming signed overflow does not "
12624 "occur when assuming that (X - c) > X "
12625 "is always false"),
12626 WARN_STRICT_OVERFLOW_ALL);
12627 return constant_boolean_node (0, type);
12630 /* Likewise (X + c) < X becomes false. */
12631 if (code == LT_EXPR
12632 && ((code0 == PLUS_EXPR && is_positive >= 0)
12633 || (code0 == MINUS_EXPR && is_positive <= 0)))
12635 if (TREE_CODE (arg01) == INTEGER_CST
12636 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12637 fold_overflow_warning (("assuming signed overflow does not "
12638 "occur when assuming that "
12639 "(X + c) < X is always false"),
12640 WARN_STRICT_OVERFLOW_ALL);
12641 return constant_boolean_node (0, type);
12644 /* Convert (X - c) <= X to true. */
12645 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12646 && code == LE_EXPR
12647 && ((code0 == MINUS_EXPR && is_positive >= 0)
12648 || (code0 == PLUS_EXPR && is_positive <= 0)))
12650 if (TREE_CODE (arg01) == INTEGER_CST
12651 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12652 fold_overflow_warning (("assuming signed overflow does not "
12653 "occur when assuming that "
12654 "(X - c) <= X is always true"),
12655 WARN_STRICT_OVERFLOW_ALL);
12656 return constant_boolean_node (1, type);
12659 /* Convert (X + c) >= X to true. */
12660 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12661 && code == GE_EXPR
12662 && ((code0 == PLUS_EXPR && is_positive >= 0)
12663 || (code0 == MINUS_EXPR && is_positive <= 0)))
12665 if (TREE_CODE (arg01) == INTEGER_CST
12666 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12667 fold_overflow_warning (("assuming signed overflow does not "
12668 "occur when assuming that "
12669 "(X + c) >= X is always true"),
12670 WARN_STRICT_OVERFLOW_ALL);
12671 return constant_boolean_node (1, type);
12674 if (TREE_CODE (arg01) == INTEGER_CST)
12676 /* Convert X + c > X and X - c < X to true for integers. */
12677 if (code == GT_EXPR
12678 && ((code0 == PLUS_EXPR && is_positive > 0)
12679 || (code0 == MINUS_EXPR && is_positive < 0)))
12681 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12682 fold_overflow_warning (("assuming signed overflow does "
12683 "not occur when assuming that "
12684 "(X + c) > X is always true"),
12685 WARN_STRICT_OVERFLOW_ALL);
12686 return constant_boolean_node (1, type);
12689 if (code == LT_EXPR
12690 && ((code0 == MINUS_EXPR && is_positive > 0)
12691 || (code0 == PLUS_EXPR && is_positive < 0)))
12693 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12694 fold_overflow_warning (("assuming signed overflow does "
12695 "not occur when assuming that "
12696 "(X - c) < X is always true"),
12697 WARN_STRICT_OVERFLOW_ALL);
12698 return constant_boolean_node (1, type);
12701 /* Convert X + c <= X and X - c >= X to false for integers. */
12702 if (code == LE_EXPR
12703 && ((code0 == PLUS_EXPR && is_positive > 0)
12704 || (code0 == MINUS_EXPR && is_positive < 0)))
12706 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12707 fold_overflow_warning (("assuming signed overflow does "
12708 "not occur when assuming that "
12709 "(X + c) <= X is always false"),
12710 WARN_STRICT_OVERFLOW_ALL);
12711 return constant_boolean_node (0, type);
12714 if (code == GE_EXPR
12715 && ((code0 == MINUS_EXPR && is_positive > 0)
12716 || (code0 == PLUS_EXPR && is_positive < 0)))
12718 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12719 fold_overflow_warning (("assuming signed overflow does "
12720 "not occur when assuming that "
12721 "(X - c) >= X is always false"),
12722 WARN_STRICT_OVERFLOW_ALL);
12723 return constant_boolean_node (0, type);
12728 /* Comparisons with the highest or lowest possible integer of
12729 the specified precision will have known values. */
12731 tree arg1_type = TREE_TYPE (arg1);
12732 unsigned int width = TYPE_PRECISION (arg1_type);
12734 if (TREE_CODE (arg1) == INTEGER_CST
12735 && width <= 2 * HOST_BITS_PER_WIDE_INT
12736 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12738 HOST_WIDE_INT signed_max_hi;
12739 unsigned HOST_WIDE_INT signed_max_lo;
12740 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12742 if (width <= HOST_BITS_PER_WIDE_INT)
12744 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12745 - 1;
12746 signed_max_hi = 0;
12747 max_hi = 0;
12749 if (TYPE_UNSIGNED (arg1_type))
12751 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12752 min_lo = 0;
12753 min_hi = 0;
12755 else
12757 max_lo = signed_max_lo;
12758 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12759 min_hi = -1;
12762 else
12764 width -= HOST_BITS_PER_WIDE_INT;
12765 signed_max_lo = -1;
12766 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12767 - 1;
12768 max_lo = -1;
12769 min_lo = 0;
12771 if (TYPE_UNSIGNED (arg1_type))
12773 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12774 min_hi = 0;
12776 else
12778 max_hi = signed_max_hi;
12779 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12783 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12784 && TREE_INT_CST_LOW (arg1) == max_lo)
12785 switch (code)
12787 case GT_EXPR:
12788 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12790 case GE_EXPR:
12791 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12793 case LE_EXPR:
12794 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12796 case LT_EXPR:
12797 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12799 /* The GE_EXPR and LT_EXPR cases above are not normally
12800 reached because of previous transformations. */
12802 default:
12803 break;
12805 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12806 == max_hi
12807 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12808 switch (code)
12810 case GT_EXPR:
12811 arg1 = const_binop (PLUS_EXPR, arg1,
12812 build_int_cst (TREE_TYPE (arg1), 1), 0);
12813 return fold_build2_loc (loc, EQ_EXPR, type,
12814 fold_convert_loc (loc,
12815 TREE_TYPE (arg1), arg0),
12816 arg1);
12817 case LE_EXPR:
12818 arg1 = const_binop (PLUS_EXPR, arg1,
12819 build_int_cst (TREE_TYPE (arg1), 1), 0);
12820 return fold_build2_loc (loc, NE_EXPR, type,
12821 fold_convert_loc (loc, TREE_TYPE (arg1),
12822 arg0),
12823 arg1);
12824 default:
12825 break;
12827 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12828 == min_hi
12829 && TREE_INT_CST_LOW (arg1) == min_lo)
12830 switch (code)
12832 case LT_EXPR:
12833 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12835 case LE_EXPR:
12836 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12838 case GE_EXPR:
12839 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12841 case GT_EXPR:
12842 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12844 default:
12845 break;
12847 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12848 == min_hi
12849 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12850 switch (code)
12852 case GE_EXPR:
12853 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12854 return fold_build2_loc (loc, NE_EXPR, type,
12855 fold_convert_loc (loc,
12856 TREE_TYPE (arg1), arg0),
12857 arg1);
12858 case LT_EXPR:
12859 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12860 return fold_build2_loc (loc, EQ_EXPR, type,
12861 fold_convert_loc (loc, TREE_TYPE (arg1),
12862 arg0),
12863 arg1);
12864 default:
12865 break;
12868 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12869 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12870 && TYPE_UNSIGNED (arg1_type)
12871 /* We will flip the signedness of the comparison operator
12872 associated with the mode of arg1, so the sign bit is
12873 specified by this mode. Check that arg1 is the signed
12874 max associated with this sign bit. */
12875 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12876 /* signed_type does not work on pointer types. */
12877 && INTEGRAL_TYPE_P (arg1_type))
12879 /* The following case also applies to X < signed_max+1
12880 and X >= signed_max+1 because previous transformations. */
12881 if (code == LE_EXPR || code == GT_EXPR)
12883 tree st;
12884 st = signed_type_for (TREE_TYPE (arg1));
12885 return fold_build2_loc (loc,
12886 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12887 type, fold_convert_loc (loc, st, arg0),
12888 build_int_cst (st, 0));
12894 /* If we are comparing an ABS_EXPR with a constant, we can
12895 convert all the cases into explicit comparisons, but they may
12896 well not be faster than doing the ABS and one comparison.
12897 But ABS (X) <= C is a range comparison, which becomes a subtraction
12898 and a comparison, and is probably faster. */
12899 if (code == LE_EXPR
12900 && TREE_CODE (arg1) == INTEGER_CST
12901 && TREE_CODE (arg0) == ABS_EXPR
12902 && ! TREE_SIDE_EFFECTS (arg0)
12903 && (0 != (tem = negate_expr (arg1)))
12904 && TREE_CODE (tem) == INTEGER_CST
12905 && !TREE_OVERFLOW (tem))
12906 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12907 build2 (GE_EXPR, type,
12908 TREE_OPERAND (arg0, 0), tem),
12909 build2 (LE_EXPR, type,
12910 TREE_OPERAND (arg0, 0), arg1));
12912 /* Convert ABS_EXPR<x> >= 0 to true. */
12913 strict_overflow_p = false;
12914 if (code == GE_EXPR
12915 && (integer_zerop (arg1)
12916 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12917 && real_zerop (arg1)))
12918 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12920 if (strict_overflow_p)
12921 fold_overflow_warning (("assuming signed overflow does not occur "
12922 "when simplifying comparison of "
12923 "absolute value and zero"),
12924 WARN_STRICT_OVERFLOW_CONDITIONAL);
12925 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12928 /* Convert ABS_EXPR<x> < 0 to false. */
12929 strict_overflow_p = false;
12930 if (code == LT_EXPR
12931 && (integer_zerop (arg1) || real_zerop (arg1))
12932 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12934 if (strict_overflow_p)
12935 fold_overflow_warning (("assuming signed overflow does not occur "
12936 "when simplifying comparison of "
12937 "absolute value and zero"),
12938 WARN_STRICT_OVERFLOW_CONDITIONAL);
12939 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12942 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12943 and similarly for >= into !=. */
12944 if ((code == LT_EXPR || code == GE_EXPR)
12945 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12946 && TREE_CODE (arg1) == LSHIFT_EXPR
12947 && integer_onep (TREE_OPERAND (arg1, 0)))
12949 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12950 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12951 TREE_OPERAND (arg1, 1)),
12952 build_int_cst (TREE_TYPE (arg0), 0));
12953 goto fold_binary_exit;
12956 if ((code == LT_EXPR || code == GE_EXPR)
12957 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12958 && CONVERT_EXPR_P (arg1)
12959 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12960 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12962 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12963 fold_convert_loc (loc, TREE_TYPE (arg0),
12964 build2 (RSHIFT_EXPR,
12965 TREE_TYPE (arg0), arg0,
12966 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12967 1))),
12968 build_int_cst (TREE_TYPE (arg0), 0));
12969 goto fold_binary_exit;
12972 return NULL_TREE;
12974 case UNORDERED_EXPR:
12975 case ORDERED_EXPR:
12976 case UNLT_EXPR:
12977 case UNLE_EXPR:
12978 case UNGT_EXPR:
12979 case UNGE_EXPR:
12980 case UNEQ_EXPR:
12981 case LTGT_EXPR:
12982 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12984 t1 = fold_relational_const (code, type, arg0, arg1);
12985 if (t1 != NULL_TREE)
12986 return t1;
12989 /* If the first operand is NaN, the result is constant. */
12990 if (TREE_CODE (arg0) == REAL_CST
12991 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12992 && (code != LTGT_EXPR || ! flag_trapping_math))
12994 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12995 ? integer_zero_node
12996 : integer_one_node;
12997 return omit_one_operand_loc (loc, type, t1, arg1);
13000 /* If the second operand is NaN, the result is constant. */
13001 if (TREE_CODE (arg1) == REAL_CST
13002 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13003 && (code != LTGT_EXPR || ! flag_trapping_math))
13005 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13006 ? integer_zero_node
13007 : integer_one_node;
13008 return omit_one_operand_loc (loc, type, t1, arg0);
13011 /* Simplify unordered comparison of something with itself. */
13012 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13013 && operand_equal_p (arg0, arg1, 0))
13014 return constant_boolean_node (1, type);
13016 if (code == LTGT_EXPR
13017 && !flag_trapping_math
13018 && operand_equal_p (arg0, arg1, 0))
13019 return constant_boolean_node (0, type);
13021 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13023 tree targ0 = strip_float_extensions (arg0);
13024 tree targ1 = strip_float_extensions (arg1);
13025 tree newtype = TREE_TYPE (targ0);
13027 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13028 newtype = TREE_TYPE (targ1);
13030 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13031 return fold_build2_loc (loc, code, type,
13032 fold_convert_loc (loc, newtype, targ0),
13033 fold_convert_loc (loc, newtype, targ1));
13036 return NULL_TREE;
13038 case COMPOUND_EXPR:
13039 /* When pedantic, a compound expression can be neither an lvalue
13040 nor an integer constant expression. */
13041 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13042 return NULL_TREE;
13043 /* Don't let (0, 0) be null pointer constant. */
13044 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13045 : fold_convert_loc (loc, type, arg1);
13046 return pedantic_non_lvalue_loc (loc, tem);
13048 case COMPLEX_EXPR:
13049 if ((TREE_CODE (arg0) == REAL_CST
13050 && TREE_CODE (arg1) == REAL_CST)
13051 || (TREE_CODE (arg0) == INTEGER_CST
13052 && TREE_CODE (arg1) == INTEGER_CST))
13053 return build_complex (type, arg0, arg1);
13054 return NULL_TREE;
13056 case ASSERT_EXPR:
13057 /* An ASSERT_EXPR should never be passed to fold_binary. */
13058 gcc_unreachable ();
13060 default:
13061 return NULL_TREE;
13062 } /* switch (code) */
13063 fold_binary_exit:
13064 protected_set_expr_location (tem, loc);
13065 return tem;
13068 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13069 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13070 of GOTO_EXPR. */
13072 static tree
13073 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13075 switch (TREE_CODE (*tp))
13077 case LABEL_EXPR:
13078 return *tp;
13080 case GOTO_EXPR:
13081 *walk_subtrees = 0;
13083 /* ... fall through ... */
13085 default:
13086 return NULL_TREE;
13090 /* Return whether the sub-tree ST contains a label which is accessible from
13091 outside the sub-tree. */
13093 static bool
13094 contains_label_p (tree st)
13096 return
13097 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13100 /* Fold a ternary expression of code CODE and type TYPE with operands
13101 OP0, OP1, and OP2. Return the folded expression if folding is
13102 successful. Otherwise, return NULL_TREE. */
13104 tree
13105 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13106 tree op0, tree op1, tree op2)
13108 tree tem;
13109 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13110 enum tree_code_class kind = TREE_CODE_CLASS (code);
13112 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13113 && TREE_CODE_LENGTH (code) == 3);
13115 /* Strip any conversions that don't change the mode. This is safe
13116 for every expression, except for a comparison expression because
13117 its signedness is derived from its operands. So, in the latter
13118 case, only strip conversions that don't change the signedness.
13120 Note that this is done as an internal manipulation within the
13121 constant folder, in order to find the simplest representation of
13122 the arguments so that their form can be studied. In any cases,
13123 the appropriate type conversions should be put back in the tree
13124 that will get out of the constant folder. */
13125 if (op0)
13127 arg0 = op0;
13128 STRIP_NOPS (arg0);
13131 if (op1)
13133 arg1 = op1;
13134 STRIP_NOPS (arg1);
13137 switch (code)
13139 case COMPONENT_REF:
13140 if (TREE_CODE (arg0) == CONSTRUCTOR
13141 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13143 unsigned HOST_WIDE_INT idx;
13144 tree field, value;
13145 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13146 if (field == arg1)
13147 return value;
13149 return NULL_TREE;
13151 case COND_EXPR:
13152 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13153 so all simple results must be passed through pedantic_non_lvalue. */
13154 if (TREE_CODE (arg0) == INTEGER_CST)
13156 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13157 tem = integer_zerop (arg0) ? op2 : op1;
13158 /* Only optimize constant conditions when the selected branch
13159 has the same type as the COND_EXPR. This avoids optimizing
13160 away "c ? x : throw", where the throw has a void type.
13161 Avoid throwing away that operand which contains label. */
13162 if ((!TREE_SIDE_EFFECTS (unused_op)
13163 || !contains_label_p (unused_op))
13164 && (! VOID_TYPE_P (TREE_TYPE (tem))
13165 || VOID_TYPE_P (type)))
13166 return pedantic_non_lvalue_loc (loc, tem);
13167 return NULL_TREE;
13169 if (operand_equal_p (arg1, op2, 0))
13170 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13172 /* If we have A op B ? A : C, we may be able to convert this to a
13173 simpler expression, depending on the operation and the values
13174 of B and C. Signed zeros prevent all of these transformations,
13175 for reasons given above each one.
13177 Also try swapping the arguments and inverting the conditional. */
13178 if (COMPARISON_CLASS_P (arg0)
13179 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13180 arg1, TREE_OPERAND (arg0, 1))
13181 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13183 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13184 if (tem)
13185 return tem;
13188 if (COMPARISON_CLASS_P (arg0)
13189 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13190 op2,
13191 TREE_OPERAND (arg0, 1))
13192 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13194 tem = fold_truth_not_expr (loc, arg0);
13195 if (tem && COMPARISON_CLASS_P (tem))
13197 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13198 if (tem)
13199 return tem;
13203 /* If the second operand is simpler than the third, swap them
13204 since that produces better jump optimization results. */
13205 if (truth_value_p (TREE_CODE (arg0))
13206 && tree_swap_operands_p (op1, op2, false))
13208 /* See if this can be inverted. If it can't, possibly because
13209 it was a floating-point inequality comparison, don't do
13210 anything. */
13211 tem = fold_truth_not_expr (loc, arg0);
13212 if (tem)
13213 return fold_build3_loc (loc, code, type, tem, op2, op1);
13216 /* Convert A ? 1 : 0 to simply A. */
13217 if (integer_onep (op1)
13218 && integer_zerop (op2)
13219 /* If we try to convert OP0 to our type, the
13220 call to fold will try to move the conversion inside
13221 a COND, which will recurse. In that case, the COND_EXPR
13222 is probably the best choice, so leave it alone. */
13223 && type == TREE_TYPE (arg0))
13224 return pedantic_non_lvalue_loc (loc, arg0);
13226 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13227 over COND_EXPR in cases such as floating point comparisons. */
13228 if (integer_zerop (op1)
13229 && integer_onep (op2)
13230 && truth_value_p (TREE_CODE (arg0)))
13231 return pedantic_non_lvalue_loc (loc,
13232 fold_convert_loc (loc, type,
13233 invert_truthvalue_loc (loc,
13234 arg0)));
13236 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13237 if (TREE_CODE (arg0) == LT_EXPR
13238 && integer_zerop (TREE_OPERAND (arg0, 1))
13239 && integer_zerop (op2)
13240 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13242 /* sign_bit_p only checks ARG1 bits within A's precision.
13243 If <sign bit of A> has wider type than A, bits outside
13244 of A's precision in <sign bit of A> need to be checked.
13245 If they are all 0, this optimization needs to be done
13246 in unsigned A's type, if they are all 1 in signed A's type,
13247 otherwise this can't be done. */
13248 if (TYPE_PRECISION (TREE_TYPE (tem))
13249 < TYPE_PRECISION (TREE_TYPE (arg1))
13250 && TYPE_PRECISION (TREE_TYPE (tem))
13251 < TYPE_PRECISION (type))
13253 unsigned HOST_WIDE_INT mask_lo;
13254 HOST_WIDE_INT mask_hi;
13255 int inner_width, outer_width;
13256 tree tem_type;
13258 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13259 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13260 if (outer_width > TYPE_PRECISION (type))
13261 outer_width = TYPE_PRECISION (type);
13263 if (outer_width > HOST_BITS_PER_WIDE_INT)
13265 mask_hi = ((unsigned HOST_WIDE_INT) -1
13266 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13267 mask_lo = -1;
13269 else
13271 mask_hi = 0;
13272 mask_lo = ((unsigned HOST_WIDE_INT) -1
13273 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13275 if (inner_width > HOST_BITS_PER_WIDE_INT)
13277 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13278 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13279 mask_lo = 0;
13281 else
13282 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13283 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13285 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13286 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13288 tem_type = signed_type_for (TREE_TYPE (tem));
13289 tem = fold_convert_loc (loc, tem_type, tem);
13291 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13292 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13294 tem_type = unsigned_type_for (TREE_TYPE (tem));
13295 tem = fold_convert_loc (loc, tem_type, tem);
13297 else
13298 tem = NULL;
13301 if (tem)
13302 return
13303 fold_convert_loc (loc, type,
13304 fold_build2_loc (loc, BIT_AND_EXPR,
13305 TREE_TYPE (tem), tem,
13306 fold_convert_loc (loc,
13307 TREE_TYPE (tem),
13308 arg1)));
13311 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13312 already handled above. */
13313 if (TREE_CODE (arg0) == BIT_AND_EXPR
13314 && integer_onep (TREE_OPERAND (arg0, 1))
13315 && integer_zerop (op2)
13316 && integer_pow2p (arg1))
13318 tree tem = TREE_OPERAND (arg0, 0);
13319 STRIP_NOPS (tem);
13320 if (TREE_CODE (tem) == RSHIFT_EXPR
13321 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13322 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13323 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13324 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13325 TREE_OPERAND (tem, 0), arg1);
13328 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13329 is probably obsolete because the first operand should be a
13330 truth value (that's why we have the two cases above), but let's
13331 leave it in until we can confirm this for all front-ends. */
13332 if (integer_zerop (op2)
13333 && TREE_CODE (arg0) == NE_EXPR
13334 && integer_zerop (TREE_OPERAND (arg0, 1))
13335 && integer_pow2p (arg1)
13336 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13337 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13338 arg1, OEP_ONLY_CONST))
13339 return pedantic_non_lvalue_loc (loc,
13340 fold_convert_loc (loc, type,
13341 TREE_OPERAND (arg0, 0)));
13343 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13344 if (integer_zerop (op2)
13345 && truth_value_p (TREE_CODE (arg0))
13346 && truth_value_p (TREE_CODE (arg1)))
13347 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13348 fold_convert_loc (loc, type, arg0),
13349 arg1);
13351 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13352 if (integer_onep (op2)
13353 && truth_value_p (TREE_CODE (arg0))
13354 && truth_value_p (TREE_CODE (arg1)))
13356 /* Only perform transformation if ARG0 is easily inverted. */
13357 tem = fold_truth_not_expr (loc, arg0);
13358 if (tem)
13359 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13360 fold_convert_loc (loc, type, tem),
13361 arg1);
13364 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13365 if (integer_zerop (arg1)
13366 && truth_value_p (TREE_CODE (arg0))
13367 && truth_value_p (TREE_CODE (op2)))
13369 /* Only perform transformation if ARG0 is easily inverted. */
13370 tem = fold_truth_not_expr (loc, arg0);
13371 if (tem)
13372 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13373 fold_convert_loc (loc, type, tem),
13374 op2);
13377 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13378 if (integer_onep (arg1)
13379 && truth_value_p (TREE_CODE (arg0))
13380 && truth_value_p (TREE_CODE (op2)))
13381 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13382 fold_convert_loc (loc, type, arg0),
13383 op2);
13385 return NULL_TREE;
13387 case CALL_EXPR:
13388 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13389 of fold_ternary on them. */
13390 gcc_unreachable ();
13392 case BIT_FIELD_REF:
13393 if ((TREE_CODE (arg0) == VECTOR_CST
13394 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13395 && type == TREE_TYPE (TREE_TYPE (arg0)))
13397 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13398 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13400 if (width != 0
13401 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13402 && (idx % width) == 0
13403 && (idx = idx / width)
13404 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13406 tree elements = NULL_TREE;
13408 if (TREE_CODE (arg0) == VECTOR_CST)
13409 elements = TREE_VECTOR_CST_ELTS (arg0);
13410 else
13412 unsigned HOST_WIDE_INT idx;
13413 tree value;
13415 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13416 elements = tree_cons (NULL_TREE, value, elements);
13418 while (idx-- > 0 && elements)
13419 elements = TREE_CHAIN (elements);
13420 if (elements)
13421 return TREE_VALUE (elements);
13422 else
13423 return fold_convert_loc (loc, type, integer_zero_node);
13427 /* A bit-field-ref that referenced the full argument can be stripped. */
13428 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13429 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13430 && integer_zerop (op2))
13431 return fold_convert_loc (loc, type, arg0);
13433 return NULL_TREE;
13435 default:
13436 return NULL_TREE;
13437 } /* switch (code) */
13440 /* Perform constant folding and related simplification of EXPR.
13441 The related simplifications include x*1 => x, x*0 => 0, etc.,
13442 and application of the associative law.
13443 NOP_EXPR conversions may be removed freely (as long as we
13444 are careful not to change the type of the overall expression).
13445 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13446 but we can constant-fold them if they have constant operands. */
13448 #ifdef ENABLE_FOLD_CHECKING
13449 # define fold(x) fold_1 (x)
13450 static tree fold_1 (tree);
13451 static
13452 #endif
13453 tree
13454 fold (tree expr)
13456 const tree t = expr;
13457 enum tree_code code = TREE_CODE (t);
13458 enum tree_code_class kind = TREE_CODE_CLASS (code);
13459 tree tem;
13460 location_t loc = EXPR_LOCATION (expr);
13462 /* Return right away if a constant. */
13463 if (kind == tcc_constant)
13464 return t;
13466 /* CALL_EXPR-like objects with variable numbers of operands are
13467 treated specially. */
13468 if (kind == tcc_vl_exp)
13470 if (code == CALL_EXPR)
13472 tem = fold_call_expr (loc, expr, false);
13473 return tem ? tem : expr;
13475 return expr;
13478 if (IS_EXPR_CODE_CLASS (kind))
13480 tree type = TREE_TYPE (t);
13481 tree op0, op1, op2;
13483 switch (TREE_CODE_LENGTH (code))
13485 case 1:
13486 op0 = TREE_OPERAND (t, 0);
13487 tem = fold_unary_loc (loc, code, type, op0);
13488 return tem ? tem : expr;
13489 case 2:
13490 op0 = TREE_OPERAND (t, 0);
13491 op1 = TREE_OPERAND (t, 1);
13492 tem = fold_binary_loc (loc, code, type, op0, op1);
13493 return tem ? tem : expr;
13494 case 3:
13495 op0 = TREE_OPERAND (t, 0);
13496 op1 = TREE_OPERAND (t, 1);
13497 op2 = TREE_OPERAND (t, 2);
13498 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13499 return tem ? tem : expr;
13500 default:
13501 break;
13505 switch (code)
13507 case ARRAY_REF:
13509 tree op0 = TREE_OPERAND (t, 0);
13510 tree op1 = TREE_OPERAND (t, 1);
13512 if (TREE_CODE (op1) == INTEGER_CST
13513 && TREE_CODE (op0) == CONSTRUCTOR
13514 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13516 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13517 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13518 unsigned HOST_WIDE_INT begin = 0;
13520 /* Find a matching index by means of a binary search. */
13521 while (begin != end)
13523 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13524 tree index = VEC_index (constructor_elt, elts, middle)->index;
13526 if (TREE_CODE (index) == INTEGER_CST
13527 && tree_int_cst_lt (index, op1))
13528 begin = middle + 1;
13529 else if (TREE_CODE (index) == INTEGER_CST
13530 && tree_int_cst_lt (op1, index))
13531 end = middle;
13532 else if (TREE_CODE (index) == RANGE_EXPR
13533 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13534 begin = middle + 1;
13535 else if (TREE_CODE (index) == RANGE_EXPR
13536 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13537 end = middle;
13538 else
13539 return VEC_index (constructor_elt, elts, middle)->value;
13543 return t;
13546 case CONST_DECL:
13547 return fold (DECL_INITIAL (t));
13549 default:
13550 return t;
13551 } /* switch (code) */
13554 #ifdef ENABLE_FOLD_CHECKING
13555 #undef fold
13557 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13558 static void fold_check_failed (const_tree, const_tree);
13559 void print_fold_checksum (const_tree);
13561 /* When --enable-checking=fold, compute a digest of expr before
13562 and after actual fold call to see if fold did not accidentally
13563 change original expr. */
13565 tree
13566 fold (tree expr)
13568 tree ret;
13569 struct md5_ctx ctx;
13570 unsigned char checksum_before[16], checksum_after[16];
13571 htab_t ht;
13573 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13574 md5_init_ctx (&ctx);
13575 fold_checksum_tree (expr, &ctx, ht);
13576 md5_finish_ctx (&ctx, checksum_before);
13577 htab_empty (ht);
13579 ret = fold_1 (expr);
13581 md5_init_ctx (&ctx);
13582 fold_checksum_tree (expr, &ctx, ht);
13583 md5_finish_ctx (&ctx, checksum_after);
13584 htab_delete (ht);
13586 if (memcmp (checksum_before, checksum_after, 16))
13587 fold_check_failed (expr, ret);
13589 return ret;
13592 void
13593 print_fold_checksum (const_tree expr)
13595 struct md5_ctx ctx;
13596 unsigned char checksum[16], cnt;
13597 htab_t ht;
13599 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13600 md5_init_ctx (&ctx);
13601 fold_checksum_tree (expr, &ctx, ht);
13602 md5_finish_ctx (&ctx, checksum);
13603 htab_delete (ht);
13604 for (cnt = 0; cnt < 16; ++cnt)
13605 fprintf (stderr, "%02x", checksum[cnt]);
13606 putc ('\n', stderr);
13609 static void
13610 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13612 internal_error ("fold check: original tree changed by fold");
13615 static void
13616 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13618 const void **slot;
13619 enum tree_code code;
13620 union tree_node buf;
13621 int i, len;
13623 recursive_label:
13625 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13626 <= sizeof (struct tree_function_decl))
13627 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13628 if (expr == NULL)
13629 return;
13630 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13631 if (*slot != NULL)
13632 return;
13633 *slot = expr;
13634 code = TREE_CODE (expr);
13635 if (TREE_CODE_CLASS (code) == tcc_declaration
13636 && DECL_ASSEMBLER_NAME_SET_P (expr))
13638 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13639 memcpy ((char *) &buf, expr, tree_size (expr));
13640 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13641 expr = (tree) &buf;
13643 else if (TREE_CODE_CLASS (code) == tcc_type
13644 && (TYPE_POINTER_TO (expr)
13645 || TYPE_REFERENCE_TO (expr)
13646 || TYPE_CACHED_VALUES_P (expr)
13647 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13648 || TYPE_NEXT_VARIANT (expr)))
13650 /* Allow these fields to be modified. */
13651 tree tmp;
13652 memcpy ((char *) &buf, expr, tree_size (expr));
13653 expr = tmp = (tree) &buf;
13654 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13655 TYPE_POINTER_TO (tmp) = NULL;
13656 TYPE_REFERENCE_TO (tmp) = NULL;
13657 TYPE_NEXT_VARIANT (tmp) = NULL;
13658 if (TYPE_CACHED_VALUES_P (tmp))
13660 TYPE_CACHED_VALUES_P (tmp) = 0;
13661 TYPE_CACHED_VALUES (tmp) = NULL;
13664 md5_process_bytes (expr, tree_size (expr), ctx);
13665 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13666 if (TREE_CODE_CLASS (code) != tcc_type
13667 && TREE_CODE_CLASS (code) != tcc_declaration
13668 && code != TREE_LIST
13669 && code != SSA_NAME)
13670 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13671 switch (TREE_CODE_CLASS (code))
13673 case tcc_constant:
13674 switch (code)
13676 case STRING_CST:
13677 md5_process_bytes (TREE_STRING_POINTER (expr),
13678 TREE_STRING_LENGTH (expr), ctx);
13679 break;
13680 case COMPLEX_CST:
13681 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13682 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13683 break;
13684 case VECTOR_CST:
13685 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13686 break;
13687 default:
13688 break;
13690 break;
13691 case tcc_exceptional:
13692 switch (code)
13694 case TREE_LIST:
13695 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13696 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13697 expr = TREE_CHAIN (expr);
13698 goto recursive_label;
13699 break;
13700 case TREE_VEC:
13701 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13702 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13703 break;
13704 default:
13705 break;
13707 break;
13708 case tcc_expression:
13709 case tcc_reference:
13710 case tcc_comparison:
13711 case tcc_unary:
13712 case tcc_binary:
13713 case tcc_statement:
13714 case tcc_vl_exp:
13715 len = TREE_OPERAND_LENGTH (expr);
13716 for (i = 0; i < len; ++i)
13717 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13718 break;
13719 case tcc_declaration:
13720 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13721 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13722 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13724 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13725 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13726 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13727 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13728 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13730 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13731 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13733 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13735 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13736 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13737 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13739 break;
13740 case tcc_type:
13741 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13742 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13743 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13744 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13745 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13746 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13747 if (INTEGRAL_TYPE_P (expr)
13748 || SCALAR_FLOAT_TYPE_P (expr))
13750 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13751 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13753 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13754 if (TREE_CODE (expr) == RECORD_TYPE
13755 || TREE_CODE (expr) == UNION_TYPE
13756 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13757 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13758 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13759 break;
13760 default:
13761 break;
13765 /* Helper function for outputting the checksum of a tree T. When
13766 debugging with gdb, you can "define mynext" to be "next" followed
13767 by "call debug_fold_checksum (op0)", then just trace down till the
13768 outputs differ. */
13770 void
13771 debug_fold_checksum (const_tree t)
13773 int i;
13774 unsigned char checksum[16];
13775 struct md5_ctx ctx;
13776 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13778 md5_init_ctx (&ctx);
13779 fold_checksum_tree (t, &ctx, ht);
13780 md5_finish_ctx (&ctx, checksum);
13781 htab_empty (ht);
13783 for (i = 0; i < 16; i++)
13784 fprintf (stderr, "%d ", checksum[i]);
13786 fprintf (stderr, "\n");
13789 #endif
13791 /* Fold a unary tree expression with code CODE of type TYPE with an
13792 operand OP0. LOC is the location of the resulting expression.
13793 Return a folded expression if successful. Otherwise, return a tree
13794 expression with code CODE of type TYPE with an operand OP0. */
13796 tree
13797 fold_build1_stat_loc (location_t loc,
13798 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13800 tree tem;
13801 #ifdef ENABLE_FOLD_CHECKING
13802 unsigned char checksum_before[16], checksum_after[16];
13803 struct md5_ctx ctx;
13804 htab_t ht;
13806 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13807 md5_init_ctx (&ctx);
13808 fold_checksum_tree (op0, &ctx, ht);
13809 md5_finish_ctx (&ctx, checksum_before);
13810 htab_empty (ht);
13811 #endif
13813 tem = fold_unary_loc (loc, code, type, op0);
13814 if (!tem)
13816 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13817 SET_EXPR_LOCATION (tem, loc);
13820 #ifdef ENABLE_FOLD_CHECKING
13821 md5_init_ctx (&ctx);
13822 fold_checksum_tree (op0, &ctx, ht);
13823 md5_finish_ctx (&ctx, checksum_after);
13824 htab_delete (ht);
13826 if (memcmp (checksum_before, checksum_after, 16))
13827 fold_check_failed (op0, tem);
13828 #endif
13829 return tem;
13832 /* Fold a binary tree expression with code CODE of type TYPE with
13833 operands OP0 and OP1. LOC is the location of the resulting
13834 expression. Return a folded expression if successful. Otherwise,
13835 return a tree expression with code CODE of type TYPE with operands
13836 OP0 and OP1. */
13838 tree
13839 fold_build2_stat_loc (location_t loc,
13840 enum tree_code code, tree type, tree op0, tree op1
13841 MEM_STAT_DECL)
13843 tree tem;
13844 #ifdef ENABLE_FOLD_CHECKING
13845 unsigned char checksum_before_op0[16],
13846 checksum_before_op1[16],
13847 checksum_after_op0[16],
13848 checksum_after_op1[16];
13849 struct md5_ctx ctx;
13850 htab_t ht;
13852 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13853 md5_init_ctx (&ctx);
13854 fold_checksum_tree (op0, &ctx, ht);
13855 md5_finish_ctx (&ctx, checksum_before_op0);
13856 htab_empty (ht);
13858 md5_init_ctx (&ctx);
13859 fold_checksum_tree (op1, &ctx, ht);
13860 md5_finish_ctx (&ctx, checksum_before_op1);
13861 htab_empty (ht);
13862 #endif
13864 tem = fold_binary_loc (loc, code, type, op0, op1);
13865 if (!tem)
13867 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13868 SET_EXPR_LOCATION (tem, loc);
13871 #ifdef ENABLE_FOLD_CHECKING
13872 md5_init_ctx (&ctx);
13873 fold_checksum_tree (op0, &ctx, ht);
13874 md5_finish_ctx (&ctx, checksum_after_op0);
13875 htab_empty (ht);
13877 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13878 fold_check_failed (op0, tem);
13880 md5_init_ctx (&ctx);
13881 fold_checksum_tree (op1, &ctx, ht);
13882 md5_finish_ctx (&ctx, checksum_after_op1);
13883 htab_delete (ht);
13885 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13886 fold_check_failed (op1, tem);
13887 #endif
13888 return tem;
13891 /* Fold a ternary tree expression with code CODE of type TYPE with
13892 operands OP0, OP1, and OP2. Return a folded expression if
13893 successful. Otherwise, return a tree expression with code CODE of
13894 type TYPE with operands OP0, OP1, and OP2. */
13896 tree
13897 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13898 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13900 tree tem;
13901 #ifdef ENABLE_FOLD_CHECKING
13902 unsigned char checksum_before_op0[16],
13903 checksum_before_op1[16],
13904 checksum_before_op2[16],
13905 checksum_after_op0[16],
13906 checksum_after_op1[16],
13907 checksum_after_op2[16];
13908 struct md5_ctx ctx;
13909 htab_t ht;
13911 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13912 md5_init_ctx (&ctx);
13913 fold_checksum_tree (op0, &ctx, ht);
13914 md5_finish_ctx (&ctx, checksum_before_op0);
13915 htab_empty (ht);
13917 md5_init_ctx (&ctx);
13918 fold_checksum_tree (op1, &ctx, ht);
13919 md5_finish_ctx (&ctx, checksum_before_op1);
13920 htab_empty (ht);
13922 md5_init_ctx (&ctx);
13923 fold_checksum_tree (op2, &ctx, ht);
13924 md5_finish_ctx (&ctx, checksum_before_op2);
13925 htab_empty (ht);
13926 #endif
13928 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13929 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13930 if (!tem)
13932 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13933 SET_EXPR_LOCATION (tem, loc);
13936 #ifdef ENABLE_FOLD_CHECKING
13937 md5_init_ctx (&ctx);
13938 fold_checksum_tree (op0, &ctx, ht);
13939 md5_finish_ctx (&ctx, checksum_after_op0);
13940 htab_empty (ht);
13942 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13943 fold_check_failed (op0, tem);
13945 md5_init_ctx (&ctx);
13946 fold_checksum_tree (op1, &ctx, ht);
13947 md5_finish_ctx (&ctx, checksum_after_op1);
13948 htab_empty (ht);
13950 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13951 fold_check_failed (op1, tem);
13953 md5_init_ctx (&ctx);
13954 fold_checksum_tree (op2, &ctx, ht);
13955 md5_finish_ctx (&ctx, checksum_after_op2);
13956 htab_delete (ht);
13958 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13959 fold_check_failed (op2, tem);
13960 #endif
13961 return tem;
13964 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13965 arguments in ARGARRAY, and a null static chain.
13966 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13967 of type TYPE from the given operands as constructed by build_call_array. */
13969 tree
13970 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13971 int nargs, tree *argarray)
13973 tree tem;
13974 #ifdef ENABLE_FOLD_CHECKING
13975 unsigned char checksum_before_fn[16],
13976 checksum_before_arglist[16],
13977 checksum_after_fn[16],
13978 checksum_after_arglist[16];
13979 struct md5_ctx ctx;
13980 htab_t ht;
13981 int i;
13983 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13984 md5_init_ctx (&ctx);
13985 fold_checksum_tree (fn, &ctx, ht);
13986 md5_finish_ctx (&ctx, checksum_before_fn);
13987 htab_empty (ht);
13989 md5_init_ctx (&ctx);
13990 for (i = 0; i < nargs; i++)
13991 fold_checksum_tree (argarray[i], &ctx, ht);
13992 md5_finish_ctx (&ctx, checksum_before_arglist);
13993 htab_empty (ht);
13994 #endif
13996 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13998 #ifdef ENABLE_FOLD_CHECKING
13999 md5_init_ctx (&ctx);
14000 fold_checksum_tree (fn, &ctx, ht);
14001 md5_finish_ctx (&ctx, checksum_after_fn);
14002 htab_empty (ht);
14004 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14005 fold_check_failed (fn, tem);
14007 md5_init_ctx (&ctx);
14008 for (i = 0; i < nargs; i++)
14009 fold_checksum_tree (argarray[i], &ctx, ht);
14010 md5_finish_ctx (&ctx, checksum_after_arglist);
14011 htab_delete (ht);
14013 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14014 fold_check_failed (NULL_TREE, tem);
14015 #endif
14016 return tem;
14019 /* Perform constant folding and related simplification of initializer
14020 expression EXPR. These behave identically to "fold_buildN" but ignore
14021 potential run-time traps and exceptions that fold must preserve. */
14023 #define START_FOLD_INIT \
14024 int saved_signaling_nans = flag_signaling_nans;\
14025 int saved_trapping_math = flag_trapping_math;\
14026 int saved_rounding_math = flag_rounding_math;\
14027 int saved_trapv = flag_trapv;\
14028 int saved_folding_initializer = folding_initializer;\
14029 flag_signaling_nans = 0;\
14030 flag_trapping_math = 0;\
14031 flag_rounding_math = 0;\
14032 flag_trapv = 0;\
14033 folding_initializer = 1;
14035 #define END_FOLD_INIT \
14036 flag_signaling_nans = saved_signaling_nans;\
14037 flag_trapping_math = saved_trapping_math;\
14038 flag_rounding_math = saved_rounding_math;\
14039 flag_trapv = saved_trapv;\
14040 folding_initializer = saved_folding_initializer;
14042 tree
14043 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14044 tree type, tree op)
14046 tree result;
14047 START_FOLD_INIT;
14049 result = fold_build1_loc (loc, code, type, op);
14051 END_FOLD_INIT;
14052 return result;
14055 tree
14056 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14057 tree type, tree op0, tree op1)
14059 tree result;
14060 START_FOLD_INIT;
14062 result = fold_build2_loc (loc, code, type, op0, op1);
14064 END_FOLD_INIT;
14065 return result;
14068 tree
14069 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14070 tree type, tree op0, tree op1, tree op2)
14072 tree result;
14073 START_FOLD_INIT;
14075 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14077 END_FOLD_INIT;
14078 return result;
14081 tree
14082 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14083 int nargs, tree *argarray)
14085 tree result;
14086 START_FOLD_INIT;
14088 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14090 END_FOLD_INIT;
14091 return result;
14094 #undef START_FOLD_INIT
14095 #undef END_FOLD_INIT
14097 /* Determine if first argument is a multiple of second argument. Return 0 if
14098 it is not, or we cannot easily determined it to be.
14100 An example of the sort of thing we care about (at this point; this routine
14101 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14102 fold cases do now) is discovering that
14104 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14106 is a multiple of
14108 SAVE_EXPR (J * 8)
14110 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14112 This code also handles discovering that
14114 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14116 is a multiple of 8 so we don't have to worry about dealing with a
14117 possible remainder.
14119 Note that we *look* inside a SAVE_EXPR only to determine how it was
14120 calculated; it is not safe for fold to do much of anything else with the
14121 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14122 at run time. For example, the latter example above *cannot* be implemented
14123 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14124 evaluation time of the original SAVE_EXPR is not necessarily the same at
14125 the time the new expression is evaluated. The only optimization of this
14126 sort that would be valid is changing
14128 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14130 divided by 8 to
14132 SAVE_EXPR (I) * SAVE_EXPR (J)
14134 (where the same SAVE_EXPR (J) is used in the original and the
14135 transformed version). */
14138 multiple_of_p (tree type, const_tree top, const_tree bottom)
14140 if (operand_equal_p (top, bottom, 0))
14141 return 1;
14143 if (TREE_CODE (type) != INTEGER_TYPE)
14144 return 0;
14146 switch (TREE_CODE (top))
14148 case BIT_AND_EXPR:
14149 /* Bitwise and provides a power of two multiple. If the mask is
14150 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14151 if (!integer_pow2p (bottom))
14152 return 0;
14153 /* FALLTHRU */
14155 case MULT_EXPR:
14156 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14157 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14159 case PLUS_EXPR:
14160 case MINUS_EXPR:
14161 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14162 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14164 case LSHIFT_EXPR:
14165 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14167 tree op1, t1;
14169 op1 = TREE_OPERAND (top, 1);
14170 /* const_binop may not detect overflow correctly,
14171 so check for it explicitly here. */
14172 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14173 > TREE_INT_CST_LOW (op1)
14174 && TREE_INT_CST_HIGH (op1) == 0
14175 && 0 != (t1 = fold_convert (type,
14176 const_binop (LSHIFT_EXPR,
14177 size_one_node,
14178 op1, 0)))
14179 && !TREE_OVERFLOW (t1))
14180 return multiple_of_p (type, t1, bottom);
14182 return 0;
14184 case NOP_EXPR:
14185 /* Can't handle conversions from non-integral or wider integral type. */
14186 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14187 || (TYPE_PRECISION (type)
14188 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14189 return 0;
14191 /* .. fall through ... */
14193 case SAVE_EXPR:
14194 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14196 case COND_EXPR:
14197 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14198 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14200 case INTEGER_CST:
14201 if (TREE_CODE (bottom) != INTEGER_CST
14202 || integer_zerop (bottom)
14203 || (TYPE_UNSIGNED (type)
14204 && (tree_int_cst_sgn (top) < 0
14205 || tree_int_cst_sgn (bottom) < 0)))
14206 return 0;
14207 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14208 top, bottom, 0));
14210 default:
14211 return 0;
14215 /* Return true if CODE or TYPE is known to be non-negative. */
14217 static bool
14218 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14220 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14221 && truth_value_p (code))
14222 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14223 have a signed:1 type (where the value is -1 and 0). */
14224 return true;
14225 return false;
14228 /* Return true if (CODE OP0) is known to be non-negative. If the return
14229 value is based on the assumption that signed overflow is undefined,
14230 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14231 *STRICT_OVERFLOW_P. */
14233 bool
14234 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14235 bool *strict_overflow_p)
14237 if (TYPE_UNSIGNED (type))
14238 return true;
14240 switch (code)
14242 case ABS_EXPR:
14243 /* We can't return 1 if flag_wrapv is set because
14244 ABS_EXPR<INT_MIN> = INT_MIN. */
14245 if (!INTEGRAL_TYPE_P (type))
14246 return true;
14247 if (TYPE_OVERFLOW_UNDEFINED (type))
14249 *strict_overflow_p = true;
14250 return true;
14252 break;
14254 case NON_LVALUE_EXPR:
14255 case FLOAT_EXPR:
14256 case FIX_TRUNC_EXPR:
14257 return tree_expr_nonnegative_warnv_p (op0,
14258 strict_overflow_p);
14260 case NOP_EXPR:
14262 tree inner_type = TREE_TYPE (op0);
14263 tree outer_type = type;
14265 if (TREE_CODE (outer_type) == REAL_TYPE)
14267 if (TREE_CODE (inner_type) == REAL_TYPE)
14268 return tree_expr_nonnegative_warnv_p (op0,
14269 strict_overflow_p);
14270 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14272 if (TYPE_UNSIGNED (inner_type))
14273 return true;
14274 return tree_expr_nonnegative_warnv_p (op0,
14275 strict_overflow_p);
14278 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14280 if (TREE_CODE (inner_type) == REAL_TYPE)
14281 return tree_expr_nonnegative_warnv_p (op0,
14282 strict_overflow_p);
14283 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14284 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14285 && TYPE_UNSIGNED (inner_type);
14288 break;
14290 default:
14291 return tree_simple_nonnegative_warnv_p (code, type);
14294 /* We don't know sign of `t', so be conservative and return false. */
14295 return false;
14298 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14299 value is based on the assumption that signed overflow is undefined,
14300 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14301 *STRICT_OVERFLOW_P. */
14303 bool
14304 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14305 tree op1, bool *strict_overflow_p)
14307 if (TYPE_UNSIGNED (type))
14308 return true;
14310 switch (code)
14312 case POINTER_PLUS_EXPR:
14313 case PLUS_EXPR:
14314 if (FLOAT_TYPE_P (type))
14315 return (tree_expr_nonnegative_warnv_p (op0,
14316 strict_overflow_p)
14317 && tree_expr_nonnegative_warnv_p (op1,
14318 strict_overflow_p));
14320 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14321 both unsigned and at least 2 bits shorter than the result. */
14322 if (TREE_CODE (type) == INTEGER_TYPE
14323 && TREE_CODE (op0) == NOP_EXPR
14324 && TREE_CODE (op1) == NOP_EXPR)
14326 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14327 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14328 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14329 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14331 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14332 TYPE_PRECISION (inner2)) + 1;
14333 return prec < TYPE_PRECISION (type);
14336 break;
14338 case MULT_EXPR:
14339 if (FLOAT_TYPE_P (type))
14341 /* x * x for floating point x is always non-negative. */
14342 if (operand_equal_p (op0, op1, 0))
14343 return true;
14344 return (tree_expr_nonnegative_warnv_p (op0,
14345 strict_overflow_p)
14346 && tree_expr_nonnegative_warnv_p (op1,
14347 strict_overflow_p));
14350 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14351 both unsigned and their total bits is shorter than the result. */
14352 if (TREE_CODE (type) == INTEGER_TYPE
14353 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14354 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14356 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14357 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14358 : TREE_TYPE (op0);
14359 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14360 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14361 : TREE_TYPE (op1);
14363 bool unsigned0 = TYPE_UNSIGNED (inner0);
14364 bool unsigned1 = TYPE_UNSIGNED (inner1);
14366 if (TREE_CODE (op0) == INTEGER_CST)
14367 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14369 if (TREE_CODE (op1) == INTEGER_CST)
14370 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14372 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14373 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14375 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14376 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14377 : TYPE_PRECISION (inner0);
14379 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14380 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14381 : TYPE_PRECISION (inner1);
14383 return precision0 + precision1 < TYPE_PRECISION (type);
14386 return false;
14388 case BIT_AND_EXPR:
14389 case MAX_EXPR:
14390 return (tree_expr_nonnegative_warnv_p (op0,
14391 strict_overflow_p)
14392 || tree_expr_nonnegative_warnv_p (op1,
14393 strict_overflow_p));
14395 case BIT_IOR_EXPR:
14396 case BIT_XOR_EXPR:
14397 case MIN_EXPR:
14398 case RDIV_EXPR:
14399 case TRUNC_DIV_EXPR:
14400 case CEIL_DIV_EXPR:
14401 case FLOOR_DIV_EXPR:
14402 case ROUND_DIV_EXPR:
14403 return (tree_expr_nonnegative_warnv_p (op0,
14404 strict_overflow_p)
14405 && tree_expr_nonnegative_warnv_p (op1,
14406 strict_overflow_p));
14408 case TRUNC_MOD_EXPR:
14409 case CEIL_MOD_EXPR:
14410 case FLOOR_MOD_EXPR:
14411 case ROUND_MOD_EXPR:
14412 return tree_expr_nonnegative_warnv_p (op0,
14413 strict_overflow_p);
14414 default:
14415 return tree_simple_nonnegative_warnv_p (code, type);
14418 /* We don't know sign of `t', so be conservative and return false. */
14419 return false;
14422 /* Return true if T is known to be non-negative. If the return
14423 value is based on the assumption that signed overflow is undefined,
14424 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14425 *STRICT_OVERFLOW_P. */
14427 bool
14428 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14430 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14431 return true;
14433 switch (TREE_CODE (t))
14435 case INTEGER_CST:
14436 return tree_int_cst_sgn (t) >= 0;
14438 case REAL_CST:
14439 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14441 case FIXED_CST:
14442 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14444 case COND_EXPR:
14445 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14446 strict_overflow_p)
14447 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14448 strict_overflow_p));
14449 default:
14450 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14451 TREE_TYPE (t));
14453 /* We don't know sign of `t', so be conservative and return false. */
14454 return false;
14457 /* Return true if T is known to be non-negative. If the return
14458 value is based on the assumption that signed overflow is undefined,
14459 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14460 *STRICT_OVERFLOW_P. */
14462 bool
14463 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14464 tree arg0, tree arg1, bool *strict_overflow_p)
14466 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14467 switch (DECL_FUNCTION_CODE (fndecl))
14469 CASE_FLT_FN (BUILT_IN_ACOS):
14470 CASE_FLT_FN (BUILT_IN_ACOSH):
14471 CASE_FLT_FN (BUILT_IN_CABS):
14472 CASE_FLT_FN (BUILT_IN_COSH):
14473 CASE_FLT_FN (BUILT_IN_ERFC):
14474 CASE_FLT_FN (BUILT_IN_EXP):
14475 CASE_FLT_FN (BUILT_IN_EXP10):
14476 CASE_FLT_FN (BUILT_IN_EXP2):
14477 CASE_FLT_FN (BUILT_IN_FABS):
14478 CASE_FLT_FN (BUILT_IN_FDIM):
14479 CASE_FLT_FN (BUILT_IN_HYPOT):
14480 CASE_FLT_FN (BUILT_IN_POW10):
14481 CASE_INT_FN (BUILT_IN_FFS):
14482 CASE_INT_FN (BUILT_IN_PARITY):
14483 CASE_INT_FN (BUILT_IN_POPCOUNT):
14484 case BUILT_IN_BSWAP32:
14485 case BUILT_IN_BSWAP64:
14486 /* Always true. */
14487 return true;
14489 CASE_FLT_FN (BUILT_IN_SQRT):
14490 /* sqrt(-0.0) is -0.0. */
14491 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14492 return true;
14493 return tree_expr_nonnegative_warnv_p (arg0,
14494 strict_overflow_p);
14496 CASE_FLT_FN (BUILT_IN_ASINH):
14497 CASE_FLT_FN (BUILT_IN_ATAN):
14498 CASE_FLT_FN (BUILT_IN_ATANH):
14499 CASE_FLT_FN (BUILT_IN_CBRT):
14500 CASE_FLT_FN (BUILT_IN_CEIL):
14501 CASE_FLT_FN (BUILT_IN_ERF):
14502 CASE_FLT_FN (BUILT_IN_EXPM1):
14503 CASE_FLT_FN (BUILT_IN_FLOOR):
14504 CASE_FLT_FN (BUILT_IN_FMOD):
14505 CASE_FLT_FN (BUILT_IN_FREXP):
14506 CASE_FLT_FN (BUILT_IN_LCEIL):
14507 CASE_FLT_FN (BUILT_IN_LDEXP):
14508 CASE_FLT_FN (BUILT_IN_LFLOOR):
14509 CASE_FLT_FN (BUILT_IN_LLCEIL):
14510 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14511 CASE_FLT_FN (BUILT_IN_LLRINT):
14512 CASE_FLT_FN (BUILT_IN_LLROUND):
14513 CASE_FLT_FN (BUILT_IN_LRINT):
14514 CASE_FLT_FN (BUILT_IN_LROUND):
14515 CASE_FLT_FN (BUILT_IN_MODF):
14516 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14517 CASE_FLT_FN (BUILT_IN_RINT):
14518 CASE_FLT_FN (BUILT_IN_ROUND):
14519 CASE_FLT_FN (BUILT_IN_SCALB):
14520 CASE_FLT_FN (BUILT_IN_SCALBLN):
14521 CASE_FLT_FN (BUILT_IN_SCALBN):
14522 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14523 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14524 CASE_FLT_FN (BUILT_IN_SINH):
14525 CASE_FLT_FN (BUILT_IN_TANH):
14526 CASE_FLT_FN (BUILT_IN_TRUNC):
14527 /* True if the 1st argument is nonnegative. */
14528 return tree_expr_nonnegative_warnv_p (arg0,
14529 strict_overflow_p);
14531 CASE_FLT_FN (BUILT_IN_FMAX):
14532 /* True if the 1st OR 2nd arguments are nonnegative. */
14533 return (tree_expr_nonnegative_warnv_p (arg0,
14534 strict_overflow_p)
14535 || (tree_expr_nonnegative_warnv_p (arg1,
14536 strict_overflow_p)));
14538 CASE_FLT_FN (BUILT_IN_FMIN):
14539 /* True if the 1st AND 2nd arguments are nonnegative. */
14540 return (tree_expr_nonnegative_warnv_p (arg0,
14541 strict_overflow_p)
14542 && (tree_expr_nonnegative_warnv_p (arg1,
14543 strict_overflow_p)));
14545 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14546 /* True if the 2nd argument is nonnegative. */
14547 return tree_expr_nonnegative_warnv_p (arg1,
14548 strict_overflow_p);
14550 CASE_FLT_FN (BUILT_IN_POWI):
14551 /* True if the 1st argument is nonnegative or the second
14552 argument is an even integer. */
14553 if (TREE_CODE (arg1) == INTEGER_CST
14554 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14555 return true;
14556 return tree_expr_nonnegative_warnv_p (arg0,
14557 strict_overflow_p);
14559 CASE_FLT_FN (BUILT_IN_POW):
14560 /* True if the 1st argument is nonnegative or the second
14561 argument is an even integer valued real. */
14562 if (TREE_CODE (arg1) == REAL_CST)
14564 REAL_VALUE_TYPE c;
14565 HOST_WIDE_INT n;
14567 c = TREE_REAL_CST (arg1);
14568 n = real_to_integer (&c);
14569 if ((n & 1) == 0)
14571 REAL_VALUE_TYPE cint;
14572 real_from_integer (&cint, VOIDmode, n,
14573 n < 0 ? -1 : 0, 0);
14574 if (real_identical (&c, &cint))
14575 return true;
14578 return tree_expr_nonnegative_warnv_p (arg0,
14579 strict_overflow_p);
14581 default:
14582 break;
14584 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14585 type);
14588 /* Return true if T is known to be non-negative. If the return
14589 value is based on the assumption that signed overflow is undefined,
14590 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14591 *STRICT_OVERFLOW_P. */
14593 bool
14594 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14596 enum tree_code code = TREE_CODE (t);
14597 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14598 return true;
14600 switch (code)
14602 case TARGET_EXPR:
14604 tree temp = TARGET_EXPR_SLOT (t);
14605 t = TARGET_EXPR_INITIAL (t);
14607 /* If the initializer is non-void, then it's a normal expression
14608 that will be assigned to the slot. */
14609 if (!VOID_TYPE_P (t))
14610 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14612 /* Otherwise, the initializer sets the slot in some way. One common
14613 way is an assignment statement at the end of the initializer. */
14614 while (1)
14616 if (TREE_CODE (t) == BIND_EXPR)
14617 t = expr_last (BIND_EXPR_BODY (t));
14618 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14619 || TREE_CODE (t) == TRY_CATCH_EXPR)
14620 t = expr_last (TREE_OPERAND (t, 0));
14621 else if (TREE_CODE (t) == STATEMENT_LIST)
14622 t = expr_last (t);
14623 else
14624 break;
14626 if (TREE_CODE (t) == MODIFY_EXPR
14627 && TREE_OPERAND (t, 0) == temp)
14628 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14629 strict_overflow_p);
14631 return false;
14634 case CALL_EXPR:
14636 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14637 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14639 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14640 get_callee_fndecl (t),
14641 arg0,
14642 arg1,
14643 strict_overflow_p);
14645 case COMPOUND_EXPR:
14646 case MODIFY_EXPR:
14647 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14648 strict_overflow_p);
14649 case BIND_EXPR:
14650 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14651 strict_overflow_p);
14652 case SAVE_EXPR:
14653 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14654 strict_overflow_p);
14656 default:
14657 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14658 TREE_TYPE (t));
14661 /* We don't know sign of `t', so be conservative and return false. */
14662 return false;
14665 /* Return true if T is known to be non-negative. If the return
14666 value is based on the assumption that signed overflow is undefined,
14667 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14668 *STRICT_OVERFLOW_P. */
14670 bool
14671 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14673 enum tree_code code;
14674 if (t == error_mark_node)
14675 return false;
14677 code = TREE_CODE (t);
14678 switch (TREE_CODE_CLASS (code))
14680 case tcc_binary:
14681 case tcc_comparison:
14682 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14683 TREE_TYPE (t),
14684 TREE_OPERAND (t, 0),
14685 TREE_OPERAND (t, 1),
14686 strict_overflow_p);
14688 case tcc_unary:
14689 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14690 TREE_TYPE (t),
14691 TREE_OPERAND (t, 0),
14692 strict_overflow_p);
14694 case tcc_constant:
14695 case tcc_declaration:
14696 case tcc_reference:
14697 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14699 default:
14700 break;
14703 switch (code)
14705 case TRUTH_AND_EXPR:
14706 case TRUTH_OR_EXPR:
14707 case TRUTH_XOR_EXPR:
14708 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14709 TREE_TYPE (t),
14710 TREE_OPERAND (t, 0),
14711 TREE_OPERAND (t, 1),
14712 strict_overflow_p);
14713 case TRUTH_NOT_EXPR:
14714 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14715 TREE_TYPE (t),
14716 TREE_OPERAND (t, 0),
14717 strict_overflow_p);
14719 case COND_EXPR:
14720 case CONSTRUCTOR:
14721 case OBJ_TYPE_REF:
14722 case ASSERT_EXPR:
14723 case ADDR_EXPR:
14724 case WITH_SIZE_EXPR:
14725 case SSA_NAME:
14726 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14728 default:
14729 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14733 /* Return true if `t' is known to be non-negative. Handle warnings
14734 about undefined signed overflow. */
14736 bool
14737 tree_expr_nonnegative_p (tree t)
14739 bool ret, strict_overflow_p;
14741 strict_overflow_p = false;
14742 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14743 if (strict_overflow_p)
14744 fold_overflow_warning (("assuming signed overflow does not occur when "
14745 "determining that expression is always "
14746 "non-negative"),
14747 WARN_STRICT_OVERFLOW_MISC);
14748 return ret;
14752 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14753 For floating point we further ensure that T is not denormal.
14754 Similar logic is present in nonzero_address in rtlanal.h.
14756 If the return value is based on the assumption that signed overflow
14757 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14758 change *STRICT_OVERFLOW_P. */
14760 bool
14761 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14762 bool *strict_overflow_p)
14764 switch (code)
14766 case ABS_EXPR:
14767 return tree_expr_nonzero_warnv_p (op0,
14768 strict_overflow_p);
14770 case NOP_EXPR:
14772 tree inner_type = TREE_TYPE (op0);
14773 tree outer_type = type;
14775 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14776 && tree_expr_nonzero_warnv_p (op0,
14777 strict_overflow_p));
14779 break;
14781 case NON_LVALUE_EXPR:
14782 return tree_expr_nonzero_warnv_p (op0,
14783 strict_overflow_p);
14785 default:
14786 break;
14789 return false;
14792 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14793 For floating point we further ensure that T is not denormal.
14794 Similar logic is present in nonzero_address in rtlanal.h.
14796 If the return value is based on the assumption that signed overflow
14797 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14798 change *STRICT_OVERFLOW_P. */
14800 bool
14801 tree_binary_nonzero_warnv_p (enum tree_code code,
14802 tree type,
14803 tree op0,
14804 tree op1, bool *strict_overflow_p)
14806 bool sub_strict_overflow_p;
14807 switch (code)
14809 case POINTER_PLUS_EXPR:
14810 case PLUS_EXPR:
14811 if (TYPE_OVERFLOW_UNDEFINED (type))
14813 /* With the presence of negative values it is hard
14814 to say something. */
14815 sub_strict_overflow_p = false;
14816 if (!tree_expr_nonnegative_warnv_p (op0,
14817 &sub_strict_overflow_p)
14818 || !tree_expr_nonnegative_warnv_p (op1,
14819 &sub_strict_overflow_p))
14820 return false;
14821 /* One of operands must be positive and the other non-negative. */
14822 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14823 overflows, on a twos-complement machine the sum of two
14824 nonnegative numbers can never be zero. */
14825 return (tree_expr_nonzero_warnv_p (op0,
14826 strict_overflow_p)
14827 || tree_expr_nonzero_warnv_p (op1,
14828 strict_overflow_p));
14830 break;
14832 case MULT_EXPR:
14833 if (TYPE_OVERFLOW_UNDEFINED (type))
14835 if (tree_expr_nonzero_warnv_p (op0,
14836 strict_overflow_p)
14837 && tree_expr_nonzero_warnv_p (op1,
14838 strict_overflow_p))
14840 *strict_overflow_p = true;
14841 return true;
14844 break;
14846 case MIN_EXPR:
14847 sub_strict_overflow_p = false;
14848 if (tree_expr_nonzero_warnv_p (op0,
14849 &sub_strict_overflow_p)
14850 && tree_expr_nonzero_warnv_p (op1,
14851 &sub_strict_overflow_p))
14853 if (sub_strict_overflow_p)
14854 *strict_overflow_p = true;
14856 break;
14858 case MAX_EXPR:
14859 sub_strict_overflow_p = false;
14860 if (tree_expr_nonzero_warnv_p (op0,
14861 &sub_strict_overflow_p))
14863 if (sub_strict_overflow_p)
14864 *strict_overflow_p = true;
14866 /* When both operands are nonzero, then MAX must be too. */
14867 if (tree_expr_nonzero_warnv_p (op1,
14868 strict_overflow_p))
14869 return true;
14871 /* MAX where operand 0 is positive is positive. */
14872 return tree_expr_nonnegative_warnv_p (op0,
14873 strict_overflow_p);
14875 /* MAX where operand 1 is positive is positive. */
14876 else if (tree_expr_nonzero_warnv_p (op1,
14877 &sub_strict_overflow_p)
14878 && tree_expr_nonnegative_warnv_p (op1,
14879 &sub_strict_overflow_p))
14881 if (sub_strict_overflow_p)
14882 *strict_overflow_p = true;
14883 return true;
14885 break;
14887 case BIT_IOR_EXPR:
14888 return (tree_expr_nonzero_warnv_p (op1,
14889 strict_overflow_p)
14890 || tree_expr_nonzero_warnv_p (op0,
14891 strict_overflow_p));
14893 default:
14894 break;
14897 return false;
14900 /* Return true when T is an address and is known to be nonzero.
14901 For floating point we further ensure that T is not denormal.
14902 Similar logic is present in nonzero_address in rtlanal.h.
14904 If the return value is based on the assumption that signed overflow
14905 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14906 change *STRICT_OVERFLOW_P. */
14908 bool
14909 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14911 bool sub_strict_overflow_p;
14912 switch (TREE_CODE (t))
14914 case INTEGER_CST:
14915 return !integer_zerop (t);
14917 case ADDR_EXPR:
14919 tree base = TREE_OPERAND (t, 0);
14920 if (!DECL_P (base))
14921 base = get_base_address (base);
14923 if (!base)
14924 return false;
14926 /* Weak declarations may link to NULL. Other things may also be NULL
14927 so protect with -fdelete-null-pointer-checks; but not variables
14928 allocated on the stack. */
14929 if (DECL_P (base)
14930 && (flag_delete_null_pointer_checks
14931 || (DECL_CONTEXT (base)
14932 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14933 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
14934 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14936 /* Constants are never weak. */
14937 if (CONSTANT_CLASS_P (base))
14938 return true;
14940 return false;
14943 case COND_EXPR:
14944 sub_strict_overflow_p = false;
14945 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14946 &sub_strict_overflow_p)
14947 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14948 &sub_strict_overflow_p))
14950 if (sub_strict_overflow_p)
14951 *strict_overflow_p = true;
14952 return true;
14954 break;
14956 default:
14957 break;
14959 return false;
14962 /* Return true when T is an address and is known to be nonzero.
14963 For floating point we further ensure that T is not denormal.
14964 Similar logic is present in nonzero_address in rtlanal.h.
14966 If the return value is based on the assumption that signed overflow
14967 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14968 change *STRICT_OVERFLOW_P. */
14970 bool
14971 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14973 tree type = TREE_TYPE (t);
14974 enum tree_code code;
14976 /* Doing something useful for floating point would need more work. */
14977 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14978 return false;
14980 code = TREE_CODE (t);
14981 switch (TREE_CODE_CLASS (code))
14983 case tcc_unary:
14984 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14985 strict_overflow_p);
14986 case tcc_binary:
14987 case tcc_comparison:
14988 return tree_binary_nonzero_warnv_p (code, type,
14989 TREE_OPERAND (t, 0),
14990 TREE_OPERAND (t, 1),
14991 strict_overflow_p);
14992 case tcc_constant:
14993 case tcc_declaration:
14994 case tcc_reference:
14995 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14997 default:
14998 break;
15001 switch (code)
15003 case TRUTH_NOT_EXPR:
15004 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15005 strict_overflow_p);
15007 case TRUTH_AND_EXPR:
15008 case TRUTH_OR_EXPR:
15009 case TRUTH_XOR_EXPR:
15010 return tree_binary_nonzero_warnv_p (code, type,
15011 TREE_OPERAND (t, 0),
15012 TREE_OPERAND (t, 1),
15013 strict_overflow_p);
15015 case COND_EXPR:
15016 case CONSTRUCTOR:
15017 case OBJ_TYPE_REF:
15018 case ASSERT_EXPR:
15019 case ADDR_EXPR:
15020 case WITH_SIZE_EXPR:
15021 case SSA_NAME:
15022 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15024 case COMPOUND_EXPR:
15025 case MODIFY_EXPR:
15026 case BIND_EXPR:
15027 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15028 strict_overflow_p);
15030 case SAVE_EXPR:
15031 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15032 strict_overflow_p);
15034 case CALL_EXPR:
15035 return alloca_call_p (t);
15037 default:
15038 break;
15040 return false;
15043 /* Return true when T is an address and is known to be nonzero.
15044 Handle warnings about undefined signed overflow. */
15046 bool
15047 tree_expr_nonzero_p (tree t)
15049 bool ret, strict_overflow_p;
15051 strict_overflow_p = false;
15052 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15053 if (strict_overflow_p)
15054 fold_overflow_warning (("assuming signed overflow does not occur when "
15055 "determining that expression is always "
15056 "non-zero"),
15057 WARN_STRICT_OVERFLOW_MISC);
15058 return ret;
15061 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15062 attempt to fold the expression to a constant without modifying TYPE,
15063 OP0 or OP1.
15065 If the expression could be simplified to a constant, then return
15066 the constant. If the expression would not be simplified to a
15067 constant, then return NULL_TREE. */
15069 tree
15070 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15072 tree tem = fold_binary (code, type, op0, op1);
15073 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15076 /* Given the components of a unary expression CODE, TYPE and OP0,
15077 attempt to fold the expression to a constant without modifying
15078 TYPE or OP0.
15080 If the expression could be simplified to a constant, then return
15081 the constant. If the expression would not be simplified to a
15082 constant, then return NULL_TREE. */
15084 tree
15085 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15087 tree tem = fold_unary (code, type, op0);
15088 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15091 /* If EXP represents referencing an element in a constant string
15092 (either via pointer arithmetic or array indexing), return the
15093 tree representing the value accessed, otherwise return NULL. */
15095 tree
15096 fold_read_from_constant_string (tree exp)
15098 if ((TREE_CODE (exp) == INDIRECT_REF
15099 || TREE_CODE (exp) == ARRAY_REF)
15100 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15102 tree exp1 = TREE_OPERAND (exp, 0);
15103 tree index;
15104 tree string;
15105 location_t loc = EXPR_LOCATION (exp);
15107 if (TREE_CODE (exp) == INDIRECT_REF)
15108 string = string_constant (exp1, &index);
15109 else
15111 tree low_bound = array_ref_low_bound (exp);
15112 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15114 /* Optimize the special-case of a zero lower bound.
15116 We convert the low_bound to sizetype to avoid some problems
15117 with constant folding. (E.g. suppose the lower bound is 1,
15118 and its mode is QI. Without the conversion,l (ARRAY
15119 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15120 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15121 if (! integer_zerop (low_bound))
15122 index = size_diffop_loc (loc, index,
15123 fold_convert_loc (loc, sizetype, low_bound));
15125 string = exp1;
15128 if (string
15129 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15130 && TREE_CODE (string) == STRING_CST
15131 && TREE_CODE (index) == INTEGER_CST
15132 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15133 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15134 == MODE_INT)
15135 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15136 return build_int_cst_type (TREE_TYPE (exp),
15137 (TREE_STRING_POINTER (string)
15138 [TREE_INT_CST_LOW (index)]));
15140 return NULL;
15143 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15144 an integer constant, real, or fixed-point constant.
15146 TYPE is the type of the result. */
15148 static tree
15149 fold_negate_const (tree arg0, tree type)
15151 tree t = NULL_TREE;
15153 switch (TREE_CODE (arg0))
15155 case INTEGER_CST:
15157 unsigned HOST_WIDE_INT low;
15158 HOST_WIDE_INT high;
15159 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15160 TREE_INT_CST_HIGH (arg0),
15161 &low, &high);
15162 t = force_fit_type_double (type, low, high, 1,
15163 (overflow | TREE_OVERFLOW (arg0))
15164 && !TYPE_UNSIGNED (type));
15165 break;
15168 case REAL_CST:
15169 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15170 break;
15172 case FIXED_CST:
15174 FIXED_VALUE_TYPE f;
15175 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15176 &(TREE_FIXED_CST (arg0)), NULL,
15177 TYPE_SATURATING (type));
15178 t = build_fixed (type, f);
15179 /* Propagate overflow flags. */
15180 if (overflow_p | TREE_OVERFLOW (arg0))
15181 TREE_OVERFLOW (t) = 1;
15182 break;
15185 default:
15186 gcc_unreachable ();
15189 return t;
15192 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15193 an integer constant or real constant.
15195 TYPE is the type of the result. */
15197 tree
15198 fold_abs_const (tree arg0, tree type)
15200 tree t = NULL_TREE;
15202 switch (TREE_CODE (arg0))
15204 case INTEGER_CST:
15205 /* If the value is unsigned, then the absolute value is
15206 the same as the ordinary value. */
15207 if (TYPE_UNSIGNED (type))
15208 t = arg0;
15209 /* Similarly, if the value is non-negative. */
15210 else if (INT_CST_LT (integer_minus_one_node, arg0))
15211 t = arg0;
15212 /* If the value is negative, then the absolute value is
15213 its negation. */
15214 else
15216 unsigned HOST_WIDE_INT low;
15217 HOST_WIDE_INT high;
15218 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15219 TREE_INT_CST_HIGH (arg0),
15220 &low, &high);
15221 t = force_fit_type_double (type, low, high, -1,
15222 overflow | TREE_OVERFLOW (arg0));
15224 break;
15226 case REAL_CST:
15227 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15228 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15229 else
15230 t = arg0;
15231 break;
15233 default:
15234 gcc_unreachable ();
15237 return t;
15240 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15241 constant. TYPE is the type of the result. */
15243 static tree
15244 fold_not_const (tree arg0, tree type)
15246 tree t = NULL_TREE;
15248 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15250 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15251 ~TREE_INT_CST_HIGH (arg0), 0,
15252 TREE_OVERFLOW (arg0));
15254 return t;
15257 /* Given CODE, a relational operator, the target type, TYPE and two
15258 constant operands OP0 and OP1, return the result of the
15259 relational operation. If the result is not a compile time
15260 constant, then return NULL_TREE. */
15262 static tree
15263 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15265 int result, invert;
15267 /* From here on, the only cases we handle are when the result is
15268 known to be a constant. */
15270 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15272 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15273 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15275 /* Handle the cases where either operand is a NaN. */
15276 if (real_isnan (c0) || real_isnan (c1))
15278 switch (code)
15280 case EQ_EXPR:
15281 case ORDERED_EXPR:
15282 result = 0;
15283 break;
15285 case NE_EXPR:
15286 case UNORDERED_EXPR:
15287 case UNLT_EXPR:
15288 case UNLE_EXPR:
15289 case UNGT_EXPR:
15290 case UNGE_EXPR:
15291 case UNEQ_EXPR:
15292 result = 1;
15293 break;
15295 case LT_EXPR:
15296 case LE_EXPR:
15297 case GT_EXPR:
15298 case GE_EXPR:
15299 case LTGT_EXPR:
15300 if (flag_trapping_math)
15301 return NULL_TREE;
15302 result = 0;
15303 break;
15305 default:
15306 gcc_unreachable ();
15309 return constant_boolean_node (result, type);
15312 return constant_boolean_node (real_compare (code, c0, c1), type);
15315 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15317 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15318 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15319 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15322 /* Handle equality/inequality of complex constants. */
15323 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15325 tree rcond = fold_relational_const (code, type,
15326 TREE_REALPART (op0),
15327 TREE_REALPART (op1));
15328 tree icond = fold_relational_const (code, type,
15329 TREE_IMAGPART (op0),
15330 TREE_IMAGPART (op1));
15331 if (code == EQ_EXPR)
15332 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15333 else if (code == NE_EXPR)
15334 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15335 else
15336 return NULL_TREE;
15339 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15341 To compute GT, swap the arguments and do LT.
15342 To compute GE, do LT and invert the result.
15343 To compute LE, swap the arguments, do LT and invert the result.
15344 To compute NE, do EQ and invert the result.
15346 Therefore, the code below must handle only EQ and LT. */
15348 if (code == LE_EXPR || code == GT_EXPR)
15350 tree tem = op0;
15351 op0 = op1;
15352 op1 = tem;
15353 code = swap_tree_comparison (code);
15356 /* Note that it is safe to invert for real values here because we
15357 have already handled the one case that it matters. */
15359 invert = 0;
15360 if (code == NE_EXPR || code == GE_EXPR)
15362 invert = 1;
15363 code = invert_tree_comparison (code, false);
15366 /* Compute a result for LT or EQ if args permit;
15367 Otherwise return T. */
15368 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15370 if (code == EQ_EXPR)
15371 result = tree_int_cst_equal (op0, op1);
15372 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15373 result = INT_CST_LT_UNSIGNED (op0, op1);
15374 else
15375 result = INT_CST_LT (op0, op1);
15377 else
15378 return NULL_TREE;
15380 if (invert)
15381 result ^= 1;
15382 return constant_boolean_node (result, type);
15385 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15386 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15387 itself. */
15389 tree
15390 fold_build_cleanup_point_expr (tree type, tree expr)
15392 /* If the expression does not have side effects then we don't have to wrap
15393 it with a cleanup point expression. */
15394 if (!TREE_SIDE_EFFECTS (expr))
15395 return expr;
15397 /* If the expression is a return, check to see if the expression inside the
15398 return has no side effects or the right hand side of the modify expression
15399 inside the return. If either don't have side effects set we don't need to
15400 wrap the expression in a cleanup point expression. Note we don't check the
15401 left hand side of the modify because it should always be a return decl. */
15402 if (TREE_CODE (expr) == RETURN_EXPR)
15404 tree op = TREE_OPERAND (expr, 0);
15405 if (!op || !TREE_SIDE_EFFECTS (op))
15406 return expr;
15407 op = TREE_OPERAND (op, 1);
15408 if (!TREE_SIDE_EFFECTS (op))
15409 return expr;
15412 return build1 (CLEANUP_POINT_EXPR, type, expr);
15415 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15416 of an indirection through OP0, or NULL_TREE if no simplification is
15417 possible. */
15419 tree
15420 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15422 tree sub = op0;
15423 tree subtype;
15425 STRIP_NOPS (sub);
15426 subtype = TREE_TYPE (sub);
15427 if (!POINTER_TYPE_P (subtype))
15428 return NULL_TREE;
15430 if (TREE_CODE (sub) == ADDR_EXPR)
15432 tree op = TREE_OPERAND (sub, 0);
15433 tree optype = TREE_TYPE (op);
15434 /* *&CONST_DECL -> to the value of the const decl. */
15435 if (TREE_CODE (op) == CONST_DECL)
15436 return DECL_INITIAL (op);
15437 /* *&p => p; make sure to handle *&"str"[cst] here. */
15438 if (type == optype)
15440 tree fop = fold_read_from_constant_string (op);
15441 if (fop)
15442 return fop;
15443 else
15444 return op;
15446 /* *(foo *)&fooarray => fooarray[0] */
15447 else if (TREE_CODE (optype) == ARRAY_TYPE
15448 && type == TREE_TYPE (optype))
15450 tree type_domain = TYPE_DOMAIN (optype);
15451 tree min_val = size_zero_node;
15452 if (type_domain && TYPE_MIN_VALUE (type_domain))
15453 min_val = TYPE_MIN_VALUE (type_domain);
15454 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15455 SET_EXPR_LOCATION (op0, loc);
15456 return op0;
15458 /* *(foo *)&complexfoo => __real__ complexfoo */
15459 else if (TREE_CODE (optype) == COMPLEX_TYPE
15460 && type == TREE_TYPE (optype))
15461 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15462 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15463 else if (TREE_CODE (optype) == VECTOR_TYPE
15464 && type == TREE_TYPE (optype))
15466 tree part_width = TYPE_SIZE (type);
15467 tree index = bitsize_int (0);
15468 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15472 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15473 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15474 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15476 tree op00 = TREE_OPERAND (sub, 0);
15477 tree op01 = TREE_OPERAND (sub, 1);
15478 tree op00type;
15480 STRIP_NOPS (op00);
15481 op00type = TREE_TYPE (op00);
15482 if (TREE_CODE (op00) == ADDR_EXPR
15483 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15484 && type == TREE_TYPE (TREE_TYPE (op00type)))
15486 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15487 tree part_width = TYPE_SIZE (type);
15488 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15489 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15490 tree index = bitsize_int (indexi);
15492 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15493 return fold_build3_loc (loc,
15494 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15495 part_width, index);
15501 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15502 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15503 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15505 tree op00 = TREE_OPERAND (sub, 0);
15506 tree op01 = TREE_OPERAND (sub, 1);
15507 tree op00type;
15509 STRIP_NOPS (op00);
15510 op00type = TREE_TYPE (op00);
15511 if (TREE_CODE (op00) == ADDR_EXPR
15512 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15513 && type == TREE_TYPE (TREE_TYPE (op00type)))
15515 tree size = TYPE_SIZE_UNIT (type);
15516 if (tree_int_cst_equal (size, op01))
15517 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15518 TREE_OPERAND (op00, 0));
15522 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15523 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15524 && type == TREE_TYPE (TREE_TYPE (subtype)))
15526 tree type_domain;
15527 tree min_val = size_zero_node;
15528 sub = build_fold_indirect_ref_loc (loc, sub);
15529 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15530 if (type_domain && TYPE_MIN_VALUE (type_domain))
15531 min_val = TYPE_MIN_VALUE (type_domain);
15532 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15533 SET_EXPR_LOCATION (op0, loc);
15534 return op0;
15537 return NULL_TREE;
15540 /* Builds an expression for an indirection through T, simplifying some
15541 cases. */
15543 tree
15544 build_fold_indirect_ref_loc (location_t loc, tree t)
15546 tree type = TREE_TYPE (TREE_TYPE (t));
15547 tree sub = fold_indirect_ref_1 (loc, type, t);
15549 if (sub)
15550 return sub;
15552 t = build1 (INDIRECT_REF, type, t);
15553 SET_EXPR_LOCATION (t, loc);
15554 return t;
15557 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15559 tree
15560 fold_indirect_ref_loc (location_t loc, tree t)
15562 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15564 if (sub)
15565 return sub;
15566 else
15567 return t;
15570 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15571 whose result is ignored. The type of the returned tree need not be
15572 the same as the original expression. */
15574 tree
15575 fold_ignored_result (tree t)
15577 if (!TREE_SIDE_EFFECTS (t))
15578 return integer_zero_node;
15580 for (;;)
15581 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15583 case tcc_unary:
15584 t = TREE_OPERAND (t, 0);
15585 break;
15587 case tcc_binary:
15588 case tcc_comparison:
15589 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15590 t = TREE_OPERAND (t, 0);
15591 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15592 t = TREE_OPERAND (t, 1);
15593 else
15594 return t;
15595 break;
15597 case tcc_expression:
15598 switch (TREE_CODE (t))
15600 case COMPOUND_EXPR:
15601 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15602 return t;
15603 t = TREE_OPERAND (t, 0);
15604 break;
15606 case COND_EXPR:
15607 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15608 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15609 return t;
15610 t = TREE_OPERAND (t, 0);
15611 break;
15613 default:
15614 return t;
15616 break;
15618 default:
15619 return t;
15623 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15624 This can only be applied to objects of a sizetype. */
15626 tree
15627 round_up_loc (location_t loc, tree value, int divisor)
15629 tree div = NULL_TREE;
15631 gcc_assert (divisor > 0);
15632 if (divisor == 1)
15633 return value;
15635 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15636 have to do anything. Only do this when we are not given a const,
15637 because in that case, this check is more expensive than just
15638 doing it. */
15639 if (TREE_CODE (value) != INTEGER_CST)
15641 div = build_int_cst (TREE_TYPE (value), divisor);
15643 if (multiple_of_p (TREE_TYPE (value), value, div))
15644 return value;
15647 /* If divisor is a power of two, simplify this to bit manipulation. */
15648 if (divisor == (divisor & -divisor))
15650 if (TREE_CODE (value) == INTEGER_CST)
15652 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15653 unsigned HOST_WIDE_INT high;
15654 bool overflow_p;
15656 if ((low & (divisor - 1)) == 0)
15657 return value;
15659 overflow_p = TREE_OVERFLOW (value);
15660 high = TREE_INT_CST_HIGH (value);
15661 low &= ~(divisor - 1);
15662 low += divisor;
15663 if (low == 0)
15665 high++;
15666 if (high == 0)
15667 overflow_p = true;
15670 return force_fit_type_double (TREE_TYPE (value), low, high,
15671 -1, overflow_p);
15673 else
15675 tree t;
15677 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15678 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15679 t = build_int_cst (TREE_TYPE (value), -divisor);
15680 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15683 else
15685 if (!div)
15686 div = build_int_cst (TREE_TYPE (value), divisor);
15687 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15688 value = size_binop_loc (loc, MULT_EXPR, value, div);
15691 return value;
15694 /* Likewise, but round down. */
15696 tree
15697 round_down_loc (location_t loc, tree value, int divisor)
15699 tree div = NULL_TREE;
15701 gcc_assert (divisor > 0);
15702 if (divisor == 1)
15703 return value;
15705 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15706 have to do anything. Only do this when we are not given a const,
15707 because in that case, this check is more expensive than just
15708 doing it. */
15709 if (TREE_CODE (value) != INTEGER_CST)
15711 div = build_int_cst (TREE_TYPE (value), divisor);
15713 if (multiple_of_p (TREE_TYPE (value), value, div))
15714 return value;
15717 /* If divisor is a power of two, simplify this to bit manipulation. */
15718 if (divisor == (divisor & -divisor))
15720 tree t;
15722 t = build_int_cst (TREE_TYPE (value), -divisor);
15723 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15725 else
15727 if (!div)
15728 div = build_int_cst (TREE_TYPE (value), divisor);
15729 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15730 value = size_binop_loc (loc, MULT_EXPR, value, div);
15733 return value;
15736 /* Returns the pointer to the base of the object addressed by EXP and
15737 extracts the information about the offset of the access, storing it
15738 to PBITPOS and POFFSET. */
15740 static tree
15741 split_address_to_core_and_offset (tree exp,
15742 HOST_WIDE_INT *pbitpos, tree *poffset)
15744 tree core;
15745 enum machine_mode mode;
15746 int unsignedp, volatilep;
15747 HOST_WIDE_INT bitsize;
15748 location_t loc = EXPR_LOCATION (exp);
15750 if (TREE_CODE (exp) == ADDR_EXPR)
15752 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15753 poffset, &mode, &unsignedp, &volatilep,
15754 false);
15755 core = build_fold_addr_expr_loc (loc, core);
15757 else
15759 core = exp;
15760 *pbitpos = 0;
15761 *poffset = NULL_TREE;
15764 return core;
15767 /* Returns true if addresses of E1 and E2 differ by a constant, false
15768 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15770 bool
15771 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15773 tree core1, core2;
15774 HOST_WIDE_INT bitpos1, bitpos2;
15775 tree toffset1, toffset2, tdiff, type;
15777 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15778 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15780 if (bitpos1 % BITS_PER_UNIT != 0
15781 || bitpos2 % BITS_PER_UNIT != 0
15782 || !operand_equal_p (core1, core2, 0))
15783 return false;
15785 if (toffset1 && toffset2)
15787 type = TREE_TYPE (toffset1);
15788 if (type != TREE_TYPE (toffset2))
15789 toffset2 = fold_convert (type, toffset2);
15791 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15792 if (!cst_and_fits_in_hwi (tdiff))
15793 return false;
15795 *diff = int_cst_value (tdiff);
15797 else if (toffset1 || toffset2)
15799 /* If only one of the offsets is non-constant, the difference cannot
15800 be a constant. */
15801 return false;
15803 else
15804 *diff = 0;
15806 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15807 return true;
15810 /* Simplify the floating point expression EXP when the sign of the
15811 result is not significant. Return NULL_TREE if no simplification
15812 is possible. */
15814 tree
15815 fold_strip_sign_ops (tree exp)
15817 tree arg0, arg1;
15818 location_t loc = EXPR_LOCATION (exp);
15820 switch (TREE_CODE (exp))
15822 case ABS_EXPR:
15823 case NEGATE_EXPR:
15824 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15825 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15827 case MULT_EXPR:
15828 case RDIV_EXPR:
15829 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15830 return NULL_TREE;
15831 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15832 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15833 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15834 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15835 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15836 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15837 break;
15839 case COMPOUND_EXPR:
15840 arg0 = TREE_OPERAND (exp, 0);
15841 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15842 if (arg1)
15843 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15844 break;
15846 case COND_EXPR:
15847 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15848 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15849 if (arg0 || arg1)
15850 return fold_build3_loc (loc,
15851 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15852 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15853 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15854 break;
15856 case CALL_EXPR:
15858 const enum built_in_function fcode = builtin_mathfn_code (exp);
15859 switch (fcode)
15861 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15862 /* Strip copysign function call, return the 1st argument. */
15863 arg0 = CALL_EXPR_ARG (exp, 0);
15864 arg1 = CALL_EXPR_ARG (exp, 1);
15865 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15867 default:
15868 /* Strip sign ops from the argument of "odd" math functions. */
15869 if (negate_mathfn_p (fcode))
15871 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15872 if (arg0)
15873 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15875 break;
15878 break;
15880 default:
15881 break;
15883 return NULL_TREE;