2010-10-26 Tobias Burnus <burnus@net-b.de>
[official-gcc.git] / gcc / fold-const.c
blobdecb0fba8f51858d7ae71765988fb2199dd39a01
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "toplev.h"
58 #include "intl.h"
59 #include "ggc.h"
60 #include "hashtab.h"
61 #include "langhooks.h"
62 #include "md5.h"
63 #include "gimple.h"
64 #include "tree-flow.h"
66 /* Nonzero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
104 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (location_t, tree, tree,
106 HOST_WIDE_INT, HOST_WIDE_INT, int);
107 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 tree, tree, tree);
109 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
112 tree *, tree *);
113 static int all_ones_mask_p (const_tree, int);
114 static tree sign_bit_p (tree, const_tree);
115 static int simple_operand_p (const_tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 extern tree make_range (tree, int *, tree *, tree *, bool *);
120 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 tree, tree);
122 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
123 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
124 static tree unextend (tree, int, int, tree);
125 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
126 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_mathfn_compare (location_t,
135 enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (const_tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static tree fold_convert_const (enum tree_code, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
149 addition.
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 sign. */
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* If ARG2 divides ARG1 with zero remainder, carries out the division
157 of type CODE and returns the quotient.
158 Otherwise returns NULL_TREE. */
160 tree
161 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
163 double_int quo, rem;
164 int uns;
166 /* The sign of the division is according to operand two, that
167 does the correct thing for POINTER_PLUS_EXPR where we want
168 a signed division. */
169 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
170 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
171 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
172 uns = false;
174 quo = double_int_divmod (tree_to_double_int (arg1),
175 tree_to_double_int (arg2),
176 uns, code, &rem);
178 if (double_int_zero_p (rem))
179 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
181 return NULL_TREE;
184 /* This is nonzero if we should defer warnings about undefined
185 overflow. This facility exists because these warnings are a
186 special case. The code to estimate loop iterations does not want
187 to issue any warnings, since it works with expressions which do not
188 occur in user code. Various bits of cleanup code call fold(), but
189 only use the result if it has certain characteristics (e.g., is a
190 constant); that code only wants to issue a warning if the result is
191 used. */
193 static int fold_deferring_overflow_warnings;
195 /* If a warning about undefined overflow is deferred, this is the
196 warning. Note that this may cause us to turn two warnings into
197 one, but that is fine since it is sufficient to only give one
198 warning per expression. */
200 static const char* fold_deferred_overflow_warning;
202 /* If a warning about undefined overflow is deferred, this is the
203 level at which the warning should be emitted. */
205 static enum warn_strict_overflow_code fold_deferred_overflow_code;
207 /* Start deferring overflow warnings. We could use a stack here to
208 permit nested calls, but at present it is not necessary. */
210 void
211 fold_defer_overflow_warnings (void)
213 ++fold_deferring_overflow_warnings;
216 /* Stop deferring overflow warnings. If there is a pending warning,
217 and ISSUE is true, then issue the warning if appropriate. STMT is
218 the statement with which the warning should be associated (used for
219 location information); STMT may be NULL. CODE is the level of the
220 warning--a warn_strict_overflow_code value. This function will use
221 the smaller of CODE and the deferred code when deciding whether to
222 issue the warning. CODE may be zero to mean to always use the
223 deferred code. */
225 void
226 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
228 const char *warnmsg;
229 location_t locus;
231 gcc_assert (fold_deferring_overflow_warnings > 0);
232 --fold_deferring_overflow_warnings;
233 if (fold_deferring_overflow_warnings > 0)
235 if (fold_deferred_overflow_warning != NULL
236 && code != 0
237 && code < (int) fold_deferred_overflow_code)
238 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
239 return;
242 warnmsg = fold_deferred_overflow_warning;
243 fold_deferred_overflow_warning = NULL;
245 if (!issue || warnmsg == NULL)
246 return;
248 if (gimple_no_warning_p (stmt))
249 return;
251 /* Use the smallest code level when deciding to issue the
252 warning. */
253 if (code == 0 || code > (int) fold_deferred_overflow_code)
254 code = fold_deferred_overflow_code;
256 if (!issue_strict_overflow_warning (code))
257 return;
259 if (stmt == NULL)
260 locus = input_location;
261 else
262 locus = gimple_location (stmt);
263 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
266 /* Stop deferring overflow warnings, ignoring any deferred
267 warnings. */
269 void
270 fold_undefer_and_ignore_overflow_warnings (void)
272 fold_undefer_overflow_warnings (false, NULL, 0);
275 /* Whether we are deferring overflow warnings. */
277 bool
278 fold_deferring_overflow_warnings_p (void)
280 return fold_deferring_overflow_warnings > 0;
283 /* This is called when we fold something based on the fact that signed
284 overflow is undefined. */
286 static void
287 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
289 if (fold_deferring_overflow_warnings > 0)
291 if (fold_deferred_overflow_warning == NULL
292 || wc < fold_deferred_overflow_code)
294 fold_deferred_overflow_warning = gmsgid;
295 fold_deferred_overflow_code = wc;
298 else if (issue_strict_overflow_warning (wc))
299 warning (OPT_Wstrict_overflow, gmsgid);
302 /* Return true if the built-in mathematical function specified by CODE
303 is odd, i.e. -f(x) == f(-x). */
305 static bool
306 negate_mathfn_p (enum built_in_function code)
308 switch (code)
310 CASE_FLT_FN (BUILT_IN_ASIN):
311 CASE_FLT_FN (BUILT_IN_ASINH):
312 CASE_FLT_FN (BUILT_IN_ATAN):
313 CASE_FLT_FN (BUILT_IN_ATANH):
314 CASE_FLT_FN (BUILT_IN_CASIN):
315 CASE_FLT_FN (BUILT_IN_CASINH):
316 CASE_FLT_FN (BUILT_IN_CATAN):
317 CASE_FLT_FN (BUILT_IN_CATANH):
318 CASE_FLT_FN (BUILT_IN_CBRT):
319 CASE_FLT_FN (BUILT_IN_CPROJ):
320 CASE_FLT_FN (BUILT_IN_CSIN):
321 CASE_FLT_FN (BUILT_IN_CSINH):
322 CASE_FLT_FN (BUILT_IN_CTAN):
323 CASE_FLT_FN (BUILT_IN_CTANH):
324 CASE_FLT_FN (BUILT_IN_ERF):
325 CASE_FLT_FN (BUILT_IN_LLROUND):
326 CASE_FLT_FN (BUILT_IN_LROUND):
327 CASE_FLT_FN (BUILT_IN_ROUND):
328 CASE_FLT_FN (BUILT_IN_SIN):
329 CASE_FLT_FN (BUILT_IN_SINH):
330 CASE_FLT_FN (BUILT_IN_TAN):
331 CASE_FLT_FN (BUILT_IN_TANH):
332 CASE_FLT_FN (BUILT_IN_TRUNC):
333 return true;
335 CASE_FLT_FN (BUILT_IN_LLRINT):
336 CASE_FLT_FN (BUILT_IN_LRINT):
337 CASE_FLT_FN (BUILT_IN_NEARBYINT):
338 CASE_FLT_FN (BUILT_IN_RINT):
339 return !flag_rounding_math;
341 default:
342 break;
344 return false;
347 /* Check whether we may negate an integer constant T without causing
348 overflow. */
350 bool
351 may_negate_without_overflow_p (const_tree t)
353 unsigned HOST_WIDE_INT val;
354 unsigned int prec;
355 tree type;
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
363 prec = TYPE_PRECISION (type);
364 if (prec > HOST_BITS_PER_WIDE_INT)
366 if (TREE_INT_CST_LOW (t) != 0)
367 return true;
368 prec -= HOST_BITS_PER_WIDE_INT;
369 val = TREE_INT_CST_HIGH (t);
371 else
372 val = TREE_INT_CST_LOW (t);
373 if (prec < HOST_BITS_PER_WIDE_INT)
374 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
375 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
381 static bool
382 negate_expr_p (tree t)
384 tree type;
386 if (t == 0)
387 return false;
389 type = TREE_TYPE (t);
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
394 case INTEGER_CST:
395 if (TYPE_OVERFLOW_WRAPS (type))
396 return true;
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
404 case FIXED_CST:
405 case NEGATE_EXPR:
406 return true;
408 case REAL_CST:
409 /* We want to canonicalize to positive real constants. Pretend
410 that only negative ones can be easily negated. */
411 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
413 case COMPLEX_CST:
414 return negate_expr_p (TREE_REALPART (t))
415 && negate_expr_p (TREE_IMAGPART (t));
417 case COMPLEX_EXPR:
418 return negate_expr_p (TREE_OPERAND (t, 0))
419 && negate_expr_p (TREE_OPERAND (t, 1));
421 case CONJ_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0));
424 case PLUS_EXPR:
425 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
426 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
427 return false;
428 /* -(A + B) -> (-B) - A. */
429 if (negate_expr_p (TREE_OPERAND (t, 1))
430 && reorder_operands_p (TREE_OPERAND (t, 0),
431 TREE_OPERAND (t, 1)))
432 return true;
433 /* -(A + B) -> (-A) - B. */
434 return negate_expr_p (TREE_OPERAND (t, 0));
436 case MINUS_EXPR:
437 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
438 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
439 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
440 && reorder_operands_p (TREE_OPERAND (t, 0),
441 TREE_OPERAND (t, 1));
443 case MULT_EXPR:
444 if (TYPE_UNSIGNED (TREE_TYPE (t)))
445 break;
447 /* Fall through. */
449 case RDIV_EXPR:
450 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
451 return negate_expr_p (TREE_OPERAND (t, 1))
452 || negate_expr_p (TREE_OPERAND (t, 0));
453 break;
455 case TRUNC_DIV_EXPR:
456 case ROUND_DIV_EXPR:
457 case FLOOR_DIV_EXPR:
458 case CEIL_DIV_EXPR:
459 case EXACT_DIV_EXPR:
460 /* In general we can't negate A / B, because if A is INT_MIN and
461 B is 1, we may turn this into INT_MIN / -1 which is undefined
462 and actually traps on some architectures. But if overflow is
463 undefined, we can negate, because - (INT_MIN / 1) is an
464 overflow. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
467 break;
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
471 case NOP_EXPR:
472 /* Negate -((double)float) as (double)(-float). */
473 if (TREE_CODE (type) == REAL_TYPE)
475 tree tem = strip_float_extensions (t);
476 if (tem != t)
477 return negate_expr_p (tem);
479 break;
481 case CALL_EXPR:
482 /* Negate -f(x) as f(-x). */
483 if (negate_mathfn_p (builtin_mathfn_code (t)))
484 return negate_expr_p (CALL_EXPR_ARG (t, 0));
485 break;
487 case RSHIFT_EXPR:
488 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
489 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
491 tree op1 = TREE_OPERAND (t, 1);
492 if (TREE_INT_CST_HIGH (op1) == 0
493 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
494 == TREE_INT_CST_LOW (op1))
495 return true;
497 break;
499 default:
500 break;
502 return false;
505 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
506 simplification is possible.
507 If negate_expr_p would return true for T, NULL_TREE will never be
508 returned. */
510 static tree
511 fold_negate_expr (location_t loc, tree t)
513 tree type = TREE_TYPE (t);
514 tree tem;
516 switch (TREE_CODE (t))
518 /* Convert - (~A) to A + 1. */
519 case BIT_NOT_EXPR:
520 if (INTEGRAL_TYPE_P (type))
521 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
522 build_int_cst (type, 1));
523 break;
525 case INTEGER_CST:
526 tem = fold_negate_const (t, type);
527 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
528 || !TYPE_OVERFLOW_TRAPS (type))
529 return tem;
530 break;
532 case REAL_CST:
533 tem = fold_negate_const (t, type);
534 /* Two's complement FP formats, such as c4x, may overflow. */
535 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
536 return tem;
537 break;
539 case FIXED_CST:
540 tem = fold_negate_const (t, type);
541 return tem;
543 case COMPLEX_CST:
545 tree rpart = negate_expr (TREE_REALPART (t));
546 tree ipart = negate_expr (TREE_IMAGPART (t));
548 if ((TREE_CODE (rpart) == REAL_CST
549 && TREE_CODE (ipart) == REAL_CST)
550 || (TREE_CODE (rpart) == INTEGER_CST
551 && TREE_CODE (ipart) == INTEGER_CST))
552 return build_complex (type, rpart, ipart);
554 break;
556 case COMPLEX_EXPR:
557 if (negate_expr_p (t))
558 return fold_build2_loc (loc, COMPLEX_EXPR, type,
559 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
560 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
561 break;
563 case CONJ_EXPR:
564 if (negate_expr_p (t))
565 return fold_build1_loc (loc, CONJ_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
567 break;
569 case NEGATE_EXPR:
570 return TREE_OPERAND (t, 0);
572 case PLUS_EXPR:
573 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
574 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
576 /* -(A + B) -> (-B) - A. */
577 if (negate_expr_p (TREE_OPERAND (t, 1))
578 && reorder_operands_p (TREE_OPERAND (t, 0),
579 TREE_OPERAND (t, 1)))
581 tem = negate_expr (TREE_OPERAND (t, 1));
582 return fold_build2_loc (loc, MINUS_EXPR, type,
583 tem, TREE_OPERAND (t, 0));
586 /* -(A + B) -> (-A) - B. */
587 if (negate_expr_p (TREE_OPERAND (t, 0)))
589 tem = negate_expr (TREE_OPERAND (t, 0));
590 return fold_build2_loc (loc, MINUS_EXPR, type,
591 tem, TREE_OPERAND (t, 1));
594 break;
596 case MINUS_EXPR:
597 /* - (A - B) -> B - A */
598 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
599 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
600 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
601 return fold_build2_loc (loc, MINUS_EXPR, type,
602 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
603 break;
605 case MULT_EXPR:
606 if (TYPE_UNSIGNED (type))
607 break;
609 /* Fall through. */
611 case RDIV_EXPR:
612 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
614 tem = TREE_OPERAND (t, 1);
615 if (negate_expr_p (tem))
616 return fold_build2_loc (loc, TREE_CODE (t), type,
617 TREE_OPERAND (t, 0), negate_expr (tem));
618 tem = TREE_OPERAND (t, 0);
619 if (negate_expr_p (tem))
620 return fold_build2_loc (loc, TREE_CODE (t), type,
621 negate_expr (tem), TREE_OPERAND (t, 1));
623 break;
625 case TRUNC_DIV_EXPR:
626 case ROUND_DIV_EXPR:
627 case FLOOR_DIV_EXPR:
628 case CEIL_DIV_EXPR:
629 case EXACT_DIV_EXPR:
630 /* In general we can't negate A / B, because if A is INT_MIN and
631 B is 1, we may turn this into INT_MIN / -1 which is undefined
632 and actually traps on some architectures. But if overflow is
633 undefined, we can negate, because - (INT_MIN / 1) is an
634 overflow. */
635 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
637 const char * const warnmsg = G_("assuming signed overflow does not "
638 "occur when negating a division");
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
642 if (INTEGRAL_TYPE_P (type)
643 && (TREE_CODE (tem) != INTEGER_CST
644 || integer_onep (tem)))
645 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 TREE_OPERAND (t, 0), negate_expr (tem));
649 tem = TREE_OPERAND (t, 0);
650 if (negate_expr_p (tem))
652 if (INTEGRAL_TYPE_P (type)
653 && (TREE_CODE (tem) != INTEGER_CST
654 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
655 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (tem), TREE_OPERAND (t, 1));
660 break;
662 case NOP_EXPR:
663 /* Convert -((double)float) into (double)(-float). */
664 if (TREE_CODE (type) == REAL_TYPE)
666 tem = strip_float_extensions (t);
667 if (tem != t && negate_expr_p (tem))
668 return fold_convert_loc (loc, type, negate_expr (tem));
670 break;
672 case CALL_EXPR:
673 /* Negate -f(x) as f(-x). */
674 if (negate_mathfn_p (builtin_mathfn_code (t))
675 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
677 tree fndecl, arg;
679 fndecl = get_callee_fndecl (t);
680 arg = negate_expr (CALL_EXPR_ARG (t, 0));
681 return build_call_expr_loc (loc, fndecl, 1, arg);
683 break;
685 case RSHIFT_EXPR:
686 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
687 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
689 tree op1 = TREE_OPERAND (t, 1);
690 if (TREE_INT_CST_HIGH (op1) == 0
691 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
692 == TREE_INT_CST_LOW (op1))
694 tree ntype = TYPE_UNSIGNED (type)
695 ? signed_type_for (type)
696 : unsigned_type_for (type);
697 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
698 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
699 return fold_convert_loc (loc, type, temp);
702 break;
704 default:
705 break;
708 return NULL_TREE;
711 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
712 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
713 return NULL_TREE. */
715 static tree
716 negate_expr (tree t)
718 tree type, tem;
719 location_t loc;
721 if (t == NULL_TREE)
722 return NULL_TREE;
724 loc = EXPR_LOCATION (t);
725 type = TREE_TYPE (t);
726 STRIP_SIGN_NOPS (t);
728 tem = fold_negate_expr (loc, t);
729 if (!tem)
731 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
732 SET_EXPR_LOCATION (tem, loc);
734 return fold_convert_loc (loc, type, tem);
737 /* Split a tree IN into a constant, literal and variable parts that could be
738 combined with CODE to make IN. "constant" means an expression with
739 TREE_CONSTANT but that isn't an actual constant. CODE must be a
740 commutative arithmetic operation. Store the constant part into *CONP,
741 the literal in *LITP and return the variable part. If a part isn't
742 present, set it to null. If the tree does not decompose in this way,
743 return the entire tree as the variable part and the other parts as null.
745 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
746 case, we negate an operand that was subtracted. Except if it is a
747 literal for which we use *MINUS_LITP instead.
749 If NEGATE_P is true, we are negating all of IN, again except a literal
750 for which we use *MINUS_LITP instead.
752 If IN is itself a literal or constant, return it as appropriate.
754 Note that we do not guarantee that any of the three values will be the
755 same type as IN, but they will have the same signedness and mode. */
757 static tree
758 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
759 tree *minus_litp, int negate_p)
761 tree var = 0;
763 *conp = 0;
764 *litp = 0;
765 *minus_litp = 0;
767 /* Strip any conversions that don't change the machine mode or signedness. */
768 STRIP_SIGN_NOPS (in);
770 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
771 || TREE_CODE (in) == FIXED_CST)
772 *litp = in;
773 else if (TREE_CODE (in) == code
774 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
775 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
776 /* We can associate addition and subtraction together (even
777 though the C standard doesn't say so) for integers because
778 the value is not affected. For reals, the value might be
779 affected, so we can't. */
780 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
781 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
783 tree op0 = TREE_OPERAND (in, 0);
784 tree op1 = TREE_OPERAND (in, 1);
785 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
786 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
788 /* First see if either of the operands is a literal, then a constant. */
789 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
790 || TREE_CODE (op0) == FIXED_CST)
791 *litp = op0, op0 = 0;
792 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
793 || TREE_CODE (op1) == FIXED_CST)
794 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
796 if (op0 != 0 && TREE_CONSTANT (op0))
797 *conp = op0, op0 = 0;
798 else if (op1 != 0 && TREE_CONSTANT (op1))
799 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
801 /* If we haven't dealt with either operand, this is not a case we can
802 decompose. Otherwise, VAR is either of the ones remaining, if any. */
803 if (op0 != 0 && op1 != 0)
804 var = in;
805 else if (op0 != 0)
806 var = op0;
807 else
808 var = op1, neg_var_p = neg1_p;
810 /* Now do any needed negations. */
811 if (neg_litp_p)
812 *minus_litp = *litp, *litp = 0;
813 if (neg_conp_p)
814 *conp = negate_expr (*conp);
815 if (neg_var_p)
816 var = negate_expr (var);
818 else if (TREE_CONSTANT (in))
819 *conp = in;
820 else
821 var = in;
823 if (negate_p)
825 if (*litp)
826 *minus_litp = *litp, *litp = 0;
827 else if (*minus_litp)
828 *litp = *minus_litp, *minus_litp = 0;
829 *conp = negate_expr (*conp);
830 var = negate_expr (var);
833 return var;
836 /* Re-associate trees split by the above function. T1 and T2 are
837 either expressions to associate or null. Return the new
838 expression, if any. LOC is the location of the new expression. If
839 we build an operation, do it in TYPE and with CODE. */
841 static tree
842 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
844 tree tem;
846 if (t1 == 0)
847 return t2;
848 else if (t2 == 0)
849 return t1;
851 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
852 try to fold this since we will have infinite recursion. But do
853 deal with any NEGATE_EXPRs. */
854 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
855 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
857 if (code == PLUS_EXPR)
859 if (TREE_CODE (t1) == NEGATE_EXPR)
860 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
861 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
862 else if (TREE_CODE (t2) == NEGATE_EXPR)
863 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
864 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
865 else if (integer_zerop (t2))
866 return fold_convert_loc (loc, type, t1);
868 else if (code == MINUS_EXPR)
870 if (integer_zerop (t2))
871 return fold_convert_loc (loc, type, t1);
874 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
875 fold_convert_loc (loc, type, t2));
876 goto associate_trees_exit;
879 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type, t2));
881 associate_trees_exit:
882 protected_set_expr_location (tem, loc);
883 return tem;
886 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
887 for use in int_const_binop, size_binop and size_diffop. */
889 static bool
890 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
892 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
893 return false;
894 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
895 return false;
897 switch (code)
899 case LSHIFT_EXPR:
900 case RSHIFT_EXPR:
901 case LROTATE_EXPR:
902 case RROTATE_EXPR:
903 return true;
905 default:
906 break;
909 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
910 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
911 && TYPE_MODE (type1) == TYPE_MODE (type2);
915 /* Combine two integer constants ARG1 and ARG2 under operation CODE
916 to produce a new constant. Return NULL_TREE if we don't know how
917 to evaluate CODE at compile-time.
919 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
921 tree
922 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
924 double_int op1, op2, res, tmp;
925 tree t;
926 tree type = TREE_TYPE (arg1);
927 bool uns = TYPE_UNSIGNED (type);
928 bool is_sizetype
929 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
930 bool overflow = false;
932 op1 = tree_to_double_int (arg1);
933 op2 = tree_to_double_int (arg2);
935 switch (code)
937 case BIT_IOR_EXPR:
938 res = double_int_ior (op1, op2);
939 break;
941 case BIT_XOR_EXPR:
942 res = double_int_xor (op1, op2);
943 break;
945 case BIT_AND_EXPR:
946 res = double_int_and (op1, op2);
947 break;
949 case RSHIFT_EXPR:
950 res = double_int_rshift (op1, double_int_to_shwi (op2),
951 TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = double_int_lshift (op1, double_int_to_shwi (op2),
959 TYPE_PRECISION (type), !uns);
960 break;
962 case RROTATE_EXPR:
963 res = double_int_rrotate (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type));
965 break;
967 case LROTATE_EXPR:
968 res = double_int_lrotate (op1, double_int_to_shwi (op2),
969 TYPE_PRECISION (type));
970 break;
972 case PLUS_EXPR:
973 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
974 &res.low, &res.high);
975 break;
977 case MINUS_EXPR:
978 neg_double (op2.low, op2.high, &res.low, &res.high);
979 add_double (op1.low, op1.high, res.low, res.high,
980 &res.low, &res.high);
981 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
982 break;
984 case MULT_EXPR:
985 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
986 &res.low, &res.high);
987 break;
989 case TRUNC_DIV_EXPR:
990 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
991 case EXACT_DIV_EXPR:
992 /* This is a shortcut for a common special case. */
993 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
994 && !TREE_OVERFLOW (arg1)
995 && !TREE_OVERFLOW (arg2)
996 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
998 if (code == CEIL_DIV_EXPR)
999 op1.low += op2.low - 1;
1001 res.low = op1.low / op2.low, res.high = 0;
1002 break;
1005 /* ... fall through ... */
1007 case ROUND_DIV_EXPR:
1008 if (double_int_zero_p (op2))
1009 return NULL_TREE;
1010 if (double_int_one_p (op2))
1012 res = op1;
1013 break;
1015 if (double_int_equal_p (op1, op2)
1016 && ! double_int_zero_p (op1))
1018 res = double_int_one;
1019 break;
1021 overflow = div_and_round_double (code, uns,
1022 op1.low, op1.high, op2.low, op2.high,
1023 &res.low, &res.high,
1024 &tmp.low, &tmp.high);
1025 break;
1027 case TRUNC_MOD_EXPR:
1028 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1029 /* This is a shortcut for a common special case. */
1030 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1031 && !TREE_OVERFLOW (arg1)
1032 && !TREE_OVERFLOW (arg2)
1033 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1035 if (code == CEIL_MOD_EXPR)
1036 op1.low += op2.low - 1;
1037 res.low = op1.low % op2.low, res.high = 0;
1038 break;
1041 /* ... fall through ... */
1043 case ROUND_MOD_EXPR:
1044 if (double_int_zero_p (op2))
1045 return NULL_TREE;
1046 overflow = div_and_round_double (code, uns,
1047 op1.low, op1.high, op2.low, op2.high,
1048 &tmp.low, &tmp.high,
1049 &res.low, &res.high);
1050 break;
1052 case MIN_EXPR:
1053 res = double_int_min (op1, op2, uns);
1054 break;
1056 case MAX_EXPR:
1057 res = double_int_max (op1, op2, uns);
1058 break;
1060 default:
1061 return NULL_TREE;
1064 if (notrunc)
1066 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1068 /* Propagate overflow flags ourselves. */
1069 if (((!uns || is_sizetype) && overflow)
1070 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1072 t = copy_node (t);
1073 TREE_OVERFLOW (t) = 1;
1076 else
1077 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1078 ((!uns || is_sizetype) && overflow)
1079 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1081 return t;
1084 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1085 constant. We assume ARG1 and ARG2 have the same data type, or at least
1086 are the same kind of constant and the same machine mode. Return zero if
1087 combining the constants is not allowed in the current operating mode. */
1089 static tree
1090 const_binop (enum tree_code code, tree arg1, tree arg2)
1092 /* Sanity check for the recursive cases. */
1093 if (!arg1 || !arg2)
1094 return NULL_TREE;
1096 STRIP_NOPS (arg1);
1097 STRIP_NOPS (arg2);
1099 if (TREE_CODE (arg1) == INTEGER_CST)
1100 return int_const_binop (code, arg1, arg2, 0);
1102 if (TREE_CODE (arg1) == REAL_CST)
1104 enum machine_mode mode;
1105 REAL_VALUE_TYPE d1;
1106 REAL_VALUE_TYPE d2;
1107 REAL_VALUE_TYPE value;
1108 REAL_VALUE_TYPE result;
1109 bool inexact;
1110 tree t, type;
1112 /* The following codes are handled by real_arithmetic. */
1113 switch (code)
1115 case PLUS_EXPR:
1116 case MINUS_EXPR:
1117 case MULT_EXPR:
1118 case RDIV_EXPR:
1119 case MIN_EXPR:
1120 case MAX_EXPR:
1121 break;
1123 default:
1124 return NULL_TREE;
1127 d1 = TREE_REAL_CST (arg1);
1128 d2 = TREE_REAL_CST (arg2);
1130 type = TREE_TYPE (arg1);
1131 mode = TYPE_MODE (type);
1133 /* Don't perform operation if we honor signaling NaNs and
1134 either operand is a NaN. */
1135 if (HONOR_SNANS (mode)
1136 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1137 return NULL_TREE;
1139 /* Don't perform operation if it would raise a division
1140 by zero exception. */
1141 if (code == RDIV_EXPR
1142 && REAL_VALUES_EQUAL (d2, dconst0)
1143 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1144 return NULL_TREE;
1146 /* If either operand is a NaN, just return it. Otherwise, set up
1147 for floating-point trap; we return an overflow. */
1148 if (REAL_VALUE_ISNAN (d1))
1149 return arg1;
1150 else if (REAL_VALUE_ISNAN (d2))
1151 return arg2;
1153 inexact = real_arithmetic (&value, code, &d1, &d2);
1154 real_convert (&result, mode, &value);
1156 /* Don't constant fold this floating point operation if
1157 the result has overflowed and flag_trapping_math. */
1158 if (flag_trapping_math
1159 && MODE_HAS_INFINITIES (mode)
1160 && REAL_VALUE_ISINF (result)
1161 && !REAL_VALUE_ISINF (d1)
1162 && !REAL_VALUE_ISINF (d2))
1163 return NULL_TREE;
1165 /* Don't constant fold this floating point operation if the
1166 result may dependent upon the run-time rounding mode and
1167 flag_rounding_math is set, or if GCC's software emulation
1168 is unable to accurately represent the result. */
1169 if ((flag_rounding_math
1170 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1171 && (inexact || !real_identical (&result, &value)))
1172 return NULL_TREE;
1174 t = build_real (type, result);
1176 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1177 return t;
1180 if (TREE_CODE (arg1) == FIXED_CST)
1182 FIXED_VALUE_TYPE f1;
1183 FIXED_VALUE_TYPE f2;
1184 FIXED_VALUE_TYPE result;
1185 tree t, type;
1186 int sat_p;
1187 bool overflow_p;
1189 /* The following codes are handled by fixed_arithmetic. */
1190 switch (code)
1192 case PLUS_EXPR:
1193 case MINUS_EXPR:
1194 case MULT_EXPR:
1195 case TRUNC_DIV_EXPR:
1196 f2 = TREE_FIXED_CST (arg2);
1197 break;
1199 case LSHIFT_EXPR:
1200 case RSHIFT_EXPR:
1201 f2.data.high = TREE_INT_CST_HIGH (arg2);
1202 f2.data.low = TREE_INT_CST_LOW (arg2);
1203 f2.mode = SImode;
1204 break;
1206 default:
1207 return NULL_TREE;
1210 f1 = TREE_FIXED_CST (arg1);
1211 type = TREE_TYPE (arg1);
1212 sat_p = TYPE_SATURATING (type);
1213 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1214 t = build_fixed (type, result);
1215 /* Propagate overflow flags. */
1216 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1217 TREE_OVERFLOW (t) = 1;
1218 return t;
1221 if (TREE_CODE (arg1) == COMPLEX_CST)
1223 tree type = TREE_TYPE (arg1);
1224 tree r1 = TREE_REALPART (arg1);
1225 tree i1 = TREE_IMAGPART (arg1);
1226 tree r2 = TREE_REALPART (arg2);
1227 tree i2 = TREE_IMAGPART (arg2);
1228 tree real, imag;
1230 switch (code)
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 real = const_binop (code, r1, r2);
1235 imag = const_binop (code, i1, i2);
1236 break;
1238 case MULT_EXPR:
1239 if (COMPLEX_FLOAT_TYPE_P (type))
1240 return do_mpc_arg2 (arg1, arg2, type,
1241 /* do_nonfinite= */ folding_initializer,
1242 mpc_mul);
1244 real = const_binop (MINUS_EXPR,
1245 const_binop (MULT_EXPR, r1, r2),
1246 const_binop (MULT_EXPR, i1, i2));
1247 imag = const_binop (PLUS_EXPR,
1248 const_binop (MULT_EXPR, r1, i2),
1249 const_binop (MULT_EXPR, i1, r2));
1250 break;
1252 case RDIV_EXPR:
1253 if (COMPLEX_FLOAT_TYPE_P (type))
1254 return do_mpc_arg2 (arg1, arg2, type,
1255 /* do_nonfinite= */ folding_initializer,
1256 mpc_div);
1257 /* Fallthru ... */
1258 case TRUNC_DIV_EXPR:
1259 case CEIL_DIV_EXPR:
1260 case FLOOR_DIV_EXPR:
1261 case ROUND_DIV_EXPR:
1262 if (flag_complex_method == 0)
1264 /* Keep this algorithm in sync with
1265 tree-complex.c:expand_complex_div_straight().
1267 Expand complex division to scalars, straightforward algorithm.
1268 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1269 t = br*br + bi*bi
1271 tree magsquared
1272 = const_binop (PLUS_EXPR,
1273 const_binop (MULT_EXPR, r2, r2),
1274 const_binop (MULT_EXPR, i2, i2));
1275 tree t1
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r1, r2),
1278 const_binop (MULT_EXPR, i1, i2));
1279 tree t2
1280 = const_binop (MINUS_EXPR,
1281 const_binop (MULT_EXPR, i1, r2),
1282 const_binop (MULT_EXPR, r1, i2));
1284 real = const_binop (code, t1, magsquared);
1285 imag = const_binop (code, t2, magsquared);
1287 else
1289 /* Keep this algorithm in sync with
1290 tree-complex.c:expand_complex_div_wide().
1292 Expand complex division to scalars, modified algorithm to minimize
1293 overflow with wide input ranges. */
1294 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1295 fold_abs_const (r2, TREE_TYPE (type)),
1296 fold_abs_const (i2, TREE_TYPE (type)));
1298 if (integer_nonzerop (compare))
1300 /* In the TRUE branch, we compute
1301 ratio = br/bi;
1302 div = (br * ratio) + bi;
1303 tr = (ar * ratio) + ai;
1304 ti = (ai * ratio) - ar;
1305 tr = tr / div;
1306 ti = ti / div; */
1307 tree ratio = const_binop (code, r2, i2);
1308 tree div = const_binop (PLUS_EXPR, i2,
1309 const_binop (MULT_EXPR, r2, ratio));
1310 real = const_binop (MULT_EXPR, r1, ratio);
1311 real = const_binop (PLUS_EXPR, real, i1);
1312 real = const_binop (code, real, div);
1314 imag = const_binop (MULT_EXPR, i1, ratio);
1315 imag = const_binop (MINUS_EXPR, imag, r1);
1316 imag = const_binop (code, imag, div);
1318 else
1320 /* In the FALSE branch, we compute
1321 ratio = d/c;
1322 divisor = (d * ratio) + c;
1323 tr = (b * ratio) + a;
1324 ti = b - (a * ratio);
1325 tr = tr / div;
1326 ti = ti / div; */
1327 tree ratio = const_binop (code, i2, r2);
1328 tree div = const_binop (PLUS_EXPR, r2,
1329 const_binop (MULT_EXPR, i2, ratio));
1331 real = const_binop (MULT_EXPR, i1, ratio);
1332 real = const_binop (PLUS_EXPR, real, r1);
1333 real = const_binop (code, real, div);
1335 imag = const_binop (MULT_EXPR, r1, ratio);
1336 imag = const_binop (MINUS_EXPR, i1, imag);
1337 imag = const_binop (code, imag, div);
1340 break;
1342 default:
1343 return NULL_TREE;
1346 if (real && imag)
1347 return build_complex (type, real, imag);
1350 if (TREE_CODE (arg1) == VECTOR_CST)
1352 tree type = TREE_TYPE(arg1);
1353 int count = TYPE_VECTOR_SUBPARTS (type), i;
1354 tree elements1, elements2, list = NULL_TREE;
1356 if(TREE_CODE(arg2) != VECTOR_CST)
1357 return NULL_TREE;
1359 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1360 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1362 for (i = 0; i < count; i++)
1364 tree elem1, elem2, elem;
1366 /* The trailing elements can be empty and should be treated as 0 */
1367 if(!elements1)
1368 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1369 else
1371 elem1 = TREE_VALUE(elements1);
1372 elements1 = TREE_CHAIN (elements1);
1375 if(!elements2)
1376 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1377 else
1379 elem2 = TREE_VALUE(elements2);
1380 elements2 = TREE_CHAIN (elements2);
1383 elem = const_binop (code, elem1, elem2);
1385 /* It is possible that const_binop cannot handle the given
1386 code and return NULL_TREE */
1387 if(elem == NULL_TREE)
1388 return NULL_TREE;
1390 list = tree_cons (NULL_TREE, elem, list);
1392 return build_vector(type, nreverse(list));
1394 return NULL_TREE;
1397 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1398 indicates which particular sizetype to create. */
1400 tree
1401 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1403 return build_int_cst (sizetype_tab[(int) kind], number);
1406 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1407 is a tree code. The type of the result is taken from the operands.
1408 Both must be equivalent integer types, ala int_binop_types_match_p.
1409 If the operands are constant, so is the result. */
1411 tree
1412 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1414 tree type = TREE_TYPE (arg0);
1416 if (arg0 == error_mark_node || arg1 == error_mark_node)
1417 return error_mark_node;
1419 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1420 TREE_TYPE (arg1)));
1422 /* Handle the special case of two integer constants faster. */
1423 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1425 /* And some specific cases even faster than that. */
1426 if (code == PLUS_EXPR)
1428 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1429 return arg1;
1430 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1431 return arg0;
1433 else if (code == MINUS_EXPR)
1435 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1436 return arg0;
1438 else if (code == MULT_EXPR)
1440 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1441 return arg1;
1444 /* Handle general case of two integer constants. */
1445 return int_const_binop (code, arg0, arg1, 0);
1448 return fold_build2_loc (loc, code, type, arg0, arg1);
1451 /* Given two values, either both of sizetype or both of bitsizetype,
1452 compute the difference between the two values. Return the value
1453 in signed type corresponding to the type of the operands. */
1455 tree
1456 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1458 tree type = TREE_TYPE (arg0);
1459 tree ctype;
1461 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1462 TREE_TYPE (arg1)));
1464 /* If the type is already signed, just do the simple thing. */
1465 if (!TYPE_UNSIGNED (type))
1466 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1468 if (type == sizetype)
1469 ctype = ssizetype;
1470 else if (type == bitsizetype)
1471 ctype = sbitsizetype;
1472 else
1473 ctype = signed_type_for (type);
1475 /* If either operand is not a constant, do the conversions to the signed
1476 type and subtract. The hardware will do the right thing with any
1477 overflow in the subtraction. */
1478 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1479 return size_binop_loc (loc, MINUS_EXPR,
1480 fold_convert_loc (loc, ctype, arg0),
1481 fold_convert_loc (loc, ctype, arg1));
1483 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1484 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1485 overflow) and negate (which can't either). Special-case a result
1486 of zero while we're here. */
1487 if (tree_int_cst_equal (arg0, arg1))
1488 return build_int_cst (ctype, 0);
1489 else if (tree_int_cst_lt (arg1, arg0))
1490 return fold_convert_loc (loc, ctype,
1491 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1492 else
1493 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1494 fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc,
1496 MINUS_EXPR,
1497 arg1, arg0)));
1500 /* A subroutine of fold_convert_const handling conversions of an
1501 INTEGER_CST to another integer type. */
1503 static tree
1504 fold_convert_const_int_from_int (tree type, const_tree arg1)
1506 tree t;
1508 /* Given an integer constant, make new constant with new type,
1509 appropriately sign-extended or truncated. */
1510 t = force_fit_type_double (type, tree_to_double_int (arg1),
1511 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1512 (TREE_INT_CST_HIGH (arg1) < 0
1513 && (TYPE_UNSIGNED (type)
1514 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1515 | TREE_OVERFLOW (arg1));
1517 return t;
1520 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1521 to an integer type. */
1523 static tree
1524 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1526 int overflow = 0;
1527 tree t;
1529 /* The following code implements the floating point to integer
1530 conversion rules required by the Java Language Specification,
1531 that IEEE NaNs are mapped to zero and values that overflow
1532 the target precision saturate, i.e. values greater than
1533 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1534 are mapped to INT_MIN. These semantics are allowed by the
1535 C and C++ standards that simply state that the behavior of
1536 FP-to-integer conversion is unspecified upon overflow. */
1538 double_int val;
1539 REAL_VALUE_TYPE r;
1540 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1542 switch (code)
1544 case FIX_TRUNC_EXPR:
1545 real_trunc (&r, VOIDmode, &x);
1546 break;
1548 default:
1549 gcc_unreachable ();
1552 /* If R is NaN, return zero and show we have an overflow. */
1553 if (REAL_VALUE_ISNAN (r))
1555 overflow = 1;
1556 val = double_int_zero;
1559 /* See if R is less than the lower bound or greater than the
1560 upper bound. */
1562 if (! overflow)
1564 tree lt = TYPE_MIN_VALUE (type);
1565 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1566 if (REAL_VALUES_LESS (r, l))
1568 overflow = 1;
1569 val = tree_to_double_int (lt);
1573 if (! overflow)
1575 tree ut = TYPE_MAX_VALUE (type);
1576 if (ut)
1578 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1579 if (REAL_VALUES_LESS (u, r))
1581 overflow = 1;
1582 val = tree_to_double_int (ut);
1587 if (! overflow)
1588 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1590 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1591 return t;
1594 /* A subroutine of fold_convert_const handling conversions of a
1595 FIXED_CST to an integer type. */
1597 static tree
1598 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1600 tree t;
1601 double_int temp, temp_trunc;
1602 unsigned int mode;
1604 /* Right shift FIXED_CST to temp by fbit. */
1605 temp = TREE_FIXED_CST (arg1).data;
1606 mode = TREE_FIXED_CST (arg1).mode;
1607 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1609 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1610 HOST_BITS_PER_DOUBLE_INT,
1611 SIGNED_FIXED_POINT_MODE_P (mode));
1613 /* Left shift temp to temp_trunc by fbit. */
1614 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1615 HOST_BITS_PER_DOUBLE_INT,
1616 SIGNED_FIXED_POINT_MODE_P (mode));
1618 else
1620 temp = double_int_zero;
1621 temp_trunc = double_int_zero;
1624 /* If FIXED_CST is negative, we need to round the value toward 0.
1625 By checking if the fractional bits are not zero to add 1 to temp. */
1626 if (SIGNED_FIXED_POINT_MODE_P (mode)
1627 && double_int_negative_p (temp_trunc)
1628 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1629 temp = double_int_add (temp, double_int_one);
1631 /* Given a fixed-point constant, make new constant with new type,
1632 appropriately sign-extended or truncated. */
1633 t = force_fit_type_double (type, temp, -1,
1634 (double_int_negative_p (temp)
1635 && (TYPE_UNSIGNED (type)
1636 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1637 | TREE_OVERFLOW (arg1));
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1643 to another floating point type. */
1645 static tree
1646 fold_convert_const_real_from_real (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1652 t = build_real (type, value);
1654 /* If converting an infinity or NAN to a representation that doesn't
1655 have one, set the overflow bit so that we can produce some kind of
1656 error message at the appropriate point if necessary. It's not the
1657 most user-friendly message, but it's better than nothing. */
1658 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1659 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1660 TREE_OVERFLOW (t) = 1;
1661 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1662 && !MODE_HAS_NANS (TYPE_MODE (type)))
1663 TREE_OVERFLOW (t) = 1;
1664 /* Regular overflow, conversion produced an infinity in a mode that
1665 can't represent them. */
1666 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1667 && REAL_VALUE_ISINF (value)
1668 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1669 TREE_OVERFLOW (t) = 1;
1670 else
1671 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1672 return t;
1675 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1676 to a floating point type. */
1678 static tree
1679 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1681 REAL_VALUE_TYPE value;
1682 tree t;
1684 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1685 t = build_real (type, value);
1687 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1688 return t;
1691 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1692 to another fixed-point type. */
1694 static tree
1695 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1697 FIXED_VALUE_TYPE value;
1698 tree t;
1699 bool overflow_p;
1701 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1702 TYPE_SATURATING (type));
1703 t = build_fixed (type, value);
1705 /* Propagate overflow flags. */
1706 if (overflow_p | TREE_OVERFLOW (arg1))
1707 TREE_OVERFLOW (t) = 1;
1708 return t;
1711 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1712 to a fixed-point type. */
1714 static tree
1715 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1717 FIXED_VALUE_TYPE value;
1718 tree t;
1719 bool overflow_p;
1721 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1722 TREE_INT_CST (arg1),
1723 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1730 return t;
1733 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1734 to a fixed-point type. */
1736 static tree
1737 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1743 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1744 &TREE_REAL_CST (arg1),
1745 TYPE_SATURATING (type));
1746 t = build_fixed (type, value);
1748 /* Propagate overflow flags. */
1749 if (overflow_p | TREE_OVERFLOW (arg1))
1750 TREE_OVERFLOW (t) = 1;
1751 return t;
1754 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1755 type TYPE. If no simplification can be done return NULL_TREE. */
1757 static tree
1758 fold_convert_const (enum tree_code code, tree type, tree arg1)
1760 if (TREE_TYPE (arg1) == type)
1761 return arg1;
1763 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1764 || TREE_CODE (type) == OFFSET_TYPE)
1766 if (TREE_CODE (arg1) == INTEGER_CST)
1767 return fold_convert_const_int_from_int (type, arg1);
1768 else if (TREE_CODE (arg1) == REAL_CST)
1769 return fold_convert_const_int_from_real (code, type, arg1);
1770 else if (TREE_CODE (arg1) == FIXED_CST)
1771 return fold_convert_const_int_from_fixed (type, arg1);
1773 else if (TREE_CODE (type) == REAL_TYPE)
1775 if (TREE_CODE (arg1) == INTEGER_CST)
1776 return build_real_from_int_cst (type, arg1);
1777 else if (TREE_CODE (arg1) == REAL_CST)
1778 return fold_convert_const_real_from_real (type, arg1);
1779 else if (TREE_CODE (arg1) == FIXED_CST)
1780 return fold_convert_const_real_from_fixed (type, arg1);
1782 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1784 if (TREE_CODE (arg1) == FIXED_CST)
1785 return fold_convert_const_fixed_from_fixed (type, arg1);
1786 else if (TREE_CODE (arg1) == INTEGER_CST)
1787 return fold_convert_const_fixed_from_int (type, arg1);
1788 else if (TREE_CODE (arg1) == REAL_CST)
1789 return fold_convert_const_fixed_from_real (type, arg1);
1791 return NULL_TREE;
1794 /* Construct a vector of zero elements of vector type TYPE. */
1796 static tree
1797 build_zero_vector (tree type)
1799 tree elem, list;
1800 int i, units;
1802 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1803 units = TYPE_VECTOR_SUBPARTS (type);
1805 list = NULL_TREE;
1806 for (i = 0; i < units; i++)
1807 list = tree_cons (NULL_TREE, elem, list);
1808 return build_vector (type, list);
1811 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1813 bool
1814 fold_convertible_p (const_tree type, const_tree arg)
1816 tree orig = TREE_TYPE (arg);
1818 if (type == orig)
1819 return true;
1821 if (TREE_CODE (arg) == ERROR_MARK
1822 || TREE_CODE (type) == ERROR_MARK
1823 || TREE_CODE (orig) == ERROR_MARK)
1824 return false;
1826 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1827 return true;
1829 switch (TREE_CODE (type))
1831 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1832 case POINTER_TYPE: case REFERENCE_TYPE:
1833 case OFFSET_TYPE:
1834 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1835 || TREE_CODE (orig) == OFFSET_TYPE)
1836 return true;
1837 return (TREE_CODE (orig) == VECTOR_TYPE
1838 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1840 case REAL_TYPE:
1841 case FIXED_POINT_TYPE:
1842 case COMPLEX_TYPE:
1843 case VECTOR_TYPE:
1844 case VOID_TYPE:
1845 return TREE_CODE (type) == TREE_CODE (orig);
1847 default:
1848 return false;
1852 /* Convert expression ARG to type TYPE. Used by the middle-end for
1853 simple conversions in preference to calling the front-end's convert. */
1855 tree
1856 fold_convert_loc (location_t loc, tree type, tree arg)
1858 tree orig = TREE_TYPE (arg);
1859 tree tem;
1861 if (type == orig)
1862 return arg;
1864 if (TREE_CODE (arg) == ERROR_MARK
1865 || TREE_CODE (type) == ERROR_MARK
1866 || TREE_CODE (orig) == ERROR_MARK)
1867 return error_mark_node;
1869 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1870 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1872 switch (TREE_CODE (type))
1874 case POINTER_TYPE:
1875 case REFERENCE_TYPE:
1876 /* Handle conversions between pointers to different address spaces. */
1877 if (POINTER_TYPE_P (orig)
1878 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1879 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1880 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1881 /* fall through */
1883 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1884 case OFFSET_TYPE:
1885 if (TREE_CODE (arg) == INTEGER_CST)
1887 tem = fold_convert_const (NOP_EXPR, type, arg);
1888 if (tem != NULL_TREE)
1889 return tem;
1891 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1892 || TREE_CODE (orig) == OFFSET_TYPE)
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1894 if (TREE_CODE (orig) == COMPLEX_TYPE)
1895 return fold_convert_loc (loc, type,
1896 fold_build1_loc (loc, REALPART_EXPR,
1897 TREE_TYPE (orig), arg));
1898 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1899 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1900 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1902 case REAL_TYPE:
1903 if (TREE_CODE (arg) == INTEGER_CST)
1905 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 return tem;
1909 else if (TREE_CODE (arg) == REAL_CST)
1911 tem = fold_convert_const (NOP_EXPR, type, arg);
1912 if (tem != NULL_TREE)
1913 return tem;
1915 else if (TREE_CODE (arg) == FIXED_CST)
1917 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1918 if (tem != NULL_TREE)
1919 return tem;
1922 switch (TREE_CODE (orig))
1924 case INTEGER_TYPE:
1925 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1926 case POINTER_TYPE: case REFERENCE_TYPE:
1927 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1929 case REAL_TYPE:
1930 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1932 case FIXED_POINT_TYPE:
1933 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1935 case COMPLEX_TYPE:
1936 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1937 return fold_convert_loc (loc, type, tem);
1939 default:
1940 gcc_unreachable ();
1943 case FIXED_POINT_TYPE:
1944 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1945 || TREE_CODE (arg) == REAL_CST)
1947 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1949 goto fold_convert_exit;
1952 switch (TREE_CODE (orig))
1954 case FIXED_POINT_TYPE:
1955 case INTEGER_TYPE:
1956 case ENUMERAL_TYPE:
1957 case BOOLEAN_TYPE:
1958 case REAL_TYPE:
1959 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1961 case COMPLEX_TYPE:
1962 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1963 return fold_convert_loc (loc, type, tem);
1965 default:
1966 gcc_unreachable ();
1969 case COMPLEX_TYPE:
1970 switch (TREE_CODE (orig))
1972 case INTEGER_TYPE:
1973 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1974 case POINTER_TYPE: case REFERENCE_TYPE:
1975 case REAL_TYPE:
1976 case FIXED_POINT_TYPE:
1977 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1978 fold_convert_loc (loc, TREE_TYPE (type), arg),
1979 fold_convert_loc (loc, TREE_TYPE (type),
1980 integer_zero_node));
1981 case COMPLEX_TYPE:
1983 tree rpart, ipart;
1985 if (TREE_CODE (arg) == COMPLEX_EXPR)
1987 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1988 TREE_OPERAND (arg, 0));
1989 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1990 TREE_OPERAND (arg, 1));
1991 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1994 arg = save_expr (arg);
1995 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1996 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1997 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1998 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1999 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2002 default:
2003 gcc_unreachable ();
2006 case VECTOR_TYPE:
2007 if (integer_zerop (arg))
2008 return build_zero_vector (type);
2009 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2010 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2011 || TREE_CODE (orig) == VECTOR_TYPE);
2012 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2014 case VOID_TYPE:
2015 tem = fold_ignored_result (arg);
2016 if (TREE_CODE (tem) == MODIFY_EXPR)
2017 goto fold_convert_exit;
2018 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2020 default:
2021 gcc_unreachable ();
2023 fold_convert_exit:
2024 protected_set_expr_location (tem, loc);
2025 return tem;
2028 /* Return false if expr can be assumed not to be an lvalue, true
2029 otherwise. */
2031 static bool
2032 maybe_lvalue_p (const_tree x)
2034 /* We only need to wrap lvalue tree codes. */
2035 switch (TREE_CODE (x))
2037 case VAR_DECL:
2038 case PARM_DECL:
2039 case RESULT_DECL:
2040 case LABEL_DECL:
2041 case FUNCTION_DECL:
2042 case SSA_NAME:
2044 case COMPONENT_REF:
2045 case MEM_REF:
2046 case INDIRECT_REF:
2047 case ARRAY_REF:
2048 case ARRAY_RANGE_REF:
2049 case BIT_FIELD_REF:
2050 case OBJ_TYPE_REF:
2052 case REALPART_EXPR:
2053 case IMAGPART_EXPR:
2054 case PREINCREMENT_EXPR:
2055 case PREDECREMENT_EXPR:
2056 case SAVE_EXPR:
2057 case TRY_CATCH_EXPR:
2058 case WITH_CLEANUP_EXPR:
2059 case COMPOUND_EXPR:
2060 case MODIFY_EXPR:
2061 case TARGET_EXPR:
2062 case COND_EXPR:
2063 case BIND_EXPR:
2064 break;
2066 default:
2067 /* Assume the worst for front-end tree codes. */
2068 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2069 break;
2070 return false;
2073 return true;
2076 /* Return an expr equal to X but certainly not valid as an lvalue. */
2078 tree
2079 non_lvalue_loc (location_t loc, tree x)
2081 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2082 us. */
2083 if (in_gimple_form)
2084 return x;
2086 if (! maybe_lvalue_p (x))
2087 return x;
2088 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2089 SET_EXPR_LOCATION (x, loc);
2090 return x;
2093 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2094 Zero means allow extended lvalues. */
2096 int pedantic_lvalues;
2098 /* When pedantic, return an expr equal to X but certainly not valid as a
2099 pedantic lvalue. Otherwise, return X. */
2101 static tree
2102 pedantic_non_lvalue_loc (location_t loc, tree x)
2104 if (pedantic_lvalues)
2105 return non_lvalue_loc (loc, x);
2106 protected_set_expr_location (x, loc);
2107 return x;
2110 /* Given a tree comparison code, return the code that is the logical inverse
2111 of the given code. It is not safe to do this for floating-point
2112 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2113 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2115 enum tree_code
2116 invert_tree_comparison (enum tree_code code, bool honor_nans)
2118 if (honor_nans && flag_trapping_math)
2119 return ERROR_MARK;
2121 switch (code)
2123 case EQ_EXPR:
2124 return NE_EXPR;
2125 case NE_EXPR:
2126 return EQ_EXPR;
2127 case GT_EXPR:
2128 return honor_nans ? UNLE_EXPR : LE_EXPR;
2129 case GE_EXPR:
2130 return honor_nans ? UNLT_EXPR : LT_EXPR;
2131 case LT_EXPR:
2132 return honor_nans ? UNGE_EXPR : GE_EXPR;
2133 case LE_EXPR:
2134 return honor_nans ? UNGT_EXPR : GT_EXPR;
2135 case LTGT_EXPR:
2136 return UNEQ_EXPR;
2137 case UNEQ_EXPR:
2138 return LTGT_EXPR;
2139 case UNGT_EXPR:
2140 return LE_EXPR;
2141 case UNGE_EXPR:
2142 return LT_EXPR;
2143 case UNLT_EXPR:
2144 return GE_EXPR;
2145 case UNLE_EXPR:
2146 return GT_EXPR;
2147 case ORDERED_EXPR:
2148 return UNORDERED_EXPR;
2149 case UNORDERED_EXPR:
2150 return ORDERED_EXPR;
2151 default:
2152 gcc_unreachable ();
2156 /* Similar, but return the comparison that results if the operands are
2157 swapped. This is safe for floating-point. */
2159 enum tree_code
2160 swap_tree_comparison (enum tree_code code)
2162 switch (code)
2164 case EQ_EXPR:
2165 case NE_EXPR:
2166 case ORDERED_EXPR:
2167 case UNORDERED_EXPR:
2168 case LTGT_EXPR:
2169 case UNEQ_EXPR:
2170 return code;
2171 case GT_EXPR:
2172 return LT_EXPR;
2173 case GE_EXPR:
2174 return LE_EXPR;
2175 case LT_EXPR:
2176 return GT_EXPR;
2177 case LE_EXPR:
2178 return GE_EXPR;
2179 case UNGT_EXPR:
2180 return UNLT_EXPR;
2181 case UNGE_EXPR:
2182 return UNLE_EXPR;
2183 case UNLT_EXPR:
2184 return UNGT_EXPR;
2185 case UNLE_EXPR:
2186 return UNGE_EXPR;
2187 default:
2188 gcc_unreachable ();
2193 /* Convert a comparison tree code from an enum tree_code representation
2194 into a compcode bit-based encoding. This function is the inverse of
2195 compcode_to_comparison. */
2197 static enum comparison_code
2198 comparison_to_compcode (enum tree_code code)
2200 switch (code)
2202 case LT_EXPR:
2203 return COMPCODE_LT;
2204 case EQ_EXPR:
2205 return COMPCODE_EQ;
2206 case LE_EXPR:
2207 return COMPCODE_LE;
2208 case GT_EXPR:
2209 return COMPCODE_GT;
2210 case NE_EXPR:
2211 return COMPCODE_NE;
2212 case GE_EXPR:
2213 return COMPCODE_GE;
2214 case ORDERED_EXPR:
2215 return COMPCODE_ORD;
2216 case UNORDERED_EXPR:
2217 return COMPCODE_UNORD;
2218 case UNLT_EXPR:
2219 return COMPCODE_UNLT;
2220 case UNEQ_EXPR:
2221 return COMPCODE_UNEQ;
2222 case UNLE_EXPR:
2223 return COMPCODE_UNLE;
2224 case UNGT_EXPR:
2225 return COMPCODE_UNGT;
2226 case LTGT_EXPR:
2227 return COMPCODE_LTGT;
2228 case UNGE_EXPR:
2229 return COMPCODE_UNGE;
2230 default:
2231 gcc_unreachable ();
2235 /* Convert a compcode bit-based encoding of a comparison operator back
2236 to GCC's enum tree_code representation. This function is the
2237 inverse of comparison_to_compcode. */
2239 static enum tree_code
2240 compcode_to_comparison (enum comparison_code code)
2242 switch (code)
2244 case COMPCODE_LT:
2245 return LT_EXPR;
2246 case COMPCODE_EQ:
2247 return EQ_EXPR;
2248 case COMPCODE_LE:
2249 return LE_EXPR;
2250 case COMPCODE_GT:
2251 return GT_EXPR;
2252 case COMPCODE_NE:
2253 return NE_EXPR;
2254 case COMPCODE_GE:
2255 return GE_EXPR;
2256 case COMPCODE_ORD:
2257 return ORDERED_EXPR;
2258 case COMPCODE_UNORD:
2259 return UNORDERED_EXPR;
2260 case COMPCODE_UNLT:
2261 return UNLT_EXPR;
2262 case COMPCODE_UNEQ:
2263 return UNEQ_EXPR;
2264 case COMPCODE_UNLE:
2265 return UNLE_EXPR;
2266 case COMPCODE_UNGT:
2267 return UNGT_EXPR;
2268 case COMPCODE_LTGT:
2269 return LTGT_EXPR;
2270 case COMPCODE_UNGE:
2271 return UNGE_EXPR;
2272 default:
2273 gcc_unreachable ();
2277 /* Return a tree for the comparison which is the combination of
2278 doing the AND or OR (depending on CODE) of the two operations LCODE
2279 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2280 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2281 if this makes the transformation invalid. */
2283 tree
2284 combine_comparisons (location_t loc,
2285 enum tree_code code, enum tree_code lcode,
2286 enum tree_code rcode, tree truth_type,
2287 tree ll_arg, tree lr_arg)
2289 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2290 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2291 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2292 int compcode;
2294 switch (code)
2296 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2297 compcode = lcompcode & rcompcode;
2298 break;
2300 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2301 compcode = lcompcode | rcompcode;
2302 break;
2304 default:
2305 return NULL_TREE;
2308 if (!honor_nans)
2310 /* Eliminate unordered comparisons, as well as LTGT and ORD
2311 which are not used unless the mode has NaNs. */
2312 compcode &= ~COMPCODE_UNORD;
2313 if (compcode == COMPCODE_LTGT)
2314 compcode = COMPCODE_NE;
2315 else if (compcode == COMPCODE_ORD)
2316 compcode = COMPCODE_TRUE;
2318 else if (flag_trapping_math)
2320 /* Check that the original operation and the optimized ones will trap
2321 under the same condition. */
2322 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2323 && (lcompcode != COMPCODE_EQ)
2324 && (lcompcode != COMPCODE_ORD);
2325 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2326 && (rcompcode != COMPCODE_EQ)
2327 && (rcompcode != COMPCODE_ORD);
2328 bool trap = (compcode & COMPCODE_UNORD) == 0
2329 && (compcode != COMPCODE_EQ)
2330 && (compcode != COMPCODE_ORD);
2332 /* In a short-circuited boolean expression the LHS might be
2333 such that the RHS, if evaluated, will never trap. For
2334 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2335 if neither x nor y is NaN. (This is a mixed blessing: for
2336 example, the expression above will never trap, hence
2337 optimizing it to x < y would be invalid). */
2338 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2339 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2340 rtrap = false;
2342 /* If the comparison was short-circuited, and only the RHS
2343 trapped, we may now generate a spurious trap. */
2344 if (rtrap && !ltrap
2345 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2346 return NULL_TREE;
2348 /* If we changed the conditions that cause a trap, we lose. */
2349 if ((ltrap || rtrap) != trap)
2350 return NULL_TREE;
2353 if (compcode == COMPCODE_TRUE)
2354 return constant_boolean_node (true, truth_type);
2355 else if (compcode == COMPCODE_FALSE)
2356 return constant_boolean_node (false, truth_type);
2357 else
2359 enum tree_code tcode;
2361 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2362 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if two operands (typically of the same tree node)
2367 are necessarily equal. If either argument has side-effects this
2368 function returns zero. FLAGS modifies behavior as follows:
2370 If OEP_ONLY_CONST is set, only return nonzero for constants.
2371 This function tests whether the operands are indistinguishable;
2372 it does not test whether they are equal using C's == operation.
2373 The distinction is important for IEEE floating point, because
2374 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2375 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2377 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2378 even though it may hold multiple values during a function.
2379 This is because a GCC tree node guarantees that nothing else is
2380 executed between the evaluation of its "operands" (which may often
2381 be evaluated in arbitrary order). Hence if the operands themselves
2382 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2383 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2384 unset means assuming isochronic (or instantaneous) tree equivalence.
2385 Unless comparing arbitrary expression trees, such as from different
2386 statements, this flag can usually be left unset.
2388 If OEP_PURE_SAME is set, then pure functions with identical arguments
2389 are considered the same. It is used when the caller has other ways
2390 to ensure that global memory is unchanged in between. */
2393 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2395 /* If either is ERROR_MARK, they aren't equal. */
2396 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2397 || TREE_TYPE (arg0) == error_mark_node
2398 || TREE_TYPE (arg1) == error_mark_node)
2399 return 0;
2401 /* Similar, if either does not have a type (like a released SSA name),
2402 they aren't equal. */
2403 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2404 return 0;
2406 /* Check equality of integer constants before bailing out due to
2407 precision differences. */
2408 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2409 return tree_int_cst_equal (arg0, arg1);
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
2413 because they may change the signedness of the arguments. As pointers
2414 strictly don't have a signedness, require either two pointers or
2415 two non-pointers as well. */
2416 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2417 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2418 return 0;
2420 /* We cannot consider pointers to different address space equal. */
2421 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2422 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2423 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2424 return 0;
2426 /* If both types don't have the same precision, then it is not safe
2427 to strip NOPs. */
2428 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2429 return 0;
2431 STRIP_NOPS (arg0);
2432 STRIP_NOPS (arg1);
2434 /* In case both args are comparisons but with different comparison
2435 code, try to swap the comparison operands of one arg to produce
2436 a match and compare that variant. */
2437 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2438 && COMPARISON_CLASS_P (arg0)
2439 && COMPARISON_CLASS_P (arg1))
2441 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2443 if (TREE_CODE (arg0) == swap_code)
2444 return operand_equal_p (TREE_OPERAND (arg0, 0),
2445 TREE_OPERAND (arg1, 1), flags)
2446 && operand_equal_p (TREE_OPERAND (arg0, 1),
2447 TREE_OPERAND (arg1, 0), flags);
2450 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2451 /* This is needed for conversions and for COMPONENT_REF.
2452 Might as well play it safe and always test this. */
2453 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2454 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2455 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2456 return 0;
2458 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2459 We don't care about side effects in that case because the SAVE_EXPR
2460 takes care of that for us. In all other cases, two expressions are
2461 equal if they have no side effects. If we have two identical
2462 expressions with side effects that should be treated the same due
2463 to the only side effects being identical SAVE_EXPR's, that will
2464 be detected in the recursive calls below. */
2465 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2466 && (TREE_CODE (arg0) == SAVE_EXPR
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2468 return 1;
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2475 case INTEGER_CST:
2476 return tree_int_cst_equal (arg0, arg1);
2478 case FIXED_CST:
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2482 case REAL_CST:
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2485 return 1;
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2493 return 1;
2495 return 0;
2497 case VECTOR_CST:
2499 tree v1, v2;
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2503 while (v1 && v2)
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2506 flags))
2507 return 0;
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2512 return v1 == v2;
2515 case COMPLEX_CST:
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2517 flags)
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2519 flags));
2521 case STRING_CST:
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2527 case ADDR_EXPR:
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2530 default:
2531 break;
2534 if (flags & OEP_ONLY_CONST)
2535 return 0;
2537 /* Define macros to test an operand from arg0 and arg1 for equality and a
2538 variant that allows null and views null as being different from any
2539 non-null value. In the latter case, if either is null, the both
2540 must be; otherwise, do the normal comparison. */
2541 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2542 TREE_OPERAND (arg1, N), flags)
2544 #define OP_SAME_WITH_NULL(N) \
2545 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2546 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2548 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2550 case tcc_unary:
2551 /* Two conversions are equal only if signedness and modes match. */
2552 switch (TREE_CODE (arg0))
2554 CASE_CONVERT:
2555 case FIX_TRUNC_EXPR:
2556 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2557 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2558 return 0;
2559 break;
2560 default:
2561 break;
2564 return OP_SAME (0);
2567 case tcc_comparison:
2568 case tcc_binary:
2569 if (OP_SAME (0) && OP_SAME (1))
2570 return 1;
2572 /* For commutative ops, allow the other order. */
2573 return (commutative_tree_code (TREE_CODE (arg0))
2574 && operand_equal_p (TREE_OPERAND (arg0, 0),
2575 TREE_OPERAND (arg1, 1), flags)
2576 && operand_equal_p (TREE_OPERAND (arg0, 1),
2577 TREE_OPERAND (arg1, 0), flags));
2579 case tcc_reference:
2580 /* If either of the pointer (or reference) expressions we are
2581 dereferencing contain a side effect, these cannot be equal. */
2582 if (TREE_SIDE_EFFECTS (arg0)
2583 || TREE_SIDE_EFFECTS (arg1))
2584 return 0;
2586 switch (TREE_CODE (arg0))
2588 case INDIRECT_REF:
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 return OP_SAME (0);
2593 case MEM_REF:
2594 /* Require equal access sizes, and similar pointer types.
2595 We can have incomplete types for array references of
2596 variable-sized arrays from the Fortran frontent
2597 though. */
2598 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2599 || (TYPE_SIZE (TREE_TYPE (arg0))
2600 && TYPE_SIZE (TREE_TYPE (arg1))
2601 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2602 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2603 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2604 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2605 && OP_SAME (0) && OP_SAME (1));
2607 case ARRAY_REF:
2608 case ARRAY_RANGE_REF:
2609 /* Operands 2 and 3 may be null.
2610 Compare the array index by value if it is constant first as we
2611 may have different types but same value here. */
2612 return (OP_SAME (0)
2613 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2614 TREE_OPERAND (arg1, 1))
2615 || OP_SAME (1))
2616 && OP_SAME_WITH_NULL (2)
2617 && OP_SAME_WITH_NULL (3));
2619 case COMPONENT_REF:
2620 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2621 may be NULL when we're called to compare MEM_EXPRs. */
2622 return OP_SAME_WITH_NULL (0)
2623 && OP_SAME (1)
2624 && OP_SAME_WITH_NULL (2);
2626 case BIT_FIELD_REF:
2627 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2629 default:
2630 return 0;
2633 case tcc_expression:
2634 switch (TREE_CODE (arg0))
2636 case ADDR_EXPR:
2637 case TRUTH_NOT_EXPR:
2638 return OP_SAME (0);
2640 case TRUTH_ANDIF_EXPR:
2641 case TRUTH_ORIF_EXPR:
2642 return OP_SAME (0) && OP_SAME (1);
2644 case TRUTH_AND_EXPR:
2645 case TRUTH_OR_EXPR:
2646 case TRUTH_XOR_EXPR:
2647 if (OP_SAME (0) && OP_SAME (1))
2648 return 1;
2650 /* Otherwise take into account this is a commutative operation. */
2651 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2652 TREE_OPERAND (arg1, 1), flags)
2653 && operand_equal_p (TREE_OPERAND (arg0, 1),
2654 TREE_OPERAND (arg1, 0), flags));
2656 case COND_EXPR:
2657 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2659 default:
2660 return 0;
2663 case tcc_vl_exp:
2664 switch (TREE_CODE (arg0))
2666 case CALL_EXPR:
2667 /* If the CALL_EXPRs call different functions, then they
2668 clearly can not be equal. */
2669 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2670 flags))
2671 return 0;
2674 unsigned int cef = call_expr_flags (arg0);
2675 if (flags & OEP_PURE_SAME)
2676 cef &= ECF_CONST | ECF_PURE;
2677 else
2678 cef &= ECF_CONST;
2679 if (!cef)
2680 return 0;
2683 /* Now see if all the arguments are the same. */
2685 const_call_expr_arg_iterator iter0, iter1;
2686 const_tree a0, a1;
2687 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2688 a1 = first_const_call_expr_arg (arg1, &iter1);
2689 a0 && a1;
2690 a0 = next_const_call_expr_arg (&iter0),
2691 a1 = next_const_call_expr_arg (&iter1))
2692 if (! operand_equal_p (a0, a1, flags))
2693 return 0;
2695 /* If we get here and both argument lists are exhausted
2696 then the CALL_EXPRs are equal. */
2697 return ! (a0 || a1);
2699 default:
2700 return 0;
2703 case tcc_declaration:
2704 /* Consider __builtin_sqrt equal to sqrt. */
2705 return (TREE_CODE (arg0) == FUNCTION_DECL
2706 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2707 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2708 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2710 default:
2711 return 0;
2714 #undef OP_SAME
2715 #undef OP_SAME_WITH_NULL
2718 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2719 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2721 When in doubt, return 0. */
2723 static int
2724 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2726 int unsignedp1, unsignedpo;
2727 tree primarg0, primarg1, primother;
2728 unsigned int correct_width;
2730 if (operand_equal_p (arg0, arg1, 0))
2731 return 1;
2733 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2734 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2735 return 0;
2737 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2738 and see if the inner values are the same. This removes any
2739 signedness comparison, which doesn't matter here. */
2740 primarg0 = arg0, primarg1 = arg1;
2741 STRIP_NOPS (primarg0);
2742 STRIP_NOPS (primarg1);
2743 if (operand_equal_p (primarg0, primarg1, 0))
2744 return 1;
2746 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2747 actual comparison operand, ARG0.
2749 First throw away any conversions to wider types
2750 already present in the operands. */
2752 primarg1 = get_narrower (arg1, &unsignedp1);
2753 primother = get_narrower (other, &unsignedpo);
2755 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2756 if (unsignedp1 == unsignedpo
2757 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2758 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2760 tree type = TREE_TYPE (arg0);
2762 /* Make sure shorter operand is extended the right way
2763 to match the longer operand. */
2764 primarg1 = fold_convert (signed_or_unsigned_type_for
2765 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2767 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2768 return 1;
2771 return 0;
2774 /* See if ARG is an expression that is either a comparison or is performing
2775 arithmetic on comparisons. The comparisons must only be comparing
2776 two different values, which will be stored in *CVAL1 and *CVAL2; if
2777 they are nonzero it means that some operands have already been found.
2778 No variables may be used anywhere else in the expression except in the
2779 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2780 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2782 If this is true, return 1. Otherwise, return zero. */
2784 static int
2785 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2787 enum tree_code code = TREE_CODE (arg);
2788 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2790 /* We can handle some of the tcc_expression cases here. */
2791 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2792 tclass = tcc_unary;
2793 else if (tclass == tcc_expression
2794 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2795 || code == COMPOUND_EXPR))
2796 tclass = tcc_binary;
2798 else if (tclass == tcc_expression && code == SAVE_EXPR
2799 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2801 /* If we've already found a CVAL1 or CVAL2, this expression is
2802 two complex to handle. */
2803 if (*cval1 || *cval2)
2804 return 0;
2806 tclass = tcc_unary;
2807 *save_p = 1;
2810 switch (tclass)
2812 case tcc_unary:
2813 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2815 case tcc_binary:
2816 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2817 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2818 cval1, cval2, save_p));
2820 case tcc_constant:
2821 return 1;
2823 case tcc_expression:
2824 if (code == COND_EXPR)
2825 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2826 cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2828 cval1, cval2, save_p)
2829 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2830 cval1, cval2, save_p));
2831 return 0;
2833 case tcc_comparison:
2834 /* First see if we can handle the first operand, then the second. For
2835 the second operand, we know *CVAL1 can't be zero. It must be that
2836 one side of the comparison is each of the values; test for the
2837 case where this isn't true by failing if the two operands
2838 are the same. */
2840 if (operand_equal_p (TREE_OPERAND (arg, 0),
2841 TREE_OPERAND (arg, 1), 0))
2842 return 0;
2844 if (*cval1 == 0)
2845 *cval1 = TREE_OPERAND (arg, 0);
2846 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2848 else if (*cval2 == 0)
2849 *cval2 = TREE_OPERAND (arg, 0);
2850 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2852 else
2853 return 0;
2855 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2857 else if (*cval2 == 0)
2858 *cval2 = TREE_OPERAND (arg, 1);
2859 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2861 else
2862 return 0;
2864 return 1;
2866 default:
2867 return 0;
2871 /* ARG is a tree that is known to contain just arithmetic operations and
2872 comparisons. Evaluate the operations in the tree substituting NEW0 for
2873 any occurrence of OLD0 as an operand of a comparison and likewise for
2874 NEW1 and OLD1. */
2876 static tree
2877 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2878 tree old1, tree new1)
2880 tree type = TREE_TYPE (arg);
2881 enum tree_code code = TREE_CODE (arg);
2882 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2884 /* We can handle some of the tcc_expression cases here. */
2885 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2886 tclass = tcc_unary;
2887 else if (tclass == tcc_expression
2888 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2889 tclass = tcc_binary;
2891 switch (tclass)
2893 case tcc_unary:
2894 return fold_build1_loc (loc, code, type,
2895 eval_subst (loc, TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1));
2898 case tcc_binary:
2899 return fold_build2_loc (loc, code, type,
2900 eval_subst (loc, TREE_OPERAND (arg, 0),
2901 old0, new0, old1, new1),
2902 eval_subst (loc, TREE_OPERAND (arg, 1),
2903 old0, new0, old1, new1));
2905 case tcc_expression:
2906 switch (code)
2908 case SAVE_EXPR:
2909 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2910 old1, new1);
2912 case COMPOUND_EXPR:
2913 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2914 old1, new1);
2916 case COND_EXPR:
2917 return fold_build3_loc (loc, code, type,
2918 eval_subst (loc, TREE_OPERAND (arg, 0),
2919 old0, new0, old1, new1),
2920 eval_subst (loc, TREE_OPERAND (arg, 1),
2921 old0, new0, old1, new1),
2922 eval_subst (loc, TREE_OPERAND (arg, 2),
2923 old0, new0, old1, new1));
2924 default:
2925 break;
2927 /* Fall through - ??? */
2929 case tcc_comparison:
2931 tree arg0 = TREE_OPERAND (arg, 0);
2932 tree arg1 = TREE_OPERAND (arg, 1);
2934 /* We need to check both for exact equality and tree equality. The
2935 former will be true if the operand has a side-effect. In that
2936 case, we know the operand occurred exactly once. */
2938 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2939 arg0 = new0;
2940 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2941 arg0 = new1;
2943 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2944 arg1 = new0;
2945 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2946 arg1 = new1;
2948 return fold_build2_loc (loc, code, type, arg0, arg1);
2951 default:
2952 return arg;
2956 /* Return a tree for the case when the result of an expression is RESULT
2957 converted to TYPE and OMITTED was previously an operand of the expression
2958 but is now not needed (e.g., we folded OMITTED * 0).
2960 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2961 the conversion of RESULT to TYPE. */
2963 tree
2964 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2966 tree t = fold_convert_loc (loc, type, result);
2968 /* If the resulting operand is an empty statement, just return the omitted
2969 statement casted to void. */
2970 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2972 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2973 goto omit_one_operand_exit;
2976 if (TREE_SIDE_EFFECTS (omitted))
2978 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2979 goto omit_one_operand_exit;
2982 return non_lvalue_loc (loc, t);
2984 omit_one_operand_exit:
2985 protected_set_expr_location (t, loc);
2986 return t;
2989 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2991 static tree
2992 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2993 tree omitted)
2995 tree t = fold_convert_loc (loc, type, result);
2997 /* If the resulting operand is an empty statement, just return the omitted
2998 statement casted to void. */
2999 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3001 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3002 goto pedantic_omit_one_operand_exit;
3005 if (TREE_SIDE_EFFECTS (omitted))
3007 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3008 goto pedantic_omit_one_operand_exit;
3011 return pedantic_non_lvalue_loc (loc, t);
3013 pedantic_omit_one_operand_exit:
3014 protected_set_expr_location (t, loc);
3015 return t;
3018 /* Return a tree for the case when the result of an expression is RESULT
3019 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3020 of the expression but are now not needed.
3022 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3023 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3024 evaluated before OMITTED2. Otherwise, if neither has side effects,
3025 just do the conversion of RESULT to TYPE. */
3027 tree
3028 omit_two_operands_loc (location_t loc, tree type, tree result,
3029 tree omitted1, tree omitted2)
3031 tree t = fold_convert_loc (loc, type, result);
3033 if (TREE_SIDE_EFFECTS (omitted2))
3035 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3036 SET_EXPR_LOCATION (t, loc);
3038 if (TREE_SIDE_EFFECTS (omitted1))
3040 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3041 SET_EXPR_LOCATION (t, loc);
3044 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3048 /* Return a simplified tree node for the truth-negation of ARG. This
3049 never alters ARG itself. We assume that ARG is an operation that
3050 returns a truth value (0 or 1).
3052 FIXME: one would think we would fold the result, but it causes
3053 problems with the dominator optimizer. */
3055 tree
3056 fold_truth_not_expr (location_t loc, tree arg)
3058 tree t, type = TREE_TYPE (arg);
3059 enum tree_code code = TREE_CODE (arg);
3060 location_t loc1, loc2;
3062 /* If this is a comparison, we can simply invert it, except for
3063 floating-point non-equality comparisons, in which case we just
3064 enclose a TRUTH_NOT_EXPR around what we have. */
3066 if (TREE_CODE_CLASS (code) == tcc_comparison)
3068 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3069 if (FLOAT_TYPE_P (op_type)
3070 && flag_trapping_math
3071 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3072 && code != NE_EXPR && code != EQ_EXPR)
3073 return NULL_TREE;
3075 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3076 if (code == ERROR_MARK)
3077 return NULL_TREE;
3079 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3080 SET_EXPR_LOCATION (t, loc);
3081 return t;
3084 switch (code)
3086 case INTEGER_CST:
3087 return constant_boolean_node (integer_zerop (arg), type);
3089 case TRUTH_AND_EXPR:
3090 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3091 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3092 if (loc1 == UNKNOWN_LOCATION)
3093 loc1 = loc;
3094 if (loc2 == UNKNOWN_LOCATION)
3095 loc2 = loc;
3096 t = build2 (TRUTH_OR_EXPR, type,
3097 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3098 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3099 break;
3101 case TRUTH_OR_EXPR:
3102 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3103 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3104 if (loc1 == UNKNOWN_LOCATION)
3105 loc1 = loc;
3106 if (loc2 == UNKNOWN_LOCATION)
3107 loc2 = loc;
3108 t = build2 (TRUTH_AND_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3111 break;
3113 case TRUTH_XOR_EXPR:
3114 /* Here we can invert either operand. We invert the first operand
3115 unless the second operand is a TRUTH_NOT_EXPR in which case our
3116 result is the XOR of the first operand with the inside of the
3117 negation of the second operand. */
3119 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3120 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3121 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3122 else
3123 t = build2 (TRUTH_XOR_EXPR, type,
3124 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3125 TREE_OPERAND (arg, 1));
3126 break;
3128 case TRUTH_ANDIF_EXPR:
3129 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3130 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3131 if (loc1 == UNKNOWN_LOCATION)
3132 loc1 = loc;
3133 if (loc2 == UNKNOWN_LOCATION)
3134 loc2 = loc;
3135 t = build2 (TRUTH_ORIF_EXPR, type,
3136 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3137 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3138 break;
3140 case TRUTH_ORIF_EXPR:
3141 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3142 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3143 if (loc1 == UNKNOWN_LOCATION)
3144 loc1 = loc;
3145 if (loc2 == UNKNOWN_LOCATION)
3146 loc2 = loc;
3147 t = build2 (TRUTH_ANDIF_EXPR, type,
3148 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3149 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3150 break;
3152 case TRUTH_NOT_EXPR:
3153 return TREE_OPERAND (arg, 0);
3155 case COND_EXPR:
3157 tree arg1 = TREE_OPERAND (arg, 1);
3158 tree arg2 = TREE_OPERAND (arg, 2);
3160 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3161 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3162 if (loc1 == UNKNOWN_LOCATION)
3163 loc1 = loc;
3164 if (loc2 == UNKNOWN_LOCATION)
3165 loc2 = loc;
3167 /* A COND_EXPR may have a throw as one operand, which
3168 then has void type. Just leave void operands
3169 as they are. */
3170 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3171 VOID_TYPE_P (TREE_TYPE (arg1))
3172 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3173 VOID_TYPE_P (TREE_TYPE (arg2))
3174 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3175 break;
3178 case COMPOUND_EXPR:
3179 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3180 if (loc1 == UNKNOWN_LOCATION)
3181 loc1 = loc;
3182 t = build2 (COMPOUND_EXPR, type,
3183 TREE_OPERAND (arg, 0),
3184 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3185 break;
3187 case NON_LVALUE_EXPR:
3188 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3189 if (loc1 == UNKNOWN_LOCATION)
3190 loc1 = loc;
3191 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3193 CASE_CONVERT:
3194 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3196 t = build1 (TRUTH_NOT_EXPR, type, arg);
3197 break;
3200 /* ... fall through ... */
3202 case FLOAT_EXPR:
3203 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3204 if (loc1 == UNKNOWN_LOCATION)
3205 loc1 = loc;
3206 t = build1 (TREE_CODE (arg), type,
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3208 break;
3210 case BIT_AND_EXPR:
3211 if (!integer_onep (TREE_OPERAND (arg, 1)))
3212 return NULL_TREE;
3213 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3214 break;
3216 case SAVE_EXPR:
3217 t = build1 (TRUTH_NOT_EXPR, type, arg);
3218 break;
3220 case CLEANUP_POINT_EXPR:
3221 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3222 if (loc1 == UNKNOWN_LOCATION)
3223 loc1 = loc;
3224 t = build1 (CLEANUP_POINT_EXPR, type,
3225 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3226 break;
3228 default:
3229 t = NULL_TREE;
3230 break;
3233 if (t)
3234 SET_EXPR_LOCATION (t, loc);
3236 return t;
3239 /* Return a simplified tree node for the truth-negation of ARG. This
3240 never alters ARG itself. We assume that ARG is an operation that
3241 returns a truth value (0 or 1).
3243 FIXME: one would think we would fold the result, but it causes
3244 problems with the dominator optimizer. */
3246 tree
3247 invert_truthvalue_loc (location_t loc, tree arg)
3249 tree tem;
3251 if (TREE_CODE (arg) == ERROR_MARK)
3252 return arg;
3254 tem = fold_truth_not_expr (loc, arg);
3255 if (!tem)
3257 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3258 SET_EXPR_LOCATION (tem, loc);
3261 return tem;
3264 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3265 operands are another bit-wise operation with a common input. If so,
3266 distribute the bit operations to save an operation and possibly two if
3267 constants are involved. For example, convert
3268 (A | B) & (A | C) into A | (B & C)
3269 Further simplification will occur if B and C are constants.
3271 If this optimization cannot be done, 0 will be returned. */
3273 static tree
3274 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3275 tree arg0, tree arg1)
3277 tree common;
3278 tree left, right;
3280 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3281 || TREE_CODE (arg0) == code
3282 || (TREE_CODE (arg0) != BIT_AND_EXPR
3283 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3284 return 0;
3286 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3288 common = TREE_OPERAND (arg0, 0);
3289 left = TREE_OPERAND (arg0, 1);
3290 right = TREE_OPERAND (arg1, 1);
3292 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3294 common = TREE_OPERAND (arg0, 0);
3295 left = TREE_OPERAND (arg0, 1);
3296 right = TREE_OPERAND (arg1, 0);
3298 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3300 common = TREE_OPERAND (arg0, 1);
3301 left = TREE_OPERAND (arg0, 0);
3302 right = TREE_OPERAND (arg1, 1);
3304 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3306 common = TREE_OPERAND (arg0, 1);
3307 left = TREE_OPERAND (arg0, 0);
3308 right = TREE_OPERAND (arg1, 0);
3310 else
3311 return 0;
3313 common = fold_convert_loc (loc, type, common);
3314 left = fold_convert_loc (loc, type, left);
3315 right = fold_convert_loc (loc, type, right);
3316 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3317 fold_build2_loc (loc, code, type, left, right));
3320 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3321 with code CODE. This optimization is unsafe. */
3322 static tree
3323 distribute_real_division (location_t loc, enum tree_code code, tree type,
3324 tree arg0, tree arg1)
3326 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3327 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3329 /* (A / C) +- (B / C) -> (A +- B) / C. */
3330 if (mul0 == mul1
3331 && operand_equal_p (TREE_OPERAND (arg0, 1),
3332 TREE_OPERAND (arg1, 1), 0))
3333 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3334 fold_build2_loc (loc, code, type,
3335 TREE_OPERAND (arg0, 0),
3336 TREE_OPERAND (arg1, 0)),
3337 TREE_OPERAND (arg0, 1));
3339 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3340 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3341 TREE_OPERAND (arg1, 0), 0)
3342 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3343 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3345 REAL_VALUE_TYPE r0, r1;
3346 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3347 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3348 if (!mul0)
3349 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3350 if (!mul1)
3351 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3352 real_arithmetic (&r0, code, &r0, &r1);
3353 return fold_build2_loc (loc, MULT_EXPR, type,
3354 TREE_OPERAND (arg0, 0),
3355 build_real (type, r0));
3358 return NULL_TREE;
3361 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3362 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3364 static tree
3365 make_bit_field_ref (location_t loc, tree inner, tree type,
3366 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3368 tree result, bftype;
3370 if (bitpos == 0)
3372 tree size = TYPE_SIZE (TREE_TYPE (inner));
3373 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3374 || POINTER_TYPE_P (TREE_TYPE (inner)))
3375 && host_integerp (size, 0)
3376 && tree_low_cst (size, 0) == bitsize)
3377 return fold_convert_loc (loc, type, inner);
3380 bftype = type;
3381 if (TYPE_PRECISION (bftype) != bitsize
3382 || TYPE_UNSIGNED (bftype) == !unsignedp)
3383 bftype = build_nonstandard_integer_type (bitsize, 0);
3385 result = build3 (BIT_FIELD_REF, bftype, inner,
3386 size_int (bitsize), bitsize_int (bitpos));
3387 SET_EXPR_LOCATION (result, loc);
3389 if (bftype != type)
3390 result = fold_convert_loc (loc, type, result);
3392 return result;
3395 /* Optimize a bit-field compare.
3397 There are two cases: First is a compare against a constant and the
3398 second is a comparison of two items where the fields are at the same
3399 bit position relative to the start of a chunk (byte, halfword, word)
3400 large enough to contain it. In these cases we can avoid the shift
3401 implicit in bitfield extractions.
3403 For constants, we emit a compare of the shifted constant with the
3404 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3405 compared. For two fields at the same position, we do the ANDs with the
3406 similar mask and compare the result of the ANDs.
3408 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3409 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3410 are the left and right operands of the comparison, respectively.
3412 If the optimization described above can be done, we return the resulting
3413 tree. Otherwise we return zero. */
3415 static tree
3416 optimize_bit_field_compare (location_t loc, enum tree_code code,
3417 tree compare_type, tree lhs, tree rhs)
3419 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3420 tree type = TREE_TYPE (lhs);
3421 tree signed_type, unsigned_type;
3422 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3423 enum machine_mode lmode, rmode, nmode;
3424 int lunsignedp, runsignedp;
3425 int lvolatilep = 0, rvolatilep = 0;
3426 tree linner, rinner = NULL_TREE;
3427 tree mask;
3428 tree offset;
3430 /* Get all the information about the extractions being done. If the bit size
3431 if the same as the size of the underlying object, we aren't doing an
3432 extraction at all and so can do nothing. We also don't want to
3433 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3434 then will no longer be able to replace it. */
3435 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3436 &lunsignedp, &lvolatilep, false);
3437 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3438 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3439 return 0;
3441 if (!const_p)
3443 /* If this is not a constant, we can only do something if bit positions,
3444 sizes, and signedness are the same. */
3445 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3446 &runsignedp, &rvolatilep, false);
3448 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3449 || lunsignedp != runsignedp || offset != 0
3450 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3451 return 0;
3454 /* See if we can find a mode to refer to this field. We should be able to,
3455 but fail if we can't. */
3456 if (lvolatilep
3457 && GET_MODE_BITSIZE (lmode) > 0
3458 && flag_strict_volatile_bitfields > 0)
3459 nmode = lmode;
3460 else
3461 nmode = get_best_mode (lbitsize, lbitpos,
3462 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3463 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3464 TYPE_ALIGN (TREE_TYPE (rinner))),
3465 word_mode, lvolatilep || rvolatilep);
3466 if (nmode == VOIDmode)
3467 return 0;
3469 /* Set signed and unsigned types of the precision of this mode for the
3470 shifts below. */
3471 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3472 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3474 /* Compute the bit position and size for the new reference and our offset
3475 within it. If the new reference is the same size as the original, we
3476 won't optimize anything, so return zero. */
3477 nbitsize = GET_MODE_BITSIZE (nmode);
3478 nbitpos = lbitpos & ~ (nbitsize - 1);
3479 lbitpos -= nbitpos;
3480 if (nbitsize == lbitsize)
3481 return 0;
3483 if (BYTES_BIG_ENDIAN)
3484 lbitpos = nbitsize - lbitsize - lbitpos;
3486 /* Make the mask to be used against the extracted field. */
3487 mask = build_int_cst_type (unsigned_type, -1);
3488 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3489 mask = const_binop (RSHIFT_EXPR, mask,
3490 size_int (nbitsize - lbitsize - lbitpos));
3492 if (! const_p)
3493 /* If not comparing with constant, just rework the comparison
3494 and return. */
3495 return fold_build2_loc (loc, code, compare_type,
3496 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3497 make_bit_field_ref (loc, linner,
3498 unsigned_type,
3499 nbitsize, nbitpos,
3501 mask),
3502 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3503 make_bit_field_ref (loc, rinner,
3504 unsigned_type,
3505 nbitsize, nbitpos,
3507 mask));
3509 /* Otherwise, we are handling the constant case. See if the constant is too
3510 big for the field. Warn and return a tree of for 0 (false) if so. We do
3511 this not only for its own sake, but to avoid having to test for this
3512 error case below. If we didn't, we might generate wrong code.
3514 For unsigned fields, the constant shifted right by the field length should
3515 be all zero. For signed fields, the high-order bits should agree with
3516 the sign bit. */
3518 if (lunsignedp)
3520 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3521 fold_convert_loc (loc,
3522 unsigned_type, rhs),
3523 size_int (lbitsize))))
3525 warning (0, "comparison is always %d due to width of bit-field",
3526 code == NE_EXPR);
3527 return constant_boolean_node (code == NE_EXPR, compare_type);
3530 else
3532 tree tem = const_binop (RSHIFT_EXPR,
3533 fold_convert_loc (loc, signed_type, rhs),
3534 size_int (lbitsize - 1));
3535 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3537 warning (0, "comparison is always %d due to width of bit-field",
3538 code == NE_EXPR);
3539 return constant_boolean_node (code == NE_EXPR, compare_type);
3543 /* Single-bit compares should always be against zero. */
3544 if (lbitsize == 1 && ! integer_zerop (rhs))
3546 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3547 rhs = build_int_cst (type, 0);
3550 /* Make a new bitfield reference, shift the constant over the
3551 appropriate number of bits and mask it with the computed mask
3552 (in case this was a signed field). If we changed it, make a new one. */
3553 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3554 if (lvolatilep)
3556 TREE_SIDE_EFFECTS (lhs) = 1;
3557 TREE_THIS_VOLATILE (lhs) = 1;
3560 rhs = const_binop (BIT_AND_EXPR,
3561 const_binop (LSHIFT_EXPR,
3562 fold_convert_loc (loc, unsigned_type, rhs),
3563 size_int (lbitpos)),
3564 mask);
3566 lhs = build2 (code, compare_type,
3567 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3568 rhs);
3569 SET_EXPR_LOCATION (lhs, loc);
3570 return lhs;
3573 /* Subroutine for fold_truthop: decode a field reference.
3575 If EXP is a comparison reference, we return the innermost reference.
3577 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3578 set to the starting bit number.
3580 If the innermost field can be completely contained in a mode-sized
3581 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3583 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3584 otherwise it is not changed.
3586 *PUNSIGNEDP is set to the signedness of the field.
3588 *PMASK is set to the mask used. This is either contained in a
3589 BIT_AND_EXPR or derived from the width of the field.
3591 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3593 Return 0 if this is not a component reference or is one that we can't
3594 do anything with. */
3596 static tree
3597 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3598 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3599 int *punsignedp, int *pvolatilep,
3600 tree *pmask, tree *pand_mask)
3602 tree outer_type = 0;
3603 tree and_mask = 0;
3604 tree mask, inner, offset;
3605 tree unsigned_type;
3606 unsigned int precision;
3608 /* All the optimizations using this function assume integer fields.
3609 There are problems with FP fields since the type_for_size call
3610 below can fail for, e.g., XFmode. */
3611 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3612 return 0;
3614 /* We are interested in the bare arrangement of bits, so strip everything
3615 that doesn't affect the machine mode. However, record the type of the
3616 outermost expression if it may matter below. */
3617 if (CONVERT_EXPR_P (exp)
3618 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3619 outer_type = TREE_TYPE (exp);
3620 STRIP_NOPS (exp);
3622 if (TREE_CODE (exp) == BIT_AND_EXPR)
3624 and_mask = TREE_OPERAND (exp, 1);
3625 exp = TREE_OPERAND (exp, 0);
3626 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3627 if (TREE_CODE (and_mask) != INTEGER_CST)
3628 return 0;
3631 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3632 punsignedp, pvolatilep, false);
3633 if ((inner == exp && and_mask == 0)
3634 || *pbitsize < 0 || offset != 0
3635 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3636 return 0;
3638 /* If the number of bits in the reference is the same as the bitsize of
3639 the outer type, then the outer type gives the signedness. Otherwise
3640 (in case of a small bitfield) the signedness is unchanged. */
3641 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3642 *punsignedp = TYPE_UNSIGNED (outer_type);
3644 /* Compute the mask to access the bitfield. */
3645 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3646 precision = TYPE_PRECISION (unsigned_type);
3648 mask = build_int_cst_type (unsigned_type, -1);
3650 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3651 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3653 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3654 if (and_mask != 0)
3655 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3656 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3658 *pmask = mask;
3659 *pand_mask = and_mask;
3660 return inner;
3663 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3664 bit positions. */
3666 static int
3667 all_ones_mask_p (const_tree mask, int size)
3669 tree type = TREE_TYPE (mask);
3670 unsigned int precision = TYPE_PRECISION (type);
3671 tree tmask;
3673 tmask = build_int_cst_type (signed_type_for (type), -1);
3675 return
3676 tree_int_cst_equal (mask,
3677 const_binop (RSHIFT_EXPR,
3678 const_binop (LSHIFT_EXPR, tmask,
3679 size_int (precision - size)),
3680 size_int (precision - size)));
3683 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3684 represents the sign bit of EXP's type. If EXP represents a sign
3685 or zero extension, also test VAL against the unextended type.
3686 The return value is the (sub)expression whose sign bit is VAL,
3687 or NULL_TREE otherwise. */
3689 static tree
3690 sign_bit_p (tree exp, const_tree val)
3692 unsigned HOST_WIDE_INT mask_lo, lo;
3693 HOST_WIDE_INT mask_hi, hi;
3694 int width;
3695 tree t;
3697 /* Tree EXP must have an integral type. */
3698 t = TREE_TYPE (exp);
3699 if (! INTEGRAL_TYPE_P (t))
3700 return NULL_TREE;
3702 /* Tree VAL must be an integer constant. */
3703 if (TREE_CODE (val) != INTEGER_CST
3704 || TREE_OVERFLOW (val))
3705 return NULL_TREE;
3707 width = TYPE_PRECISION (t);
3708 if (width > HOST_BITS_PER_WIDE_INT)
3710 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3711 lo = 0;
3713 mask_hi = ((unsigned HOST_WIDE_INT) -1
3714 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3715 mask_lo = -1;
3717 else
3719 hi = 0;
3720 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3722 mask_hi = 0;
3723 mask_lo = ((unsigned HOST_WIDE_INT) -1
3724 >> (HOST_BITS_PER_WIDE_INT - width));
3727 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3728 treat VAL as if it were unsigned. */
3729 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3730 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3731 return exp;
3733 /* Handle extension from a narrower type. */
3734 if (TREE_CODE (exp) == NOP_EXPR
3735 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3736 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3738 return NULL_TREE;
3741 /* Subroutine for fold_truthop: determine if an operand is simple enough
3742 to be evaluated unconditionally. */
3744 static int
3745 simple_operand_p (const_tree exp)
3747 /* Strip any conversions that don't change the machine mode. */
3748 STRIP_NOPS (exp);
3750 return (CONSTANT_CLASS_P (exp)
3751 || TREE_CODE (exp) == SSA_NAME
3752 || (DECL_P (exp)
3753 && ! TREE_ADDRESSABLE (exp)
3754 && ! TREE_THIS_VOLATILE (exp)
3755 && ! DECL_NONLOCAL (exp)
3756 /* Don't regard global variables as simple. They may be
3757 allocated in ways unknown to the compiler (shared memory,
3758 #pragma weak, etc). */
3759 && ! TREE_PUBLIC (exp)
3760 && ! DECL_EXTERNAL (exp)
3761 /* Loading a static variable is unduly expensive, but global
3762 registers aren't expensive. */
3763 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3766 /* The following functions are subroutines to fold_range_test and allow it to
3767 try to change a logical combination of comparisons into a range test.
3769 For example, both
3770 X == 2 || X == 3 || X == 4 || X == 5
3772 X >= 2 && X <= 5
3773 are converted to
3774 (unsigned) (X - 2) <= 3
3776 We describe each set of comparisons as being either inside or outside
3777 a range, using a variable named like IN_P, and then describe the
3778 range with a lower and upper bound. If one of the bounds is omitted,
3779 it represents either the highest or lowest value of the type.
3781 In the comments below, we represent a range by two numbers in brackets
3782 preceded by a "+" to designate being inside that range, or a "-" to
3783 designate being outside that range, so the condition can be inverted by
3784 flipping the prefix. An omitted bound is represented by a "-". For
3785 example, "- [-, 10]" means being outside the range starting at the lowest
3786 possible value and ending at 10, in other words, being greater than 10.
3787 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3788 always false.
3790 We set up things so that the missing bounds are handled in a consistent
3791 manner so neither a missing bound nor "true" and "false" need to be
3792 handled using a special case. */
3794 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3795 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3796 and UPPER1_P are nonzero if the respective argument is an upper bound
3797 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3798 must be specified for a comparison. ARG1 will be converted to ARG0's
3799 type if both are specified. */
3801 static tree
3802 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3803 tree arg1, int upper1_p)
3805 tree tem;
3806 int result;
3807 int sgn0, sgn1;
3809 /* If neither arg represents infinity, do the normal operation.
3810 Else, if not a comparison, return infinity. Else handle the special
3811 comparison rules. Note that most of the cases below won't occur, but
3812 are handled for consistency. */
3814 if (arg0 != 0 && arg1 != 0)
3816 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3817 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3818 STRIP_NOPS (tem);
3819 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3822 if (TREE_CODE_CLASS (code) != tcc_comparison)
3823 return 0;
3825 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3826 for neither. In real maths, we cannot assume open ended ranges are
3827 the same. But, this is computer arithmetic, where numbers are finite.
3828 We can therefore make the transformation of any unbounded range with
3829 the value Z, Z being greater than any representable number. This permits
3830 us to treat unbounded ranges as equal. */
3831 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3832 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3833 switch (code)
3835 case EQ_EXPR:
3836 result = sgn0 == sgn1;
3837 break;
3838 case NE_EXPR:
3839 result = sgn0 != sgn1;
3840 break;
3841 case LT_EXPR:
3842 result = sgn0 < sgn1;
3843 break;
3844 case LE_EXPR:
3845 result = sgn0 <= sgn1;
3846 break;
3847 case GT_EXPR:
3848 result = sgn0 > sgn1;
3849 break;
3850 case GE_EXPR:
3851 result = sgn0 >= sgn1;
3852 break;
3853 default:
3854 gcc_unreachable ();
3857 return constant_boolean_node (result, type);
3860 /* Given EXP, a logical expression, set the range it is testing into
3861 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3862 actually being tested. *PLOW and *PHIGH will be made of the same
3863 type as the returned expression. If EXP is not a comparison, we
3864 will most likely not be returning a useful value and range. Set
3865 *STRICT_OVERFLOW_P to true if the return value is only valid
3866 because signed overflow is undefined; otherwise, do not change
3867 *STRICT_OVERFLOW_P. */
3869 tree
3870 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3871 bool *strict_overflow_p)
3873 enum tree_code code;
3874 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3875 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3876 int in_p, n_in_p;
3877 tree low, high, n_low, n_high;
3878 location_t loc = EXPR_LOCATION (exp);
3880 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3881 and see if we can refine the range. Some of the cases below may not
3882 happen, but it doesn't seem worth worrying about this. We "continue"
3883 the outer loop when we've changed something; otherwise we "break"
3884 the switch, which will "break" the while. */
3886 in_p = 0;
3887 low = high = build_int_cst (TREE_TYPE (exp), 0);
3889 while (1)
3891 code = TREE_CODE (exp);
3892 exp_type = TREE_TYPE (exp);
3894 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3896 if (TREE_OPERAND_LENGTH (exp) > 0)
3897 arg0 = TREE_OPERAND (exp, 0);
3898 if (TREE_CODE_CLASS (code) == tcc_comparison
3899 || TREE_CODE_CLASS (code) == tcc_unary
3900 || TREE_CODE_CLASS (code) == tcc_binary)
3901 arg0_type = TREE_TYPE (arg0);
3902 if (TREE_CODE_CLASS (code) == tcc_binary
3903 || TREE_CODE_CLASS (code) == tcc_comparison
3904 || (TREE_CODE_CLASS (code) == tcc_expression
3905 && TREE_OPERAND_LENGTH (exp) > 1))
3906 arg1 = TREE_OPERAND (exp, 1);
3909 switch (code)
3911 case TRUTH_NOT_EXPR:
3912 in_p = ! in_p, exp = arg0;
3913 continue;
3915 case EQ_EXPR: case NE_EXPR:
3916 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3917 /* We can only do something if the range is testing for zero
3918 and if the second operand is an integer constant. Note that
3919 saying something is "in" the range we make is done by
3920 complementing IN_P since it will set in the initial case of
3921 being not equal to zero; "out" is leaving it alone. */
3922 if (low == 0 || high == 0
3923 || ! integer_zerop (low) || ! integer_zerop (high)
3924 || TREE_CODE (arg1) != INTEGER_CST)
3925 break;
3927 switch (code)
3929 case NE_EXPR: /* - [c, c] */
3930 low = high = arg1;
3931 break;
3932 case EQ_EXPR: /* + [c, c] */
3933 in_p = ! in_p, low = high = arg1;
3934 break;
3935 case GT_EXPR: /* - [-, c] */
3936 low = 0, high = arg1;
3937 break;
3938 case GE_EXPR: /* + [c, -] */
3939 in_p = ! in_p, low = arg1, high = 0;
3940 break;
3941 case LT_EXPR: /* - [c, -] */
3942 low = arg1, high = 0;
3943 break;
3944 case LE_EXPR: /* + [-, c] */
3945 in_p = ! in_p, low = 0, high = arg1;
3946 break;
3947 default:
3948 gcc_unreachable ();
3951 /* If this is an unsigned comparison, we also know that EXP is
3952 greater than or equal to zero. We base the range tests we make
3953 on that fact, so we record it here so we can parse existing
3954 range tests. We test arg0_type since often the return type
3955 of, e.g. EQ_EXPR, is boolean. */
3956 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3958 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3959 in_p, low, high, 1,
3960 build_int_cst (arg0_type, 0),
3961 NULL_TREE))
3962 break;
3964 in_p = n_in_p, low = n_low, high = n_high;
3966 /* If the high bound is missing, but we have a nonzero low
3967 bound, reverse the range so it goes from zero to the low bound
3968 minus 1. */
3969 if (high == 0 && low && ! integer_zerop (low))
3971 in_p = ! in_p;
3972 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3973 integer_one_node, 0);
3974 low = build_int_cst (arg0_type, 0);
3978 exp = arg0;
3979 continue;
3981 case NEGATE_EXPR:
3982 /* (-x) IN [a,b] -> x in [-b, -a] */
3983 n_low = range_binop (MINUS_EXPR, exp_type,
3984 build_int_cst (exp_type, 0),
3985 0, high, 1);
3986 n_high = range_binop (MINUS_EXPR, exp_type,
3987 build_int_cst (exp_type, 0),
3988 0, low, 0);
3989 if (n_high != 0 && TREE_OVERFLOW (n_high))
3990 break;
3991 goto normalize;
3993 case BIT_NOT_EXPR:
3994 /* ~ X -> -X - 1 */
3995 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3996 build_int_cst (exp_type, 1));
3997 SET_EXPR_LOCATION (exp, loc);
3998 continue;
4000 case PLUS_EXPR: case MINUS_EXPR:
4001 if (TREE_CODE (arg1) != INTEGER_CST)
4002 break;
4004 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4005 move a constant to the other side. */
4006 if (!TYPE_UNSIGNED (arg0_type)
4007 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4008 break;
4010 /* If EXP is signed, any overflow in the computation is undefined,
4011 so we don't worry about it so long as our computations on
4012 the bounds don't overflow. For unsigned, overflow is defined
4013 and this is exactly the right thing. */
4014 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4015 arg0_type, low, 0, arg1, 0);
4016 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4017 arg0_type, high, 1, arg1, 0);
4018 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4019 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4020 break;
4022 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4023 *strict_overflow_p = true;
4025 normalize:
4026 /* Check for an unsigned range which has wrapped around the maximum
4027 value thus making n_high < n_low, and normalize it. */
4028 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4030 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4031 integer_one_node, 0);
4032 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4033 integer_one_node, 0);
4035 /* If the range is of the form +/- [ x+1, x ], we won't
4036 be able to normalize it. But then, it represents the
4037 whole range or the empty set, so make it
4038 +/- [ -, - ]. */
4039 if (tree_int_cst_equal (n_low, low)
4040 && tree_int_cst_equal (n_high, high))
4041 low = high = 0;
4042 else
4043 in_p = ! in_p;
4045 else
4046 low = n_low, high = n_high;
4048 exp = arg0;
4049 continue;
4051 CASE_CONVERT: case NON_LVALUE_EXPR:
4052 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4053 break;
4055 if (! INTEGRAL_TYPE_P (arg0_type)
4056 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4057 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4058 break;
4060 n_low = low, n_high = high;
4062 if (n_low != 0)
4063 n_low = fold_convert_loc (loc, arg0_type, n_low);
4065 if (n_high != 0)
4066 n_high = fold_convert_loc (loc, arg0_type, n_high);
4069 /* If we're converting arg0 from an unsigned type, to exp,
4070 a signed type, we will be doing the comparison as unsigned.
4071 The tests above have already verified that LOW and HIGH
4072 are both positive.
4074 So we have to ensure that we will handle large unsigned
4075 values the same way that the current signed bounds treat
4076 negative values. */
4078 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4080 tree high_positive;
4081 tree equiv_type;
4082 /* For fixed-point modes, we need to pass the saturating flag
4083 as the 2nd parameter. */
4084 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4085 equiv_type = lang_hooks.types.type_for_mode
4086 (TYPE_MODE (arg0_type),
4087 TYPE_SATURATING (arg0_type));
4088 else
4089 equiv_type = lang_hooks.types.type_for_mode
4090 (TYPE_MODE (arg0_type), 1);
4092 /* A range without an upper bound is, naturally, unbounded.
4093 Since convert would have cropped a very large value, use
4094 the max value for the destination type. */
4095 high_positive
4096 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4097 : TYPE_MAX_VALUE (arg0_type);
4099 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4100 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4101 fold_convert_loc (loc, arg0_type,
4102 high_positive),
4103 build_int_cst (arg0_type, 1));
4105 /* If the low bound is specified, "and" the range with the
4106 range for which the original unsigned value will be
4107 positive. */
4108 if (low != 0)
4110 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4111 1, n_low, n_high, 1,
4112 fold_convert_loc (loc, arg0_type,
4113 integer_zero_node),
4114 high_positive))
4115 break;
4117 in_p = (n_in_p == in_p);
4119 else
4121 /* Otherwise, "or" the range with the range of the input
4122 that will be interpreted as negative. */
4123 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4124 0, n_low, n_high, 1,
4125 fold_convert_loc (loc, arg0_type,
4126 integer_zero_node),
4127 high_positive))
4128 break;
4130 in_p = (in_p != n_in_p);
4134 exp = arg0;
4135 low = n_low, high = n_high;
4136 continue;
4138 default:
4139 break;
4142 break;
4145 /* If EXP is a constant, we can evaluate whether this is true or false. */
4146 if (TREE_CODE (exp) == INTEGER_CST)
4148 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4149 exp, 0, low, 0))
4150 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4151 exp, 1, high, 1)));
4152 low = high = 0;
4153 exp = 0;
4156 *pin_p = in_p, *plow = low, *phigh = high;
4157 return exp;
4160 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4161 type, TYPE, return an expression to test if EXP is in (or out of, depending
4162 on IN_P) the range. Return 0 if the test couldn't be created. */
4164 tree
4165 build_range_check (location_t loc, tree type, tree exp, int in_p,
4166 tree low, tree high)
4168 tree etype = TREE_TYPE (exp), value;
4170 #ifdef HAVE_canonicalize_funcptr_for_compare
4171 /* Disable this optimization for function pointer expressions
4172 on targets that require function pointer canonicalization. */
4173 if (HAVE_canonicalize_funcptr_for_compare
4174 && TREE_CODE (etype) == POINTER_TYPE
4175 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4176 return NULL_TREE;
4177 #endif
4179 if (! in_p)
4181 value = build_range_check (loc, type, exp, 1, low, high);
4182 if (value != 0)
4183 return invert_truthvalue_loc (loc, value);
4185 return 0;
4188 if (low == 0 && high == 0)
4189 return build_int_cst (type, 1);
4191 if (low == 0)
4192 return fold_build2_loc (loc, LE_EXPR, type, exp,
4193 fold_convert_loc (loc, etype, high));
4195 if (high == 0)
4196 return fold_build2_loc (loc, GE_EXPR, type, exp,
4197 fold_convert_loc (loc, etype, low));
4199 if (operand_equal_p (low, high, 0))
4200 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4201 fold_convert_loc (loc, etype, low));
4203 if (integer_zerop (low))
4205 if (! TYPE_UNSIGNED (etype))
4207 etype = unsigned_type_for (etype);
4208 high = fold_convert_loc (loc, etype, high);
4209 exp = fold_convert_loc (loc, etype, exp);
4211 return build_range_check (loc, type, exp, 1, 0, high);
4214 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4215 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4217 unsigned HOST_WIDE_INT lo;
4218 HOST_WIDE_INT hi;
4219 int prec;
4221 prec = TYPE_PRECISION (etype);
4222 if (prec <= HOST_BITS_PER_WIDE_INT)
4224 hi = 0;
4225 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4227 else
4229 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4230 lo = (unsigned HOST_WIDE_INT) -1;
4233 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4235 if (TYPE_UNSIGNED (etype))
4237 tree signed_etype = signed_type_for (etype);
4238 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4239 etype
4240 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4241 else
4242 etype = signed_etype;
4243 exp = fold_convert_loc (loc, etype, exp);
4245 return fold_build2_loc (loc, GT_EXPR, type, exp,
4246 build_int_cst (etype, 0));
4250 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4251 This requires wrap-around arithmetics for the type of the expression.
4252 First make sure that arithmetics in this type is valid, then make sure
4253 that it wraps around. */
4254 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4255 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4256 TYPE_UNSIGNED (etype));
4258 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4260 tree utype, minv, maxv;
4262 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4263 for the type in question, as we rely on this here. */
4264 utype = unsigned_type_for (etype);
4265 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4266 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4267 integer_one_node, 1);
4268 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4270 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4271 minv, 1, maxv, 1)))
4272 etype = utype;
4273 else
4274 return 0;
4277 high = fold_convert_loc (loc, etype, high);
4278 low = fold_convert_loc (loc, etype, low);
4279 exp = fold_convert_loc (loc, etype, exp);
4281 value = const_binop (MINUS_EXPR, high, low);
4284 if (POINTER_TYPE_P (etype))
4286 if (value != 0 && !TREE_OVERFLOW (value))
4288 low = fold_convert_loc (loc, sizetype, low);
4289 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4290 return build_range_check (loc, type,
4291 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4292 etype, exp, low),
4293 1, build_int_cst (etype, 0), value);
4295 return 0;
4298 if (value != 0 && !TREE_OVERFLOW (value))
4299 return build_range_check (loc, type,
4300 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4301 1, build_int_cst (etype, 0), value);
4303 return 0;
4306 /* Return the predecessor of VAL in its type, handling the infinite case. */
4308 static tree
4309 range_predecessor (tree val)
4311 tree type = TREE_TYPE (val);
4313 if (INTEGRAL_TYPE_P (type)
4314 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4315 return 0;
4316 else
4317 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4320 /* Return the successor of VAL in its type, handling the infinite case. */
4322 static tree
4323 range_successor (tree val)
4325 tree type = TREE_TYPE (val);
4327 if (INTEGRAL_TYPE_P (type)
4328 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4329 return 0;
4330 else
4331 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4334 /* Given two ranges, see if we can merge them into one. Return 1 if we
4335 can, 0 if we can't. Set the output range into the specified parameters. */
4337 bool
4338 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4339 tree high0, int in1_p, tree low1, tree high1)
4341 int no_overlap;
4342 int subset;
4343 int temp;
4344 tree tem;
4345 int in_p;
4346 tree low, high;
4347 int lowequal = ((low0 == 0 && low1 == 0)
4348 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4349 low0, 0, low1, 0)));
4350 int highequal = ((high0 == 0 && high1 == 0)
4351 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4352 high0, 1, high1, 1)));
4354 /* Make range 0 be the range that starts first, or ends last if they
4355 start at the same value. Swap them if it isn't. */
4356 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4357 low0, 0, low1, 0))
4358 || (lowequal
4359 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4360 high1, 1, high0, 1))))
4362 temp = in0_p, in0_p = in1_p, in1_p = temp;
4363 tem = low0, low0 = low1, low1 = tem;
4364 tem = high0, high0 = high1, high1 = tem;
4367 /* Now flag two cases, whether the ranges are disjoint or whether the
4368 second range is totally subsumed in the first. Note that the tests
4369 below are simplified by the ones above. */
4370 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4371 high0, 1, low1, 0));
4372 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4373 high1, 1, high0, 1));
4375 /* We now have four cases, depending on whether we are including or
4376 excluding the two ranges. */
4377 if (in0_p && in1_p)
4379 /* If they don't overlap, the result is false. If the second range
4380 is a subset it is the result. Otherwise, the range is from the start
4381 of the second to the end of the first. */
4382 if (no_overlap)
4383 in_p = 0, low = high = 0;
4384 else if (subset)
4385 in_p = 1, low = low1, high = high1;
4386 else
4387 in_p = 1, low = low1, high = high0;
4390 else if (in0_p && ! in1_p)
4392 /* If they don't overlap, the result is the first range. If they are
4393 equal, the result is false. If the second range is a subset of the
4394 first, and the ranges begin at the same place, we go from just after
4395 the end of the second range to the end of the first. If the second
4396 range is not a subset of the first, or if it is a subset and both
4397 ranges end at the same place, the range starts at the start of the
4398 first range and ends just before the second range.
4399 Otherwise, we can't describe this as a single range. */
4400 if (no_overlap)
4401 in_p = 1, low = low0, high = high0;
4402 else if (lowequal && highequal)
4403 in_p = 0, low = high = 0;
4404 else if (subset && lowequal)
4406 low = range_successor (high1);
4407 high = high0;
4408 in_p = 1;
4409 if (low == 0)
4411 /* We are in the weird situation where high0 > high1 but
4412 high1 has no successor. Punt. */
4413 return 0;
4416 else if (! subset || highequal)
4418 low = low0;
4419 high = range_predecessor (low1);
4420 in_p = 1;
4421 if (high == 0)
4423 /* low0 < low1 but low1 has no predecessor. Punt. */
4424 return 0;
4427 else
4428 return 0;
4431 else if (! in0_p && in1_p)
4433 /* If they don't overlap, the result is the second range. If the second
4434 is a subset of the first, the result is false. Otherwise,
4435 the range starts just after the first range and ends at the
4436 end of the second. */
4437 if (no_overlap)
4438 in_p = 1, low = low1, high = high1;
4439 else if (subset || highequal)
4440 in_p = 0, low = high = 0;
4441 else
4443 low = range_successor (high0);
4444 high = high1;
4445 in_p = 1;
4446 if (low == 0)
4448 /* high1 > high0 but high0 has no successor. Punt. */
4449 return 0;
4454 else
4456 /* The case where we are excluding both ranges. Here the complex case
4457 is if they don't overlap. In that case, the only time we have a
4458 range is if they are adjacent. If the second is a subset of the
4459 first, the result is the first. Otherwise, the range to exclude
4460 starts at the beginning of the first range and ends at the end of the
4461 second. */
4462 if (no_overlap)
4464 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4465 range_successor (high0),
4466 1, low1, 0)))
4467 in_p = 0, low = low0, high = high1;
4468 else
4470 /* Canonicalize - [min, x] into - [-, x]. */
4471 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4472 switch (TREE_CODE (TREE_TYPE (low0)))
4474 case ENUMERAL_TYPE:
4475 if (TYPE_PRECISION (TREE_TYPE (low0))
4476 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4477 break;
4478 /* FALLTHROUGH */
4479 case INTEGER_TYPE:
4480 if (tree_int_cst_equal (low0,
4481 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4482 low0 = 0;
4483 break;
4484 case POINTER_TYPE:
4485 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4486 && integer_zerop (low0))
4487 low0 = 0;
4488 break;
4489 default:
4490 break;
4493 /* Canonicalize - [x, max] into - [x, -]. */
4494 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4495 switch (TREE_CODE (TREE_TYPE (high1)))
4497 case ENUMERAL_TYPE:
4498 if (TYPE_PRECISION (TREE_TYPE (high1))
4499 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4500 break;
4501 /* FALLTHROUGH */
4502 case INTEGER_TYPE:
4503 if (tree_int_cst_equal (high1,
4504 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4505 high1 = 0;
4506 break;
4507 case POINTER_TYPE:
4508 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4509 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4510 high1, 1,
4511 integer_one_node, 1)))
4512 high1 = 0;
4513 break;
4514 default:
4515 break;
4518 /* The ranges might be also adjacent between the maximum and
4519 minimum values of the given type. For
4520 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4521 return + [x + 1, y - 1]. */
4522 if (low0 == 0 && high1 == 0)
4524 low = range_successor (high0);
4525 high = range_predecessor (low1);
4526 if (low == 0 || high == 0)
4527 return 0;
4529 in_p = 1;
4531 else
4532 return 0;
4535 else if (subset)
4536 in_p = 0, low = low0, high = high0;
4537 else
4538 in_p = 0, low = low0, high = high1;
4541 *pin_p = in_p, *plow = low, *phigh = high;
4542 return 1;
4546 /* Subroutine of fold, looking inside expressions of the form
4547 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4548 of the COND_EXPR. This function is being used also to optimize
4549 A op B ? C : A, by reversing the comparison first.
4551 Return a folded expression whose code is not a COND_EXPR
4552 anymore, or NULL_TREE if no folding opportunity is found. */
4554 static tree
4555 fold_cond_expr_with_comparison (location_t loc, tree type,
4556 tree arg0, tree arg1, tree arg2)
4558 enum tree_code comp_code = TREE_CODE (arg0);
4559 tree arg00 = TREE_OPERAND (arg0, 0);
4560 tree arg01 = TREE_OPERAND (arg0, 1);
4561 tree arg1_type = TREE_TYPE (arg1);
4562 tree tem;
4564 STRIP_NOPS (arg1);
4565 STRIP_NOPS (arg2);
4567 /* If we have A op 0 ? A : -A, consider applying the following
4568 transformations:
4570 A == 0? A : -A same as -A
4571 A != 0? A : -A same as A
4572 A >= 0? A : -A same as abs (A)
4573 A > 0? A : -A same as abs (A)
4574 A <= 0? A : -A same as -abs (A)
4575 A < 0? A : -A same as -abs (A)
4577 None of these transformations work for modes with signed
4578 zeros. If A is +/-0, the first two transformations will
4579 change the sign of the result (from +0 to -0, or vice
4580 versa). The last four will fix the sign of the result,
4581 even though the original expressions could be positive or
4582 negative, depending on the sign of A.
4584 Note that all these transformations are correct if A is
4585 NaN, since the two alternatives (A and -A) are also NaNs. */
4586 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4587 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4588 ? real_zerop (arg01)
4589 : integer_zerop (arg01))
4590 && ((TREE_CODE (arg2) == NEGATE_EXPR
4591 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4592 /* In the case that A is of the form X-Y, '-A' (arg2) may
4593 have already been folded to Y-X, check for that. */
4594 || (TREE_CODE (arg1) == MINUS_EXPR
4595 && TREE_CODE (arg2) == MINUS_EXPR
4596 && operand_equal_p (TREE_OPERAND (arg1, 0),
4597 TREE_OPERAND (arg2, 1), 0)
4598 && operand_equal_p (TREE_OPERAND (arg1, 1),
4599 TREE_OPERAND (arg2, 0), 0))))
4600 switch (comp_code)
4602 case EQ_EXPR:
4603 case UNEQ_EXPR:
4604 tem = fold_convert_loc (loc, arg1_type, arg1);
4605 return pedantic_non_lvalue_loc (loc,
4606 fold_convert_loc (loc, type,
4607 negate_expr (tem)));
4608 case NE_EXPR:
4609 case LTGT_EXPR:
4610 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4611 case UNGE_EXPR:
4612 case UNGT_EXPR:
4613 if (flag_trapping_math)
4614 break;
4615 /* Fall through. */
4616 case GE_EXPR:
4617 case GT_EXPR:
4618 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4619 arg1 = fold_convert_loc (loc, signed_type_for
4620 (TREE_TYPE (arg1)), arg1);
4621 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4622 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4623 case UNLE_EXPR:
4624 case UNLT_EXPR:
4625 if (flag_trapping_math)
4626 break;
4627 case LE_EXPR:
4628 case LT_EXPR:
4629 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4630 arg1 = fold_convert_loc (loc, signed_type_for
4631 (TREE_TYPE (arg1)), arg1);
4632 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4633 return negate_expr (fold_convert_loc (loc, type, tem));
4634 default:
4635 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4636 break;
4639 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4640 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4641 both transformations are correct when A is NaN: A != 0
4642 is then true, and A == 0 is false. */
4644 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4645 && integer_zerop (arg01) && integer_zerop (arg2))
4647 if (comp_code == NE_EXPR)
4648 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4649 else if (comp_code == EQ_EXPR)
4650 return build_int_cst (type, 0);
4653 /* Try some transformations of A op B ? A : B.
4655 A == B? A : B same as B
4656 A != B? A : B same as A
4657 A >= B? A : B same as max (A, B)
4658 A > B? A : B same as max (B, A)
4659 A <= B? A : B same as min (A, B)
4660 A < B? A : B same as min (B, A)
4662 As above, these transformations don't work in the presence
4663 of signed zeros. For example, if A and B are zeros of
4664 opposite sign, the first two transformations will change
4665 the sign of the result. In the last four, the original
4666 expressions give different results for (A=+0, B=-0) and
4667 (A=-0, B=+0), but the transformed expressions do not.
4669 The first two transformations are correct if either A or B
4670 is a NaN. In the first transformation, the condition will
4671 be false, and B will indeed be chosen. In the case of the
4672 second transformation, the condition A != B will be true,
4673 and A will be chosen.
4675 The conversions to max() and min() are not correct if B is
4676 a number and A is not. The conditions in the original
4677 expressions will be false, so all four give B. The min()
4678 and max() versions would give a NaN instead. */
4679 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4680 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4681 /* Avoid these transformations if the COND_EXPR may be used
4682 as an lvalue in the C++ front-end. PR c++/19199. */
4683 && (in_gimple_form
4684 || (strcmp (lang_hooks.name, "GNU C++") != 0
4685 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4686 || ! maybe_lvalue_p (arg1)
4687 || ! maybe_lvalue_p (arg2)))
4689 tree comp_op0 = arg00;
4690 tree comp_op1 = arg01;
4691 tree comp_type = TREE_TYPE (comp_op0);
4693 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4694 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4696 comp_type = type;
4697 comp_op0 = arg1;
4698 comp_op1 = arg2;
4701 switch (comp_code)
4703 case EQ_EXPR:
4704 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4705 case NE_EXPR:
4706 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4707 case LE_EXPR:
4708 case LT_EXPR:
4709 case UNLE_EXPR:
4710 case UNLT_EXPR:
4711 /* In C++ a ?: expression can be an lvalue, so put the
4712 operand which will be used if they are equal first
4713 so that we can convert this back to the
4714 corresponding COND_EXPR. */
4715 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4717 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4718 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4719 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4720 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4721 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4722 comp_op1, comp_op0);
4723 return pedantic_non_lvalue_loc (loc,
4724 fold_convert_loc (loc, type, tem));
4726 break;
4727 case GE_EXPR:
4728 case GT_EXPR:
4729 case UNGE_EXPR:
4730 case UNGT_EXPR:
4731 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4733 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4734 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4735 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4736 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4737 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4738 comp_op1, comp_op0);
4739 return pedantic_non_lvalue_loc (loc,
4740 fold_convert_loc (loc, type, tem));
4742 break;
4743 case UNEQ_EXPR:
4744 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4745 return pedantic_non_lvalue_loc (loc,
4746 fold_convert_loc (loc, type, arg2));
4747 break;
4748 case LTGT_EXPR:
4749 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4750 return pedantic_non_lvalue_loc (loc,
4751 fold_convert_loc (loc, type, arg1));
4752 break;
4753 default:
4754 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4755 break;
4759 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4760 we might still be able to simplify this. For example,
4761 if C1 is one less or one more than C2, this might have started
4762 out as a MIN or MAX and been transformed by this function.
4763 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4765 if (INTEGRAL_TYPE_P (type)
4766 && TREE_CODE (arg01) == INTEGER_CST
4767 && TREE_CODE (arg2) == INTEGER_CST)
4768 switch (comp_code)
4770 case EQ_EXPR:
4771 if (TREE_CODE (arg1) == INTEGER_CST)
4772 break;
4773 /* We can replace A with C1 in this case. */
4774 arg1 = fold_convert_loc (loc, type, arg01);
4775 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4777 case LT_EXPR:
4778 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4779 MIN_EXPR, to preserve the signedness of the comparison. */
4780 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4781 OEP_ONLY_CONST)
4782 && operand_equal_p (arg01,
4783 const_binop (PLUS_EXPR, arg2,
4784 build_int_cst (type, 1)),
4785 OEP_ONLY_CONST))
4787 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4788 fold_convert_loc (loc, TREE_TYPE (arg00),
4789 arg2));
4790 return pedantic_non_lvalue_loc (loc,
4791 fold_convert_loc (loc, type, tem));
4793 break;
4795 case LE_EXPR:
4796 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4797 as above. */
4798 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4799 OEP_ONLY_CONST)
4800 && operand_equal_p (arg01,
4801 const_binop (MINUS_EXPR, arg2,
4802 build_int_cst (type, 1)),
4803 OEP_ONLY_CONST))
4805 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4806 fold_convert_loc (loc, TREE_TYPE (arg00),
4807 arg2));
4808 return pedantic_non_lvalue_loc (loc,
4809 fold_convert_loc (loc, type, tem));
4811 break;
4813 case GT_EXPR:
4814 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4815 MAX_EXPR, to preserve the signedness of the comparison. */
4816 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4817 OEP_ONLY_CONST)
4818 && operand_equal_p (arg01,
4819 const_binop (MINUS_EXPR, arg2,
4820 build_int_cst (type, 1)),
4821 OEP_ONLY_CONST))
4823 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4824 fold_convert_loc (loc, TREE_TYPE (arg00),
4825 arg2));
4826 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4828 break;
4830 case GE_EXPR:
4831 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4832 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4833 OEP_ONLY_CONST)
4834 && operand_equal_p (arg01,
4835 const_binop (PLUS_EXPR, arg2,
4836 build_int_cst (type, 1)),
4837 OEP_ONLY_CONST))
4839 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4840 fold_convert_loc (loc, TREE_TYPE (arg00),
4841 arg2));
4842 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4844 break;
4845 case NE_EXPR:
4846 break;
4847 default:
4848 gcc_unreachable ();
4851 return NULL_TREE;
4856 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4857 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4858 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4859 false) >= 2)
4860 #endif
4862 /* EXP is some logical combination of boolean tests. See if we can
4863 merge it into some range test. Return the new tree if so. */
4865 static tree
4866 fold_range_test (location_t loc, enum tree_code code, tree type,
4867 tree op0, tree op1)
4869 int or_op = (code == TRUTH_ORIF_EXPR
4870 || code == TRUTH_OR_EXPR);
4871 int in0_p, in1_p, in_p;
4872 tree low0, low1, low, high0, high1, high;
4873 bool strict_overflow_p = false;
4874 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4875 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4876 tree tem;
4877 const char * const warnmsg = G_("assuming signed overflow does not occur "
4878 "when simplifying range test");
4880 /* If this is an OR operation, invert both sides; we will invert
4881 again at the end. */
4882 if (or_op)
4883 in0_p = ! in0_p, in1_p = ! in1_p;
4885 /* If both expressions are the same, if we can merge the ranges, and we
4886 can build the range test, return it or it inverted. If one of the
4887 ranges is always true or always false, consider it to be the same
4888 expression as the other. */
4889 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4890 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4891 in1_p, low1, high1)
4892 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4893 lhs != 0 ? lhs
4894 : rhs != 0 ? rhs : integer_zero_node,
4895 in_p, low, high))))
4897 if (strict_overflow_p)
4898 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4899 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4902 /* On machines where the branch cost is expensive, if this is a
4903 short-circuited branch and the underlying object on both sides
4904 is the same, make a non-short-circuit operation. */
4905 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4906 && lhs != 0 && rhs != 0
4907 && (code == TRUTH_ANDIF_EXPR
4908 || code == TRUTH_ORIF_EXPR)
4909 && operand_equal_p (lhs, rhs, 0))
4911 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4912 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4913 which cases we can't do this. */
4914 if (simple_operand_p (lhs))
4916 tem = build2 (code == TRUTH_ANDIF_EXPR
4917 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4918 type, op0, op1);
4919 SET_EXPR_LOCATION (tem, loc);
4920 return tem;
4923 else if (lang_hooks.decls.global_bindings_p () == 0
4924 && ! CONTAINS_PLACEHOLDER_P (lhs))
4926 tree common = save_expr (lhs);
4928 if (0 != (lhs = build_range_check (loc, type, common,
4929 or_op ? ! in0_p : in0_p,
4930 low0, high0))
4931 && (0 != (rhs = build_range_check (loc, type, common,
4932 or_op ? ! in1_p : in1_p,
4933 low1, high1))))
4935 if (strict_overflow_p)
4936 fold_overflow_warning (warnmsg,
4937 WARN_STRICT_OVERFLOW_COMPARISON);
4938 tem = build2 (code == TRUTH_ANDIF_EXPR
4939 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4940 type, lhs, rhs);
4941 SET_EXPR_LOCATION (tem, loc);
4942 return tem;
4947 return 0;
4950 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4951 bit value. Arrange things so the extra bits will be set to zero if and
4952 only if C is signed-extended to its full width. If MASK is nonzero,
4953 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4955 static tree
4956 unextend (tree c, int p, int unsignedp, tree mask)
4958 tree type = TREE_TYPE (c);
4959 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4960 tree temp;
4962 if (p == modesize || unsignedp)
4963 return c;
4965 /* We work by getting just the sign bit into the low-order bit, then
4966 into the high-order bit, then sign-extend. We then XOR that value
4967 with C. */
4968 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4969 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4971 /* We must use a signed type in order to get an arithmetic right shift.
4972 However, we must also avoid introducing accidental overflows, so that
4973 a subsequent call to integer_zerop will work. Hence we must
4974 do the type conversion here. At this point, the constant is either
4975 zero or one, and the conversion to a signed type can never overflow.
4976 We could get an overflow if this conversion is done anywhere else. */
4977 if (TYPE_UNSIGNED (type))
4978 temp = fold_convert (signed_type_for (type), temp);
4980 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4981 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4982 if (mask != 0)
4983 temp = const_binop (BIT_AND_EXPR, temp,
4984 fold_convert (TREE_TYPE (c), mask));
4985 /* If necessary, convert the type back to match the type of C. */
4986 if (TYPE_UNSIGNED (type))
4987 temp = fold_convert (type, temp);
4989 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4992 /* For an expression that has the form
4993 (A && B) || ~B
4995 (A || B) && ~B,
4996 we can drop one of the inner expressions and simplify to
4997 A || ~B
4999 A && ~B
5000 LOC is the location of the resulting expression. OP is the inner
5001 logical operation; the left-hand side in the examples above, while CMPOP
5002 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5003 removing a condition that guards another, as in
5004 (A != NULL && A->...) || A == NULL
5005 which we must not transform. If RHS_ONLY is true, only eliminate the
5006 right-most operand of the inner logical operation. */
5008 static tree
5009 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5010 bool rhs_only)
5012 tree type = TREE_TYPE (cmpop);
5013 enum tree_code code = TREE_CODE (cmpop);
5014 enum tree_code truthop_code = TREE_CODE (op);
5015 tree lhs = TREE_OPERAND (op, 0);
5016 tree rhs = TREE_OPERAND (op, 1);
5017 tree orig_lhs = lhs, orig_rhs = rhs;
5018 enum tree_code rhs_code = TREE_CODE (rhs);
5019 enum tree_code lhs_code = TREE_CODE (lhs);
5020 enum tree_code inv_code;
5022 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5023 return NULL_TREE;
5025 if (TREE_CODE_CLASS (code) != tcc_comparison)
5026 return NULL_TREE;
5028 if (rhs_code == truthop_code)
5030 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5031 if (newrhs != NULL_TREE)
5033 rhs = newrhs;
5034 rhs_code = TREE_CODE (rhs);
5037 if (lhs_code == truthop_code && !rhs_only)
5039 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5040 if (newlhs != NULL_TREE)
5042 lhs = newlhs;
5043 lhs_code = TREE_CODE (lhs);
5047 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5048 if (inv_code == rhs_code
5049 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5050 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5051 return lhs;
5052 if (!rhs_only && inv_code == lhs_code
5053 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5054 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5055 return rhs;
5056 if (rhs != orig_rhs || lhs != orig_lhs)
5057 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5058 lhs, rhs);
5059 return NULL_TREE;
5062 /* Find ways of folding logical expressions of LHS and RHS:
5063 Try to merge two comparisons to the same innermost item.
5064 Look for range tests like "ch >= '0' && ch <= '9'".
5065 Look for combinations of simple terms on machines with expensive branches
5066 and evaluate the RHS unconditionally.
5068 For example, if we have p->a == 2 && p->b == 4 and we can make an
5069 object large enough to span both A and B, we can do this with a comparison
5070 against the object ANDed with the a mask.
5072 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5073 operations to do this with one comparison.
5075 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5076 function and the one above.
5078 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5079 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5081 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5082 two operands.
5084 We return the simplified tree or 0 if no optimization is possible. */
5086 static tree
5087 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5088 tree lhs, tree rhs)
5090 /* If this is the "or" of two comparisons, we can do something if
5091 the comparisons are NE_EXPR. If this is the "and", we can do something
5092 if the comparisons are EQ_EXPR. I.e.,
5093 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5095 WANTED_CODE is this operation code. For single bit fields, we can
5096 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5097 comparison for one-bit fields. */
5099 enum tree_code wanted_code;
5100 enum tree_code lcode, rcode;
5101 tree ll_arg, lr_arg, rl_arg, rr_arg;
5102 tree ll_inner, lr_inner, rl_inner, rr_inner;
5103 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5104 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5105 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5106 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5107 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5108 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5109 enum machine_mode lnmode, rnmode;
5110 tree ll_mask, lr_mask, rl_mask, rr_mask;
5111 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5112 tree l_const, r_const;
5113 tree lntype, rntype, result;
5114 HOST_WIDE_INT first_bit, end_bit;
5115 int volatilep;
5116 tree orig_lhs = lhs, orig_rhs = rhs;
5117 enum tree_code orig_code = code;
5119 /* Start by getting the comparison codes. Fail if anything is volatile.
5120 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5121 it were surrounded with a NE_EXPR. */
5123 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5124 return 0;
5126 lcode = TREE_CODE (lhs);
5127 rcode = TREE_CODE (rhs);
5129 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5131 lhs = build2 (NE_EXPR, truth_type, lhs,
5132 build_int_cst (TREE_TYPE (lhs), 0));
5133 lcode = NE_EXPR;
5136 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5138 rhs = build2 (NE_EXPR, truth_type, rhs,
5139 build_int_cst (TREE_TYPE (rhs), 0));
5140 rcode = NE_EXPR;
5143 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5144 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5145 return 0;
5147 ll_arg = TREE_OPERAND (lhs, 0);
5148 lr_arg = TREE_OPERAND (lhs, 1);
5149 rl_arg = TREE_OPERAND (rhs, 0);
5150 rr_arg = TREE_OPERAND (rhs, 1);
5152 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5153 if (simple_operand_p (ll_arg)
5154 && simple_operand_p (lr_arg))
5156 tree result;
5157 if (operand_equal_p (ll_arg, rl_arg, 0)
5158 && operand_equal_p (lr_arg, rr_arg, 0))
5160 result = combine_comparisons (loc, code, lcode, rcode,
5161 truth_type, ll_arg, lr_arg);
5162 if (result)
5163 return result;
5165 else if (operand_equal_p (ll_arg, rr_arg, 0)
5166 && operand_equal_p (lr_arg, rl_arg, 0))
5168 result = combine_comparisons (loc, code, lcode,
5169 swap_tree_comparison (rcode),
5170 truth_type, ll_arg, lr_arg);
5171 if (result)
5172 return result;
5176 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5177 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5179 /* If the RHS can be evaluated unconditionally and its operands are
5180 simple, it wins to evaluate the RHS unconditionally on machines
5181 with expensive branches. In this case, this isn't a comparison
5182 that can be merged. Avoid doing this if the RHS is a floating-point
5183 comparison since those can trap. */
5185 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5186 false) >= 2
5187 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5188 && simple_operand_p (rl_arg)
5189 && simple_operand_p (rr_arg))
5191 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5192 if (code == TRUTH_OR_EXPR
5193 && lcode == NE_EXPR && integer_zerop (lr_arg)
5194 && rcode == NE_EXPR && integer_zerop (rr_arg)
5195 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5196 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5198 result = build2 (NE_EXPR, truth_type,
5199 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5200 ll_arg, rl_arg),
5201 build_int_cst (TREE_TYPE (ll_arg), 0));
5202 goto fold_truthop_exit;
5205 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5206 if (code == TRUTH_AND_EXPR
5207 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5208 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5209 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5210 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5212 result = build2 (EQ_EXPR, truth_type,
5213 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5214 ll_arg, rl_arg),
5215 build_int_cst (TREE_TYPE (ll_arg), 0));
5216 goto fold_truthop_exit;
5219 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5221 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5223 result = build2 (code, truth_type, lhs, rhs);
5224 goto fold_truthop_exit;
5226 return NULL_TREE;
5230 /* See if the comparisons can be merged. Then get all the parameters for
5231 each side. */
5233 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5234 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5235 return 0;
5237 volatilep = 0;
5238 ll_inner = decode_field_reference (loc, ll_arg,
5239 &ll_bitsize, &ll_bitpos, &ll_mode,
5240 &ll_unsignedp, &volatilep, &ll_mask,
5241 &ll_and_mask);
5242 lr_inner = decode_field_reference (loc, lr_arg,
5243 &lr_bitsize, &lr_bitpos, &lr_mode,
5244 &lr_unsignedp, &volatilep, &lr_mask,
5245 &lr_and_mask);
5246 rl_inner = decode_field_reference (loc, rl_arg,
5247 &rl_bitsize, &rl_bitpos, &rl_mode,
5248 &rl_unsignedp, &volatilep, &rl_mask,
5249 &rl_and_mask);
5250 rr_inner = decode_field_reference (loc, rr_arg,
5251 &rr_bitsize, &rr_bitpos, &rr_mode,
5252 &rr_unsignedp, &volatilep, &rr_mask,
5253 &rr_and_mask);
5255 /* It must be true that the inner operation on the lhs of each
5256 comparison must be the same if we are to be able to do anything.
5257 Then see if we have constants. If not, the same must be true for
5258 the rhs's. */
5259 if (volatilep || ll_inner == 0 || rl_inner == 0
5260 || ! operand_equal_p (ll_inner, rl_inner, 0))
5261 return 0;
5263 if (TREE_CODE (lr_arg) == INTEGER_CST
5264 && TREE_CODE (rr_arg) == INTEGER_CST)
5265 l_const = lr_arg, r_const = rr_arg;
5266 else if (lr_inner == 0 || rr_inner == 0
5267 || ! operand_equal_p (lr_inner, rr_inner, 0))
5268 return 0;
5269 else
5270 l_const = r_const = 0;
5272 /* If either comparison code is not correct for our logical operation,
5273 fail. However, we can convert a one-bit comparison against zero into
5274 the opposite comparison against that bit being set in the field. */
5276 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5277 if (lcode != wanted_code)
5279 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5281 /* Make the left operand unsigned, since we are only interested
5282 in the value of one bit. Otherwise we are doing the wrong
5283 thing below. */
5284 ll_unsignedp = 1;
5285 l_const = ll_mask;
5287 else
5288 return 0;
5291 /* This is analogous to the code for l_const above. */
5292 if (rcode != wanted_code)
5294 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5296 rl_unsignedp = 1;
5297 r_const = rl_mask;
5299 else
5300 return 0;
5303 /* See if we can find a mode that contains both fields being compared on
5304 the left. If we can't, fail. Otherwise, update all constants and masks
5305 to be relative to a field of that size. */
5306 first_bit = MIN (ll_bitpos, rl_bitpos);
5307 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5308 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5309 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5310 volatilep);
5311 if (lnmode == VOIDmode)
5312 return 0;
5314 lnbitsize = GET_MODE_BITSIZE (lnmode);
5315 lnbitpos = first_bit & ~ (lnbitsize - 1);
5316 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5317 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5319 if (BYTES_BIG_ENDIAN)
5321 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5322 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5325 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5326 size_int (xll_bitpos));
5327 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5328 size_int (xrl_bitpos));
5330 if (l_const)
5332 l_const = fold_convert_loc (loc, lntype, l_const);
5333 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5334 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5335 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5336 fold_build1_loc (loc, BIT_NOT_EXPR,
5337 lntype, ll_mask))))
5339 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5341 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5344 if (r_const)
5346 r_const = fold_convert_loc (loc, lntype, r_const);
5347 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5348 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5349 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5350 fold_build1_loc (loc, BIT_NOT_EXPR,
5351 lntype, rl_mask))))
5353 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5355 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5359 /* If the right sides are not constant, do the same for it. Also,
5360 disallow this optimization if a size or signedness mismatch occurs
5361 between the left and right sides. */
5362 if (l_const == 0)
5364 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5365 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5366 /* Make sure the two fields on the right
5367 correspond to the left without being swapped. */
5368 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5369 return 0;
5371 first_bit = MIN (lr_bitpos, rr_bitpos);
5372 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5373 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5374 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5375 volatilep);
5376 if (rnmode == VOIDmode)
5377 return 0;
5379 rnbitsize = GET_MODE_BITSIZE (rnmode);
5380 rnbitpos = first_bit & ~ (rnbitsize - 1);
5381 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5382 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5384 if (BYTES_BIG_ENDIAN)
5386 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5387 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5390 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5391 rntype, lr_mask),
5392 size_int (xlr_bitpos));
5393 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5394 rntype, rr_mask),
5395 size_int (xrr_bitpos));
5397 /* Make a mask that corresponds to both fields being compared.
5398 Do this for both items being compared. If the operands are the
5399 same size and the bits being compared are in the same position
5400 then we can do this by masking both and comparing the masked
5401 results. */
5402 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5403 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5404 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5406 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5407 ll_unsignedp || rl_unsignedp);
5408 if (! all_ones_mask_p (ll_mask, lnbitsize))
5409 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5411 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5412 lr_unsignedp || rr_unsignedp);
5413 if (! all_ones_mask_p (lr_mask, rnbitsize))
5414 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5416 result = build2 (wanted_code, truth_type, lhs, rhs);
5417 goto fold_truthop_exit;
5420 /* There is still another way we can do something: If both pairs of
5421 fields being compared are adjacent, we may be able to make a wider
5422 field containing them both.
5424 Note that we still must mask the lhs/rhs expressions. Furthermore,
5425 the mask must be shifted to account for the shift done by
5426 make_bit_field_ref. */
5427 if ((ll_bitsize + ll_bitpos == rl_bitpos
5428 && lr_bitsize + lr_bitpos == rr_bitpos)
5429 || (ll_bitpos == rl_bitpos + rl_bitsize
5430 && lr_bitpos == rr_bitpos + rr_bitsize))
5432 tree type;
5434 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5435 ll_bitsize + rl_bitsize,
5436 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5437 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5438 lr_bitsize + rr_bitsize,
5439 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5441 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5442 size_int (MIN (xll_bitpos, xrl_bitpos)));
5443 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5444 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5446 /* Convert to the smaller type before masking out unwanted bits. */
5447 type = lntype;
5448 if (lntype != rntype)
5450 if (lnbitsize > rnbitsize)
5452 lhs = fold_convert_loc (loc, rntype, lhs);
5453 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5454 type = rntype;
5456 else if (lnbitsize < rnbitsize)
5458 rhs = fold_convert_loc (loc, lntype, rhs);
5459 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5460 type = lntype;
5464 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5465 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5467 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5468 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5470 result = build2 (wanted_code, truth_type, lhs, rhs);
5471 goto fold_truthop_exit;
5474 return 0;
5477 /* Handle the case of comparisons with constants. If there is something in
5478 common between the masks, those bits of the constants must be the same.
5479 If not, the condition is always false. Test for this to avoid generating
5480 incorrect code below. */
5481 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5482 if (! integer_zerop (result)
5483 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5484 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5486 if (wanted_code == NE_EXPR)
5488 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5489 return constant_boolean_node (true, truth_type);
5491 else
5493 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5494 return constant_boolean_node (false, truth_type);
5498 /* Construct the expression we will return. First get the component
5499 reference we will make. Unless the mask is all ones the width of
5500 that field, perform the mask operation. Then compare with the
5501 merged constant. */
5502 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5503 ll_unsignedp || rl_unsignedp);
5505 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5506 if (! all_ones_mask_p (ll_mask, lnbitsize))
5508 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5509 SET_EXPR_LOCATION (result, loc);
5512 result = build2 (wanted_code, truth_type, result,
5513 const_binop (BIT_IOR_EXPR, l_const, r_const));
5515 fold_truthop_exit:
5516 SET_EXPR_LOCATION (result, loc);
5517 return result;
5520 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5521 constant. */
5523 static tree
5524 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5525 tree op0, tree op1)
5527 tree arg0 = op0;
5528 enum tree_code op_code;
5529 tree comp_const;
5530 tree minmax_const;
5531 int consts_equal, consts_lt;
5532 tree inner;
5534 STRIP_SIGN_NOPS (arg0);
5536 op_code = TREE_CODE (arg0);
5537 minmax_const = TREE_OPERAND (arg0, 1);
5538 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5539 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5540 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5541 inner = TREE_OPERAND (arg0, 0);
5543 /* If something does not permit us to optimize, return the original tree. */
5544 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5545 || TREE_CODE (comp_const) != INTEGER_CST
5546 || TREE_OVERFLOW (comp_const)
5547 || TREE_CODE (minmax_const) != INTEGER_CST
5548 || TREE_OVERFLOW (minmax_const))
5549 return NULL_TREE;
5551 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5552 and GT_EXPR, doing the rest with recursive calls using logical
5553 simplifications. */
5554 switch (code)
5556 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5558 tree tem
5559 = optimize_minmax_comparison (loc,
5560 invert_tree_comparison (code, false),
5561 type, op0, op1);
5562 if (tem)
5563 return invert_truthvalue_loc (loc, tem);
5564 return NULL_TREE;
5567 case GE_EXPR:
5568 return
5569 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5570 optimize_minmax_comparison
5571 (loc, EQ_EXPR, type, arg0, comp_const),
5572 optimize_minmax_comparison
5573 (loc, GT_EXPR, type, arg0, comp_const));
5575 case EQ_EXPR:
5576 if (op_code == MAX_EXPR && consts_equal)
5577 /* MAX (X, 0) == 0 -> X <= 0 */
5578 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5580 else if (op_code == MAX_EXPR && consts_lt)
5581 /* MAX (X, 0) == 5 -> X == 5 */
5582 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5584 else if (op_code == MAX_EXPR)
5585 /* MAX (X, 0) == -1 -> false */
5586 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5588 else if (consts_equal)
5589 /* MIN (X, 0) == 0 -> X >= 0 */
5590 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5592 else if (consts_lt)
5593 /* MIN (X, 0) == 5 -> false */
5594 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5596 else
5597 /* MIN (X, 0) == -1 -> X == -1 */
5598 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5600 case GT_EXPR:
5601 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5602 /* MAX (X, 0) > 0 -> X > 0
5603 MAX (X, 0) > 5 -> X > 5 */
5604 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5606 else if (op_code == MAX_EXPR)
5607 /* MAX (X, 0) > -1 -> true */
5608 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5610 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5611 /* MIN (X, 0) > 0 -> false
5612 MIN (X, 0) > 5 -> false */
5613 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5615 else
5616 /* MIN (X, 0) > -1 -> X > -1 */
5617 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5619 default:
5620 return NULL_TREE;
5624 /* T is an integer expression that is being multiplied, divided, or taken a
5625 modulus (CODE says which and what kind of divide or modulus) by a
5626 constant C. See if we can eliminate that operation by folding it with
5627 other operations already in T. WIDE_TYPE, if non-null, is a type that
5628 should be used for the computation if wider than our type.
5630 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5631 (X * 2) + (Y * 4). We must, however, be assured that either the original
5632 expression would not overflow or that overflow is undefined for the type
5633 in the language in question.
5635 If we return a non-null expression, it is an equivalent form of the
5636 original computation, but need not be in the original type.
5638 We set *STRICT_OVERFLOW_P to true if the return values depends on
5639 signed overflow being undefined. Otherwise we do not change
5640 *STRICT_OVERFLOW_P. */
5642 static tree
5643 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5644 bool *strict_overflow_p)
5646 /* To avoid exponential search depth, refuse to allow recursion past
5647 three levels. Beyond that (1) it's highly unlikely that we'll find
5648 something interesting and (2) we've probably processed it before
5649 when we built the inner expression. */
5651 static int depth;
5652 tree ret;
5654 if (depth > 3)
5655 return NULL;
5657 depth++;
5658 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5659 depth--;
5661 return ret;
5664 static tree
5665 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5666 bool *strict_overflow_p)
5668 tree type = TREE_TYPE (t);
5669 enum tree_code tcode = TREE_CODE (t);
5670 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5671 > GET_MODE_SIZE (TYPE_MODE (type)))
5672 ? wide_type : type);
5673 tree t1, t2;
5674 int same_p = tcode == code;
5675 tree op0 = NULL_TREE, op1 = NULL_TREE;
5676 bool sub_strict_overflow_p;
5678 /* Don't deal with constants of zero here; they confuse the code below. */
5679 if (integer_zerop (c))
5680 return NULL_TREE;
5682 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5683 op0 = TREE_OPERAND (t, 0);
5685 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5686 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5688 /* Note that we need not handle conditional operations here since fold
5689 already handles those cases. So just do arithmetic here. */
5690 switch (tcode)
5692 case INTEGER_CST:
5693 /* For a constant, we can always simplify if we are a multiply
5694 or (for divide and modulus) if it is a multiple of our constant. */
5695 if (code == MULT_EXPR
5696 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5697 return const_binop (code, fold_convert (ctype, t),
5698 fold_convert (ctype, c));
5699 break;
5701 CASE_CONVERT: case NON_LVALUE_EXPR:
5702 /* If op0 is an expression ... */
5703 if ((COMPARISON_CLASS_P (op0)
5704 || UNARY_CLASS_P (op0)
5705 || BINARY_CLASS_P (op0)
5706 || VL_EXP_CLASS_P (op0)
5707 || EXPRESSION_CLASS_P (op0))
5708 /* ... and has wrapping overflow, and its type is smaller
5709 than ctype, then we cannot pass through as widening. */
5710 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5711 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5712 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5713 && (TYPE_PRECISION (ctype)
5714 > TYPE_PRECISION (TREE_TYPE (op0))))
5715 /* ... or this is a truncation (t is narrower than op0),
5716 then we cannot pass through this narrowing. */
5717 || (TYPE_PRECISION (type)
5718 < TYPE_PRECISION (TREE_TYPE (op0)))
5719 /* ... or signedness changes for division or modulus,
5720 then we cannot pass through this conversion. */
5721 || (code != MULT_EXPR
5722 && (TYPE_UNSIGNED (ctype)
5723 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5724 /* ... or has undefined overflow while the converted to
5725 type has not, we cannot do the operation in the inner type
5726 as that would introduce undefined overflow. */
5727 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5728 && !TYPE_OVERFLOW_UNDEFINED (type))))
5729 break;
5731 /* Pass the constant down and see if we can make a simplification. If
5732 we can, replace this expression with the inner simplification for
5733 possible later conversion to our or some other type. */
5734 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5735 && TREE_CODE (t2) == INTEGER_CST
5736 && !TREE_OVERFLOW (t2)
5737 && (0 != (t1 = extract_muldiv (op0, t2, code,
5738 code == MULT_EXPR
5739 ? ctype : NULL_TREE,
5740 strict_overflow_p))))
5741 return t1;
5742 break;
5744 case ABS_EXPR:
5745 /* If widening the type changes it from signed to unsigned, then we
5746 must avoid building ABS_EXPR itself as unsigned. */
5747 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5749 tree cstype = (*signed_type_for) (ctype);
5750 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5751 != 0)
5753 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5754 return fold_convert (ctype, t1);
5756 break;
5758 /* If the constant is negative, we cannot simplify this. */
5759 if (tree_int_cst_sgn (c) == -1)
5760 break;
5761 /* FALLTHROUGH */
5762 case NEGATE_EXPR:
5763 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5764 != 0)
5765 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5766 break;
5768 case MIN_EXPR: case MAX_EXPR:
5769 /* If widening the type changes the signedness, then we can't perform
5770 this optimization as that changes the result. */
5771 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5772 break;
5774 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5775 sub_strict_overflow_p = false;
5776 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5777 &sub_strict_overflow_p)) != 0
5778 && (t2 = extract_muldiv (op1, c, code, wide_type,
5779 &sub_strict_overflow_p)) != 0)
5781 if (tree_int_cst_sgn (c) < 0)
5782 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5783 if (sub_strict_overflow_p)
5784 *strict_overflow_p = true;
5785 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5786 fold_convert (ctype, t2));
5788 break;
5790 case LSHIFT_EXPR: case RSHIFT_EXPR:
5791 /* If the second operand is constant, this is a multiplication
5792 or floor division, by a power of two, so we can treat it that
5793 way unless the multiplier or divisor overflows. Signed
5794 left-shift overflow is implementation-defined rather than
5795 undefined in C90, so do not convert signed left shift into
5796 multiplication. */
5797 if (TREE_CODE (op1) == INTEGER_CST
5798 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5799 /* const_binop may not detect overflow correctly,
5800 so check for it explicitly here. */
5801 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5802 && TREE_INT_CST_HIGH (op1) == 0
5803 && 0 != (t1 = fold_convert (ctype,
5804 const_binop (LSHIFT_EXPR,
5805 size_one_node,
5806 op1)))
5807 && !TREE_OVERFLOW (t1))
5808 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5809 ? MULT_EXPR : FLOOR_DIV_EXPR,
5810 ctype,
5811 fold_convert (ctype, op0),
5812 t1),
5813 c, code, wide_type, strict_overflow_p);
5814 break;
5816 case PLUS_EXPR: case MINUS_EXPR:
5817 /* See if we can eliminate the operation on both sides. If we can, we
5818 can return a new PLUS or MINUS. If we can't, the only remaining
5819 cases where we can do anything are if the second operand is a
5820 constant. */
5821 sub_strict_overflow_p = false;
5822 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5823 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5824 if (t1 != 0 && t2 != 0
5825 && (code == MULT_EXPR
5826 /* If not multiplication, we can only do this if both operands
5827 are divisible by c. */
5828 || (multiple_of_p (ctype, op0, c)
5829 && multiple_of_p (ctype, op1, c))))
5831 if (sub_strict_overflow_p)
5832 *strict_overflow_p = true;
5833 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5834 fold_convert (ctype, t2));
5837 /* If this was a subtraction, negate OP1 and set it to be an addition.
5838 This simplifies the logic below. */
5839 if (tcode == MINUS_EXPR)
5841 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5842 /* If OP1 was not easily negatable, the constant may be OP0. */
5843 if (TREE_CODE (op0) == INTEGER_CST)
5845 tree tem = op0;
5846 op0 = op1;
5847 op1 = tem;
5848 tem = t1;
5849 t1 = t2;
5850 t2 = tem;
5854 if (TREE_CODE (op1) != INTEGER_CST)
5855 break;
5857 /* If either OP1 or C are negative, this optimization is not safe for
5858 some of the division and remainder types while for others we need
5859 to change the code. */
5860 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5862 if (code == CEIL_DIV_EXPR)
5863 code = FLOOR_DIV_EXPR;
5864 else if (code == FLOOR_DIV_EXPR)
5865 code = CEIL_DIV_EXPR;
5866 else if (code != MULT_EXPR
5867 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5868 break;
5871 /* If it's a multiply or a division/modulus operation of a multiple
5872 of our constant, do the operation and verify it doesn't overflow. */
5873 if (code == MULT_EXPR
5874 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5876 op1 = const_binop (code, fold_convert (ctype, op1),
5877 fold_convert (ctype, c));
5878 /* We allow the constant to overflow with wrapping semantics. */
5879 if (op1 == 0
5880 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5881 break;
5883 else
5884 break;
5886 /* If we have an unsigned type is not a sizetype, we cannot widen
5887 the operation since it will change the result if the original
5888 computation overflowed. */
5889 if (TYPE_UNSIGNED (ctype)
5890 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5891 && ctype != type)
5892 break;
5894 /* If we were able to eliminate our operation from the first side,
5895 apply our operation to the second side and reform the PLUS. */
5896 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5897 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5899 /* The last case is if we are a multiply. In that case, we can
5900 apply the distributive law to commute the multiply and addition
5901 if the multiplication of the constants doesn't overflow. */
5902 if (code == MULT_EXPR)
5903 return fold_build2 (tcode, ctype,
5904 fold_build2 (code, ctype,
5905 fold_convert (ctype, op0),
5906 fold_convert (ctype, c)),
5907 op1);
5909 break;
5911 case MULT_EXPR:
5912 /* We have a special case here if we are doing something like
5913 (C * 8) % 4 since we know that's zero. */
5914 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5915 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5916 /* If the multiplication can overflow we cannot optimize this.
5917 ??? Until we can properly mark individual operations as
5918 not overflowing we need to treat sizetype special here as
5919 stor-layout relies on this opimization to make
5920 DECL_FIELD_BIT_OFFSET always a constant. */
5921 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5922 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5923 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5924 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5925 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5927 *strict_overflow_p = true;
5928 return omit_one_operand (type, integer_zero_node, op0);
5931 /* ... fall through ... */
5933 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5934 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5935 /* If we can extract our operation from the LHS, do so and return a
5936 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5937 do something only if the second operand is a constant. */
5938 if (same_p
5939 && (t1 = extract_muldiv (op0, c, code, wide_type,
5940 strict_overflow_p)) != 0)
5941 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5942 fold_convert (ctype, op1));
5943 else if (tcode == MULT_EXPR && code == MULT_EXPR
5944 && (t1 = extract_muldiv (op1, c, code, wide_type,
5945 strict_overflow_p)) != 0)
5946 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5947 fold_convert (ctype, t1));
5948 else if (TREE_CODE (op1) != INTEGER_CST)
5949 return 0;
5951 /* If these are the same operation types, we can associate them
5952 assuming no overflow. */
5953 if (tcode == code
5954 && 0 != (t1 = int_const_binop (MULT_EXPR,
5955 fold_convert (ctype, op1),
5956 fold_convert (ctype, c), 1))
5957 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5958 (TYPE_UNSIGNED (ctype)
5959 && tcode != MULT_EXPR) ? -1 : 1,
5960 TREE_OVERFLOW (t1)))
5961 && !TREE_OVERFLOW (t1))
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5964 /* If these operations "cancel" each other, we have the main
5965 optimizations of this pass, which occur when either constant is a
5966 multiple of the other, in which case we replace this with either an
5967 operation or CODE or TCODE.
5969 If we have an unsigned type that is not a sizetype, we cannot do
5970 this since it will change the result if the original computation
5971 overflowed. */
5972 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5973 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5974 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5975 || (tcode == MULT_EXPR
5976 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5977 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5978 && code != MULT_EXPR)))
5980 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5982 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5983 *strict_overflow_p = true;
5984 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5985 fold_convert (ctype,
5986 const_binop (TRUNC_DIV_EXPR,
5987 op1, c)));
5989 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5992 *strict_overflow_p = true;
5993 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5994 fold_convert (ctype,
5995 const_binop (TRUNC_DIV_EXPR,
5996 c, op1)));
5999 break;
6001 default:
6002 break;
6005 return 0;
6008 /* Return a node which has the indicated constant VALUE (either 0 or
6009 1), and is of the indicated TYPE. */
6011 tree
6012 constant_boolean_node (int value, tree type)
6014 if (type == integer_type_node)
6015 return value ? integer_one_node : integer_zero_node;
6016 else if (type == boolean_type_node)
6017 return value ? boolean_true_node : boolean_false_node;
6018 else
6019 return build_int_cst (type, value);
6023 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6024 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6025 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6026 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6027 COND is the first argument to CODE; otherwise (as in the example
6028 given here), it is the second argument. TYPE is the type of the
6029 original expression. Return NULL_TREE if no simplification is
6030 possible. */
6032 static tree
6033 fold_binary_op_with_conditional_arg (location_t loc,
6034 enum tree_code code,
6035 tree type, tree op0, tree op1,
6036 tree cond, tree arg, int cond_first_p)
6038 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6039 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6040 tree test, true_value, false_value;
6041 tree lhs = NULL_TREE;
6042 tree rhs = NULL_TREE;
6044 if (TREE_CODE (cond) == COND_EXPR)
6046 test = TREE_OPERAND (cond, 0);
6047 true_value = TREE_OPERAND (cond, 1);
6048 false_value = TREE_OPERAND (cond, 2);
6049 /* If this operand throws an expression, then it does not make
6050 sense to try to perform a logical or arithmetic operation
6051 involving it. */
6052 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6053 lhs = true_value;
6054 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6055 rhs = false_value;
6057 else
6059 tree testtype = TREE_TYPE (cond);
6060 test = cond;
6061 true_value = constant_boolean_node (true, testtype);
6062 false_value = constant_boolean_node (false, testtype);
6065 /* This transformation is only worthwhile if we don't have to wrap ARG
6066 in a SAVE_EXPR and the operation can be simplified on at least one
6067 of the branches once its pushed inside the COND_EXPR. */
6068 if (!TREE_CONSTANT (arg)
6069 && (TREE_SIDE_EFFECTS (arg)
6070 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6071 return NULL_TREE;
6073 arg = fold_convert_loc (loc, arg_type, arg);
6074 if (lhs == 0)
6076 true_value = fold_convert_loc (loc, cond_type, true_value);
6077 if (cond_first_p)
6078 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6079 else
6080 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6082 if (rhs == 0)
6084 false_value = fold_convert_loc (loc, cond_type, false_value);
6085 if (cond_first_p)
6086 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6087 else
6088 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6091 /* Check that we have simplified at least one of the branches. */
6092 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6093 return NULL_TREE;
6095 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6099 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6101 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6102 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6103 ADDEND is the same as X.
6105 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6106 and finite. The problematic cases are when X is zero, and its mode
6107 has signed zeros. In the case of rounding towards -infinity,
6108 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6109 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6111 bool
6112 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6114 if (!real_zerop (addend))
6115 return false;
6117 /* Don't allow the fold with -fsignaling-nans. */
6118 if (HONOR_SNANS (TYPE_MODE (type)))
6119 return false;
6121 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6122 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6123 return true;
6125 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6126 if (TREE_CODE (addend) == REAL_CST
6127 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6128 negate = !negate;
6130 /* The mode has signed zeros, and we have to honor their sign.
6131 In this situation, there is only one case we can return true for.
6132 X - 0 is the same as X unless rounding towards -infinity is
6133 supported. */
6134 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6137 /* Subroutine of fold() that checks comparisons of built-in math
6138 functions against real constants.
6140 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6141 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6142 is the type of the result and ARG0 and ARG1 are the operands of the
6143 comparison. ARG1 must be a TREE_REAL_CST.
6145 The function returns the constant folded tree if a simplification
6146 can be made, and NULL_TREE otherwise. */
6148 static tree
6149 fold_mathfn_compare (location_t loc,
6150 enum built_in_function fcode, enum tree_code code,
6151 tree type, tree arg0, tree arg1)
6153 REAL_VALUE_TYPE c;
6155 if (BUILTIN_SQRT_P (fcode))
6157 tree arg = CALL_EXPR_ARG (arg0, 0);
6158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6160 c = TREE_REAL_CST (arg1);
6161 if (REAL_VALUE_NEGATIVE (c))
6163 /* sqrt(x) < y is always false, if y is negative. */
6164 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6165 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6167 /* sqrt(x) > y is always true, if y is negative and we
6168 don't care about NaNs, i.e. negative values of x. */
6169 if (code == NE_EXPR || !HONOR_NANS (mode))
6170 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6172 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6173 return fold_build2_loc (loc, GE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg), dconst0));
6176 else if (code == GT_EXPR || code == GE_EXPR)
6178 REAL_VALUE_TYPE c2;
6180 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6181 real_convert (&c2, mode, &c2);
6183 if (REAL_VALUE_ISINF (c2))
6185 /* sqrt(x) > y is x == +Inf, when y is very large. */
6186 if (HONOR_INFINITIES (mode))
6187 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6188 build_real (TREE_TYPE (arg), c2));
6190 /* sqrt(x) > y is always false, when y is very large
6191 and we don't care about infinities. */
6192 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6195 /* sqrt(x) > c is the same as x > c*c. */
6196 return fold_build2_loc (loc, code, type, arg,
6197 build_real (TREE_TYPE (arg), c2));
6199 else if (code == LT_EXPR || code == LE_EXPR)
6201 REAL_VALUE_TYPE c2;
6203 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6204 real_convert (&c2, mode, &c2);
6206 if (REAL_VALUE_ISINF (c2))
6208 /* sqrt(x) < y is always true, when y is a very large
6209 value and we don't care about NaNs or Infinities. */
6210 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6211 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6213 /* sqrt(x) < y is x != +Inf when y is very large and we
6214 don't care about NaNs. */
6215 if (! HONOR_NANS (mode))
6216 return fold_build2_loc (loc, NE_EXPR, type, arg,
6217 build_real (TREE_TYPE (arg), c2));
6219 /* sqrt(x) < y is x >= 0 when y is very large and we
6220 don't care about Infinities. */
6221 if (! HONOR_INFINITIES (mode))
6222 return fold_build2_loc (loc, GE_EXPR, type, arg,
6223 build_real (TREE_TYPE (arg), dconst0));
6225 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6226 if (lang_hooks.decls.global_bindings_p () != 0
6227 || CONTAINS_PLACEHOLDER_P (arg))
6228 return NULL_TREE;
6230 arg = save_expr (arg);
6231 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6232 fold_build2_loc (loc, GE_EXPR, type, arg,
6233 build_real (TREE_TYPE (arg),
6234 dconst0)),
6235 fold_build2_loc (loc, NE_EXPR, type, arg,
6236 build_real (TREE_TYPE (arg),
6237 c2)));
6240 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6241 if (! HONOR_NANS (mode))
6242 return fold_build2_loc (loc, code, type, arg,
6243 build_real (TREE_TYPE (arg), c2));
6245 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6246 if (lang_hooks.decls.global_bindings_p () == 0
6247 && ! CONTAINS_PLACEHOLDER_P (arg))
6249 arg = save_expr (arg);
6250 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6251 fold_build2_loc (loc, GE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg),
6253 dconst0)),
6254 fold_build2_loc (loc, code, type, arg,
6255 build_real (TREE_TYPE (arg),
6256 c2)));
6261 return NULL_TREE;
6264 /* Subroutine of fold() that optimizes comparisons against Infinities,
6265 either +Inf or -Inf.
6267 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6268 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6269 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6271 The function returns the constant folded tree if a simplification
6272 can be made, and NULL_TREE otherwise. */
6274 static tree
6275 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6276 tree arg0, tree arg1)
6278 enum machine_mode mode;
6279 REAL_VALUE_TYPE max;
6280 tree temp;
6281 bool neg;
6283 mode = TYPE_MODE (TREE_TYPE (arg0));
6285 /* For negative infinity swap the sense of the comparison. */
6286 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6287 if (neg)
6288 code = swap_tree_comparison (code);
6290 switch (code)
6292 case GT_EXPR:
6293 /* x > +Inf is always false, if with ignore sNANs. */
6294 if (HONOR_SNANS (mode))
6295 return NULL_TREE;
6296 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6298 case LE_EXPR:
6299 /* x <= +Inf is always true, if we don't case about NaNs. */
6300 if (! HONOR_NANS (mode))
6301 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6303 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6304 if (lang_hooks.decls.global_bindings_p () == 0
6305 && ! CONTAINS_PLACEHOLDER_P (arg0))
6307 arg0 = save_expr (arg0);
6308 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6310 break;
6312 case EQ_EXPR:
6313 case GE_EXPR:
6314 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6315 real_maxval (&max, neg, mode);
6316 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6319 case LT_EXPR:
6320 /* x < +Inf is always equal to x <= DBL_MAX. */
6321 real_maxval (&max, neg, mode);
6322 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6323 arg0, build_real (TREE_TYPE (arg0), max));
6325 case NE_EXPR:
6326 /* x != +Inf is always equal to !(x > DBL_MAX). */
6327 real_maxval (&max, neg, mode);
6328 if (! HONOR_NANS (mode))
6329 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6332 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6334 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6336 default:
6337 break;
6340 return NULL_TREE;
6343 /* Subroutine of fold() that optimizes comparisons of a division by
6344 a nonzero integer constant against an integer constant, i.e.
6345 X/C1 op C2.
6347 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6348 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6349 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6351 The function returns the constant folded tree if a simplification
6352 can be made, and NULL_TREE otherwise. */
6354 static tree
6355 fold_div_compare (location_t loc,
6356 enum tree_code code, tree type, tree arg0, tree arg1)
6358 tree prod, tmp, hi, lo;
6359 tree arg00 = TREE_OPERAND (arg0, 0);
6360 tree arg01 = TREE_OPERAND (arg0, 1);
6361 double_int val;
6362 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6363 bool neg_overflow;
6364 int overflow;
6366 /* We have to do this the hard way to detect unsigned overflow.
6367 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6368 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6369 TREE_INT_CST_HIGH (arg01),
6370 TREE_INT_CST_LOW (arg1),
6371 TREE_INT_CST_HIGH (arg1),
6372 &val.low, &val.high, unsigned_p);
6373 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6374 neg_overflow = false;
6376 if (unsigned_p)
6378 tmp = int_const_binop (MINUS_EXPR, arg01,
6379 build_int_cst (TREE_TYPE (arg01), 1), 0);
6380 lo = prod;
6382 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6383 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6384 TREE_INT_CST_HIGH (prod),
6385 TREE_INT_CST_LOW (tmp),
6386 TREE_INT_CST_HIGH (tmp),
6387 &val.low, &val.high, unsigned_p);
6388 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6389 -1, overflow | TREE_OVERFLOW (prod));
6391 else if (tree_int_cst_sgn (arg01) >= 0)
6393 tmp = int_const_binop (MINUS_EXPR, arg01,
6394 build_int_cst (TREE_TYPE (arg01), 1), 0);
6395 switch (tree_int_cst_sgn (arg1))
6397 case -1:
6398 neg_overflow = true;
6399 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6400 hi = prod;
6401 break;
6403 case 0:
6404 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6405 hi = tmp;
6406 break;
6408 case 1:
6409 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6410 lo = prod;
6411 break;
6413 default:
6414 gcc_unreachable ();
6417 else
6419 /* A negative divisor reverses the relational operators. */
6420 code = swap_tree_comparison (code);
6422 tmp = int_const_binop (PLUS_EXPR, arg01,
6423 build_int_cst (TREE_TYPE (arg01), 1), 0);
6424 switch (tree_int_cst_sgn (arg1))
6426 case -1:
6427 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6428 lo = prod;
6429 break;
6431 case 0:
6432 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6433 lo = tmp;
6434 break;
6436 case 1:
6437 neg_overflow = true;
6438 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6439 hi = prod;
6440 break;
6442 default:
6443 gcc_unreachable ();
6447 switch (code)
6449 case EQ_EXPR:
6450 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6451 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6452 if (TREE_OVERFLOW (hi))
6453 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6454 if (TREE_OVERFLOW (lo))
6455 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6456 return build_range_check (loc, type, arg00, 1, lo, hi);
6458 case NE_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6465 return build_range_check (loc, type, arg00, 0, lo, hi);
6467 case LT_EXPR:
6468 if (TREE_OVERFLOW (lo))
6470 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6471 return omit_one_operand_loc (loc, type, tmp, arg00);
6473 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6475 case LE_EXPR:
6476 if (TREE_OVERFLOW (hi))
6478 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6479 return omit_one_operand_loc (loc, type, tmp, arg00);
6481 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6483 case GT_EXPR:
6484 if (TREE_OVERFLOW (hi))
6486 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6487 return omit_one_operand_loc (loc, type, tmp, arg00);
6489 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6491 case GE_EXPR:
6492 if (TREE_OVERFLOW (lo))
6494 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6495 return omit_one_operand_loc (loc, type, tmp, arg00);
6497 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6499 default:
6500 break;
6503 return NULL_TREE;
6507 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6508 equality/inequality test, then return a simplified form of the test
6509 using a sign testing. Otherwise return NULL. TYPE is the desired
6510 result type. */
6512 static tree
6513 fold_single_bit_test_into_sign_test (location_t loc,
6514 enum tree_code code, tree arg0, tree arg1,
6515 tree result_type)
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code == NE_EXPR || code == EQ_EXPR)
6519 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6526 if (arg00 != NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00))
6530 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6532 tree stype = signed_type_for (TREE_TYPE (arg00));
6533 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6534 result_type,
6535 fold_convert_loc (loc, stype, arg00),
6536 build_int_cst (stype, 0));
6540 return NULL_TREE;
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6548 tree
6549 fold_single_bit_test (location_t loc, enum tree_code code,
6550 tree arg0, tree arg1, tree result_type)
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code == NE_EXPR || code == EQ_EXPR)
6554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6557 tree inner = TREE_OPERAND (arg0, 0);
6558 tree type = TREE_TYPE (arg0);
6559 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6560 enum machine_mode operand_mode = TYPE_MODE (type);
6561 int ops_unsigned;
6562 tree signed_type, unsigned_type, intermediate_type;
6563 tree tem, one;
6565 /* First, see if we can fold the single bit test into a sign-bit
6566 test. */
6567 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6568 result_type);
6569 if (tem)
6570 return tem;
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6580 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6581 && bitnum < TYPE_PRECISION (type)
6582 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6583 bitnum - TYPE_PRECISION (type)))
6585 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6586 inner = TREE_OPERAND (inner, 0);
6589 /* If we are going to be able to omit the AND below, we must do our
6590 operations as unsigned. If we must use the AND, we have a choice.
6591 Normally unsigned is faster, but for some machines signed is. */
6592 #ifdef LOAD_EXTEND_OP
6593 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6594 && !flag_syntax_only) ? 0 : 1;
6595 #else
6596 ops_unsigned = 1;
6597 #endif
6599 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6600 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6601 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6602 inner = fold_convert_loc (loc, intermediate_type, inner);
6604 if (bitnum != 0)
6605 inner = build2 (RSHIFT_EXPR, intermediate_type,
6606 inner, size_int (bitnum));
6608 one = build_int_cst (intermediate_type, 1);
6610 if (code == EQ_EXPR)
6611 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6613 /* Put the AND last so it can combine with more things. */
6614 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6616 /* Make sure to return the proper type. */
6617 inner = fold_convert_loc (loc, result_type, inner);
6619 return inner;
6621 return NULL_TREE;
6624 /* Check whether we are allowed to reorder operands arg0 and arg1,
6625 such that the evaluation of arg1 occurs before arg0. */
6627 static bool
6628 reorder_operands_p (const_tree arg0, const_tree arg1)
6630 if (! flag_evaluation_order)
6631 return true;
6632 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6633 return true;
6634 return ! TREE_SIDE_EFFECTS (arg0)
6635 && ! TREE_SIDE_EFFECTS (arg1);
6638 /* Test whether it is preferable two swap two operands, ARG0 and
6639 ARG1, for example because ARG0 is an integer constant and ARG1
6640 isn't. If REORDER is true, only recommend swapping if we can
6641 evaluate the operands in reverse order. */
6643 bool
6644 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6646 STRIP_SIGN_NOPS (arg0);
6647 STRIP_SIGN_NOPS (arg1);
6649 if (TREE_CODE (arg1) == INTEGER_CST)
6650 return 0;
6651 if (TREE_CODE (arg0) == INTEGER_CST)
6652 return 1;
6654 if (TREE_CODE (arg1) == REAL_CST)
6655 return 0;
6656 if (TREE_CODE (arg0) == REAL_CST)
6657 return 1;
6659 if (TREE_CODE (arg1) == FIXED_CST)
6660 return 0;
6661 if (TREE_CODE (arg0) == FIXED_CST)
6662 return 1;
6664 if (TREE_CODE (arg1) == COMPLEX_CST)
6665 return 0;
6666 if (TREE_CODE (arg0) == COMPLEX_CST)
6667 return 1;
6669 if (TREE_CONSTANT (arg1))
6670 return 0;
6671 if (TREE_CONSTANT (arg0))
6672 return 1;
6674 if (optimize_function_for_size_p (cfun))
6675 return 0;
6677 if (reorder && flag_evaluation_order
6678 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6679 return 0;
6681 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6682 for commutative and comparison operators. Ensuring a canonical
6683 form allows the optimizers to find additional redundancies without
6684 having to explicitly check for both orderings. */
6685 if (TREE_CODE (arg0) == SSA_NAME
6686 && TREE_CODE (arg1) == SSA_NAME
6687 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6688 return 1;
6690 /* Put SSA_NAMEs last. */
6691 if (TREE_CODE (arg1) == SSA_NAME)
6692 return 0;
6693 if (TREE_CODE (arg0) == SSA_NAME)
6694 return 1;
6696 /* Put variables last. */
6697 if (DECL_P (arg1))
6698 return 0;
6699 if (DECL_P (arg0))
6700 return 1;
6702 return 0;
6705 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6706 ARG0 is extended to a wider type. */
6708 static tree
6709 fold_widened_comparison (location_t loc, enum tree_code code,
6710 tree type, tree arg0, tree arg1)
6712 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6713 tree arg1_unw;
6714 tree shorter_type, outer_type;
6715 tree min, max;
6716 bool above, below;
6718 if (arg0_unw == arg0)
6719 return NULL_TREE;
6720 shorter_type = TREE_TYPE (arg0_unw);
6722 #ifdef HAVE_canonicalize_funcptr_for_compare
6723 /* Disable this optimization if we're casting a function pointer
6724 type on targets that require function pointer canonicalization. */
6725 if (HAVE_canonicalize_funcptr_for_compare
6726 && TREE_CODE (shorter_type) == POINTER_TYPE
6727 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6728 return NULL_TREE;
6729 #endif
6731 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6732 return NULL_TREE;
6734 arg1_unw = get_unwidened (arg1, NULL_TREE);
6736 /* If possible, express the comparison in the shorter mode. */
6737 if ((code == EQ_EXPR || code == NE_EXPR
6738 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6739 && (TREE_TYPE (arg1_unw) == shorter_type
6740 || ((TYPE_PRECISION (shorter_type)
6741 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6742 && (TYPE_UNSIGNED (shorter_type)
6743 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6744 || (TREE_CODE (arg1_unw) == INTEGER_CST
6745 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6746 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6747 && int_fits_type_p (arg1_unw, shorter_type))))
6748 return fold_build2_loc (loc, code, type, arg0_unw,
6749 fold_convert_loc (loc, shorter_type, arg1_unw));
6751 if (TREE_CODE (arg1_unw) != INTEGER_CST
6752 || TREE_CODE (shorter_type) != INTEGER_TYPE
6753 || !int_fits_type_p (arg1_unw, shorter_type))
6754 return NULL_TREE;
6756 /* If we are comparing with the integer that does not fit into the range
6757 of the shorter type, the result is known. */
6758 outer_type = TREE_TYPE (arg1_unw);
6759 min = lower_bound_in_type (outer_type, shorter_type);
6760 max = upper_bound_in_type (outer_type, shorter_type);
6762 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6763 max, arg1_unw));
6764 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6765 arg1_unw, min));
6767 switch (code)
6769 case EQ_EXPR:
6770 if (above || below)
6771 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6772 break;
6774 case NE_EXPR:
6775 if (above || below)
6776 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6777 break;
6779 case LT_EXPR:
6780 case LE_EXPR:
6781 if (above)
6782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6783 else if (below)
6784 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6786 case GT_EXPR:
6787 case GE_EXPR:
6788 if (above)
6789 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6790 else if (below)
6791 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6793 default:
6794 break;
6797 return NULL_TREE;
6800 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6801 ARG0 just the signedness is changed. */
6803 static tree
6804 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6805 tree arg0, tree arg1)
6807 tree arg0_inner;
6808 tree inner_type, outer_type;
6810 if (!CONVERT_EXPR_P (arg0))
6811 return NULL_TREE;
6813 outer_type = TREE_TYPE (arg0);
6814 arg0_inner = TREE_OPERAND (arg0, 0);
6815 inner_type = TREE_TYPE (arg0_inner);
6817 #ifdef HAVE_canonicalize_funcptr_for_compare
6818 /* Disable this optimization if we're casting a function pointer
6819 type on targets that require function pointer canonicalization. */
6820 if (HAVE_canonicalize_funcptr_for_compare
6821 && TREE_CODE (inner_type) == POINTER_TYPE
6822 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6823 return NULL_TREE;
6824 #endif
6826 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6827 return NULL_TREE;
6829 if (TREE_CODE (arg1) != INTEGER_CST
6830 && !(CONVERT_EXPR_P (arg1)
6831 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6832 return NULL_TREE;
6834 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6835 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6836 && code != NE_EXPR
6837 && code != EQ_EXPR)
6838 return NULL_TREE;
6840 if (TREE_CODE (arg1) == INTEGER_CST)
6841 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6842 0, TREE_OVERFLOW (arg1));
6843 else
6844 arg1 = fold_convert_loc (loc, inner_type, arg1);
6846 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6849 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6850 step of the array. Reconstructs s and delta in the case of s *
6851 delta being an integer constant (and thus already folded). ADDR is
6852 the address. MULT is the multiplicative expression. If the
6853 function succeeds, the new address expression is returned.
6854 Otherwise NULL_TREE is returned. LOC is the location of the
6855 resulting expression. */
6857 static tree
6858 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6860 tree s, delta, step;
6861 tree ref = TREE_OPERAND (addr, 0), pref;
6862 tree ret, pos;
6863 tree itype;
6864 bool mdim = false;
6866 /* Strip the nops that might be added when converting op1 to sizetype. */
6867 STRIP_NOPS (op1);
6869 /* Canonicalize op1 into a possibly non-constant delta
6870 and an INTEGER_CST s. */
6871 if (TREE_CODE (op1) == MULT_EXPR)
6873 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6875 STRIP_NOPS (arg0);
6876 STRIP_NOPS (arg1);
6878 if (TREE_CODE (arg0) == INTEGER_CST)
6880 s = arg0;
6881 delta = arg1;
6883 else if (TREE_CODE (arg1) == INTEGER_CST)
6885 s = arg1;
6886 delta = arg0;
6888 else
6889 return NULL_TREE;
6891 else if (TREE_CODE (op1) == INTEGER_CST)
6893 delta = op1;
6894 s = NULL_TREE;
6896 else
6898 /* Simulate we are delta * 1. */
6899 delta = op1;
6900 s = integer_one_node;
6903 for (;; ref = TREE_OPERAND (ref, 0))
6905 if (TREE_CODE (ref) == ARRAY_REF)
6907 tree domain;
6909 /* Remember if this was a multi-dimensional array. */
6910 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6911 mdim = true;
6913 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6914 if (! domain)
6915 continue;
6916 itype = TREE_TYPE (domain);
6918 step = array_ref_element_size (ref);
6919 if (TREE_CODE (step) != INTEGER_CST)
6920 continue;
6922 if (s)
6924 if (! tree_int_cst_equal (step, s))
6925 continue;
6927 else
6929 /* Try if delta is a multiple of step. */
6930 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6931 if (! tmp)
6932 continue;
6933 delta = tmp;
6936 /* Only fold here if we can verify we do not overflow one
6937 dimension of a multi-dimensional array. */
6938 if (mdim)
6940 tree tmp;
6942 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6943 || !TYPE_MAX_VALUE (domain)
6944 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6945 continue;
6947 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6948 fold_convert_loc (loc, itype,
6949 TREE_OPERAND (ref, 1)),
6950 fold_convert_loc (loc, itype, delta));
6951 if (!tmp
6952 || TREE_CODE (tmp) != INTEGER_CST
6953 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6954 continue;
6957 break;
6959 else
6960 mdim = false;
6962 if (!handled_component_p (ref))
6963 return NULL_TREE;
6966 /* We found the suitable array reference. So copy everything up to it,
6967 and replace the index. */
6969 pref = TREE_OPERAND (addr, 0);
6970 ret = copy_node (pref);
6971 SET_EXPR_LOCATION (ret, loc);
6972 pos = ret;
6974 while (pref != ref)
6976 pref = TREE_OPERAND (pref, 0);
6977 TREE_OPERAND (pos, 0) = copy_node (pref);
6978 pos = TREE_OPERAND (pos, 0);
6981 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6982 fold_convert_loc (loc, itype,
6983 TREE_OPERAND (pos, 1)),
6984 fold_convert_loc (loc, itype, delta));
6986 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6990 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6991 means A >= Y && A != MAX, but in this case we know that
6992 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6994 static tree
6995 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6997 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6999 if (TREE_CODE (bound) == LT_EXPR)
7000 a = TREE_OPERAND (bound, 0);
7001 else if (TREE_CODE (bound) == GT_EXPR)
7002 a = TREE_OPERAND (bound, 1);
7003 else
7004 return NULL_TREE;
7006 typea = TREE_TYPE (a);
7007 if (!INTEGRAL_TYPE_P (typea)
7008 && !POINTER_TYPE_P (typea))
7009 return NULL_TREE;
7011 if (TREE_CODE (ineq) == LT_EXPR)
7013 a1 = TREE_OPERAND (ineq, 1);
7014 y = TREE_OPERAND (ineq, 0);
7016 else if (TREE_CODE (ineq) == GT_EXPR)
7018 a1 = TREE_OPERAND (ineq, 0);
7019 y = TREE_OPERAND (ineq, 1);
7021 else
7022 return NULL_TREE;
7024 if (TREE_TYPE (a1) != typea)
7025 return NULL_TREE;
7027 if (POINTER_TYPE_P (typea))
7029 /* Convert the pointer types into integer before taking the difference. */
7030 tree ta = fold_convert_loc (loc, ssizetype, a);
7031 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7032 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7034 else
7035 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7037 if (!diff || !integer_onep (diff))
7038 return NULL_TREE;
7040 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7043 /* Fold a sum or difference of at least one multiplication.
7044 Returns the folded tree or NULL if no simplification could be made. */
7046 static tree
7047 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7048 tree arg0, tree arg1)
7050 tree arg00, arg01, arg10, arg11;
7051 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7053 /* (A * C) +- (B * C) -> (A+-B) * C.
7054 (A * C) +- A -> A * (C+-1).
7055 We are most concerned about the case where C is a constant,
7056 but other combinations show up during loop reduction. Since
7057 it is not difficult, try all four possibilities. */
7059 if (TREE_CODE (arg0) == MULT_EXPR)
7061 arg00 = TREE_OPERAND (arg0, 0);
7062 arg01 = TREE_OPERAND (arg0, 1);
7064 else if (TREE_CODE (arg0) == INTEGER_CST)
7066 arg00 = build_one_cst (type);
7067 arg01 = arg0;
7069 else
7071 /* We cannot generate constant 1 for fract. */
7072 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7073 return NULL_TREE;
7074 arg00 = arg0;
7075 arg01 = build_one_cst (type);
7077 if (TREE_CODE (arg1) == MULT_EXPR)
7079 arg10 = TREE_OPERAND (arg1, 0);
7080 arg11 = TREE_OPERAND (arg1, 1);
7082 else if (TREE_CODE (arg1) == INTEGER_CST)
7084 arg10 = build_one_cst (type);
7085 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7086 the purpose of this canonicalization. */
7087 if (TREE_INT_CST_HIGH (arg1) == -1
7088 && negate_expr_p (arg1)
7089 && code == PLUS_EXPR)
7091 arg11 = negate_expr (arg1);
7092 code = MINUS_EXPR;
7094 else
7095 arg11 = arg1;
7097 else
7099 /* We cannot generate constant 1 for fract. */
7100 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7101 return NULL_TREE;
7102 arg10 = arg1;
7103 arg11 = build_one_cst (type);
7105 same = NULL_TREE;
7107 if (operand_equal_p (arg01, arg11, 0))
7108 same = arg01, alt0 = arg00, alt1 = arg10;
7109 else if (operand_equal_p (arg00, arg10, 0))
7110 same = arg00, alt0 = arg01, alt1 = arg11;
7111 else if (operand_equal_p (arg00, arg11, 0))
7112 same = arg00, alt0 = arg01, alt1 = arg10;
7113 else if (operand_equal_p (arg01, arg10, 0))
7114 same = arg01, alt0 = arg00, alt1 = arg11;
7116 /* No identical multiplicands; see if we can find a common
7117 power-of-two factor in non-power-of-two multiplies. This
7118 can help in multi-dimensional array access. */
7119 else if (host_integerp (arg01, 0)
7120 && host_integerp (arg11, 0))
7122 HOST_WIDE_INT int01, int11, tmp;
7123 bool swap = false;
7124 tree maybe_same;
7125 int01 = TREE_INT_CST_LOW (arg01);
7126 int11 = TREE_INT_CST_LOW (arg11);
7128 /* Move min of absolute values to int11. */
7129 if ((int01 >= 0 ? int01 : -int01)
7130 < (int11 >= 0 ? int11 : -int11))
7132 tmp = int01, int01 = int11, int11 = tmp;
7133 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7134 maybe_same = arg01;
7135 swap = true;
7137 else
7138 maybe_same = arg11;
7140 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7141 /* The remainder should not be a constant, otherwise we
7142 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7143 increased the number of multiplications necessary. */
7144 && TREE_CODE (arg10) != INTEGER_CST)
7146 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7147 build_int_cst (TREE_TYPE (arg00),
7148 int01 / int11));
7149 alt1 = arg10;
7150 same = maybe_same;
7151 if (swap)
7152 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7156 if (same)
7157 return fold_build2_loc (loc, MULT_EXPR, type,
7158 fold_build2_loc (loc, code, type,
7159 fold_convert_loc (loc, type, alt0),
7160 fold_convert_loc (loc, type, alt1)),
7161 fold_convert_loc (loc, type, same));
7163 return NULL_TREE;
7166 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7167 specified by EXPR into the buffer PTR of length LEN bytes.
7168 Return the number of bytes placed in the buffer, or zero
7169 upon failure. */
7171 static int
7172 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7174 tree type = TREE_TYPE (expr);
7175 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7176 int byte, offset, word, words;
7177 unsigned char value;
7179 if (total_bytes > len)
7180 return 0;
7181 words = total_bytes / UNITS_PER_WORD;
7183 for (byte = 0; byte < total_bytes; byte++)
7185 int bitpos = byte * BITS_PER_UNIT;
7186 if (bitpos < HOST_BITS_PER_WIDE_INT)
7187 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7188 else
7189 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7190 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7192 if (total_bytes > UNITS_PER_WORD)
7194 word = byte / UNITS_PER_WORD;
7195 if (WORDS_BIG_ENDIAN)
7196 word = (words - 1) - word;
7197 offset = word * UNITS_PER_WORD;
7198 if (BYTES_BIG_ENDIAN)
7199 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7200 else
7201 offset += byte % UNITS_PER_WORD;
7203 else
7204 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7205 ptr[offset] = value;
7207 return total_bytes;
7211 /* Subroutine of native_encode_expr. Encode the REAL_CST
7212 specified by EXPR into the buffer PTR of length LEN bytes.
7213 Return the number of bytes placed in the buffer, or zero
7214 upon failure. */
7216 static int
7217 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7219 tree type = TREE_TYPE (expr);
7220 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7221 int byte, offset, word, words, bitpos;
7222 unsigned char value;
7224 /* There are always 32 bits in each long, no matter the size of
7225 the hosts long. We handle floating point representations with
7226 up to 192 bits. */
7227 long tmp[6];
7229 if (total_bytes > len)
7230 return 0;
7231 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7233 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7235 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7236 bitpos += BITS_PER_UNIT)
7238 byte = (bitpos / BITS_PER_UNIT) & 3;
7239 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7241 if (UNITS_PER_WORD < 4)
7243 word = byte / UNITS_PER_WORD;
7244 if (WORDS_BIG_ENDIAN)
7245 word = (words - 1) - word;
7246 offset = word * UNITS_PER_WORD;
7247 if (BYTES_BIG_ENDIAN)
7248 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7249 else
7250 offset += byte % UNITS_PER_WORD;
7252 else
7253 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7254 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7256 return total_bytes;
7259 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7260 specified by EXPR into the buffer PTR of length LEN bytes.
7261 Return the number of bytes placed in the buffer, or zero
7262 upon failure. */
7264 static int
7265 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7267 int rsize, isize;
7268 tree part;
7270 part = TREE_REALPART (expr);
7271 rsize = native_encode_expr (part, ptr, len);
7272 if (rsize == 0)
7273 return 0;
7274 part = TREE_IMAGPART (expr);
7275 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7276 if (isize != rsize)
7277 return 0;
7278 return rsize + isize;
7282 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7283 specified by EXPR into the buffer PTR of length LEN bytes.
7284 Return the number of bytes placed in the buffer, or zero
7285 upon failure. */
7287 static int
7288 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7290 int i, size, offset, count;
7291 tree itype, elem, elements;
7293 offset = 0;
7294 elements = TREE_VECTOR_CST_ELTS (expr);
7295 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7296 itype = TREE_TYPE (TREE_TYPE (expr));
7297 size = GET_MODE_SIZE (TYPE_MODE (itype));
7298 for (i = 0; i < count; i++)
7300 if (elements)
7302 elem = TREE_VALUE (elements);
7303 elements = TREE_CHAIN (elements);
7305 else
7306 elem = NULL_TREE;
7308 if (elem)
7310 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7311 return 0;
7313 else
7315 if (offset + size > len)
7316 return 0;
7317 memset (ptr+offset, 0, size);
7319 offset += size;
7321 return offset;
7325 /* Subroutine of native_encode_expr. Encode the STRING_CST
7326 specified by EXPR into the buffer PTR of length LEN bytes.
7327 Return the number of bytes placed in the buffer, or zero
7328 upon failure. */
7330 static int
7331 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7333 tree type = TREE_TYPE (expr);
7334 HOST_WIDE_INT total_bytes;
7336 if (TREE_CODE (type) != ARRAY_TYPE
7337 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7338 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7339 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7340 return 0;
7341 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7342 if (total_bytes > len)
7343 return 0;
7344 if (TREE_STRING_LENGTH (expr) < total_bytes)
7346 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7347 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7348 total_bytes - TREE_STRING_LENGTH (expr));
7350 else
7351 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7352 return total_bytes;
7356 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7357 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7358 buffer PTR of length LEN bytes. Return the number of bytes
7359 placed in the buffer, or zero upon failure. */
7362 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7364 switch (TREE_CODE (expr))
7366 case INTEGER_CST:
7367 return native_encode_int (expr, ptr, len);
7369 case REAL_CST:
7370 return native_encode_real (expr, ptr, len);
7372 case COMPLEX_CST:
7373 return native_encode_complex (expr, ptr, len);
7375 case VECTOR_CST:
7376 return native_encode_vector (expr, ptr, len);
7378 case STRING_CST:
7379 return native_encode_string (expr, ptr, len);
7381 default:
7382 return 0;
7387 /* Subroutine of native_interpret_expr. Interpret the contents of
7388 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7389 If the buffer cannot be interpreted, return NULL_TREE. */
7391 static tree
7392 native_interpret_int (tree type, const unsigned char *ptr, int len)
7394 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7395 int byte, offset, word, words;
7396 unsigned char value;
7397 double_int result;
7399 if (total_bytes > len)
7400 return NULL_TREE;
7401 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7402 return NULL_TREE;
7404 result = double_int_zero;
7405 words = total_bytes / UNITS_PER_WORD;
7407 for (byte = 0; byte < total_bytes; byte++)
7409 int bitpos = byte * BITS_PER_UNIT;
7410 if (total_bytes > UNITS_PER_WORD)
7412 word = byte / UNITS_PER_WORD;
7413 if (WORDS_BIG_ENDIAN)
7414 word = (words - 1) - word;
7415 offset = word * UNITS_PER_WORD;
7416 if (BYTES_BIG_ENDIAN)
7417 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7418 else
7419 offset += byte % UNITS_PER_WORD;
7421 else
7422 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7423 value = ptr[offset];
7425 if (bitpos < HOST_BITS_PER_WIDE_INT)
7426 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7427 else
7428 result.high |= (unsigned HOST_WIDE_INT) value
7429 << (bitpos - HOST_BITS_PER_WIDE_INT);
7432 return double_int_to_tree (type, result);
7436 /* Subroutine of native_interpret_expr. Interpret the contents of
7437 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7438 If the buffer cannot be interpreted, return NULL_TREE. */
7440 static tree
7441 native_interpret_real (tree type, const unsigned char *ptr, int len)
7443 enum machine_mode mode = TYPE_MODE (type);
7444 int total_bytes = GET_MODE_SIZE (mode);
7445 int byte, offset, word, words, bitpos;
7446 unsigned char value;
7447 /* There are always 32 bits in each long, no matter the size of
7448 the hosts long. We handle floating point representations with
7449 up to 192 bits. */
7450 REAL_VALUE_TYPE r;
7451 long tmp[6];
7453 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7454 if (total_bytes > len || total_bytes > 24)
7455 return NULL_TREE;
7456 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7458 memset (tmp, 0, sizeof (tmp));
7459 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7460 bitpos += BITS_PER_UNIT)
7462 byte = (bitpos / BITS_PER_UNIT) & 3;
7463 if (UNITS_PER_WORD < 4)
7465 word = byte / UNITS_PER_WORD;
7466 if (WORDS_BIG_ENDIAN)
7467 word = (words - 1) - word;
7468 offset = word * UNITS_PER_WORD;
7469 if (BYTES_BIG_ENDIAN)
7470 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7471 else
7472 offset += byte % UNITS_PER_WORD;
7474 else
7475 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7476 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7478 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7481 real_from_target (&r, tmp, mode);
7482 return build_real (type, r);
7486 /* Subroutine of native_interpret_expr. Interpret the contents of
7487 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7488 If the buffer cannot be interpreted, return NULL_TREE. */
7490 static tree
7491 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7493 tree etype, rpart, ipart;
7494 int size;
7496 etype = TREE_TYPE (type);
7497 size = GET_MODE_SIZE (TYPE_MODE (etype));
7498 if (size * 2 > len)
7499 return NULL_TREE;
7500 rpart = native_interpret_expr (etype, ptr, size);
7501 if (!rpart)
7502 return NULL_TREE;
7503 ipart = native_interpret_expr (etype, ptr+size, size);
7504 if (!ipart)
7505 return NULL_TREE;
7506 return build_complex (type, rpart, ipart);
7510 /* Subroutine of native_interpret_expr. Interpret the contents of
7511 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7512 If the buffer cannot be interpreted, return NULL_TREE. */
7514 static tree
7515 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7517 tree etype, elem, elements;
7518 int i, size, count;
7520 etype = TREE_TYPE (type);
7521 size = GET_MODE_SIZE (TYPE_MODE (etype));
7522 count = TYPE_VECTOR_SUBPARTS (type);
7523 if (size * count > len)
7524 return NULL_TREE;
7526 elements = NULL_TREE;
7527 for (i = count - 1; i >= 0; i--)
7529 elem = native_interpret_expr (etype, ptr+(i*size), size);
7530 if (!elem)
7531 return NULL_TREE;
7532 elements = tree_cons (NULL_TREE, elem, elements);
7534 return build_vector (type, elements);
7538 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7539 the buffer PTR of length LEN as a constant of type TYPE. For
7540 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7541 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7542 return NULL_TREE. */
7544 tree
7545 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7547 switch (TREE_CODE (type))
7549 case INTEGER_TYPE:
7550 case ENUMERAL_TYPE:
7551 case BOOLEAN_TYPE:
7552 return native_interpret_int (type, ptr, len);
7554 case REAL_TYPE:
7555 return native_interpret_real (type, ptr, len);
7557 case COMPLEX_TYPE:
7558 return native_interpret_complex (type, ptr, len);
7560 case VECTOR_TYPE:
7561 return native_interpret_vector (type, ptr, len);
7563 default:
7564 return NULL_TREE;
7569 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7570 TYPE at compile-time. If we're unable to perform the conversion
7571 return NULL_TREE. */
7573 static tree
7574 fold_view_convert_expr (tree type, tree expr)
7576 /* We support up to 512-bit values (for V8DFmode). */
7577 unsigned char buffer[64];
7578 int len;
7580 /* Check that the host and target are sane. */
7581 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7582 return NULL_TREE;
7584 len = native_encode_expr (expr, buffer, sizeof (buffer));
7585 if (len == 0)
7586 return NULL_TREE;
7588 return native_interpret_expr (type, buffer, len);
7591 /* Build an expression for the address of T. Folds away INDIRECT_REF
7592 to avoid confusing the gimplify process. */
7594 tree
7595 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7597 /* The size of the object is not relevant when talking about its address. */
7598 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7599 t = TREE_OPERAND (t, 0);
7601 if (TREE_CODE (t) == INDIRECT_REF)
7603 t = TREE_OPERAND (t, 0);
7605 if (TREE_TYPE (t) != ptrtype)
7607 t = build1 (NOP_EXPR, ptrtype, t);
7608 SET_EXPR_LOCATION (t, loc);
7611 else if (TREE_CODE (t) == MEM_REF
7612 && integer_zerop (TREE_OPERAND (t, 1)))
7613 return TREE_OPERAND (t, 0);
7614 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7616 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7618 if (TREE_TYPE (t) != ptrtype)
7619 t = fold_convert_loc (loc, ptrtype, t);
7621 else
7623 t = build1 (ADDR_EXPR, ptrtype, t);
7624 SET_EXPR_LOCATION (t, loc);
7627 return t;
7630 /* Build an expression for the address of T. */
7632 tree
7633 build_fold_addr_expr_loc (location_t loc, tree t)
7635 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7637 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7640 /* Fold a unary expression of code CODE and type TYPE with operand
7641 OP0. Return the folded expression if folding is successful.
7642 Otherwise, return NULL_TREE. */
7644 tree
7645 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7647 tree tem;
7648 tree arg0;
7649 enum tree_code_class kind = TREE_CODE_CLASS (code);
7651 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7652 && TREE_CODE_LENGTH (code) == 1);
7654 arg0 = op0;
7655 if (arg0)
7657 if (CONVERT_EXPR_CODE_P (code)
7658 || code == FLOAT_EXPR || code == ABS_EXPR)
7660 /* Don't use STRIP_NOPS, because signedness of argument type
7661 matters. */
7662 STRIP_SIGN_NOPS (arg0);
7664 else
7666 /* Strip any conversions that don't change the mode. This
7667 is safe for every expression, except for a comparison
7668 expression because its signedness is derived from its
7669 operands.
7671 Note that this is done as an internal manipulation within
7672 the constant folder, in order to find the simplest
7673 representation of the arguments so that their form can be
7674 studied. In any cases, the appropriate type conversions
7675 should be put back in the tree that will get out of the
7676 constant folder. */
7677 STRIP_NOPS (arg0);
7681 if (TREE_CODE_CLASS (code) == tcc_unary)
7683 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7684 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7685 fold_build1_loc (loc, code, type,
7686 fold_convert_loc (loc, TREE_TYPE (op0),
7687 TREE_OPERAND (arg0, 1))));
7688 else if (TREE_CODE (arg0) == COND_EXPR)
7690 tree arg01 = TREE_OPERAND (arg0, 1);
7691 tree arg02 = TREE_OPERAND (arg0, 2);
7692 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7693 arg01 = fold_build1_loc (loc, code, type,
7694 fold_convert_loc (loc,
7695 TREE_TYPE (op0), arg01));
7696 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7697 arg02 = fold_build1_loc (loc, code, type,
7698 fold_convert_loc (loc,
7699 TREE_TYPE (op0), arg02));
7700 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7701 arg01, arg02);
7703 /* If this was a conversion, and all we did was to move into
7704 inside the COND_EXPR, bring it back out. But leave it if
7705 it is a conversion from integer to integer and the
7706 result precision is no wider than a word since such a
7707 conversion is cheap and may be optimized away by combine,
7708 while it couldn't if it were outside the COND_EXPR. Then return
7709 so we don't get into an infinite recursion loop taking the
7710 conversion out and then back in. */
7712 if ((CONVERT_EXPR_CODE_P (code)
7713 || code == NON_LVALUE_EXPR)
7714 && TREE_CODE (tem) == COND_EXPR
7715 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7716 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7717 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7718 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7719 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7720 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7721 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7722 && (INTEGRAL_TYPE_P
7723 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7724 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7725 || flag_syntax_only))
7727 tem = build1 (code, type,
7728 build3 (COND_EXPR,
7729 TREE_TYPE (TREE_OPERAND
7730 (TREE_OPERAND (tem, 1), 0)),
7731 TREE_OPERAND (tem, 0),
7732 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7733 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7734 SET_EXPR_LOCATION (tem, loc);
7736 return tem;
7738 else if (COMPARISON_CLASS_P (arg0))
7740 if (TREE_CODE (type) == BOOLEAN_TYPE)
7742 arg0 = copy_node (arg0);
7743 TREE_TYPE (arg0) = type;
7744 return arg0;
7746 else if (TREE_CODE (type) != INTEGER_TYPE)
7747 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7748 fold_build1_loc (loc, code, type,
7749 integer_one_node),
7750 fold_build1_loc (loc, code, type,
7751 integer_zero_node));
7755 switch (code)
7757 case PAREN_EXPR:
7758 /* Re-association barriers around constants and other re-association
7759 barriers can be removed. */
7760 if (CONSTANT_CLASS_P (op0)
7761 || TREE_CODE (op0) == PAREN_EXPR)
7762 return fold_convert_loc (loc, type, op0);
7763 return NULL_TREE;
7765 CASE_CONVERT:
7766 case FLOAT_EXPR:
7767 case FIX_TRUNC_EXPR:
7768 if (TREE_TYPE (op0) == type)
7769 return op0;
7771 /* If we have (type) (a CMP b) and type is an integral type, return
7772 new expression involving the new type. */
7773 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7774 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7775 TREE_OPERAND (op0, 1));
7777 /* Handle cases of two conversions in a row. */
7778 if (CONVERT_EXPR_P (op0))
7780 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7781 tree inter_type = TREE_TYPE (op0);
7782 int inside_int = INTEGRAL_TYPE_P (inside_type);
7783 int inside_ptr = POINTER_TYPE_P (inside_type);
7784 int inside_float = FLOAT_TYPE_P (inside_type);
7785 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7786 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7787 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7788 int inter_int = INTEGRAL_TYPE_P (inter_type);
7789 int inter_ptr = POINTER_TYPE_P (inter_type);
7790 int inter_float = FLOAT_TYPE_P (inter_type);
7791 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7792 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7793 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7794 int final_int = INTEGRAL_TYPE_P (type);
7795 int final_ptr = POINTER_TYPE_P (type);
7796 int final_float = FLOAT_TYPE_P (type);
7797 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7798 unsigned int final_prec = TYPE_PRECISION (type);
7799 int final_unsignedp = TYPE_UNSIGNED (type);
7801 /* In addition to the cases of two conversions in a row
7802 handled below, if we are converting something to its own
7803 type via an object of identical or wider precision, neither
7804 conversion is needed. */
7805 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7806 && (((inter_int || inter_ptr) && final_int)
7807 || (inter_float && final_float))
7808 && inter_prec >= final_prec)
7809 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7811 /* Likewise, if the intermediate and initial types are either both
7812 float or both integer, we don't need the middle conversion if the
7813 former is wider than the latter and doesn't change the signedness
7814 (for integers). Avoid this if the final type is a pointer since
7815 then we sometimes need the middle conversion. Likewise if the
7816 final type has a precision not equal to the size of its mode. */
7817 if (((inter_int && inside_int)
7818 || (inter_float && inside_float)
7819 || (inter_vec && inside_vec))
7820 && inter_prec >= inside_prec
7821 && (inter_float || inter_vec
7822 || inter_unsignedp == inside_unsignedp)
7823 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7824 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7825 && ! final_ptr
7826 && (! final_vec || inter_prec == inside_prec))
7827 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7829 /* If we have a sign-extension of a zero-extended value, we can
7830 replace that by a single zero-extension. */
7831 if (inside_int && inter_int && final_int
7832 && inside_prec < inter_prec && inter_prec < final_prec
7833 && inside_unsignedp && !inter_unsignedp)
7834 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7836 /* Two conversions in a row are not needed unless:
7837 - some conversion is floating-point (overstrict for now), or
7838 - some conversion is a vector (overstrict for now), or
7839 - the intermediate type is narrower than both initial and
7840 final, or
7841 - the intermediate type and innermost type differ in signedness,
7842 and the outermost type is wider than the intermediate, or
7843 - the initial type is a pointer type and the precisions of the
7844 intermediate and final types differ, or
7845 - the final type is a pointer type and the precisions of the
7846 initial and intermediate types differ. */
7847 if (! inside_float && ! inter_float && ! final_float
7848 && ! inside_vec && ! inter_vec && ! final_vec
7849 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7850 && ! (inside_int && inter_int
7851 && inter_unsignedp != inside_unsignedp
7852 && inter_prec < final_prec)
7853 && ((inter_unsignedp && inter_prec > inside_prec)
7854 == (final_unsignedp && final_prec > inter_prec))
7855 && ! (inside_ptr && inter_prec != final_prec)
7856 && ! (final_ptr && inside_prec != inter_prec)
7857 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7858 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7859 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7862 /* Handle (T *)&A.B.C for A being of type T and B and C
7863 living at offset zero. This occurs frequently in
7864 C++ upcasting and then accessing the base. */
7865 if (TREE_CODE (op0) == ADDR_EXPR
7866 && POINTER_TYPE_P (type)
7867 && handled_component_p (TREE_OPERAND (op0, 0)))
7869 HOST_WIDE_INT bitsize, bitpos;
7870 tree offset;
7871 enum machine_mode mode;
7872 int unsignedp, volatilep;
7873 tree base = TREE_OPERAND (op0, 0);
7874 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7875 &mode, &unsignedp, &volatilep, false);
7876 /* If the reference was to a (constant) zero offset, we can use
7877 the address of the base if it has the same base type
7878 as the result type and the pointer type is unqualified. */
7879 if (! offset && bitpos == 0
7880 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7881 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7882 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7883 return fold_convert_loc (loc, type,
7884 build_fold_addr_expr_loc (loc, base));
7887 if (TREE_CODE (op0) == MODIFY_EXPR
7888 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7889 /* Detect assigning a bitfield. */
7890 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7891 && DECL_BIT_FIELD
7892 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7894 /* Don't leave an assignment inside a conversion
7895 unless assigning a bitfield. */
7896 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7897 /* First do the assignment, then return converted constant. */
7898 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7899 TREE_NO_WARNING (tem) = 1;
7900 TREE_USED (tem) = 1;
7901 SET_EXPR_LOCATION (tem, loc);
7902 return tem;
7905 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7906 constants (if x has signed type, the sign bit cannot be set
7907 in c). This folds extension into the BIT_AND_EXPR.
7908 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7909 very likely don't have maximal range for their precision and this
7910 transformation effectively doesn't preserve non-maximal ranges. */
7911 if (TREE_CODE (type) == INTEGER_TYPE
7912 && TREE_CODE (op0) == BIT_AND_EXPR
7913 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7915 tree and_expr = op0;
7916 tree and0 = TREE_OPERAND (and_expr, 0);
7917 tree and1 = TREE_OPERAND (and_expr, 1);
7918 int change = 0;
7920 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7921 || (TYPE_PRECISION (type)
7922 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7923 change = 1;
7924 else if (TYPE_PRECISION (TREE_TYPE (and1))
7925 <= HOST_BITS_PER_WIDE_INT
7926 && host_integerp (and1, 1))
7928 unsigned HOST_WIDE_INT cst;
7930 cst = tree_low_cst (and1, 1);
7931 cst &= (HOST_WIDE_INT) -1
7932 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7933 change = (cst == 0);
7934 #ifdef LOAD_EXTEND_OP
7935 if (change
7936 && !flag_syntax_only
7937 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7938 == ZERO_EXTEND))
7940 tree uns = unsigned_type_for (TREE_TYPE (and0));
7941 and0 = fold_convert_loc (loc, uns, and0);
7942 and1 = fold_convert_loc (loc, uns, and1);
7944 #endif
7946 if (change)
7948 tem = force_fit_type_double (type, tree_to_double_int (and1),
7949 0, TREE_OVERFLOW (and1));
7950 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7951 fold_convert_loc (loc, type, and0), tem);
7955 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7956 when one of the new casts will fold away. Conservatively we assume
7957 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7958 if (POINTER_TYPE_P (type)
7959 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7960 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7961 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7962 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7964 tree arg00 = TREE_OPERAND (arg0, 0);
7965 tree arg01 = TREE_OPERAND (arg0, 1);
7967 return fold_build2_loc (loc,
7968 TREE_CODE (arg0), type,
7969 fold_convert_loc (loc, type, arg00),
7970 fold_convert_loc (loc, sizetype, arg01));
7973 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7974 of the same precision, and X is an integer type not narrower than
7975 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7976 if (INTEGRAL_TYPE_P (type)
7977 && TREE_CODE (op0) == BIT_NOT_EXPR
7978 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7979 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7980 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7982 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7983 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7984 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7985 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7986 fold_convert_loc (loc, type, tem));
7989 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7990 type of X and Y (integer types only). */
7991 if (INTEGRAL_TYPE_P (type)
7992 && TREE_CODE (op0) == MULT_EXPR
7993 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7994 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7996 /* Be careful not to introduce new overflows. */
7997 tree mult_type;
7998 if (TYPE_OVERFLOW_WRAPS (type))
7999 mult_type = type;
8000 else
8001 mult_type = unsigned_type_for (type);
8003 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8005 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8006 fold_convert_loc (loc, mult_type,
8007 TREE_OPERAND (op0, 0)),
8008 fold_convert_loc (loc, mult_type,
8009 TREE_OPERAND (op0, 1)));
8010 return fold_convert_loc (loc, type, tem);
8014 tem = fold_convert_const (code, type, op0);
8015 return tem ? tem : NULL_TREE;
8017 case ADDR_SPACE_CONVERT_EXPR:
8018 if (integer_zerop (arg0))
8019 return fold_convert_const (code, type, arg0);
8020 return NULL_TREE;
8022 case FIXED_CONVERT_EXPR:
8023 tem = fold_convert_const (code, type, arg0);
8024 return tem ? tem : NULL_TREE;
8026 case VIEW_CONVERT_EXPR:
8027 if (TREE_TYPE (op0) == type)
8028 return op0;
8029 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8030 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8031 type, TREE_OPERAND (op0, 0));
8032 if (TREE_CODE (op0) == MEM_REF)
8033 return fold_build2_loc (loc, MEM_REF, type,
8034 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8036 /* For integral conversions with the same precision or pointer
8037 conversions use a NOP_EXPR instead. */
8038 if ((INTEGRAL_TYPE_P (type)
8039 || POINTER_TYPE_P (type))
8040 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 || POINTER_TYPE_P (TREE_TYPE (op0)))
8042 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8043 return fold_convert_loc (loc, type, op0);
8045 /* Strip inner integral conversions that do not change the precision. */
8046 if (CONVERT_EXPR_P (op0)
8047 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8048 || POINTER_TYPE_P (TREE_TYPE (op0)))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8050 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8051 && (TYPE_PRECISION (TREE_TYPE (op0))
8052 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8053 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8054 type, TREE_OPERAND (op0, 0));
8056 return fold_view_convert_expr (type, op0);
8058 case NEGATE_EXPR:
8059 tem = fold_negate_expr (loc, arg0);
8060 if (tem)
8061 return fold_convert_loc (loc, type, tem);
8062 return NULL_TREE;
8064 case ABS_EXPR:
8065 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8066 return fold_abs_const (arg0, type);
8067 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8068 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8069 /* Convert fabs((double)float) into (double)fabsf(float). */
8070 else if (TREE_CODE (arg0) == NOP_EXPR
8071 && TREE_CODE (type) == REAL_TYPE)
8073 tree targ0 = strip_float_extensions (arg0);
8074 if (targ0 != arg0)
8075 return fold_convert_loc (loc, type,
8076 fold_build1_loc (loc, ABS_EXPR,
8077 TREE_TYPE (targ0),
8078 targ0));
8080 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8081 else if (TREE_CODE (arg0) == ABS_EXPR)
8082 return arg0;
8083 else if (tree_expr_nonnegative_p (arg0))
8084 return arg0;
8086 /* Strip sign ops from argument. */
8087 if (TREE_CODE (type) == REAL_TYPE)
8089 tem = fold_strip_sign_ops (arg0);
8090 if (tem)
8091 return fold_build1_loc (loc, ABS_EXPR, type,
8092 fold_convert_loc (loc, type, tem));
8094 return NULL_TREE;
8096 case CONJ_EXPR:
8097 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8098 return fold_convert_loc (loc, type, arg0);
8099 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8101 tree itype = TREE_TYPE (type);
8102 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8103 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8104 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8105 negate_expr (ipart));
8107 if (TREE_CODE (arg0) == COMPLEX_CST)
8109 tree itype = TREE_TYPE (type);
8110 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8111 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8112 return build_complex (type, rpart, negate_expr (ipart));
8114 if (TREE_CODE (arg0) == CONJ_EXPR)
8115 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8116 return NULL_TREE;
8118 case BIT_NOT_EXPR:
8119 if (TREE_CODE (arg0) == INTEGER_CST)
8120 return fold_not_const (arg0, type);
8121 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8122 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8123 /* Convert ~ (-A) to A - 1. */
8124 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8125 return fold_build2_loc (loc, MINUS_EXPR, type,
8126 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8127 build_int_cst (type, 1));
8128 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8129 else if (INTEGRAL_TYPE_P (type)
8130 && ((TREE_CODE (arg0) == MINUS_EXPR
8131 && integer_onep (TREE_OPERAND (arg0, 1)))
8132 || (TREE_CODE (arg0) == PLUS_EXPR
8133 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8134 return fold_build1_loc (loc, NEGATE_EXPR, type,
8135 fold_convert_loc (loc, type,
8136 TREE_OPERAND (arg0, 0)));
8137 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8138 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8139 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8140 fold_convert_loc (loc, type,
8141 TREE_OPERAND (arg0, 0)))))
8142 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8143 fold_convert_loc (loc, type,
8144 TREE_OPERAND (arg0, 1)));
8145 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8146 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 1)))))
8149 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8150 fold_convert_loc (loc, type,
8151 TREE_OPERAND (arg0, 0)), tem);
8152 /* Perform BIT_NOT_EXPR on each element individually. */
8153 else if (TREE_CODE (arg0) == VECTOR_CST)
8155 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8156 int count = TYPE_VECTOR_SUBPARTS (type), i;
8158 for (i = 0; i < count; i++)
8160 if (elements)
8162 elem = TREE_VALUE (elements);
8163 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8164 if (elem == NULL_TREE)
8165 break;
8166 elements = TREE_CHAIN (elements);
8168 else
8169 elem = build_int_cst (TREE_TYPE (type), -1);
8170 list = tree_cons (NULL_TREE, elem, list);
8172 if (i == count)
8173 return build_vector (type, nreverse (list));
8176 return NULL_TREE;
8178 case TRUTH_NOT_EXPR:
8179 /* The argument to invert_truthvalue must have Boolean type. */
8180 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8181 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8183 /* Note that the operand of this must be an int
8184 and its values must be 0 or 1.
8185 ("true" is a fixed value perhaps depending on the language,
8186 but we don't handle values other than 1 correctly yet.) */
8187 tem = fold_truth_not_expr (loc, arg0);
8188 if (!tem)
8189 return NULL_TREE;
8190 return fold_convert_loc (loc, type, tem);
8192 case REALPART_EXPR:
8193 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8194 return fold_convert_loc (loc, type, arg0);
8195 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8196 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8197 TREE_OPERAND (arg0, 1));
8198 if (TREE_CODE (arg0) == COMPLEX_CST)
8199 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8200 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8202 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8203 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8204 fold_build1_loc (loc, REALPART_EXPR, itype,
8205 TREE_OPERAND (arg0, 0)),
8206 fold_build1_loc (loc, REALPART_EXPR, itype,
8207 TREE_OPERAND (arg0, 1)));
8208 return fold_convert_loc (loc, type, tem);
8210 if (TREE_CODE (arg0) == CONJ_EXPR)
8212 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8213 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8214 TREE_OPERAND (arg0, 0));
8215 return fold_convert_loc (loc, type, tem);
8217 if (TREE_CODE (arg0) == CALL_EXPR)
8219 tree fn = get_callee_fndecl (arg0);
8220 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8221 switch (DECL_FUNCTION_CODE (fn))
8223 CASE_FLT_FN (BUILT_IN_CEXPI):
8224 fn = mathfn_built_in (type, BUILT_IN_COS);
8225 if (fn)
8226 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8227 break;
8229 default:
8230 break;
8233 return NULL_TREE;
8235 case IMAGPART_EXPR:
8236 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8237 return fold_convert_loc (loc, type, integer_zero_node);
8238 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8239 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8240 TREE_OPERAND (arg0, 0));
8241 if (TREE_CODE (arg0) == COMPLEX_CST)
8242 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8243 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8245 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8246 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8247 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8248 TREE_OPERAND (arg0, 0)),
8249 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8250 TREE_OPERAND (arg0, 1)));
8251 return fold_convert_loc (loc, type, tem);
8253 if (TREE_CODE (arg0) == CONJ_EXPR)
8255 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8256 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8257 return fold_convert_loc (loc, type, negate_expr (tem));
8259 if (TREE_CODE (arg0) == CALL_EXPR)
8261 tree fn = get_callee_fndecl (arg0);
8262 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8263 switch (DECL_FUNCTION_CODE (fn))
8265 CASE_FLT_FN (BUILT_IN_CEXPI):
8266 fn = mathfn_built_in (type, BUILT_IN_SIN);
8267 if (fn)
8268 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8269 break;
8271 default:
8272 break;
8275 return NULL_TREE;
8277 case INDIRECT_REF:
8278 /* Fold *&X to X if X is an lvalue. */
8279 if (TREE_CODE (op0) == ADDR_EXPR)
8281 tree op00 = TREE_OPERAND (op0, 0);
8282 if ((TREE_CODE (op00) == VAR_DECL
8283 || TREE_CODE (op00) == PARM_DECL
8284 || TREE_CODE (op00) == RESULT_DECL)
8285 && !TREE_READONLY (op00))
8286 return op00;
8288 return NULL_TREE;
8290 default:
8291 return NULL_TREE;
8292 } /* switch (code) */
8296 /* If the operation was a conversion do _not_ mark a resulting constant
8297 with TREE_OVERFLOW if the original constant was not. These conversions
8298 have implementation defined behavior and retaining the TREE_OVERFLOW
8299 flag here would confuse later passes such as VRP. */
8300 tree
8301 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8302 tree type, tree op0)
8304 tree res = fold_unary_loc (loc, code, type, op0);
8305 if (res
8306 && TREE_CODE (res) == INTEGER_CST
8307 && TREE_CODE (op0) == INTEGER_CST
8308 && CONVERT_EXPR_CODE_P (code))
8309 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8311 return res;
8314 /* Fold a binary expression of code CODE and type TYPE with operands
8315 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8316 Return the folded expression if folding is successful. Otherwise,
8317 return NULL_TREE. */
8319 static tree
8320 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8322 enum tree_code compl_code;
8324 if (code == MIN_EXPR)
8325 compl_code = MAX_EXPR;
8326 else if (code == MAX_EXPR)
8327 compl_code = MIN_EXPR;
8328 else
8329 gcc_unreachable ();
8331 /* MIN (MAX (a, b), b) == b. */
8332 if (TREE_CODE (op0) == compl_code
8333 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8334 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8336 /* MIN (MAX (b, a), b) == b. */
8337 if (TREE_CODE (op0) == compl_code
8338 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8339 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8340 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8342 /* MIN (a, MAX (a, b)) == a. */
8343 if (TREE_CODE (op1) == compl_code
8344 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8345 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8346 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8348 /* MIN (a, MAX (b, a)) == a. */
8349 if (TREE_CODE (op1) == compl_code
8350 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8351 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8352 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8354 return NULL_TREE;
8357 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8358 by changing CODE to reduce the magnitude of constants involved in
8359 ARG0 of the comparison.
8360 Returns a canonicalized comparison tree if a simplification was
8361 possible, otherwise returns NULL_TREE.
8362 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8363 valid if signed overflow is undefined. */
8365 static tree
8366 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8367 tree arg0, tree arg1,
8368 bool *strict_overflow_p)
8370 enum tree_code code0 = TREE_CODE (arg0);
8371 tree t, cst0 = NULL_TREE;
8372 int sgn0;
8373 bool swap = false;
8375 /* Match A +- CST code arg1 and CST code arg1. We can change the
8376 first form only if overflow is undefined. */
8377 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8378 /* In principle pointers also have undefined overflow behavior,
8379 but that causes problems elsewhere. */
8380 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8381 && (code0 == MINUS_EXPR
8382 || code0 == PLUS_EXPR)
8383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8384 || code0 == INTEGER_CST))
8385 return NULL_TREE;
8387 /* Identify the constant in arg0 and its sign. */
8388 if (code0 == INTEGER_CST)
8389 cst0 = arg0;
8390 else
8391 cst0 = TREE_OPERAND (arg0, 1);
8392 sgn0 = tree_int_cst_sgn (cst0);
8394 /* Overflowed constants and zero will cause problems. */
8395 if (integer_zerop (cst0)
8396 || TREE_OVERFLOW (cst0))
8397 return NULL_TREE;
8399 /* See if we can reduce the magnitude of the constant in
8400 arg0 by changing the comparison code. */
8401 if (code0 == INTEGER_CST)
8403 /* CST <= arg1 -> CST-1 < arg1. */
8404 if (code == LE_EXPR && sgn0 == 1)
8405 code = LT_EXPR;
8406 /* -CST < arg1 -> -CST-1 <= arg1. */
8407 else if (code == LT_EXPR && sgn0 == -1)
8408 code = LE_EXPR;
8409 /* CST > arg1 -> CST-1 >= arg1. */
8410 else if (code == GT_EXPR && sgn0 == 1)
8411 code = GE_EXPR;
8412 /* -CST >= arg1 -> -CST-1 > arg1. */
8413 else if (code == GE_EXPR && sgn0 == -1)
8414 code = GT_EXPR;
8415 else
8416 return NULL_TREE;
8417 /* arg1 code' CST' might be more canonical. */
8418 swap = true;
8420 else
8422 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8423 if (code == LT_EXPR
8424 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8425 code = LE_EXPR;
8426 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8427 else if (code == GT_EXPR
8428 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8429 code = GE_EXPR;
8430 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8431 else if (code == LE_EXPR
8432 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8433 code = LT_EXPR;
8434 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8435 else if (code == GE_EXPR
8436 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8437 code = GT_EXPR;
8438 else
8439 return NULL_TREE;
8440 *strict_overflow_p = true;
8443 /* Now build the constant reduced in magnitude. But not if that
8444 would produce one outside of its types range. */
8445 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8446 && ((sgn0 == 1
8447 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8448 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8449 || (sgn0 == -1
8450 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8451 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8452 /* We cannot swap the comparison here as that would cause us to
8453 endlessly recurse. */
8454 return NULL_TREE;
8456 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8457 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8458 if (code0 != INTEGER_CST)
8459 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8461 /* If swapping might yield to a more canonical form, do so. */
8462 if (swap)
8463 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8464 else
8465 return fold_build2_loc (loc, code, type, t, arg1);
8468 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8469 overflow further. Try to decrease the magnitude of constants involved
8470 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8471 and put sole constants at the second argument position.
8472 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8474 static tree
8475 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8476 tree arg0, tree arg1)
8478 tree t;
8479 bool strict_overflow_p;
8480 const char * const warnmsg = G_("assuming signed overflow does not occur "
8481 "when reducing constant in comparison");
8483 /* Try canonicalization by simplifying arg0. */
8484 strict_overflow_p = false;
8485 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8486 &strict_overflow_p);
8487 if (t)
8489 if (strict_overflow_p)
8490 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8491 return t;
8494 /* Try canonicalization by simplifying arg1 using the swapped
8495 comparison. */
8496 code = swap_tree_comparison (code);
8497 strict_overflow_p = false;
8498 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8499 &strict_overflow_p);
8500 if (t && strict_overflow_p)
8501 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8502 return t;
8505 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8506 space. This is used to avoid issuing overflow warnings for
8507 expressions like &p->x which can not wrap. */
8509 static bool
8510 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8512 unsigned HOST_WIDE_INT offset_low, total_low;
8513 HOST_WIDE_INT size, offset_high, total_high;
8515 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8516 return true;
8518 if (bitpos < 0)
8519 return true;
8521 if (offset == NULL_TREE)
8523 offset_low = 0;
8524 offset_high = 0;
8526 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8527 return true;
8528 else
8530 offset_low = TREE_INT_CST_LOW (offset);
8531 offset_high = TREE_INT_CST_HIGH (offset);
8534 if (add_double_with_sign (offset_low, offset_high,
8535 bitpos / BITS_PER_UNIT, 0,
8536 &total_low, &total_high,
8537 true))
8538 return true;
8540 if (total_high != 0)
8541 return true;
8543 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8544 if (size <= 0)
8545 return true;
8547 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8548 array. */
8549 if (TREE_CODE (base) == ADDR_EXPR)
8551 HOST_WIDE_INT base_size;
8553 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8554 if (base_size > 0 && size < base_size)
8555 size = base_size;
8558 return total_low > (unsigned HOST_WIDE_INT) size;
8561 /* Subroutine of fold_binary. This routine performs all of the
8562 transformations that are common to the equality/inequality
8563 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8564 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8565 fold_binary should call fold_binary. Fold a comparison with
8566 tree code CODE and type TYPE with operands OP0 and OP1. Return
8567 the folded comparison or NULL_TREE. */
8569 static tree
8570 fold_comparison (location_t loc, enum tree_code code, tree type,
8571 tree op0, tree op1)
8573 tree arg0, arg1, tem;
8575 arg0 = op0;
8576 arg1 = op1;
8578 STRIP_SIGN_NOPS (arg0);
8579 STRIP_SIGN_NOPS (arg1);
8581 tem = fold_relational_const (code, type, arg0, arg1);
8582 if (tem != NULL_TREE)
8583 return tem;
8585 /* If one arg is a real or integer constant, put it last. */
8586 if (tree_swap_operands_p (arg0, arg1, true))
8587 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8589 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8590 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8591 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8592 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8593 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8594 && (TREE_CODE (arg1) == INTEGER_CST
8595 && !TREE_OVERFLOW (arg1)))
8597 tree const1 = TREE_OPERAND (arg0, 1);
8598 tree const2 = arg1;
8599 tree variable = TREE_OPERAND (arg0, 0);
8600 tree lhs;
8601 int lhs_add;
8602 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8604 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8605 TREE_TYPE (arg1), const2, const1);
8607 /* If the constant operation overflowed this can be
8608 simplified as a comparison against INT_MAX/INT_MIN. */
8609 if (TREE_CODE (lhs) == INTEGER_CST
8610 && TREE_OVERFLOW (lhs))
8612 int const1_sgn = tree_int_cst_sgn (const1);
8613 enum tree_code code2 = code;
8615 /* Get the sign of the constant on the lhs if the
8616 operation were VARIABLE + CONST1. */
8617 if (TREE_CODE (arg0) == MINUS_EXPR)
8618 const1_sgn = -const1_sgn;
8620 /* The sign of the constant determines if we overflowed
8621 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8622 Canonicalize to the INT_MIN overflow by swapping the comparison
8623 if necessary. */
8624 if (const1_sgn == -1)
8625 code2 = swap_tree_comparison (code);
8627 /* We now can look at the canonicalized case
8628 VARIABLE + 1 CODE2 INT_MIN
8629 and decide on the result. */
8630 if (code2 == LT_EXPR
8631 || code2 == LE_EXPR
8632 || code2 == EQ_EXPR)
8633 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8634 else if (code2 == NE_EXPR
8635 || code2 == GE_EXPR
8636 || code2 == GT_EXPR)
8637 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8640 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8641 && (TREE_CODE (lhs) != INTEGER_CST
8642 || !TREE_OVERFLOW (lhs)))
8644 fold_overflow_warning ("assuming signed overflow does not occur "
8645 "when changing X +- C1 cmp C2 to "
8646 "X cmp C1 +- C2",
8647 WARN_STRICT_OVERFLOW_COMPARISON);
8648 return fold_build2_loc (loc, code, type, variable, lhs);
8652 /* For comparisons of pointers we can decompose it to a compile time
8653 comparison of the base objects and the offsets into the object.
8654 This requires at least one operand being an ADDR_EXPR or a
8655 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8656 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8657 && (TREE_CODE (arg0) == ADDR_EXPR
8658 || TREE_CODE (arg1) == ADDR_EXPR
8659 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8660 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8662 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8663 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8664 enum machine_mode mode;
8665 int volatilep, unsignedp;
8666 bool indirect_base0 = false, indirect_base1 = false;
8668 /* Get base and offset for the access. Strip ADDR_EXPR for
8669 get_inner_reference, but put it back by stripping INDIRECT_REF
8670 off the base object if possible. indirect_baseN will be true
8671 if baseN is not an address but refers to the object itself. */
8672 base0 = arg0;
8673 if (TREE_CODE (arg0) == ADDR_EXPR)
8675 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8676 &bitsize, &bitpos0, &offset0, &mode,
8677 &unsignedp, &volatilep, false);
8678 if (TREE_CODE (base0) == INDIRECT_REF)
8679 base0 = TREE_OPERAND (base0, 0);
8680 else
8681 indirect_base0 = true;
8683 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8685 base0 = TREE_OPERAND (arg0, 0);
8686 STRIP_SIGN_NOPS (base0);
8687 if (TREE_CODE (base0) == ADDR_EXPR)
8689 base0 = TREE_OPERAND (base0, 0);
8690 indirect_base0 = true;
8692 offset0 = TREE_OPERAND (arg0, 1);
8695 base1 = arg1;
8696 if (TREE_CODE (arg1) == ADDR_EXPR)
8698 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8699 &bitsize, &bitpos1, &offset1, &mode,
8700 &unsignedp, &volatilep, false);
8701 if (TREE_CODE (base1) == INDIRECT_REF)
8702 base1 = TREE_OPERAND (base1, 0);
8703 else
8704 indirect_base1 = true;
8706 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8708 base1 = TREE_OPERAND (arg1, 0);
8709 STRIP_SIGN_NOPS (base1);
8710 if (TREE_CODE (base1) == ADDR_EXPR)
8712 base1 = TREE_OPERAND (base1, 0);
8713 indirect_base1 = true;
8715 offset1 = TREE_OPERAND (arg1, 1);
8718 /* A local variable can never be pointed to by
8719 the default SSA name of an incoming parameter. */
8720 if ((TREE_CODE (arg0) == ADDR_EXPR
8721 && indirect_base0
8722 && TREE_CODE (base0) == VAR_DECL
8723 && auto_var_in_fn_p (base0, current_function_decl)
8724 && !indirect_base1
8725 && TREE_CODE (base1) == SSA_NAME
8726 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8727 && SSA_NAME_IS_DEFAULT_DEF (base1))
8728 || (TREE_CODE (arg1) == ADDR_EXPR
8729 && indirect_base1
8730 && TREE_CODE (base1) == VAR_DECL
8731 && auto_var_in_fn_p (base1, current_function_decl)
8732 && !indirect_base0
8733 && TREE_CODE (base0) == SSA_NAME
8734 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8735 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8737 if (code == NE_EXPR)
8738 return constant_boolean_node (1, type);
8739 else if (code == EQ_EXPR)
8740 return constant_boolean_node (0, type);
8742 /* If we have equivalent bases we might be able to simplify. */
8743 else if (indirect_base0 == indirect_base1
8744 && operand_equal_p (base0, base1, 0))
8746 /* We can fold this expression to a constant if the non-constant
8747 offset parts are equal. */
8748 if ((offset0 == offset1
8749 || (offset0 && offset1
8750 && operand_equal_p (offset0, offset1, 0)))
8751 && (code == EQ_EXPR
8752 || code == NE_EXPR
8753 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8756 if (code != EQ_EXPR
8757 && code != NE_EXPR
8758 && bitpos0 != bitpos1
8759 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8760 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8761 fold_overflow_warning (("assuming pointer wraparound does not "
8762 "occur when comparing P +- C1 with "
8763 "P +- C2"),
8764 WARN_STRICT_OVERFLOW_CONDITIONAL);
8766 switch (code)
8768 case EQ_EXPR:
8769 return constant_boolean_node (bitpos0 == bitpos1, type);
8770 case NE_EXPR:
8771 return constant_boolean_node (bitpos0 != bitpos1, type);
8772 case LT_EXPR:
8773 return constant_boolean_node (bitpos0 < bitpos1, type);
8774 case LE_EXPR:
8775 return constant_boolean_node (bitpos0 <= bitpos1, type);
8776 case GE_EXPR:
8777 return constant_boolean_node (bitpos0 >= bitpos1, type);
8778 case GT_EXPR:
8779 return constant_boolean_node (bitpos0 > bitpos1, type);
8780 default:;
8783 /* We can simplify the comparison to a comparison of the variable
8784 offset parts if the constant offset parts are equal.
8785 Be careful to use signed size type here because otherwise we
8786 mess with array offsets in the wrong way. This is possible
8787 because pointer arithmetic is restricted to retain within an
8788 object and overflow on pointer differences is undefined as of
8789 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8790 else if (bitpos0 == bitpos1
8791 && ((code == EQ_EXPR || code == NE_EXPR)
8792 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8794 /* By converting to signed size type we cover middle-end pointer
8795 arithmetic which operates on unsigned pointer types of size
8796 type size and ARRAY_REF offsets which are properly sign or
8797 zero extended from their type in case it is narrower than
8798 size type. */
8799 if (offset0 == NULL_TREE)
8800 offset0 = build_int_cst (ssizetype, 0);
8801 else
8802 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8803 if (offset1 == NULL_TREE)
8804 offset1 = build_int_cst (ssizetype, 0);
8805 else
8806 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8808 if (code != EQ_EXPR
8809 && code != NE_EXPR
8810 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8811 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8812 fold_overflow_warning (("assuming pointer wraparound does not "
8813 "occur when comparing P +- C1 with "
8814 "P +- C2"),
8815 WARN_STRICT_OVERFLOW_COMPARISON);
8817 return fold_build2_loc (loc, code, type, offset0, offset1);
8820 /* For non-equal bases we can simplify if they are addresses
8821 of local binding decls or constants. */
8822 else if (indirect_base0 && indirect_base1
8823 /* We know that !operand_equal_p (base0, base1, 0)
8824 because the if condition was false. But make
8825 sure two decls are not the same. */
8826 && base0 != base1
8827 && TREE_CODE (arg0) == ADDR_EXPR
8828 && TREE_CODE (arg1) == ADDR_EXPR
8829 && (((TREE_CODE (base0) == VAR_DECL
8830 || TREE_CODE (base0) == PARM_DECL)
8831 && (targetm.binds_local_p (base0)
8832 || CONSTANT_CLASS_P (base1)))
8833 || CONSTANT_CLASS_P (base0))
8834 && (((TREE_CODE (base1) == VAR_DECL
8835 || TREE_CODE (base1) == PARM_DECL)
8836 && (targetm.binds_local_p (base1)
8837 || CONSTANT_CLASS_P (base0)))
8838 || CONSTANT_CLASS_P (base1)))
8840 if (code == EQ_EXPR)
8841 return omit_two_operands_loc (loc, type, boolean_false_node,
8842 arg0, arg1);
8843 else if (code == NE_EXPR)
8844 return omit_two_operands_loc (loc, type, boolean_true_node,
8845 arg0, arg1);
8847 /* For equal offsets we can simplify to a comparison of the
8848 base addresses. */
8849 else if (bitpos0 == bitpos1
8850 && (indirect_base0
8851 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8852 && (indirect_base1
8853 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8854 && ((offset0 == offset1)
8855 || (offset0 && offset1
8856 && operand_equal_p (offset0, offset1, 0))))
8858 if (indirect_base0)
8859 base0 = build_fold_addr_expr_loc (loc, base0);
8860 if (indirect_base1)
8861 base1 = build_fold_addr_expr_loc (loc, base1);
8862 return fold_build2_loc (loc, code, type, base0, base1);
8866 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8867 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8868 the resulting offset is smaller in absolute value than the
8869 original one. */
8870 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8871 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8872 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8873 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8874 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8875 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8876 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8878 tree const1 = TREE_OPERAND (arg0, 1);
8879 tree const2 = TREE_OPERAND (arg1, 1);
8880 tree variable1 = TREE_OPERAND (arg0, 0);
8881 tree variable2 = TREE_OPERAND (arg1, 0);
8882 tree cst;
8883 const char * const warnmsg = G_("assuming signed overflow does not "
8884 "occur when combining constants around "
8885 "a comparison");
8887 /* Put the constant on the side where it doesn't overflow and is
8888 of lower absolute value than before. */
8889 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8890 ? MINUS_EXPR : PLUS_EXPR,
8891 const2, const1, 0);
8892 if (!TREE_OVERFLOW (cst)
8893 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8895 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8896 return fold_build2_loc (loc, code, type,
8897 variable1,
8898 fold_build2_loc (loc,
8899 TREE_CODE (arg1), TREE_TYPE (arg1),
8900 variable2, cst));
8903 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8904 ? MINUS_EXPR : PLUS_EXPR,
8905 const1, const2, 0);
8906 if (!TREE_OVERFLOW (cst)
8907 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8909 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8910 return fold_build2_loc (loc, code, type,
8911 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8912 variable1, cst),
8913 variable2);
8917 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8918 signed arithmetic case. That form is created by the compiler
8919 often enough for folding it to be of value. One example is in
8920 computing loop trip counts after Operator Strength Reduction. */
8921 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8922 && TREE_CODE (arg0) == MULT_EXPR
8923 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8924 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8925 && integer_zerop (arg1))
8927 tree const1 = TREE_OPERAND (arg0, 1);
8928 tree const2 = arg1; /* zero */
8929 tree variable1 = TREE_OPERAND (arg0, 0);
8930 enum tree_code cmp_code = code;
8932 /* Handle unfolded multiplication by zero. */
8933 if (integer_zerop (const1))
8934 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8936 fold_overflow_warning (("assuming signed overflow does not occur when "
8937 "eliminating multiplication in comparison "
8938 "with zero"),
8939 WARN_STRICT_OVERFLOW_COMPARISON);
8941 /* If const1 is negative we swap the sense of the comparison. */
8942 if (tree_int_cst_sgn (const1) < 0)
8943 cmp_code = swap_tree_comparison (cmp_code);
8945 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8948 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8949 if (tem)
8950 return tem;
8952 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8954 tree targ0 = strip_float_extensions (arg0);
8955 tree targ1 = strip_float_extensions (arg1);
8956 tree newtype = TREE_TYPE (targ0);
8958 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8959 newtype = TREE_TYPE (targ1);
8961 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8962 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8963 return fold_build2_loc (loc, code, type,
8964 fold_convert_loc (loc, newtype, targ0),
8965 fold_convert_loc (loc, newtype, targ1));
8967 /* (-a) CMP (-b) -> b CMP a */
8968 if (TREE_CODE (arg0) == NEGATE_EXPR
8969 && TREE_CODE (arg1) == NEGATE_EXPR)
8970 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8971 TREE_OPERAND (arg0, 0));
8973 if (TREE_CODE (arg1) == REAL_CST)
8975 REAL_VALUE_TYPE cst;
8976 cst = TREE_REAL_CST (arg1);
8978 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8979 if (TREE_CODE (arg0) == NEGATE_EXPR)
8980 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8981 TREE_OPERAND (arg0, 0),
8982 build_real (TREE_TYPE (arg1),
8983 real_value_negate (&cst)));
8985 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8986 /* a CMP (-0) -> a CMP 0 */
8987 if (REAL_VALUE_MINUS_ZERO (cst))
8988 return fold_build2_loc (loc, code, type, arg0,
8989 build_real (TREE_TYPE (arg1), dconst0));
8991 /* x != NaN is always true, other ops are always false. */
8992 if (REAL_VALUE_ISNAN (cst)
8993 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8995 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8996 return omit_one_operand_loc (loc, type, tem, arg0);
8999 /* Fold comparisons against infinity. */
9000 if (REAL_VALUE_ISINF (cst)
9001 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9003 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9004 if (tem != NULL_TREE)
9005 return tem;
9009 /* If this is a comparison of a real constant with a PLUS_EXPR
9010 or a MINUS_EXPR of a real constant, we can convert it into a
9011 comparison with a revised real constant as long as no overflow
9012 occurs when unsafe_math_optimizations are enabled. */
9013 if (flag_unsafe_math_optimizations
9014 && TREE_CODE (arg1) == REAL_CST
9015 && (TREE_CODE (arg0) == PLUS_EXPR
9016 || TREE_CODE (arg0) == MINUS_EXPR)
9017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9018 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9019 ? MINUS_EXPR : PLUS_EXPR,
9020 arg1, TREE_OPERAND (arg0, 1)))
9021 && !TREE_OVERFLOW (tem))
9022 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9024 /* Likewise, we can simplify a comparison of a real constant with
9025 a MINUS_EXPR whose first operand is also a real constant, i.e.
9026 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9027 floating-point types only if -fassociative-math is set. */
9028 if (flag_associative_math
9029 && TREE_CODE (arg1) == REAL_CST
9030 && TREE_CODE (arg0) == MINUS_EXPR
9031 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9032 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9033 arg1))
9034 && !TREE_OVERFLOW (tem))
9035 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9036 TREE_OPERAND (arg0, 1), tem);
9038 /* Fold comparisons against built-in math functions. */
9039 if (TREE_CODE (arg1) == REAL_CST
9040 && flag_unsafe_math_optimizations
9041 && ! flag_errno_math)
9043 enum built_in_function fcode = builtin_mathfn_code (arg0);
9045 if (fcode != END_BUILTINS)
9047 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9048 if (tem != NULL_TREE)
9049 return tem;
9054 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9055 && CONVERT_EXPR_P (arg0))
9057 /* If we are widening one operand of an integer comparison,
9058 see if the other operand is similarly being widened. Perhaps we
9059 can do the comparison in the narrower type. */
9060 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9061 if (tem)
9062 return tem;
9064 /* Or if we are changing signedness. */
9065 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9066 if (tem)
9067 return tem;
9070 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9071 constant, we can simplify it. */
9072 if (TREE_CODE (arg1) == INTEGER_CST
9073 && (TREE_CODE (arg0) == MIN_EXPR
9074 || TREE_CODE (arg0) == MAX_EXPR)
9075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9077 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9078 if (tem)
9079 return tem;
9082 /* Simplify comparison of something with itself. (For IEEE
9083 floating-point, we can only do some of these simplifications.) */
9084 if (operand_equal_p (arg0, arg1, 0))
9086 switch (code)
9088 case EQ_EXPR:
9089 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9090 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9091 return constant_boolean_node (1, type);
9092 break;
9094 case GE_EXPR:
9095 case LE_EXPR:
9096 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9097 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9098 return constant_boolean_node (1, type);
9099 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9101 case NE_EXPR:
9102 /* For NE, we can only do this simplification if integer
9103 or we don't honor IEEE floating point NaNs. */
9104 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9105 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9106 break;
9107 /* ... fall through ... */
9108 case GT_EXPR:
9109 case LT_EXPR:
9110 return constant_boolean_node (0, type);
9111 default:
9112 gcc_unreachable ();
9116 /* If we are comparing an expression that just has comparisons
9117 of two integer values, arithmetic expressions of those comparisons,
9118 and constants, we can simplify it. There are only three cases
9119 to check: the two values can either be equal, the first can be
9120 greater, or the second can be greater. Fold the expression for
9121 those three values. Since each value must be 0 or 1, we have
9122 eight possibilities, each of which corresponds to the constant 0
9123 or 1 or one of the six possible comparisons.
9125 This handles common cases like (a > b) == 0 but also handles
9126 expressions like ((x > y) - (y > x)) > 0, which supposedly
9127 occur in macroized code. */
9129 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9131 tree cval1 = 0, cval2 = 0;
9132 int save_p = 0;
9134 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9135 /* Don't handle degenerate cases here; they should already
9136 have been handled anyway. */
9137 && cval1 != 0 && cval2 != 0
9138 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9139 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9140 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9141 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9142 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9143 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9144 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9146 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9147 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9149 /* We can't just pass T to eval_subst in case cval1 or cval2
9150 was the same as ARG1. */
9152 tree high_result
9153 = fold_build2_loc (loc, code, type,
9154 eval_subst (loc, arg0, cval1, maxval,
9155 cval2, minval),
9156 arg1);
9157 tree equal_result
9158 = fold_build2_loc (loc, code, type,
9159 eval_subst (loc, arg0, cval1, maxval,
9160 cval2, maxval),
9161 arg1);
9162 tree low_result
9163 = fold_build2_loc (loc, code, type,
9164 eval_subst (loc, arg0, cval1, minval,
9165 cval2, maxval),
9166 arg1);
9168 /* All three of these results should be 0 or 1. Confirm they are.
9169 Then use those values to select the proper code to use. */
9171 if (TREE_CODE (high_result) == INTEGER_CST
9172 && TREE_CODE (equal_result) == INTEGER_CST
9173 && TREE_CODE (low_result) == INTEGER_CST)
9175 /* Make a 3-bit mask with the high-order bit being the
9176 value for `>', the next for '=', and the low for '<'. */
9177 switch ((integer_onep (high_result) * 4)
9178 + (integer_onep (equal_result) * 2)
9179 + integer_onep (low_result))
9181 case 0:
9182 /* Always false. */
9183 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9184 case 1:
9185 code = LT_EXPR;
9186 break;
9187 case 2:
9188 code = EQ_EXPR;
9189 break;
9190 case 3:
9191 code = LE_EXPR;
9192 break;
9193 case 4:
9194 code = GT_EXPR;
9195 break;
9196 case 5:
9197 code = NE_EXPR;
9198 break;
9199 case 6:
9200 code = GE_EXPR;
9201 break;
9202 case 7:
9203 /* Always true. */
9204 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9207 if (save_p)
9209 tem = save_expr (build2 (code, type, cval1, cval2));
9210 SET_EXPR_LOCATION (tem, loc);
9211 return tem;
9213 return fold_build2_loc (loc, code, type, cval1, cval2);
9218 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9219 into a single range test. */
9220 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9221 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9222 && TREE_CODE (arg1) == INTEGER_CST
9223 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9224 && !integer_zerop (TREE_OPERAND (arg0, 1))
9225 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9226 && !TREE_OVERFLOW (arg1))
9228 tem = fold_div_compare (loc, code, type, arg0, arg1);
9229 if (tem != NULL_TREE)
9230 return tem;
9233 /* Fold ~X op ~Y as Y op X. */
9234 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9235 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9237 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9238 return fold_build2_loc (loc, code, type,
9239 fold_convert_loc (loc, cmp_type,
9240 TREE_OPERAND (arg1, 0)),
9241 TREE_OPERAND (arg0, 0));
9244 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9245 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9246 && TREE_CODE (arg1) == INTEGER_CST)
9248 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9249 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9250 TREE_OPERAND (arg0, 0),
9251 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9252 fold_convert_loc (loc, cmp_type, arg1)));
9255 return NULL_TREE;
9259 /* Subroutine of fold_binary. Optimize complex multiplications of the
9260 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9261 argument EXPR represents the expression "z" of type TYPE. */
9263 static tree
9264 fold_mult_zconjz (location_t loc, tree type, tree expr)
9266 tree itype = TREE_TYPE (type);
9267 tree rpart, ipart, tem;
9269 if (TREE_CODE (expr) == COMPLEX_EXPR)
9271 rpart = TREE_OPERAND (expr, 0);
9272 ipart = TREE_OPERAND (expr, 1);
9274 else if (TREE_CODE (expr) == COMPLEX_CST)
9276 rpart = TREE_REALPART (expr);
9277 ipart = TREE_IMAGPART (expr);
9279 else
9281 expr = save_expr (expr);
9282 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9283 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9286 rpart = save_expr (rpart);
9287 ipart = save_expr (ipart);
9288 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9289 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9290 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9291 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9292 fold_convert_loc (loc, itype, integer_zero_node));
9296 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9297 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9298 guarantees that P and N have the same least significant log2(M) bits.
9299 N is not otherwise constrained. In particular, N is not normalized to
9300 0 <= N < M as is common. In general, the precise value of P is unknown.
9301 M is chosen as large as possible such that constant N can be determined.
9303 Returns M and sets *RESIDUE to N.
9305 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9306 account. This is not always possible due to PR 35705.
9309 static unsigned HOST_WIDE_INT
9310 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9311 bool allow_func_align)
9313 enum tree_code code;
9315 *residue = 0;
9317 code = TREE_CODE (expr);
9318 if (code == ADDR_EXPR)
9320 expr = TREE_OPERAND (expr, 0);
9321 if (handled_component_p (expr))
9323 HOST_WIDE_INT bitsize, bitpos;
9324 tree offset;
9325 enum machine_mode mode;
9326 int unsignedp, volatilep;
9328 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9329 &mode, &unsignedp, &volatilep, false);
9330 *residue = bitpos / BITS_PER_UNIT;
9331 if (offset)
9333 if (TREE_CODE (offset) == INTEGER_CST)
9334 *residue += TREE_INT_CST_LOW (offset);
9335 else
9336 /* We don't handle more complicated offset expressions. */
9337 return 1;
9341 if (DECL_P (expr)
9342 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9343 return DECL_ALIGN_UNIT (expr);
9345 else if (code == POINTER_PLUS_EXPR)
9347 tree op0, op1;
9348 unsigned HOST_WIDE_INT modulus;
9349 enum tree_code inner_code;
9351 op0 = TREE_OPERAND (expr, 0);
9352 STRIP_NOPS (op0);
9353 modulus = get_pointer_modulus_and_residue (op0, residue,
9354 allow_func_align);
9356 op1 = TREE_OPERAND (expr, 1);
9357 STRIP_NOPS (op1);
9358 inner_code = TREE_CODE (op1);
9359 if (inner_code == INTEGER_CST)
9361 *residue += TREE_INT_CST_LOW (op1);
9362 return modulus;
9364 else if (inner_code == MULT_EXPR)
9366 op1 = TREE_OPERAND (op1, 1);
9367 if (TREE_CODE (op1) == INTEGER_CST)
9369 unsigned HOST_WIDE_INT align;
9371 /* Compute the greatest power-of-2 divisor of op1. */
9372 align = TREE_INT_CST_LOW (op1);
9373 align &= -align;
9375 /* If align is non-zero and less than *modulus, replace
9376 *modulus with align., If align is 0, then either op1 is 0
9377 or the greatest power-of-2 divisor of op1 doesn't fit in an
9378 unsigned HOST_WIDE_INT. In either case, no additional
9379 constraint is imposed. */
9380 if (align)
9381 modulus = MIN (modulus, align);
9383 return modulus;
9388 /* If we get here, we were unable to determine anything useful about the
9389 expression. */
9390 return 1;
9394 /* Fold a binary expression of code CODE and type TYPE with operands
9395 OP0 and OP1. LOC is the location of the resulting expression.
9396 Return the folded expression if folding is successful. Otherwise,
9397 return NULL_TREE. */
9399 tree
9400 fold_binary_loc (location_t loc,
9401 enum tree_code code, tree type, tree op0, tree op1)
9403 enum tree_code_class kind = TREE_CODE_CLASS (code);
9404 tree arg0, arg1, tem;
9405 tree t1 = NULL_TREE;
9406 bool strict_overflow_p;
9408 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9409 && TREE_CODE_LENGTH (code) == 2
9410 && op0 != NULL_TREE
9411 && op1 != NULL_TREE);
9413 arg0 = op0;
9414 arg1 = op1;
9416 /* Strip any conversions that don't change the mode. This is
9417 safe for every expression, except for a comparison expression
9418 because its signedness is derived from its operands. So, in
9419 the latter case, only strip conversions that don't change the
9420 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9421 preserved.
9423 Note that this is done as an internal manipulation within the
9424 constant folder, in order to find the simplest representation
9425 of the arguments so that their form can be studied. In any
9426 cases, the appropriate type conversions should be put back in
9427 the tree that will get out of the constant folder. */
9429 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9431 STRIP_SIGN_NOPS (arg0);
9432 STRIP_SIGN_NOPS (arg1);
9434 else
9436 STRIP_NOPS (arg0);
9437 STRIP_NOPS (arg1);
9440 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9441 constant but we can't do arithmetic on them. */
9442 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9443 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9444 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9445 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9446 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9447 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9449 if (kind == tcc_binary)
9451 /* Make sure type and arg0 have the same saturating flag. */
9452 gcc_assert (TYPE_SATURATING (type)
9453 == TYPE_SATURATING (TREE_TYPE (arg0)));
9454 tem = const_binop (code, arg0, arg1);
9456 else if (kind == tcc_comparison)
9457 tem = fold_relational_const (code, type, arg0, arg1);
9458 else
9459 tem = NULL_TREE;
9461 if (tem != NULL_TREE)
9463 if (TREE_TYPE (tem) != type)
9464 tem = fold_convert_loc (loc, type, tem);
9465 return tem;
9469 /* If this is a commutative operation, and ARG0 is a constant, move it
9470 to ARG1 to reduce the number of tests below. */
9471 if (commutative_tree_code (code)
9472 && tree_swap_operands_p (arg0, arg1, true))
9473 return fold_build2_loc (loc, code, type, op1, op0);
9475 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9477 First check for cases where an arithmetic operation is applied to a
9478 compound, conditional, or comparison operation. Push the arithmetic
9479 operation inside the compound or conditional to see if any folding
9480 can then be done. Convert comparison to conditional for this purpose.
9481 The also optimizes non-constant cases that used to be done in
9482 expand_expr.
9484 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9485 one of the operands is a comparison and the other is a comparison, a
9486 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9487 code below would make the expression more complex. Change it to a
9488 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9489 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9491 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9492 || code == EQ_EXPR || code == NE_EXPR)
9493 && ((truth_value_p (TREE_CODE (arg0))
9494 && (truth_value_p (TREE_CODE (arg1))
9495 || (TREE_CODE (arg1) == BIT_AND_EXPR
9496 && integer_onep (TREE_OPERAND (arg1, 1)))))
9497 || (truth_value_p (TREE_CODE (arg1))
9498 && (truth_value_p (TREE_CODE (arg0))
9499 || (TREE_CODE (arg0) == BIT_AND_EXPR
9500 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9502 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9503 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9504 : TRUTH_XOR_EXPR,
9505 boolean_type_node,
9506 fold_convert_loc (loc, boolean_type_node, arg0),
9507 fold_convert_loc (loc, boolean_type_node, arg1));
9509 if (code == EQ_EXPR)
9510 tem = invert_truthvalue_loc (loc, tem);
9512 return fold_convert_loc (loc, type, tem);
9515 if (TREE_CODE_CLASS (code) == tcc_binary
9516 || TREE_CODE_CLASS (code) == tcc_comparison)
9518 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9520 tem = fold_build2_loc (loc, code, type,
9521 fold_convert_loc (loc, TREE_TYPE (op0),
9522 TREE_OPERAND (arg0, 1)), op1);
9523 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9524 goto fold_binary_exit;
9526 if (TREE_CODE (arg1) == COMPOUND_EXPR
9527 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9529 tem = fold_build2_loc (loc, code, type, op0,
9530 fold_convert_loc (loc, TREE_TYPE (op1),
9531 TREE_OPERAND (arg1, 1)));
9532 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9533 goto fold_binary_exit;
9536 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9538 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9539 arg0, arg1,
9540 /*cond_first_p=*/1);
9541 if (tem != NULL_TREE)
9542 return tem;
9545 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9547 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9548 arg1, arg0,
9549 /*cond_first_p=*/0);
9550 if (tem != NULL_TREE)
9551 return tem;
9555 switch (code)
9557 case MEM_REF:
9558 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9559 if (TREE_CODE (arg0) == ADDR_EXPR
9560 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9562 tree iref = TREE_OPERAND (arg0, 0);
9563 return fold_build2 (MEM_REF, type,
9564 TREE_OPERAND (iref, 0),
9565 int_const_binop (PLUS_EXPR, arg1,
9566 TREE_OPERAND (iref, 1), 0));
9569 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9570 if (TREE_CODE (arg0) == ADDR_EXPR
9571 && handled_component_p (TREE_OPERAND (arg0, 0)))
9573 tree base;
9574 HOST_WIDE_INT coffset;
9575 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9576 &coffset);
9577 if (!base)
9578 return NULL_TREE;
9579 return fold_build2 (MEM_REF, type,
9580 build_fold_addr_expr (base),
9581 int_const_binop (PLUS_EXPR, arg1,
9582 size_int (coffset), 0));
9585 return NULL_TREE;
9587 case POINTER_PLUS_EXPR:
9588 /* 0 +p index -> (type)index */
9589 if (integer_zerop (arg0))
9590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9592 /* PTR +p 0 -> PTR */
9593 if (integer_zerop (arg1))
9594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9596 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9597 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9598 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9599 return fold_convert_loc (loc, type,
9600 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9601 fold_convert_loc (loc, sizetype,
9602 arg1),
9603 fold_convert_loc (loc, sizetype,
9604 arg0)));
9606 /* index +p PTR -> PTR +p index */
9607 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9608 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9609 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9610 fold_convert_loc (loc, type, arg1),
9611 fold_convert_loc (loc, sizetype, arg0));
9613 /* (PTR +p B) +p A -> PTR +p (B + A) */
9614 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9616 tree inner;
9617 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9618 tree arg00 = TREE_OPERAND (arg0, 0);
9619 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9620 arg01, fold_convert_loc (loc, sizetype, arg1));
9621 return fold_convert_loc (loc, type,
9622 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9623 TREE_TYPE (arg00),
9624 arg00, inner));
9627 /* PTR_CST +p CST -> CST1 */
9628 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9629 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9630 fold_convert_loc (loc, type, arg1));
9632 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9633 of the array. Loop optimizer sometimes produce this type of
9634 expressions. */
9635 if (TREE_CODE (arg0) == ADDR_EXPR)
9637 tem = try_move_mult_to_index (loc, arg0,
9638 fold_convert_loc (loc, sizetype, arg1));
9639 if (tem)
9640 return fold_convert_loc (loc, type, tem);
9643 return NULL_TREE;
9645 case PLUS_EXPR:
9646 /* A + (-B) -> A - B */
9647 if (TREE_CODE (arg1) == NEGATE_EXPR)
9648 return fold_build2_loc (loc, MINUS_EXPR, type,
9649 fold_convert_loc (loc, type, arg0),
9650 fold_convert_loc (loc, type,
9651 TREE_OPERAND (arg1, 0)));
9652 /* (-A) + B -> B - A */
9653 if (TREE_CODE (arg0) == NEGATE_EXPR
9654 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9655 return fold_build2_loc (loc, MINUS_EXPR, type,
9656 fold_convert_loc (loc, type, arg1),
9657 fold_convert_loc (loc, type,
9658 TREE_OPERAND (arg0, 0)));
9660 if (INTEGRAL_TYPE_P (type))
9662 /* Convert ~A + 1 to -A. */
9663 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9664 && integer_onep (arg1))
9665 return fold_build1_loc (loc, NEGATE_EXPR, type,
9666 fold_convert_loc (loc, type,
9667 TREE_OPERAND (arg0, 0)));
9669 /* ~X + X is -1. */
9670 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9671 && !TYPE_OVERFLOW_TRAPS (type))
9673 tree tem = TREE_OPERAND (arg0, 0);
9675 STRIP_NOPS (tem);
9676 if (operand_equal_p (tem, arg1, 0))
9678 t1 = build_int_cst_type (type, -1);
9679 return omit_one_operand_loc (loc, type, t1, arg1);
9683 /* X + ~X is -1. */
9684 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9685 && !TYPE_OVERFLOW_TRAPS (type))
9687 tree tem = TREE_OPERAND (arg1, 0);
9689 STRIP_NOPS (tem);
9690 if (operand_equal_p (arg0, tem, 0))
9692 t1 = build_int_cst_type (type, -1);
9693 return omit_one_operand_loc (loc, type, t1, arg0);
9697 /* X + (X / CST) * -CST is X % CST. */
9698 if (TREE_CODE (arg1) == MULT_EXPR
9699 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9700 && operand_equal_p (arg0,
9701 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9703 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9704 tree cst1 = TREE_OPERAND (arg1, 1);
9705 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9706 cst1, cst0);
9707 if (sum && integer_zerop (sum))
9708 return fold_convert_loc (loc, type,
9709 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9710 TREE_TYPE (arg0), arg0,
9711 cst0));
9715 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9716 same or one. Make sure type is not saturating.
9717 fold_plusminus_mult_expr will re-associate. */
9718 if ((TREE_CODE (arg0) == MULT_EXPR
9719 || TREE_CODE (arg1) == MULT_EXPR)
9720 && !TYPE_SATURATING (type)
9721 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9723 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9724 if (tem)
9725 return tem;
9728 if (! FLOAT_TYPE_P (type))
9730 if (integer_zerop (arg1))
9731 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9733 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9734 with a constant, and the two constants have no bits in common,
9735 we should treat this as a BIT_IOR_EXPR since this may produce more
9736 simplifications. */
9737 if (TREE_CODE (arg0) == BIT_AND_EXPR
9738 && TREE_CODE (arg1) == BIT_AND_EXPR
9739 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9740 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9741 && integer_zerop (const_binop (BIT_AND_EXPR,
9742 TREE_OPERAND (arg0, 1),
9743 TREE_OPERAND (arg1, 1))))
9745 code = BIT_IOR_EXPR;
9746 goto bit_ior;
9749 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9750 (plus (plus (mult) (mult)) (foo)) so that we can
9751 take advantage of the factoring cases below. */
9752 if (((TREE_CODE (arg0) == PLUS_EXPR
9753 || TREE_CODE (arg0) == MINUS_EXPR)
9754 && TREE_CODE (arg1) == MULT_EXPR)
9755 || ((TREE_CODE (arg1) == PLUS_EXPR
9756 || TREE_CODE (arg1) == MINUS_EXPR)
9757 && TREE_CODE (arg0) == MULT_EXPR))
9759 tree parg0, parg1, parg, marg;
9760 enum tree_code pcode;
9762 if (TREE_CODE (arg1) == MULT_EXPR)
9763 parg = arg0, marg = arg1;
9764 else
9765 parg = arg1, marg = arg0;
9766 pcode = TREE_CODE (parg);
9767 parg0 = TREE_OPERAND (parg, 0);
9768 parg1 = TREE_OPERAND (parg, 1);
9769 STRIP_NOPS (parg0);
9770 STRIP_NOPS (parg1);
9772 if (TREE_CODE (parg0) == MULT_EXPR
9773 && TREE_CODE (parg1) != MULT_EXPR)
9774 return fold_build2_loc (loc, pcode, type,
9775 fold_build2_loc (loc, PLUS_EXPR, type,
9776 fold_convert_loc (loc, type,
9777 parg0),
9778 fold_convert_loc (loc, type,
9779 marg)),
9780 fold_convert_loc (loc, type, parg1));
9781 if (TREE_CODE (parg0) != MULT_EXPR
9782 && TREE_CODE (parg1) == MULT_EXPR)
9783 return
9784 fold_build2_loc (loc, PLUS_EXPR, type,
9785 fold_convert_loc (loc, type, parg0),
9786 fold_build2_loc (loc, pcode, type,
9787 fold_convert_loc (loc, type, marg),
9788 fold_convert_loc (loc, type,
9789 parg1)));
9792 else
9794 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9795 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9798 /* Likewise if the operands are reversed. */
9799 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9800 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9802 /* Convert X + -C into X - C. */
9803 if (TREE_CODE (arg1) == REAL_CST
9804 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9806 tem = fold_negate_const (arg1, type);
9807 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9808 return fold_build2_loc (loc, MINUS_EXPR, type,
9809 fold_convert_loc (loc, type, arg0),
9810 fold_convert_loc (loc, type, tem));
9813 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9814 to __complex__ ( x, y ). This is not the same for SNaNs or
9815 if signed zeros are involved. */
9816 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9817 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9818 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9820 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9821 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9822 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9823 bool arg0rz = false, arg0iz = false;
9824 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9825 || (arg0i && (arg0iz = real_zerop (arg0i))))
9827 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9828 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9829 if (arg0rz && arg1i && real_zerop (arg1i))
9831 tree rp = arg1r ? arg1r
9832 : build1 (REALPART_EXPR, rtype, arg1);
9833 tree ip = arg0i ? arg0i
9834 : build1 (IMAGPART_EXPR, rtype, arg0);
9835 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9837 else if (arg0iz && arg1r && real_zerop (arg1r))
9839 tree rp = arg0r ? arg0r
9840 : build1 (REALPART_EXPR, rtype, arg0);
9841 tree ip = arg1i ? arg1i
9842 : build1 (IMAGPART_EXPR, rtype, arg1);
9843 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9848 if (flag_unsafe_math_optimizations
9849 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9850 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9851 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9852 return tem;
9854 /* Convert x+x into x*2.0. */
9855 if (operand_equal_p (arg0, arg1, 0)
9856 && SCALAR_FLOAT_TYPE_P (type))
9857 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9858 build_real (type, dconst2));
9860 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9861 We associate floats only if the user has specified
9862 -fassociative-math. */
9863 if (flag_associative_math
9864 && TREE_CODE (arg1) == PLUS_EXPR
9865 && TREE_CODE (arg0) != MULT_EXPR)
9867 tree tree10 = TREE_OPERAND (arg1, 0);
9868 tree tree11 = TREE_OPERAND (arg1, 1);
9869 if (TREE_CODE (tree11) == MULT_EXPR
9870 && TREE_CODE (tree10) == MULT_EXPR)
9872 tree tree0;
9873 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9874 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9877 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9878 We associate floats only if the user has specified
9879 -fassociative-math. */
9880 if (flag_associative_math
9881 && TREE_CODE (arg0) == PLUS_EXPR
9882 && TREE_CODE (arg1) != MULT_EXPR)
9884 tree tree00 = TREE_OPERAND (arg0, 0);
9885 tree tree01 = TREE_OPERAND (arg0, 1);
9886 if (TREE_CODE (tree01) == MULT_EXPR
9887 && TREE_CODE (tree00) == MULT_EXPR)
9889 tree tree0;
9890 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9891 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9896 bit_rotate:
9897 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9898 is a rotate of A by C1 bits. */
9899 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9900 is a rotate of A by B bits. */
9902 enum tree_code code0, code1;
9903 tree rtype;
9904 code0 = TREE_CODE (arg0);
9905 code1 = TREE_CODE (arg1);
9906 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9907 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9908 && operand_equal_p (TREE_OPERAND (arg0, 0),
9909 TREE_OPERAND (arg1, 0), 0)
9910 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9911 TYPE_UNSIGNED (rtype))
9912 /* Only create rotates in complete modes. Other cases are not
9913 expanded properly. */
9914 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9916 tree tree01, tree11;
9917 enum tree_code code01, code11;
9919 tree01 = TREE_OPERAND (arg0, 1);
9920 tree11 = TREE_OPERAND (arg1, 1);
9921 STRIP_NOPS (tree01);
9922 STRIP_NOPS (tree11);
9923 code01 = TREE_CODE (tree01);
9924 code11 = TREE_CODE (tree11);
9925 if (code01 == INTEGER_CST
9926 && code11 == INTEGER_CST
9927 && TREE_INT_CST_HIGH (tree01) == 0
9928 && TREE_INT_CST_HIGH (tree11) == 0
9929 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9930 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9932 tem = build2 (LROTATE_EXPR,
9933 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9934 TREE_OPERAND (arg0, 0),
9935 code0 == LSHIFT_EXPR
9936 ? tree01 : tree11);
9937 SET_EXPR_LOCATION (tem, loc);
9938 return fold_convert_loc (loc, type, tem);
9940 else if (code11 == MINUS_EXPR)
9942 tree tree110, tree111;
9943 tree110 = TREE_OPERAND (tree11, 0);
9944 tree111 = TREE_OPERAND (tree11, 1);
9945 STRIP_NOPS (tree110);
9946 STRIP_NOPS (tree111);
9947 if (TREE_CODE (tree110) == INTEGER_CST
9948 && 0 == compare_tree_int (tree110,
9949 TYPE_PRECISION
9950 (TREE_TYPE (TREE_OPERAND
9951 (arg0, 0))))
9952 && operand_equal_p (tree01, tree111, 0))
9953 return
9954 fold_convert_loc (loc, type,
9955 build2 ((code0 == LSHIFT_EXPR
9956 ? LROTATE_EXPR
9957 : RROTATE_EXPR),
9958 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9959 TREE_OPERAND (arg0, 0), tree01));
9961 else if (code01 == MINUS_EXPR)
9963 tree tree010, tree011;
9964 tree010 = TREE_OPERAND (tree01, 0);
9965 tree011 = TREE_OPERAND (tree01, 1);
9966 STRIP_NOPS (tree010);
9967 STRIP_NOPS (tree011);
9968 if (TREE_CODE (tree010) == INTEGER_CST
9969 && 0 == compare_tree_int (tree010,
9970 TYPE_PRECISION
9971 (TREE_TYPE (TREE_OPERAND
9972 (arg0, 0))))
9973 && operand_equal_p (tree11, tree011, 0))
9974 return fold_convert_loc
9975 (loc, type,
9976 build2 ((code0 != LSHIFT_EXPR
9977 ? LROTATE_EXPR
9978 : RROTATE_EXPR),
9979 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9980 TREE_OPERAND (arg0, 0), tree11));
9985 associate:
9986 /* In most languages, can't associate operations on floats through
9987 parentheses. Rather than remember where the parentheses were, we
9988 don't associate floats at all, unless the user has specified
9989 -fassociative-math.
9990 And, we need to make sure type is not saturating. */
9992 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9993 && !TYPE_SATURATING (type))
9995 tree var0, con0, lit0, minus_lit0;
9996 tree var1, con1, lit1, minus_lit1;
9997 bool ok = true;
9999 /* Split both trees into variables, constants, and literals. Then
10000 associate each group together, the constants with literals,
10001 then the result with variables. This increases the chances of
10002 literals being recombined later and of generating relocatable
10003 expressions for the sum of a constant and literal. */
10004 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10005 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10006 code == MINUS_EXPR);
10008 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10009 if (code == MINUS_EXPR)
10010 code = PLUS_EXPR;
10012 /* With undefined overflow we can only associate constants with one
10013 variable, and constants whose association doesn't overflow. */
10014 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10015 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10017 if (var0 && var1)
10019 tree tmp0 = var0;
10020 tree tmp1 = var1;
10022 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10023 tmp0 = TREE_OPERAND (tmp0, 0);
10024 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10025 tmp1 = TREE_OPERAND (tmp1, 0);
10026 /* The only case we can still associate with two variables
10027 is if they are the same, modulo negation. */
10028 if (!operand_equal_p (tmp0, tmp1, 0))
10029 ok = false;
10032 if (ok && lit0 && lit1)
10034 tree tmp0 = fold_convert (type, lit0);
10035 tree tmp1 = fold_convert (type, lit1);
10037 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10038 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10039 ok = false;
10043 /* Only do something if we found more than two objects. Otherwise,
10044 nothing has changed and we risk infinite recursion. */
10045 if (ok
10046 && (2 < ((var0 != 0) + (var1 != 0)
10047 + (con0 != 0) + (con1 != 0)
10048 + (lit0 != 0) + (lit1 != 0)
10049 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10051 var0 = associate_trees (loc, var0, var1, code, type);
10052 con0 = associate_trees (loc, con0, con1, code, type);
10053 lit0 = associate_trees (loc, lit0, lit1, code, type);
10054 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10056 /* Preserve the MINUS_EXPR if the negative part of the literal is
10057 greater than the positive part. Otherwise, the multiplicative
10058 folding code (i.e extract_muldiv) may be fooled in case
10059 unsigned constants are subtracted, like in the following
10060 example: ((X*2 + 4) - 8U)/2. */
10061 if (minus_lit0 && lit0)
10063 if (TREE_CODE (lit0) == INTEGER_CST
10064 && TREE_CODE (minus_lit0) == INTEGER_CST
10065 && tree_int_cst_lt (lit0, minus_lit0))
10067 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10068 MINUS_EXPR, type);
10069 lit0 = 0;
10071 else
10073 lit0 = associate_trees (loc, lit0, minus_lit0,
10074 MINUS_EXPR, type);
10075 minus_lit0 = 0;
10078 if (minus_lit0)
10080 if (con0 == 0)
10081 return
10082 fold_convert_loc (loc, type,
10083 associate_trees (loc, var0, minus_lit0,
10084 MINUS_EXPR, type));
10085 else
10087 con0 = associate_trees (loc, con0, minus_lit0,
10088 MINUS_EXPR, type);
10089 return
10090 fold_convert_loc (loc, type,
10091 associate_trees (loc, var0, con0,
10092 PLUS_EXPR, type));
10096 con0 = associate_trees (loc, con0, lit0, code, type);
10097 return
10098 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10099 code, type));
10103 return NULL_TREE;
10105 case MINUS_EXPR:
10106 /* Pointer simplifications for subtraction, simple reassociations. */
10107 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10109 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10110 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10111 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10113 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10114 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10115 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10116 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10117 return fold_build2_loc (loc, PLUS_EXPR, type,
10118 fold_build2_loc (loc, MINUS_EXPR, type,
10119 arg00, arg10),
10120 fold_build2_loc (loc, MINUS_EXPR, type,
10121 arg01, arg11));
10123 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10124 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10126 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10127 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10128 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10129 fold_convert_loc (loc, type, arg1));
10130 if (tmp)
10131 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10134 /* A - (-B) -> A + B */
10135 if (TREE_CODE (arg1) == NEGATE_EXPR)
10136 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10137 fold_convert_loc (loc, type,
10138 TREE_OPERAND (arg1, 0)));
10139 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10140 if (TREE_CODE (arg0) == NEGATE_EXPR
10141 && (FLOAT_TYPE_P (type)
10142 || INTEGRAL_TYPE_P (type))
10143 && negate_expr_p (arg1)
10144 && reorder_operands_p (arg0, arg1))
10145 return fold_build2_loc (loc, MINUS_EXPR, type,
10146 fold_convert_loc (loc, type,
10147 negate_expr (arg1)),
10148 fold_convert_loc (loc, type,
10149 TREE_OPERAND (arg0, 0)));
10150 /* Convert -A - 1 to ~A. */
10151 if (INTEGRAL_TYPE_P (type)
10152 && TREE_CODE (arg0) == NEGATE_EXPR
10153 && integer_onep (arg1)
10154 && !TYPE_OVERFLOW_TRAPS (type))
10155 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10156 fold_convert_loc (loc, type,
10157 TREE_OPERAND (arg0, 0)));
10159 /* Convert -1 - A to ~A. */
10160 if (INTEGRAL_TYPE_P (type)
10161 && integer_all_onesp (arg0))
10162 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10165 /* X - (X / CST) * CST is X % CST. */
10166 if (INTEGRAL_TYPE_P (type)
10167 && TREE_CODE (arg1) == MULT_EXPR
10168 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10169 && operand_equal_p (arg0,
10170 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10171 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10172 TREE_OPERAND (arg1, 1), 0))
10173 return
10174 fold_convert_loc (loc, type,
10175 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10176 arg0, TREE_OPERAND (arg1, 1)));
10178 if (! FLOAT_TYPE_P (type))
10180 if (integer_zerop (arg0))
10181 return negate_expr (fold_convert_loc (loc, type, arg1));
10182 if (integer_zerop (arg1))
10183 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10185 /* Fold A - (A & B) into ~B & A. */
10186 if (!TREE_SIDE_EFFECTS (arg0)
10187 && TREE_CODE (arg1) == BIT_AND_EXPR)
10189 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10191 tree arg10 = fold_convert_loc (loc, type,
10192 TREE_OPERAND (arg1, 0));
10193 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10194 fold_build1_loc (loc, BIT_NOT_EXPR,
10195 type, arg10),
10196 fold_convert_loc (loc, type, arg0));
10198 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10200 tree arg11 = fold_convert_loc (loc,
10201 type, TREE_OPERAND (arg1, 1));
10202 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10203 fold_build1_loc (loc, BIT_NOT_EXPR,
10204 type, arg11),
10205 fold_convert_loc (loc, type, arg0));
10209 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10210 any power of 2 minus 1. */
10211 if (TREE_CODE (arg0) == BIT_AND_EXPR
10212 && TREE_CODE (arg1) == BIT_AND_EXPR
10213 && operand_equal_p (TREE_OPERAND (arg0, 0),
10214 TREE_OPERAND (arg1, 0), 0))
10216 tree mask0 = TREE_OPERAND (arg0, 1);
10217 tree mask1 = TREE_OPERAND (arg1, 1);
10218 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10220 if (operand_equal_p (tem, mask1, 0))
10222 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10223 TREE_OPERAND (arg0, 0), mask1);
10224 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10229 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10230 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10231 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10233 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10234 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10235 (-ARG1 + ARG0) reduces to -ARG1. */
10236 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10237 return negate_expr (fold_convert_loc (loc, type, arg1));
10239 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10240 __complex__ ( x, -y ). This is not the same for SNaNs or if
10241 signed zeros are involved. */
10242 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10243 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10244 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10246 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10247 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10248 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10249 bool arg0rz = false, arg0iz = false;
10250 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10251 || (arg0i && (arg0iz = real_zerop (arg0i))))
10253 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10254 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10255 if (arg0rz && arg1i && real_zerop (arg1i))
10257 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10258 arg1r ? arg1r
10259 : build1 (REALPART_EXPR, rtype, arg1));
10260 tree ip = arg0i ? arg0i
10261 : build1 (IMAGPART_EXPR, rtype, arg0);
10262 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10264 else if (arg0iz && arg1r && real_zerop (arg1r))
10266 tree rp = arg0r ? arg0r
10267 : build1 (REALPART_EXPR, rtype, arg0);
10268 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10269 arg1i ? arg1i
10270 : build1 (IMAGPART_EXPR, rtype, arg1));
10271 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10276 /* Fold &x - &x. This can happen from &x.foo - &x.
10277 This is unsafe for certain floats even in non-IEEE formats.
10278 In IEEE, it is unsafe because it does wrong for NaNs.
10279 Also note that operand_equal_p is always false if an operand
10280 is volatile. */
10282 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10283 && operand_equal_p (arg0, arg1, 0))
10284 return fold_convert_loc (loc, type, integer_zero_node);
10286 /* A - B -> A + (-B) if B is easily negatable. */
10287 if (negate_expr_p (arg1)
10288 && ((FLOAT_TYPE_P (type)
10289 /* Avoid this transformation if B is a positive REAL_CST. */
10290 && (TREE_CODE (arg1) != REAL_CST
10291 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10292 || INTEGRAL_TYPE_P (type)))
10293 return fold_build2_loc (loc, PLUS_EXPR, type,
10294 fold_convert_loc (loc, type, arg0),
10295 fold_convert_loc (loc, type,
10296 negate_expr (arg1)));
10298 /* Try folding difference of addresses. */
10300 HOST_WIDE_INT diff;
10302 if ((TREE_CODE (arg0) == ADDR_EXPR
10303 || TREE_CODE (arg1) == ADDR_EXPR)
10304 && ptr_difference_const (arg0, arg1, &diff))
10305 return build_int_cst_type (type, diff);
10308 /* Fold &a[i] - &a[j] to i-j. */
10309 if (TREE_CODE (arg0) == ADDR_EXPR
10310 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10311 && TREE_CODE (arg1) == ADDR_EXPR
10312 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10314 tree aref0 = TREE_OPERAND (arg0, 0);
10315 tree aref1 = TREE_OPERAND (arg1, 0);
10316 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10317 TREE_OPERAND (aref1, 0), 0))
10319 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10320 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10321 tree esz = array_ref_element_size (aref0);
10322 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10323 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10324 fold_convert_loc (loc, type, esz));
10329 if (FLOAT_TYPE_P (type)
10330 && flag_unsafe_math_optimizations
10331 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10332 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10333 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10334 return tem;
10336 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10337 same or one. Make sure type is not saturating.
10338 fold_plusminus_mult_expr will re-associate. */
10339 if ((TREE_CODE (arg0) == MULT_EXPR
10340 || TREE_CODE (arg1) == MULT_EXPR)
10341 && !TYPE_SATURATING (type)
10342 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10344 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10345 if (tem)
10346 return tem;
10349 goto associate;
10351 case MULT_EXPR:
10352 /* (-A) * (-B) -> A * B */
10353 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10354 return fold_build2_loc (loc, MULT_EXPR, type,
10355 fold_convert_loc (loc, type,
10356 TREE_OPERAND (arg0, 0)),
10357 fold_convert_loc (loc, type,
10358 negate_expr (arg1)));
10359 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10360 return fold_build2_loc (loc, MULT_EXPR, type,
10361 fold_convert_loc (loc, type,
10362 negate_expr (arg0)),
10363 fold_convert_loc (loc, type,
10364 TREE_OPERAND (arg1, 0)));
10366 if (! FLOAT_TYPE_P (type))
10368 if (integer_zerop (arg1))
10369 return omit_one_operand_loc (loc, type, arg1, arg0);
10370 if (integer_onep (arg1))
10371 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10372 /* Transform x * -1 into -x. Make sure to do the negation
10373 on the original operand with conversions not stripped
10374 because we can only strip non-sign-changing conversions. */
10375 if (integer_all_onesp (arg1))
10376 return fold_convert_loc (loc, type, negate_expr (op0));
10377 /* Transform x * -C into -x * C if x is easily negatable. */
10378 if (TREE_CODE (arg1) == INTEGER_CST
10379 && tree_int_cst_sgn (arg1) == -1
10380 && negate_expr_p (arg0)
10381 && (tem = negate_expr (arg1)) != arg1
10382 && !TREE_OVERFLOW (tem))
10383 return fold_build2_loc (loc, MULT_EXPR, type,
10384 fold_convert_loc (loc, type,
10385 negate_expr (arg0)),
10386 tem);
10388 /* (a * (1 << b)) is (a << b) */
10389 if (TREE_CODE (arg1) == LSHIFT_EXPR
10390 && integer_onep (TREE_OPERAND (arg1, 0)))
10391 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10392 TREE_OPERAND (arg1, 1));
10393 if (TREE_CODE (arg0) == LSHIFT_EXPR
10394 && integer_onep (TREE_OPERAND (arg0, 0)))
10395 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10396 TREE_OPERAND (arg0, 1));
10398 /* (A + A) * C -> A * 2 * C */
10399 if (TREE_CODE (arg0) == PLUS_EXPR
10400 && TREE_CODE (arg1) == INTEGER_CST
10401 && operand_equal_p (TREE_OPERAND (arg0, 0),
10402 TREE_OPERAND (arg0, 1), 0))
10403 return fold_build2_loc (loc, MULT_EXPR, type,
10404 omit_one_operand_loc (loc, type,
10405 TREE_OPERAND (arg0, 0),
10406 TREE_OPERAND (arg0, 1)),
10407 fold_build2_loc (loc, MULT_EXPR, type,
10408 build_int_cst (type, 2) , arg1));
10410 strict_overflow_p = false;
10411 if (TREE_CODE (arg1) == INTEGER_CST
10412 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10413 &strict_overflow_p)))
10415 if (strict_overflow_p)
10416 fold_overflow_warning (("assuming signed overflow does not "
10417 "occur when simplifying "
10418 "multiplication"),
10419 WARN_STRICT_OVERFLOW_MISC);
10420 return fold_convert_loc (loc, type, tem);
10423 /* Optimize z * conj(z) for integer complex numbers. */
10424 if (TREE_CODE (arg0) == CONJ_EXPR
10425 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10426 return fold_mult_zconjz (loc, type, arg1);
10427 if (TREE_CODE (arg1) == CONJ_EXPR
10428 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10429 return fold_mult_zconjz (loc, type, arg0);
10431 else
10433 /* Maybe fold x * 0 to 0. The expressions aren't the same
10434 when x is NaN, since x * 0 is also NaN. Nor are they the
10435 same in modes with signed zeros, since multiplying a
10436 negative value by 0 gives -0, not +0. */
10437 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10438 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10439 && real_zerop (arg1))
10440 return omit_one_operand_loc (loc, type, arg1, arg0);
10441 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10442 Likewise for complex arithmetic with signed zeros. */
10443 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10444 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10445 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10446 && real_onep (arg1))
10447 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10449 /* Transform x * -1.0 into -x. */
10450 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10451 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10452 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10453 && real_minus_onep (arg1))
10454 return fold_convert_loc (loc, type, negate_expr (arg0));
10456 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10457 the result for floating point types due to rounding so it is applied
10458 only if -fassociative-math was specify. */
10459 if (flag_associative_math
10460 && TREE_CODE (arg0) == RDIV_EXPR
10461 && TREE_CODE (arg1) == REAL_CST
10462 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10464 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10465 arg1);
10466 if (tem)
10467 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10468 TREE_OPERAND (arg0, 1));
10471 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10472 if (operand_equal_p (arg0, arg1, 0))
10474 tree tem = fold_strip_sign_ops (arg0);
10475 if (tem != NULL_TREE)
10477 tem = fold_convert_loc (loc, type, tem);
10478 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10482 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10483 This is not the same for NaNs or if signed zeros are
10484 involved. */
10485 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10486 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10487 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10488 && TREE_CODE (arg1) == COMPLEX_CST
10489 && real_zerop (TREE_REALPART (arg1)))
10491 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10492 if (real_onep (TREE_IMAGPART (arg1)))
10493 return
10494 fold_build2_loc (loc, COMPLEX_EXPR, type,
10495 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10496 rtype, arg0)),
10497 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10498 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10499 return
10500 fold_build2_loc (loc, COMPLEX_EXPR, type,
10501 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10502 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10503 rtype, arg0)));
10506 /* Optimize z * conj(z) for floating point complex numbers.
10507 Guarded by flag_unsafe_math_optimizations as non-finite
10508 imaginary components don't produce scalar results. */
10509 if (flag_unsafe_math_optimizations
10510 && TREE_CODE (arg0) == CONJ_EXPR
10511 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10512 return fold_mult_zconjz (loc, type, arg1);
10513 if (flag_unsafe_math_optimizations
10514 && TREE_CODE (arg1) == CONJ_EXPR
10515 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10516 return fold_mult_zconjz (loc, type, arg0);
10518 if (flag_unsafe_math_optimizations)
10520 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10521 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10523 /* Optimizations of root(...)*root(...). */
10524 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10526 tree rootfn, arg;
10527 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10528 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10530 /* Optimize sqrt(x)*sqrt(x) as x. */
10531 if (BUILTIN_SQRT_P (fcode0)
10532 && operand_equal_p (arg00, arg10, 0)
10533 && ! HONOR_SNANS (TYPE_MODE (type)))
10534 return arg00;
10536 /* Optimize root(x)*root(y) as root(x*y). */
10537 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10538 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10539 return build_call_expr_loc (loc, rootfn, 1, arg);
10542 /* Optimize expN(x)*expN(y) as expN(x+y). */
10543 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10545 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10546 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10547 CALL_EXPR_ARG (arg0, 0),
10548 CALL_EXPR_ARG (arg1, 0));
10549 return build_call_expr_loc (loc, expfn, 1, arg);
10552 /* Optimizations of pow(...)*pow(...). */
10553 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10554 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10555 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10557 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10558 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10559 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10560 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10562 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10563 if (operand_equal_p (arg01, arg11, 0))
10565 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10566 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10567 arg00, arg10);
10568 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10571 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10572 if (operand_equal_p (arg00, arg10, 0))
10574 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10575 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10576 arg01, arg11);
10577 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10581 /* Optimize tan(x)*cos(x) as sin(x). */
10582 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10583 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10584 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10585 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10586 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10587 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10588 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10589 CALL_EXPR_ARG (arg1, 0), 0))
10591 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10593 if (sinfn != NULL_TREE)
10594 return build_call_expr_loc (loc, sinfn, 1,
10595 CALL_EXPR_ARG (arg0, 0));
10598 /* Optimize x*pow(x,c) as pow(x,c+1). */
10599 if (fcode1 == BUILT_IN_POW
10600 || fcode1 == BUILT_IN_POWF
10601 || fcode1 == BUILT_IN_POWL)
10603 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10604 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10605 if (TREE_CODE (arg11) == REAL_CST
10606 && !TREE_OVERFLOW (arg11)
10607 && operand_equal_p (arg0, arg10, 0))
10609 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10610 REAL_VALUE_TYPE c;
10611 tree arg;
10613 c = TREE_REAL_CST (arg11);
10614 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10615 arg = build_real (type, c);
10616 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10620 /* Optimize pow(x,c)*x as pow(x,c+1). */
10621 if (fcode0 == BUILT_IN_POW
10622 || fcode0 == BUILT_IN_POWF
10623 || fcode0 == BUILT_IN_POWL)
10625 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10626 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10627 if (TREE_CODE (arg01) == REAL_CST
10628 && !TREE_OVERFLOW (arg01)
10629 && operand_equal_p (arg1, arg00, 0))
10631 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10632 REAL_VALUE_TYPE c;
10633 tree arg;
10635 c = TREE_REAL_CST (arg01);
10636 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10637 arg = build_real (type, c);
10638 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10642 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10643 if (optimize_function_for_speed_p (cfun)
10644 && operand_equal_p (arg0, arg1, 0))
10646 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10648 if (powfn)
10650 tree arg = build_real (type, dconst2);
10651 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10656 goto associate;
10658 case BIT_IOR_EXPR:
10659 bit_ior:
10660 if (integer_all_onesp (arg1))
10661 return omit_one_operand_loc (loc, type, arg1, arg0);
10662 if (integer_zerop (arg1))
10663 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10664 if (operand_equal_p (arg0, arg1, 0))
10665 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10667 /* ~X | X is -1. */
10668 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10669 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10671 t1 = fold_convert_loc (loc, type, integer_zero_node);
10672 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10673 return omit_one_operand_loc (loc, type, t1, arg1);
10676 /* X | ~X is -1. */
10677 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10678 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10680 t1 = fold_convert_loc (loc, type, integer_zero_node);
10681 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10682 return omit_one_operand_loc (loc, type, t1, arg0);
10685 /* Canonicalize (X & C1) | C2. */
10686 if (TREE_CODE (arg0) == BIT_AND_EXPR
10687 && TREE_CODE (arg1) == INTEGER_CST
10688 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10690 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10691 int width = TYPE_PRECISION (type), w;
10692 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10693 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10694 hi2 = TREE_INT_CST_HIGH (arg1);
10695 lo2 = TREE_INT_CST_LOW (arg1);
10697 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10698 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10699 return omit_one_operand_loc (loc, type, arg1,
10700 TREE_OPERAND (arg0, 0));
10702 if (width > HOST_BITS_PER_WIDE_INT)
10704 mhi = (unsigned HOST_WIDE_INT) -1
10705 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10706 mlo = -1;
10708 else
10710 mhi = 0;
10711 mlo = (unsigned HOST_WIDE_INT) -1
10712 >> (HOST_BITS_PER_WIDE_INT - width);
10715 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10716 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10717 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10718 TREE_OPERAND (arg0, 0), arg1);
10720 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10721 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10722 mode which allows further optimizations. */
10723 hi1 &= mhi;
10724 lo1 &= mlo;
10725 hi2 &= mhi;
10726 lo2 &= mlo;
10727 hi3 = hi1 & ~hi2;
10728 lo3 = lo1 & ~lo2;
10729 for (w = BITS_PER_UNIT;
10730 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10731 w <<= 1)
10733 unsigned HOST_WIDE_INT mask
10734 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10735 if (((lo1 | lo2) & mask) == mask
10736 && (lo1 & ~mask) == 0 && hi1 == 0)
10738 hi3 = 0;
10739 lo3 = mask;
10740 break;
10743 if (hi3 != hi1 || lo3 != lo1)
10744 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10745 fold_build2_loc (loc, BIT_AND_EXPR, type,
10746 TREE_OPERAND (arg0, 0),
10747 build_int_cst_wide (type,
10748 lo3, hi3)),
10749 arg1);
10752 /* (X & Y) | Y is (X, Y). */
10753 if (TREE_CODE (arg0) == BIT_AND_EXPR
10754 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10755 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10756 /* (X & Y) | X is (Y, X). */
10757 if (TREE_CODE (arg0) == BIT_AND_EXPR
10758 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10759 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10760 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10761 /* X | (X & Y) is (Y, X). */
10762 if (TREE_CODE (arg1) == BIT_AND_EXPR
10763 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10764 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10765 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10766 /* X | (Y & X) is (Y, X). */
10767 if (TREE_CODE (arg1) == BIT_AND_EXPR
10768 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10769 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10770 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10772 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10773 if (t1 != NULL_TREE)
10774 return t1;
10776 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10778 This results in more efficient code for machines without a NAND
10779 instruction. Combine will canonicalize to the first form
10780 which will allow use of NAND instructions provided by the
10781 backend if they exist. */
10782 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10783 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10785 return
10786 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10787 build2 (BIT_AND_EXPR, type,
10788 fold_convert_loc (loc, type,
10789 TREE_OPERAND (arg0, 0)),
10790 fold_convert_loc (loc, type,
10791 TREE_OPERAND (arg1, 0))));
10794 /* See if this can be simplified into a rotate first. If that
10795 is unsuccessful continue in the association code. */
10796 goto bit_rotate;
10798 case BIT_XOR_EXPR:
10799 if (integer_zerop (arg1))
10800 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10801 if (integer_all_onesp (arg1))
10802 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10803 if (operand_equal_p (arg0, arg1, 0))
10804 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10806 /* ~X ^ X is -1. */
10807 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10808 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10810 t1 = fold_convert_loc (loc, type, integer_zero_node);
10811 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10812 return omit_one_operand_loc (loc, type, t1, arg1);
10815 /* X ^ ~X is -1. */
10816 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10817 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10819 t1 = fold_convert_loc (loc, type, integer_zero_node);
10820 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10821 return omit_one_operand_loc (loc, type, t1, arg0);
10824 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10825 with a constant, and the two constants have no bits in common,
10826 we should treat this as a BIT_IOR_EXPR since this may produce more
10827 simplifications. */
10828 if (TREE_CODE (arg0) == BIT_AND_EXPR
10829 && TREE_CODE (arg1) == BIT_AND_EXPR
10830 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10831 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10832 && integer_zerop (const_binop (BIT_AND_EXPR,
10833 TREE_OPERAND (arg0, 1),
10834 TREE_OPERAND (arg1, 1))))
10836 code = BIT_IOR_EXPR;
10837 goto bit_ior;
10840 /* (X | Y) ^ X -> Y & ~ X*/
10841 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10844 tree t2 = TREE_OPERAND (arg0, 1);
10845 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10846 arg1);
10847 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10848 fold_convert_loc (loc, type, t2),
10849 fold_convert_loc (loc, type, t1));
10850 return t1;
10853 /* (Y | X) ^ X -> Y & ~ X*/
10854 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10855 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10857 tree t2 = TREE_OPERAND (arg0, 0);
10858 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10859 arg1);
10860 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10861 fold_convert_loc (loc, type, t2),
10862 fold_convert_loc (loc, type, t1));
10863 return t1;
10866 /* X ^ (X | Y) -> Y & ~ X*/
10867 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10868 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10870 tree t2 = TREE_OPERAND (arg1, 1);
10871 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10872 arg0);
10873 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10874 fold_convert_loc (loc, type, t2),
10875 fold_convert_loc (loc, type, t1));
10876 return t1;
10879 /* X ^ (Y | X) -> Y & ~ X*/
10880 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10881 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10883 tree t2 = TREE_OPERAND (arg1, 0);
10884 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10885 arg0);
10886 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10887 fold_convert_loc (loc, type, t2),
10888 fold_convert_loc (loc, type, t1));
10889 return t1;
10892 /* Convert ~X ^ ~Y to X ^ Y. */
10893 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10894 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10895 return fold_build2_loc (loc, code, type,
10896 fold_convert_loc (loc, type,
10897 TREE_OPERAND (arg0, 0)),
10898 fold_convert_loc (loc, type,
10899 TREE_OPERAND (arg1, 0)));
10901 /* Convert ~X ^ C to X ^ ~C. */
10902 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10903 && TREE_CODE (arg1) == INTEGER_CST)
10904 return fold_build2_loc (loc, code, type,
10905 fold_convert_loc (loc, type,
10906 TREE_OPERAND (arg0, 0)),
10907 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10909 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10910 if (TREE_CODE (arg0) == BIT_AND_EXPR
10911 && integer_onep (TREE_OPERAND (arg0, 1))
10912 && integer_onep (arg1))
10913 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10914 build_int_cst (TREE_TYPE (arg0), 0));
10916 /* Fold (X & Y) ^ Y as ~X & Y. */
10917 if (TREE_CODE (arg0) == BIT_AND_EXPR
10918 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10920 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10921 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10922 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10923 fold_convert_loc (loc, type, arg1));
10925 /* Fold (X & Y) ^ X as ~Y & X. */
10926 if (TREE_CODE (arg0) == BIT_AND_EXPR
10927 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10928 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10930 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10931 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10932 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10933 fold_convert_loc (loc, type, arg1));
10935 /* Fold X ^ (X & Y) as X & ~Y. */
10936 if (TREE_CODE (arg1) == BIT_AND_EXPR
10937 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10939 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10940 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10941 fold_convert_loc (loc, type, arg0),
10942 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10944 /* Fold X ^ (Y & X) as ~Y & X. */
10945 if (TREE_CODE (arg1) == BIT_AND_EXPR
10946 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10947 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10949 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10950 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10951 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10952 fold_convert_loc (loc, type, arg0));
10955 /* See if this can be simplified into a rotate first. If that
10956 is unsuccessful continue in the association code. */
10957 goto bit_rotate;
10959 case BIT_AND_EXPR:
10960 if (integer_all_onesp (arg1))
10961 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10962 if (integer_zerop (arg1))
10963 return omit_one_operand_loc (loc, type, arg1, arg0);
10964 if (operand_equal_p (arg0, arg1, 0))
10965 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10967 /* ~X & X is always zero. */
10968 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10969 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10970 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10972 /* X & ~X is always zero. */
10973 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10974 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10975 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10977 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10978 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10979 && TREE_CODE (arg1) == INTEGER_CST
10980 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10982 tree tmp1 = fold_convert_loc (loc, type, arg1);
10983 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10984 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10985 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10986 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10987 return
10988 fold_convert_loc (loc, type,
10989 fold_build2_loc (loc, BIT_IOR_EXPR,
10990 type, tmp2, tmp3));
10993 /* (X | Y) & Y is (X, Y). */
10994 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10995 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10996 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10997 /* (X | Y) & X is (Y, X). */
10998 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10999 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11000 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11001 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11002 /* X & (X | Y) is (Y, X). */
11003 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11004 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11005 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11006 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11007 /* X & (Y | X) is (Y, X). */
11008 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11009 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11010 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11011 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11013 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11014 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11015 && integer_onep (TREE_OPERAND (arg0, 1))
11016 && integer_onep (arg1))
11018 tem = TREE_OPERAND (arg0, 0);
11019 return fold_build2_loc (loc, EQ_EXPR, type,
11020 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11021 build_int_cst (TREE_TYPE (tem), 1)),
11022 build_int_cst (TREE_TYPE (tem), 0));
11024 /* Fold ~X & 1 as (X & 1) == 0. */
11025 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11026 && integer_onep (arg1))
11028 tem = TREE_OPERAND (arg0, 0);
11029 return fold_build2_loc (loc, EQ_EXPR, type,
11030 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11031 build_int_cst (TREE_TYPE (tem), 1)),
11032 build_int_cst (TREE_TYPE (tem), 0));
11035 /* Fold (X ^ Y) & Y as ~X & Y. */
11036 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11037 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11039 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11040 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11041 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11042 fold_convert_loc (loc, type, arg1));
11044 /* Fold (X ^ Y) & X as ~Y & X. */
11045 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11046 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11047 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11049 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11050 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11051 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11052 fold_convert_loc (loc, type, arg1));
11054 /* Fold X & (X ^ Y) as X & ~Y. */
11055 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11056 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11058 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11059 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11060 fold_convert_loc (loc, type, arg0),
11061 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11063 /* Fold X & (Y ^ X) as ~Y & X. */
11064 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11065 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11066 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11068 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11069 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11070 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11071 fold_convert_loc (loc, type, arg0));
11074 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11075 ((A & N) + B) & M -> (A + B) & M
11076 Similarly if (N & M) == 0,
11077 ((A | N) + B) & M -> (A + B) & M
11078 and for - instead of + (or unary - instead of +)
11079 and/or ^ instead of |.
11080 If B is constant and (B & M) == 0, fold into A & M. */
11081 if (host_integerp (arg1, 1))
11083 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11084 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11085 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11086 && (TREE_CODE (arg0) == PLUS_EXPR
11087 || TREE_CODE (arg0) == MINUS_EXPR
11088 || TREE_CODE (arg0) == NEGATE_EXPR)
11089 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11090 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11092 tree pmop[2];
11093 int which = 0;
11094 unsigned HOST_WIDE_INT cst0;
11096 /* Now we know that arg0 is (C + D) or (C - D) or
11097 -C and arg1 (M) is == (1LL << cst) - 1.
11098 Store C into PMOP[0] and D into PMOP[1]. */
11099 pmop[0] = TREE_OPERAND (arg0, 0);
11100 pmop[1] = NULL;
11101 if (TREE_CODE (arg0) != NEGATE_EXPR)
11103 pmop[1] = TREE_OPERAND (arg0, 1);
11104 which = 1;
11107 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11108 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11109 & cst1) != cst1)
11110 which = -1;
11112 for (; which >= 0; which--)
11113 switch (TREE_CODE (pmop[which]))
11115 case BIT_AND_EXPR:
11116 case BIT_IOR_EXPR:
11117 case BIT_XOR_EXPR:
11118 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11119 != INTEGER_CST)
11120 break;
11121 /* tree_low_cst not used, because we don't care about
11122 the upper bits. */
11123 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11124 cst0 &= cst1;
11125 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11127 if (cst0 != cst1)
11128 break;
11130 else if (cst0 != 0)
11131 break;
11132 /* If C or D is of the form (A & N) where
11133 (N & M) == M, or of the form (A | N) or
11134 (A ^ N) where (N & M) == 0, replace it with A. */
11135 pmop[which] = TREE_OPERAND (pmop[which], 0);
11136 break;
11137 case INTEGER_CST:
11138 /* If C or D is a N where (N & M) == 0, it can be
11139 omitted (assumed 0). */
11140 if ((TREE_CODE (arg0) == PLUS_EXPR
11141 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11142 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11143 pmop[which] = NULL;
11144 break;
11145 default:
11146 break;
11149 /* Only build anything new if we optimized one or both arguments
11150 above. */
11151 if (pmop[0] != TREE_OPERAND (arg0, 0)
11152 || (TREE_CODE (arg0) != NEGATE_EXPR
11153 && pmop[1] != TREE_OPERAND (arg0, 1)))
11155 tree utype = TREE_TYPE (arg0);
11156 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11158 /* Perform the operations in a type that has defined
11159 overflow behavior. */
11160 utype = unsigned_type_for (TREE_TYPE (arg0));
11161 if (pmop[0] != NULL)
11162 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11163 if (pmop[1] != NULL)
11164 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11167 if (TREE_CODE (arg0) == NEGATE_EXPR)
11168 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11169 else if (TREE_CODE (arg0) == PLUS_EXPR)
11171 if (pmop[0] != NULL && pmop[1] != NULL)
11172 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11173 pmop[0], pmop[1]);
11174 else if (pmop[0] != NULL)
11175 tem = pmop[0];
11176 else if (pmop[1] != NULL)
11177 tem = pmop[1];
11178 else
11179 return build_int_cst (type, 0);
11181 else if (pmop[0] == NULL)
11182 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11183 else
11184 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11185 pmop[0], pmop[1]);
11186 /* TEM is now the new binary +, - or unary - replacement. */
11187 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11188 fold_convert_loc (loc, utype, arg1));
11189 return fold_convert_loc (loc, type, tem);
11194 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11195 if (t1 != NULL_TREE)
11196 return t1;
11197 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11198 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11199 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11201 unsigned int prec
11202 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11204 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11205 && (~TREE_INT_CST_LOW (arg1)
11206 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11207 return
11208 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11211 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11213 This results in more efficient code for machines without a NOR
11214 instruction. Combine will canonicalize to the first form
11215 which will allow use of NOR instructions provided by the
11216 backend if they exist. */
11217 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11218 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11220 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11221 build2 (BIT_IOR_EXPR, type,
11222 fold_convert_loc (loc, type,
11223 TREE_OPERAND (arg0, 0)),
11224 fold_convert_loc (loc, type,
11225 TREE_OPERAND (arg1, 0))));
11228 /* If arg0 is derived from the address of an object or function, we may
11229 be able to fold this expression using the object or function's
11230 alignment. */
11231 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11233 unsigned HOST_WIDE_INT modulus, residue;
11234 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11236 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11237 integer_onep (arg1));
11239 /* This works because modulus is a power of 2. If this weren't the
11240 case, we'd have to replace it by its greatest power-of-2
11241 divisor: modulus & -modulus. */
11242 if (low < modulus)
11243 return build_int_cst (type, residue & low);
11246 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11247 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11248 if the new mask might be further optimized. */
11249 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11250 || TREE_CODE (arg0) == RSHIFT_EXPR)
11251 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11252 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11253 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11254 < TYPE_PRECISION (TREE_TYPE (arg0))
11255 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11256 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11258 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11259 unsigned HOST_WIDE_INT mask
11260 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11261 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11262 tree shift_type = TREE_TYPE (arg0);
11264 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11265 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11266 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11267 && TYPE_PRECISION (TREE_TYPE (arg0))
11268 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11270 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11271 tree arg00 = TREE_OPERAND (arg0, 0);
11272 /* See if more bits can be proven as zero because of
11273 zero extension. */
11274 if (TREE_CODE (arg00) == NOP_EXPR
11275 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11277 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11278 if (TYPE_PRECISION (inner_type)
11279 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11280 && TYPE_PRECISION (inner_type) < prec)
11282 prec = TYPE_PRECISION (inner_type);
11283 /* See if we can shorten the right shift. */
11284 if (shiftc < prec)
11285 shift_type = inner_type;
11288 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11289 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11290 zerobits <<= prec - shiftc;
11291 /* For arithmetic shift if sign bit could be set, zerobits
11292 can contain actually sign bits, so no transformation is
11293 possible, unless MASK masks them all away. In that
11294 case the shift needs to be converted into logical shift. */
11295 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11296 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11298 if ((mask & zerobits) == 0)
11299 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11300 else
11301 zerobits = 0;
11305 /* ((X << 16) & 0xff00) is (X, 0). */
11306 if ((mask & zerobits) == mask)
11307 return omit_one_operand_loc (loc, type,
11308 build_int_cst (type, 0), arg0);
11310 newmask = mask | zerobits;
11311 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11313 unsigned int prec;
11315 /* Only do the transformation if NEWMASK is some integer
11316 mode's mask. */
11317 for (prec = BITS_PER_UNIT;
11318 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11319 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11320 break;
11321 if (prec < HOST_BITS_PER_WIDE_INT
11322 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11324 tree newmaskt;
11326 if (shift_type != TREE_TYPE (arg0))
11328 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11329 fold_convert_loc (loc, shift_type,
11330 TREE_OPERAND (arg0, 0)),
11331 TREE_OPERAND (arg0, 1));
11332 tem = fold_convert_loc (loc, type, tem);
11334 else
11335 tem = op0;
11336 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11337 if (!tree_int_cst_equal (newmaskt, arg1))
11338 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11343 goto associate;
11345 case RDIV_EXPR:
11346 /* Don't touch a floating-point divide by zero unless the mode
11347 of the constant can represent infinity. */
11348 if (TREE_CODE (arg1) == REAL_CST
11349 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11350 && real_zerop (arg1))
11351 return NULL_TREE;
11353 /* Optimize A / A to 1.0 if we don't care about
11354 NaNs or Infinities. Skip the transformation
11355 for non-real operands. */
11356 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11357 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11358 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11359 && operand_equal_p (arg0, arg1, 0))
11361 tree r = build_real (TREE_TYPE (arg0), dconst1);
11363 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11366 /* The complex version of the above A / A optimization. */
11367 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11368 && operand_equal_p (arg0, arg1, 0))
11370 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11371 if (! HONOR_NANS (TYPE_MODE (elem_type))
11372 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11374 tree r = build_real (elem_type, dconst1);
11375 /* omit_two_operands will call fold_convert for us. */
11376 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11380 /* (-A) / (-B) -> A / B */
11381 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11382 return fold_build2_loc (loc, RDIV_EXPR, type,
11383 TREE_OPERAND (arg0, 0),
11384 negate_expr (arg1));
11385 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11386 return fold_build2_loc (loc, RDIV_EXPR, type,
11387 negate_expr (arg0),
11388 TREE_OPERAND (arg1, 0));
11390 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11391 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11392 && real_onep (arg1))
11393 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11395 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11396 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11397 && real_minus_onep (arg1))
11398 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11399 negate_expr (arg0)));
11401 /* If ARG1 is a constant, we can convert this to a multiply by the
11402 reciprocal. This does not have the same rounding properties,
11403 so only do this if -freciprocal-math. We can actually
11404 always safely do it if ARG1 is a power of two, but it's hard to
11405 tell if it is or not in a portable manner. */
11406 if (TREE_CODE (arg1) == REAL_CST)
11408 if (flag_reciprocal_math
11409 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11410 arg1)))
11411 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11412 /* Find the reciprocal if optimizing and the result is exact. */
11413 if (optimize)
11415 REAL_VALUE_TYPE r;
11416 r = TREE_REAL_CST (arg1);
11417 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11419 tem = build_real (type, r);
11420 return fold_build2_loc (loc, MULT_EXPR, type,
11421 fold_convert_loc (loc, type, arg0), tem);
11425 /* Convert A/B/C to A/(B*C). */
11426 if (flag_reciprocal_math
11427 && TREE_CODE (arg0) == RDIV_EXPR)
11428 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11429 fold_build2_loc (loc, MULT_EXPR, type,
11430 TREE_OPERAND (arg0, 1), arg1));
11432 /* Convert A/(B/C) to (A/B)*C. */
11433 if (flag_reciprocal_math
11434 && TREE_CODE (arg1) == RDIV_EXPR)
11435 return fold_build2_loc (loc, MULT_EXPR, type,
11436 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11437 TREE_OPERAND (arg1, 0)),
11438 TREE_OPERAND (arg1, 1));
11440 /* Convert C1/(X*C2) into (C1/C2)/X. */
11441 if (flag_reciprocal_math
11442 && TREE_CODE (arg1) == MULT_EXPR
11443 && TREE_CODE (arg0) == REAL_CST
11444 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11446 tree tem = const_binop (RDIV_EXPR, arg0,
11447 TREE_OPERAND (arg1, 1));
11448 if (tem)
11449 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11450 TREE_OPERAND (arg1, 0));
11453 if (flag_unsafe_math_optimizations)
11455 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11456 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11458 /* Optimize sin(x)/cos(x) as tan(x). */
11459 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11460 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11461 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11462 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11463 CALL_EXPR_ARG (arg1, 0), 0))
11465 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11467 if (tanfn != NULL_TREE)
11468 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11471 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11472 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11473 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11474 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11475 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11476 CALL_EXPR_ARG (arg1, 0), 0))
11478 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11480 if (tanfn != NULL_TREE)
11482 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11483 CALL_EXPR_ARG (arg0, 0));
11484 return fold_build2_loc (loc, RDIV_EXPR, type,
11485 build_real (type, dconst1), tmp);
11489 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11490 NaNs or Infinities. */
11491 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11492 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11493 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11495 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11496 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11498 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11499 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11500 && operand_equal_p (arg00, arg01, 0))
11502 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11504 if (cosfn != NULL_TREE)
11505 return build_call_expr_loc (loc, cosfn, 1, arg00);
11509 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11510 NaNs or Infinities. */
11511 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11512 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11513 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11515 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11516 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11518 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11519 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11520 && operand_equal_p (arg00, arg01, 0))
11522 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11524 if (cosfn != NULL_TREE)
11526 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11527 return fold_build2_loc (loc, RDIV_EXPR, type,
11528 build_real (type, dconst1),
11529 tmp);
11534 /* Optimize pow(x,c)/x as pow(x,c-1). */
11535 if (fcode0 == BUILT_IN_POW
11536 || fcode0 == BUILT_IN_POWF
11537 || fcode0 == BUILT_IN_POWL)
11539 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11540 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11541 if (TREE_CODE (arg01) == REAL_CST
11542 && !TREE_OVERFLOW (arg01)
11543 && operand_equal_p (arg1, arg00, 0))
11545 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11546 REAL_VALUE_TYPE c;
11547 tree arg;
11549 c = TREE_REAL_CST (arg01);
11550 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11551 arg = build_real (type, c);
11552 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11556 /* Optimize a/root(b/c) into a*root(c/b). */
11557 if (BUILTIN_ROOT_P (fcode1))
11559 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11561 if (TREE_CODE (rootarg) == RDIV_EXPR)
11563 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11564 tree b = TREE_OPERAND (rootarg, 0);
11565 tree c = TREE_OPERAND (rootarg, 1);
11567 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11569 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11570 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11574 /* Optimize x/expN(y) into x*expN(-y). */
11575 if (BUILTIN_EXPONENT_P (fcode1))
11577 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11578 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11579 arg1 = build_call_expr_loc (loc,
11580 expfn, 1,
11581 fold_convert_loc (loc, type, arg));
11582 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11585 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11586 if (fcode1 == BUILT_IN_POW
11587 || fcode1 == BUILT_IN_POWF
11588 || fcode1 == BUILT_IN_POWL)
11590 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11591 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11592 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11593 tree neg11 = fold_convert_loc (loc, type,
11594 negate_expr (arg11));
11595 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11596 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11599 return NULL_TREE;
11601 case TRUNC_DIV_EXPR:
11602 /* Optimize (X & (-A)) / A where A is a power of 2,
11603 to X >> log2(A) */
11604 if (TREE_CODE (arg0) == BIT_AND_EXPR
11605 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11606 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11608 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11609 arg1, TREE_OPERAND (arg0, 1));
11610 if (sum && integer_zerop (sum)) {
11611 unsigned long pow2;
11613 if (TREE_INT_CST_LOW (arg1))
11614 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11615 else
11616 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11617 + HOST_BITS_PER_WIDE_INT;
11619 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11620 TREE_OPERAND (arg0, 0),
11621 build_int_cst (NULL_TREE, pow2));
11625 /* Fall thru */
11627 case FLOOR_DIV_EXPR:
11628 /* Simplify A / (B << N) where A and B are positive and B is
11629 a power of 2, to A >> (N + log2(B)). */
11630 strict_overflow_p = false;
11631 if (TREE_CODE (arg1) == LSHIFT_EXPR
11632 && (TYPE_UNSIGNED (type)
11633 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11635 tree sval = TREE_OPERAND (arg1, 0);
11636 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11638 tree sh_cnt = TREE_OPERAND (arg1, 1);
11639 unsigned long pow2;
11641 if (TREE_INT_CST_LOW (sval))
11642 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11643 else
11644 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11645 + HOST_BITS_PER_WIDE_INT;
11647 if (strict_overflow_p)
11648 fold_overflow_warning (("assuming signed overflow does not "
11649 "occur when simplifying A / (B << N)"),
11650 WARN_STRICT_OVERFLOW_MISC);
11652 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11653 sh_cnt, build_int_cst (NULL_TREE, pow2));
11654 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11655 fold_convert_loc (loc, type, arg0), sh_cnt);
11659 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11660 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11661 if (INTEGRAL_TYPE_P (type)
11662 && TYPE_UNSIGNED (type)
11663 && code == FLOOR_DIV_EXPR)
11664 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11666 /* Fall thru */
11668 case ROUND_DIV_EXPR:
11669 case CEIL_DIV_EXPR:
11670 case EXACT_DIV_EXPR:
11671 if (integer_onep (arg1))
11672 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11673 if (integer_zerop (arg1))
11674 return NULL_TREE;
11675 /* X / -1 is -X. */
11676 if (!TYPE_UNSIGNED (type)
11677 && TREE_CODE (arg1) == INTEGER_CST
11678 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11679 && TREE_INT_CST_HIGH (arg1) == -1)
11680 return fold_convert_loc (loc, type, negate_expr (arg0));
11682 /* Convert -A / -B to A / B when the type is signed and overflow is
11683 undefined. */
11684 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11685 && TREE_CODE (arg0) == NEGATE_EXPR
11686 && negate_expr_p (arg1))
11688 if (INTEGRAL_TYPE_P (type))
11689 fold_overflow_warning (("assuming signed overflow does not occur "
11690 "when distributing negation across "
11691 "division"),
11692 WARN_STRICT_OVERFLOW_MISC);
11693 return fold_build2_loc (loc, code, type,
11694 fold_convert_loc (loc, type,
11695 TREE_OPERAND (arg0, 0)),
11696 fold_convert_loc (loc, type,
11697 negate_expr (arg1)));
11699 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11700 && TREE_CODE (arg1) == NEGATE_EXPR
11701 && negate_expr_p (arg0))
11703 if (INTEGRAL_TYPE_P (type))
11704 fold_overflow_warning (("assuming signed overflow does not occur "
11705 "when distributing negation across "
11706 "division"),
11707 WARN_STRICT_OVERFLOW_MISC);
11708 return fold_build2_loc (loc, code, type,
11709 fold_convert_loc (loc, type,
11710 negate_expr (arg0)),
11711 fold_convert_loc (loc, type,
11712 TREE_OPERAND (arg1, 0)));
11715 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11716 operation, EXACT_DIV_EXPR.
11718 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11719 At one time others generated faster code, it's not clear if they do
11720 after the last round to changes to the DIV code in expmed.c. */
11721 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11722 && multiple_of_p (type, arg0, arg1))
11723 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11725 strict_overflow_p = false;
11726 if (TREE_CODE (arg1) == INTEGER_CST
11727 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11728 &strict_overflow_p)))
11730 if (strict_overflow_p)
11731 fold_overflow_warning (("assuming signed overflow does not occur "
11732 "when simplifying division"),
11733 WARN_STRICT_OVERFLOW_MISC);
11734 return fold_convert_loc (loc, type, tem);
11737 return NULL_TREE;
11739 case CEIL_MOD_EXPR:
11740 case FLOOR_MOD_EXPR:
11741 case ROUND_MOD_EXPR:
11742 case TRUNC_MOD_EXPR:
11743 /* X % 1 is always zero, but be sure to preserve any side
11744 effects in X. */
11745 if (integer_onep (arg1))
11746 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11748 /* X % 0, return X % 0 unchanged so that we can get the
11749 proper warnings and errors. */
11750 if (integer_zerop (arg1))
11751 return NULL_TREE;
11753 /* 0 % X is always zero, but be sure to preserve any side
11754 effects in X. Place this after checking for X == 0. */
11755 if (integer_zerop (arg0))
11756 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11758 /* X % -1 is zero. */
11759 if (!TYPE_UNSIGNED (type)
11760 && TREE_CODE (arg1) == INTEGER_CST
11761 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11762 && TREE_INT_CST_HIGH (arg1) == -1)
11763 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11765 /* X % -C is the same as X % C. */
11766 if (code == TRUNC_MOD_EXPR
11767 && !TYPE_UNSIGNED (type)
11768 && TREE_CODE (arg1) == INTEGER_CST
11769 && !TREE_OVERFLOW (arg1)
11770 && TREE_INT_CST_HIGH (arg1) < 0
11771 && !TYPE_OVERFLOW_TRAPS (type)
11772 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11773 && !sign_bit_p (arg1, arg1))
11774 return fold_build2_loc (loc, code, type,
11775 fold_convert_loc (loc, type, arg0),
11776 fold_convert_loc (loc, type,
11777 negate_expr (arg1)));
11779 /* X % -Y is the same as X % Y. */
11780 if (code == TRUNC_MOD_EXPR
11781 && !TYPE_UNSIGNED (type)
11782 && TREE_CODE (arg1) == NEGATE_EXPR
11783 && !TYPE_OVERFLOW_TRAPS (type))
11784 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11785 fold_convert_loc (loc, type,
11786 TREE_OPERAND (arg1, 0)));
11788 strict_overflow_p = false;
11789 if (TREE_CODE (arg1) == INTEGER_CST
11790 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11791 &strict_overflow_p)))
11793 if (strict_overflow_p)
11794 fold_overflow_warning (("assuming signed overflow does not occur "
11795 "when simplifying modulus"),
11796 WARN_STRICT_OVERFLOW_MISC);
11797 return fold_convert_loc (loc, type, tem);
11800 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11801 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11802 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11803 && (TYPE_UNSIGNED (type)
11804 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11806 tree c = arg1;
11807 /* Also optimize A % (C << N) where C is a power of 2,
11808 to A & ((C << N) - 1). */
11809 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11810 c = TREE_OPERAND (arg1, 0);
11812 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11814 tree mask
11815 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11816 build_int_cst (TREE_TYPE (arg1), 1));
11817 if (strict_overflow_p)
11818 fold_overflow_warning (("assuming signed overflow does not "
11819 "occur when simplifying "
11820 "X % (power of two)"),
11821 WARN_STRICT_OVERFLOW_MISC);
11822 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11823 fold_convert_loc (loc, type, arg0),
11824 fold_convert_loc (loc, type, mask));
11828 return NULL_TREE;
11830 case LROTATE_EXPR:
11831 case RROTATE_EXPR:
11832 if (integer_all_onesp (arg0))
11833 return omit_one_operand_loc (loc, type, arg0, arg1);
11834 goto shift;
11836 case RSHIFT_EXPR:
11837 /* Optimize -1 >> x for arithmetic right shifts. */
11838 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11839 && tree_expr_nonnegative_p (arg1))
11840 return omit_one_operand_loc (loc, type, arg0, arg1);
11841 /* ... fall through ... */
11843 case LSHIFT_EXPR:
11844 shift:
11845 if (integer_zerop (arg1))
11846 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11847 if (integer_zerop (arg0))
11848 return omit_one_operand_loc (loc, type, arg0, arg1);
11850 /* Since negative shift count is not well-defined,
11851 don't try to compute it in the compiler. */
11852 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11853 return NULL_TREE;
11855 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11856 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11857 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11858 && host_integerp (TREE_OPERAND (arg0, 1), false)
11859 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11861 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11862 + TREE_INT_CST_LOW (arg1));
11864 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11865 being well defined. */
11866 if (low >= TYPE_PRECISION (type))
11868 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11869 low = low % TYPE_PRECISION (type);
11870 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11871 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11872 TREE_OPERAND (arg0, 0));
11873 else
11874 low = TYPE_PRECISION (type) - 1;
11877 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11878 build_int_cst (type, low));
11881 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11882 into x & ((unsigned)-1 >> c) for unsigned types. */
11883 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11884 || (TYPE_UNSIGNED (type)
11885 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11886 && host_integerp (arg1, false)
11887 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11888 && host_integerp (TREE_OPERAND (arg0, 1), false)
11889 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11891 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11892 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11893 tree lshift;
11894 tree arg00;
11896 if (low0 == low1)
11898 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11900 lshift = build_int_cst (type, -1);
11901 lshift = int_const_binop (code, lshift, arg1, 0);
11903 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11907 /* Rewrite an LROTATE_EXPR by a constant into an
11908 RROTATE_EXPR by a new constant. */
11909 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11911 tree tem = build_int_cst (TREE_TYPE (arg1),
11912 TYPE_PRECISION (type));
11913 tem = const_binop (MINUS_EXPR, tem, arg1);
11914 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11917 /* If we have a rotate of a bit operation with the rotate count and
11918 the second operand of the bit operation both constant,
11919 permute the two operations. */
11920 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11921 && (TREE_CODE (arg0) == BIT_AND_EXPR
11922 || TREE_CODE (arg0) == BIT_IOR_EXPR
11923 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11924 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11925 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11926 fold_build2_loc (loc, code, type,
11927 TREE_OPERAND (arg0, 0), arg1),
11928 fold_build2_loc (loc, code, type,
11929 TREE_OPERAND (arg0, 1), arg1));
11931 /* Two consecutive rotates adding up to the precision of the
11932 type can be ignored. */
11933 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11934 && TREE_CODE (arg0) == RROTATE_EXPR
11935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11936 && TREE_INT_CST_HIGH (arg1) == 0
11937 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11938 && ((TREE_INT_CST_LOW (arg1)
11939 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11940 == (unsigned int) TYPE_PRECISION (type)))
11941 return TREE_OPERAND (arg0, 0);
11943 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11944 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11945 if the latter can be further optimized. */
11946 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11947 && TREE_CODE (arg0) == BIT_AND_EXPR
11948 && TREE_CODE (arg1) == INTEGER_CST
11949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11951 tree mask = fold_build2_loc (loc, code, type,
11952 fold_convert_loc (loc, type,
11953 TREE_OPERAND (arg0, 1)),
11954 arg1);
11955 tree shift = fold_build2_loc (loc, code, type,
11956 fold_convert_loc (loc, type,
11957 TREE_OPERAND (arg0, 0)),
11958 arg1);
11959 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11960 if (tem)
11961 return tem;
11964 return NULL_TREE;
11966 case MIN_EXPR:
11967 if (operand_equal_p (arg0, arg1, 0))
11968 return omit_one_operand_loc (loc, type, arg0, arg1);
11969 if (INTEGRAL_TYPE_P (type)
11970 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11971 return omit_one_operand_loc (loc, type, arg1, arg0);
11972 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11973 if (tem)
11974 return tem;
11975 goto associate;
11977 case MAX_EXPR:
11978 if (operand_equal_p (arg0, arg1, 0))
11979 return omit_one_operand_loc (loc, type, arg0, arg1);
11980 if (INTEGRAL_TYPE_P (type)
11981 && TYPE_MAX_VALUE (type)
11982 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11983 return omit_one_operand_loc (loc, type, arg1, arg0);
11984 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11985 if (tem)
11986 return tem;
11987 goto associate;
11989 case TRUTH_ANDIF_EXPR:
11990 /* Note that the operands of this must be ints
11991 and their values must be 0 or 1.
11992 ("true" is a fixed value perhaps depending on the language.) */
11993 /* If first arg is constant zero, return it. */
11994 if (integer_zerop (arg0))
11995 return fold_convert_loc (loc, type, arg0);
11996 case TRUTH_AND_EXPR:
11997 /* If either arg is constant true, drop it. */
11998 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11999 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12000 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12001 /* Preserve sequence points. */
12002 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12004 /* If second arg is constant zero, result is zero, but first arg
12005 must be evaluated. */
12006 if (integer_zerop (arg1))
12007 return omit_one_operand_loc (loc, type, arg1, arg0);
12008 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12009 case will be handled here. */
12010 if (integer_zerop (arg0))
12011 return omit_one_operand_loc (loc, type, arg0, arg1);
12013 /* !X && X is always false. */
12014 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12015 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12016 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12017 /* X && !X is always false. */
12018 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12019 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12020 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12022 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12023 means A >= Y && A != MAX, but in this case we know that
12024 A < X <= MAX. */
12026 if (!TREE_SIDE_EFFECTS (arg0)
12027 && !TREE_SIDE_EFFECTS (arg1))
12029 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12030 if (tem && !operand_equal_p (tem, arg0, 0))
12031 return fold_build2_loc (loc, code, type, tem, arg1);
12033 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12034 if (tem && !operand_equal_p (tem, arg1, 0))
12035 return fold_build2_loc (loc, code, type, arg0, tem);
12038 truth_andor:
12039 /* We only do these simplifications if we are optimizing. */
12040 if (!optimize)
12041 return NULL_TREE;
12043 /* Check for things like (A || B) && (A || C). We can convert this
12044 to A || (B && C). Note that either operator can be any of the four
12045 truth and/or operations and the transformation will still be
12046 valid. Also note that we only care about order for the
12047 ANDIF and ORIF operators. If B contains side effects, this
12048 might change the truth-value of A. */
12049 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12050 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12051 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12052 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12053 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12054 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12056 tree a00 = TREE_OPERAND (arg0, 0);
12057 tree a01 = TREE_OPERAND (arg0, 1);
12058 tree a10 = TREE_OPERAND (arg1, 0);
12059 tree a11 = TREE_OPERAND (arg1, 1);
12060 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12061 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12062 && (code == TRUTH_AND_EXPR
12063 || code == TRUTH_OR_EXPR));
12065 if (operand_equal_p (a00, a10, 0))
12066 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12067 fold_build2_loc (loc, code, type, a01, a11));
12068 else if (commutative && operand_equal_p (a00, a11, 0))
12069 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12070 fold_build2_loc (loc, code, type, a01, a10));
12071 else if (commutative && operand_equal_p (a01, a10, 0))
12072 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12073 fold_build2_loc (loc, code, type, a00, a11));
12075 /* This case if tricky because we must either have commutative
12076 operators or else A10 must not have side-effects. */
12078 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12079 && operand_equal_p (a01, a11, 0))
12080 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12081 fold_build2_loc (loc, code, type, a00, a10),
12082 a01);
12085 /* See if we can build a range comparison. */
12086 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12087 return tem;
12089 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12090 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12092 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12093 if (tem)
12094 return fold_build2_loc (loc, code, type, tem, arg1);
12097 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12098 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12100 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12101 if (tem)
12102 return fold_build2_loc (loc, code, type, arg0, tem);
12105 /* Check for the possibility of merging component references. If our
12106 lhs is another similar operation, try to merge its rhs with our
12107 rhs. Then try to merge our lhs and rhs. */
12108 if (TREE_CODE (arg0) == code
12109 && 0 != (tem = fold_truthop (loc, code, type,
12110 TREE_OPERAND (arg0, 1), arg1)))
12111 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12113 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12114 return tem;
12116 return NULL_TREE;
12118 case TRUTH_ORIF_EXPR:
12119 /* Note that the operands of this must be ints
12120 and their values must be 0 or true.
12121 ("true" is a fixed value perhaps depending on the language.) */
12122 /* If first arg is constant true, return it. */
12123 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12124 return fold_convert_loc (loc, type, arg0);
12125 case TRUTH_OR_EXPR:
12126 /* If either arg is constant zero, drop it. */
12127 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12128 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12129 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12130 /* Preserve sequence points. */
12131 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12132 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12133 /* If second arg is constant true, result is true, but we must
12134 evaluate first arg. */
12135 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12136 return omit_one_operand_loc (loc, type, arg1, arg0);
12137 /* Likewise for first arg, but note this only occurs here for
12138 TRUTH_OR_EXPR. */
12139 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12140 return omit_one_operand_loc (loc, type, arg0, arg1);
12142 /* !X || X is always true. */
12143 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12144 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12145 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12146 /* X || !X is always true. */
12147 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12148 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12149 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12151 goto truth_andor;
12153 case TRUTH_XOR_EXPR:
12154 /* If the second arg is constant zero, drop it. */
12155 if (integer_zerop (arg1))
12156 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12157 /* If the second arg is constant true, this is a logical inversion. */
12158 if (integer_onep (arg1))
12160 /* Only call invert_truthvalue if operand is a truth value. */
12161 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12162 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12163 else
12164 tem = invert_truthvalue_loc (loc, arg0);
12165 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12167 /* Identical arguments cancel to zero. */
12168 if (operand_equal_p (arg0, arg1, 0))
12169 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12171 /* !X ^ X is always true. */
12172 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12173 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12174 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12176 /* X ^ !X is always true. */
12177 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12178 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12179 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12181 return NULL_TREE;
12183 case EQ_EXPR:
12184 case NE_EXPR:
12185 tem = fold_comparison (loc, code, type, op0, op1);
12186 if (tem != NULL_TREE)
12187 return tem;
12189 /* bool_var != 0 becomes bool_var. */
12190 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12191 && code == NE_EXPR)
12192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12194 /* bool_var == 1 becomes bool_var. */
12195 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12196 && code == EQ_EXPR)
12197 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12199 /* bool_var != 1 becomes !bool_var. */
12200 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12201 && code == NE_EXPR)
12202 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12203 fold_convert_loc (loc, type, arg0));
12205 /* bool_var == 0 becomes !bool_var. */
12206 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12207 && code == EQ_EXPR)
12208 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12209 fold_convert_loc (loc, type, arg0));
12211 /* !exp != 0 becomes !exp */
12212 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12213 && code == NE_EXPR)
12214 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12216 /* If this is an equality comparison of the address of two non-weak,
12217 unaliased symbols neither of which are extern (since we do not
12218 have access to attributes for externs), then we know the result. */
12219 if (TREE_CODE (arg0) == ADDR_EXPR
12220 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12221 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12222 && ! lookup_attribute ("alias",
12223 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12224 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12225 && TREE_CODE (arg1) == ADDR_EXPR
12226 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12227 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12228 && ! lookup_attribute ("alias",
12229 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12230 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12232 /* We know that we're looking at the address of two
12233 non-weak, unaliased, static _DECL nodes.
12235 It is both wasteful and incorrect to call operand_equal_p
12236 to compare the two ADDR_EXPR nodes. It is wasteful in that
12237 all we need to do is test pointer equality for the arguments
12238 to the two ADDR_EXPR nodes. It is incorrect to use
12239 operand_equal_p as that function is NOT equivalent to a
12240 C equality test. It can in fact return false for two
12241 objects which would test as equal using the C equality
12242 operator. */
12243 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12244 return constant_boolean_node (equal
12245 ? code == EQ_EXPR : code != EQ_EXPR,
12246 type);
12249 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12250 a MINUS_EXPR of a constant, we can convert it into a comparison with
12251 a revised constant as long as no overflow occurs. */
12252 if (TREE_CODE (arg1) == INTEGER_CST
12253 && (TREE_CODE (arg0) == PLUS_EXPR
12254 || TREE_CODE (arg0) == MINUS_EXPR)
12255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12256 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12257 ? MINUS_EXPR : PLUS_EXPR,
12258 fold_convert_loc (loc, TREE_TYPE (arg0),
12259 arg1),
12260 TREE_OPERAND (arg0, 1)))
12261 && !TREE_OVERFLOW (tem))
12262 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12264 /* Similarly for a NEGATE_EXPR. */
12265 if (TREE_CODE (arg0) == NEGATE_EXPR
12266 && TREE_CODE (arg1) == INTEGER_CST
12267 && 0 != (tem = negate_expr (arg1))
12268 && TREE_CODE (tem) == INTEGER_CST
12269 && !TREE_OVERFLOW (tem))
12270 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12272 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12273 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12274 && TREE_CODE (arg1) == INTEGER_CST
12275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12276 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12277 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12278 fold_convert_loc (loc,
12279 TREE_TYPE (arg0),
12280 arg1),
12281 TREE_OPERAND (arg0, 1)));
12283 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12284 if ((TREE_CODE (arg0) == PLUS_EXPR
12285 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12286 || TREE_CODE (arg0) == MINUS_EXPR)
12287 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12288 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12289 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12291 tree val = TREE_OPERAND (arg0, 1);
12292 return omit_two_operands_loc (loc, type,
12293 fold_build2_loc (loc, code, type,
12294 val,
12295 build_int_cst (TREE_TYPE (val),
12296 0)),
12297 TREE_OPERAND (arg0, 0), arg1);
12300 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12301 if (TREE_CODE (arg0) == MINUS_EXPR
12302 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12303 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12304 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12306 return omit_two_operands_loc (loc, type,
12307 code == NE_EXPR
12308 ? boolean_true_node : boolean_false_node,
12309 TREE_OPERAND (arg0, 1), arg1);
12312 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12313 for !=. Don't do this for ordered comparisons due to overflow. */
12314 if (TREE_CODE (arg0) == MINUS_EXPR
12315 && integer_zerop (arg1))
12316 return fold_build2_loc (loc, code, type,
12317 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12319 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12320 if (TREE_CODE (arg0) == ABS_EXPR
12321 && (integer_zerop (arg1) || real_zerop (arg1)))
12322 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12324 /* If this is an EQ or NE comparison with zero and ARG0 is
12325 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12326 two operations, but the latter can be done in one less insn
12327 on machines that have only two-operand insns or on which a
12328 constant cannot be the first operand. */
12329 if (TREE_CODE (arg0) == BIT_AND_EXPR
12330 && integer_zerop (arg1))
12332 tree arg00 = TREE_OPERAND (arg0, 0);
12333 tree arg01 = TREE_OPERAND (arg0, 1);
12334 if (TREE_CODE (arg00) == LSHIFT_EXPR
12335 && integer_onep (TREE_OPERAND (arg00, 0)))
12337 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12338 arg01, TREE_OPERAND (arg00, 1));
12339 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12340 build_int_cst (TREE_TYPE (arg0), 1));
12341 return fold_build2_loc (loc, code, type,
12342 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12343 arg1);
12345 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12346 && integer_onep (TREE_OPERAND (arg01, 0)))
12348 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12349 arg00, TREE_OPERAND (arg01, 1));
12350 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12351 build_int_cst (TREE_TYPE (arg0), 1));
12352 return fold_build2_loc (loc, code, type,
12353 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12354 arg1);
12358 /* If this is an NE or EQ comparison of zero against the result of a
12359 signed MOD operation whose second operand is a power of 2, make
12360 the MOD operation unsigned since it is simpler and equivalent. */
12361 if (integer_zerop (arg1)
12362 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12363 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12364 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12365 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12366 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12367 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12369 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12370 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12371 fold_convert_loc (loc, newtype,
12372 TREE_OPERAND (arg0, 0)),
12373 fold_convert_loc (loc, newtype,
12374 TREE_OPERAND (arg0, 1)));
12376 return fold_build2_loc (loc, code, type, newmod,
12377 fold_convert_loc (loc, newtype, arg1));
12380 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12381 C1 is a valid shift constant, and C2 is a power of two, i.e.
12382 a single bit. */
12383 if (TREE_CODE (arg0) == BIT_AND_EXPR
12384 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12385 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12386 == INTEGER_CST
12387 && integer_pow2p (TREE_OPERAND (arg0, 1))
12388 && integer_zerop (arg1))
12390 tree itype = TREE_TYPE (arg0);
12391 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12392 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12394 /* Check for a valid shift count. */
12395 if (TREE_INT_CST_HIGH (arg001) == 0
12396 && TREE_INT_CST_LOW (arg001) < prec)
12398 tree arg01 = TREE_OPERAND (arg0, 1);
12399 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12400 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12401 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12402 can be rewritten as (X & (C2 << C1)) != 0. */
12403 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12405 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12406 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12407 return fold_build2_loc (loc, code, type, tem, arg1);
12409 /* Otherwise, for signed (arithmetic) shifts,
12410 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12411 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12412 else if (!TYPE_UNSIGNED (itype))
12413 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12414 arg000, build_int_cst (itype, 0));
12415 /* Otherwise, of unsigned (logical) shifts,
12416 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12417 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12418 else
12419 return omit_one_operand_loc (loc, type,
12420 code == EQ_EXPR ? integer_one_node
12421 : integer_zero_node,
12422 arg000);
12426 /* If this is an NE comparison of zero with an AND of one, remove the
12427 comparison since the AND will give the correct value. */
12428 if (code == NE_EXPR
12429 && integer_zerop (arg1)
12430 && TREE_CODE (arg0) == BIT_AND_EXPR
12431 && integer_onep (TREE_OPERAND (arg0, 1)))
12432 return fold_convert_loc (loc, type, arg0);
12434 /* If we have (A & C) == C where C is a power of 2, convert this into
12435 (A & C) != 0. Similarly for NE_EXPR. */
12436 if (TREE_CODE (arg0) == BIT_AND_EXPR
12437 && integer_pow2p (TREE_OPERAND (arg0, 1))
12438 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12439 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12440 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12441 integer_zero_node));
12443 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12444 bit, then fold the expression into A < 0 or A >= 0. */
12445 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12446 if (tem)
12447 return tem;
12449 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12450 Similarly for NE_EXPR. */
12451 if (TREE_CODE (arg0) == BIT_AND_EXPR
12452 && TREE_CODE (arg1) == INTEGER_CST
12453 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12455 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12456 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12457 TREE_OPERAND (arg0, 1));
12458 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12459 arg1, notc);
12460 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12461 if (integer_nonzerop (dandnotc))
12462 return omit_one_operand_loc (loc, type, rslt, arg0);
12465 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12466 Similarly for NE_EXPR. */
12467 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12468 && TREE_CODE (arg1) == INTEGER_CST
12469 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12471 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12472 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12473 TREE_OPERAND (arg0, 1), notd);
12474 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12475 if (integer_nonzerop (candnotd))
12476 return omit_one_operand_loc (loc, type, rslt, arg0);
12479 /* If this is a comparison of a field, we may be able to simplify it. */
12480 if ((TREE_CODE (arg0) == COMPONENT_REF
12481 || TREE_CODE (arg0) == BIT_FIELD_REF)
12482 /* Handle the constant case even without -O
12483 to make sure the warnings are given. */
12484 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12486 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12487 if (t1)
12488 return t1;
12491 /* Optimize comparisons of strlen vs zero to a compare of the
12492 first character of the string vs zero. To wit,
12493 strlen(ptr) == 0 => *ptr == 0
12494 strlen(ptr) != 0 => *ptr != 0
12495 Other cases should reduce to one of these two (or a constant)
12496 due to the return value of strlen being unsigned. */
12497 if (TREE_CODE (arg0) == CALL_EXPR
12498 && integer_zerop (arg1))
12500 tree fndecl = get_callee_fndecl (arg0);
12502 if (fndecl
12503 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12504 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12505 && call_expr_nargs (arg0) == 1
12506 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12508 tree iref = build_fold_indirect_ref_loc (loc,
12509 CALL_EXPR_ARG (arg0, 0));
12510 return fold_build2_loc (loc, code, type, iref,
12511 build_int_cst (TREE_TYPE (iref), 0));
12515 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12516 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12517 if (TREE_CODE (arg0) == RSHIFT_EXPR
12518 && integer_zerop (arg1)
12519 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12521 tree arg00 = TREE_OPERAND (arg0, 0);
12522 tree arg01 = TREE_OPERAND (arg0, 1);
12523 tree itype = TREE_TYPE (arg00);
12524 if (TREE_INT_CST_HIGH (arg01) == 0
12525 && TREE_INT_CST_LOW (arg01)
12526 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12528 if (TYPE_UNSIGNED (itype))
12530 itype = signed_type_for (itype);
12531 arg00 = fold_convert_loc (loc, itype, arg00);
12533 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12534 type, arg00, build_int_cst (itype, 0));
12538 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12539 if (integer_zerop (arg1)
12540 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12541 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12542 TREE_OPERAND (arg0, 1));
12544 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12545 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12546 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12547 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12548 build_int_cst (TREE_TYPE (arg1), 0));
12549 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12550 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12551 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12552 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12553 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12554 build_int_cst (TREE_TYPE (arg1), 0));
12556 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12557 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12558 && TREE_CODE (arg1) == INTEGER_CST
12559 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12560 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12561 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12562 TREE_OPERAND (arg0, 1), arg1));
12564 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12565 (X & C) == 0 when C is a single bit. */
12566 if (TREE_CODE (arg0) == BIT_AND_EXPR
12567 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12568 && integer_zerop (arg1)
12569 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12571 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12572 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12573 TREE_OPERAND (arg0, 1));
12574 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12575 type, tem, arg1);
12578 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12579 constant C is a power of two, i.e. a single bit. */
12580 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12581 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12582 && integer_zerop (arg1)
12583 && integer_pow2p (TREE_OPERAND (arg0, 1))
12584 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12585 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12587 tree arg00 = TREE_OPERAND (arg0, 0);
12588 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12589 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12592 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12593 when is C is a power of two, i.e. a single bit. */
12594 if (TREE_CODE (arg0) == BIT_AND_EXPR
12595 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12596 && integer_zerop (arg1)
12597 && integer_pow2p (TREE_OPERAND (arg0, 1))
12598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12599 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12601 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12602 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12603 arg000, TREE_OPERAND (arg0, 1));
12604 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12605 tem, build_int_cst (TREE_TYPE (tem), 0));
12608 if (integer_zerop (arg1)
12609 && tree_expr_nonzero_p (arg0))
12611 tree res = constant_boolean_node (code==NE_EXPR, type);
12612 return omit_one_operand_loc (loc, type, res, arg0);
12615 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12616 if (TREE_CODE (arg0) == NEGATE_EXPR
12617 && TREE_CODE (arg1) == NEGATE_EXPR)
12618 return fold_build2_loc (loc, code, type,
12619 TREE_OPERAND (arg0, 0),
12620 TREE_OPERAND (arg1, 0));
12622 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12623 if (TREE_CODE (arg0) == BIT_AND_EXPR
12624 && TREE_CODE (arg1) == BIT_AND_EXPR)
12626 tree arg00 = TREE_OPERAND (arg0, 0);
12627 tree arg01 = TREE_OPERAND (arg0, 1);
12628 tree arg10 = TREE_OPERAND (arg1, 0);
12629 tree arg11 = TREE_OPERAND (arg1, 1);
12630 tree itype = TREE_TYPE (arg0);
12632 if (operand_equal_p (arg01, arg11, 0))
12633 return fold_build2_loc (loc, code, type,
12634 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12635 fold_build2_loc (loc,
12636 BIT_XOR_EXPR, itype,
12637 arg00, arg10),
12638 arg01),
12639 build_int_cst (itype, 0));
12641 if (operand_equal_p (arg01, arg10, 0))
12642 return fold_build2_loc (loc, code, type,
12643 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12644 fold_build2_loc (loc,
12645 BIT_XOR_EXPR, itype,
12646 arg00, arg11),
12647 arg01),
12648 build_int_cst (itype, 0));
12650 if (operand_equal_p (arg00, arg11, 0))
12651 return fold_build2_loc (loc, code, type,
12652 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12653 fold_build2_loc (loc,
12654 BIT_XOR_EXPR, itype,
12655 arg01, arg10),
12656 arg00),
12657 build_int_cst (itype, 0));
12659 if (operand_equal_p (arg00, arg10, 0))
12660 return fold_build2_loc (loc, code, type,
12661 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12662 fold_build2_loc (loc,
12663 BIT_XOR_EXPR, itype,
12664 arg01, arg11),
12665 arg00),
12666 build_int_cst (itype, 0));
12669 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12670 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12672 tree arg00 = TREE_OPERAND (arg0, 0);
12673 tree arg01 = TREE_OPERAND (arg0, 1);
12674 tree arg10 = TREE_OPERAND (arg1, 0);
12675 tree arg11 = TREE_OPERAND (arg1, 1);
12676 tree itype = TREE_TYPE (arg0);
12678 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12679 operand_equal_p guarantees no side-effects so we don't need
12680 to use omit_one_operand on Z. */
12681 if (operand_equal_p (arg01, arg11, 0))
12682 return fold_build2_loc (loc, code, type, arg00, arg10);
12683 if (operand_equal_p (arg01, arg10, 0))
12684 return fold_build2_loc (loc, code, type, arg00, arg11);
12685 if (operand_equal_p (arg00, arg11, 0))
12686 return fold_build2_loc (loc, code, type, arg01, arg10);
12687 if (operand_equal_p (arg00, arg10, 0))
12688 return fold_build2_loc (loc, code, type, arg01, arg11);
12690 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12691 if (TREE_CODE (arg01) == INTEGER_CST
12692 && TREE_CODE (arg11) == INTEGER_CST)
12693 return fold_build2_loc (loc, code, type,
12694 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12695 fold_build2_loc (loc,
12696 BIT_XOR_EXPR, itype,
12697 arg01, arg11)),
12698 arg10);
12701 /* Attempt to simplify equality/inequality comparisons of complex
12702 values. Only lower the comparison if the result is known or
12703 can be simplified to a single scalar comparison. */
12704 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12705 || TREE_CODE (arg0) == COMPLEX_CST)
12706 && (TREE_CODE (arg1) == COMPLEX_EXPR
12707 || TREE_CODE (arg1) == COMPLEX_CST))
12709 tree real0, imag0, real1, imag1;
12710 tree rcond, icond;
12712 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12714 real0 = TREE_OPERAND (arg0, 0);
12715 imag0 = TREE_OPERAND (arg0, 1);
12717 else
12719 real0 = TREE_REALPART (arg0);
12720 imag0 = TREE_IMAGPART (arg0);
12723 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12725 real1 = TREE_OPERAND (arg1, 0);
12726 imag1 = TREE_OPERAND (arg1, 1);
12728 else
12730 real1 = TREE_REALPART (arg1);
12731 imag1 = TREE_IMAGPART (arg1);
12734 rcond = fold_binary_loc (loc, code, type, real0, real1);
12735 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12737 if (integer_zerop (rcond))
12739 if (code == EQ_EXPR)
12740 return omit_two_operands_loc (loc, type, boolean_false_node,
12741 imag0, imag1);
12742 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12744 else
12746 if (code == NE_EXPR)
12747 return omit_two_operands_loc (loc, type, boolean_true_node,
12748 imag0, imag1);
12749 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12753 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12754 if (icond && TREE_CODE (icond) == INTEGER_CST)
12756 if (integer_zerop (icond))
12758 if (code == EQ_EXPR)
12759 return omit_two_operands_loc (loc, type, boolean_false_node,
12760 real0, real1);
12761 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12763 else
12765 if (code == NE_EXPR)
12766 return omit_two_operands_loc (loc, type, boolean_true_node,
12767 real0, real1);
12768 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12773 return NULL_TREE;
12775 case LT_EXPR:
12776 case GT_EXPR:
12777 case LE_EXPR:
12778 case GE_EXPR:
12779 tem = fold_comparison (loc, code, type, op0, op1);
12780 if (tem != NULL_TREE)
12781 return tem;
12783 /* Transform comparisons of the form X +- C CMP X. */
12784 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12785 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12786 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12787 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12788 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12789 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12791 tree arg01 = TREE_OPERAND (arg0, 1);
12792 enum tree_code code0 = TREE_CODE (arg0);
12793 int is_positive;
12795 if (TREE_CODE (arg01) == REAL_CST)
12796 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12797 else
12798 is_positive = tree_int_cst_sgn (arg01);
12800 /* (X - c) > X becomes false. */
12801 if (code == GT_EXPR
12802 && ((code0 == MINUS_EXPR && is_positive >= 0)
12803 || (code0 == PLUS_EXPR && is_positive <= 0)))
12805 if (TREE_CODE (arg01) == INTEGER_CST
12806 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12807 fold_overflow_warning (("assuming signed overflow does not "
12808 "occur when assuming that (X - c) > X "
12809 "is always false"),
12810 WARN_STRICT_OVERFLOW_ALL);
12811 return constant_boolean_node (0, type);
12814 /* Likewise (X + c) < X becomes false. */
12815 if (code == LT_EXPR
12816 && ((code0 == PLUS_EXPR && is_positive >= 0)
12817 || (code0 == MINUS_EXPR && is_positive <= 0)))
12819 if (TREE_CODE (arg01) == INTEGER_CST
12820 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12821 fold_overflow_warning (("assuming signed overflow does not "
12822 "occur when assuming that "
12823 "(X + c) < X is always false"),
12824 WARN_STRICT_OVERFLOW_ALL);
12825 return constant_boolean_node (0, type);
12828 /* Convert (X - c) <= X to true. */
12829 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12830 && code == LE_EXPR
12831 && ((code0 == MINUS_EXPR && is_positive >= 0)
12832 || (code0 == PLUS_EXPR && is_positive <= 0)))
12834 if (TREE_CODE (arg01) == INTEGER_CST
12835 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12836 fold_overflow_warning (("assuming signed overflow does not "
12837 "occur when assuming that "
12838 "(X - c) <= X is always true"),
12839 WARN_STRICT_OVERFLOW_ALL);
12840 return constant_boolean_node (1, type);
12843 /* Convert (X + c) >= X to true. */
12844 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12845 && code == GE_EXPR
12846 && ((code0 == PLUS_EXPR && is_positive >= 0)
12847 || (code0 == MINUS_EXPR && is_positive <= 0)))
12849 if (TREE_CODE (arg01) == INTEGER_CST
12850 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12851 fold_overflow_warning (("assuming signed overflow does not "
12852 "occur when assuming that "
12853 "(X + c) >= X is always true"),
12854 WARN_STRICT_OVERFLOW_ALL);
12855 return constant_boolean_node (1, type);
12858 if (TREE_CODE (arg01) == INTEGER_CST)
12860 /* Convert X + c > X and X - c < X to true for integers. */
12861 if (code == GT_EXPR
12862 && ((code0 == PLUS_EXPR && is_positive > 0)
12863 || (code0 == MINUS_EXPR && is_positive < 0)))
12865 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12866 fold_overflow_warning (("assuming signed overflow does "
12867 "not occur when assuming that "
12868 "(X + c) > X is always true"),
12869 WARN_STRICT_OVERFLOW_ALL);
12870 return constant_boolean_node (1, type);
12873 if (code == LT_EXPR
12874 && ((code0 == MINUS_EXPR && is_positive > 0)
12875 || (code0 == PLUS_EXPR && is_positive < 0)))
12877 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12878 fold_overflow_warning (("assuming signed overflow does "
12879 "not occur when assuming that "
12880 "(X - c) < X is always true"),
12881 WARN_STRICT_OVERFLOW_ALL);
12882 return constant_boolean_node (1, type);
12885 /* Convert X + c <= X and X - c >= X to false for integers. */
12886 if (code == LE_EXPR
12887 && ((code0 == PLUS_EXPR && is_positive > 0)
12888 || (code0 == MINUS_EXPR && is_positive < 0)))
12890 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12891 fold_overflow_warning (("assuming signed overflow does "
12892 "not occur when assuming that "
12893 "(X + c) <= X is always false"),
12894 WARN_STRICT_OVERFLOW_ALL);
12895 return constant_boolean_node (0, type);
12898 if (code == GE_EXPR
12899 && ((code0 == MINUS_EXPR && is_positive > 0)
12900 || (code0 == PLUS_EXPR && is_positive < 0)))
12902 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12903 fold_overflow_warning (("assuming signed overflow does "
12904 "not occur when assuming that "
12905 "(X - c) >= X is always false"),
12906 WARN_STRICT_OVERFLOW_ALL);
12907 return constant_boolean_node (0, type);
12912 /* Comparisons with the highest or lowest possible integer of
12913 the specified precision will have known values. */
12915 tree arg1_type = TREE_TYPE (arg1);
12916 unsigned int width = TYPE_PRECISION (arg1_type);
12918 if (TREE_CODE (arg1) == INTEGER_CST
12919 && width <= 2 * HOST_BITS_PER_WIDE_INT
12920 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12922 HOST_WIDE_INT signed_max_hi;
12923 unsigned HOST_WIDE_INT signed_max_lo;
12924 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12926 if (width <= HOST_BITS_PER_WIDE_INT)
12928 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12929 - 1;
12930 signed_max_hi = 0;
12931 max_hi = 0;
12933 if (TYPE_UNSIGNED (arg1_type))
12935 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12936 min_lo = 0;
12937 min_hi = 0;
12939 else
12941 max_lo = signed_max_lo;
12942 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12943 min_hi = -1;
12946 else
12948 width -= HOST_BITS_PER_WIDE_INT;
12949 signed_max_lo = -1;
12950 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12951 - 1;
12952 max_lo = -1;
12953 min_lo = 0;
12955 if (TYPE_UNSIGNED (arg1_type))
12957 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12958 min_hi = 0;
12960 else
12962 max_hi = signed_max_hi;
12963 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12967 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12968 && TREE_INT_CST_LOW (arg1) == max_lo)
12969 switch (code)
12971 case GT_EXPR:
12972 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12974 case GE_EXPR:
12975 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12977 case LE_EXPR:
12978 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12980 case LT_EXPR:
12981 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12983 /* The GE_EXPR and LT_EXPR cases above are not normally
12984 reached because of previous transformations. */
12986 default:
12987 break;
12989 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12990 == max_hi
12991 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12992 switch (code)
12994 case GT_EXPR:
12995 arg1 = const_binop (PLUS_EXPR, arg1,
12996 build_int_cst (TREE_TYPE (arg1), 1));
12997 return fold_build2_loc (loc, EQ_EXPR, type,
12998 fold_convert_loc (loc,
12999 TREE_TYPE (arg1), arg0),
13000 arg1);
13001 case LE_EXPR:
13002 arg1 = const_binop (PLUS_EXPR, arg1,
13003 build_int_cst (TREE_TYPE (arg1), 1));
13004 return fold_build2_loc (loc, NE_EXPR, type,
13005 fold_convert_loc (loc, TREE_TYPE (arg1),
13006 arg0),
13007 arg1);
13008 default:
13009 break;
13011 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13012 == min_hi
13013 && TREE_INT_CST_LOW (arg1) == min_lo)
13014 switch (code)
13016 case LT_EXPR:
13017 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13019 case LE_EXPR:
13020 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13022 case GE_EXPR:
13023 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13025 case GT_EXPR:
13026 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13028 default:
13029 break;
13031 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13032 == min_hi
13033 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13034 switch (code)
13036 case GE_EXPR:
13037 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13038 return fold_build2_loc (loc, NE_EXPR, type,
13039 fold_convert_loc (loc,
13040 TREE_TYPE (arg1), arg0),
13041 arg1);
13042 case LT_EXPR:
13043 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13044 return fold_build2_loc (loc, EQ_EXPR, type,
13045 fold_convert_loc (loc, TREE_TYPE (arg1),
13046 arg0),
13047 arg1);
13048 default:
13049 break;
13052 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13053 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13054 && TYPE_UNSIGNED (arg1_type)
13055 /* We will flip the signedness of the comparison operator
13056 associated with the mode of arg1, so the sign bit is
13057 specified by this mode. Check that arg1 is the signed
13058 max associated with this sign bit. */
13059 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13060 /* signed_type does not work on pointer types. */
13061 && INTEGRAL_TYPE_P (arg1_type))
13063 /* The following case also applies to X < signed_max+1
13064 and X >= signed_max+1 because previous transformations. */
13065 if (code == LE_EXPR || code == GT_EXPR)
13067 tree st;
13068 st = signed_type_for (TREE_TYPE (arg1));
13069 return fold_build2_loc (loc,
13070 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13071 type, fold_convert_loc (loc, st, arg0),
13072 build_int_cst (st, 0));
13078 /* If we are comparing an ABS_EXPR with a constant, we can
13079 convert all the cases into explicit comparisons, but they may
13080 well not be faster than doing the ABS and one comparison.
13081 But ABS (X) <= C is a range comparison, which becomes a subtraction
13082 and a comparison, and is probably faster. */
13083 if (code == LE_EXPR
13084 && TREE_CODE (arg1) == INTEGER_CST
13085 && TREE_CODE (arg0) == ABS_EXPR
13086 && ! TREE_SIDE_EFFECTS (arg0)
13087 && (0 != (tem = negate_expr (arg1)))
13088 && TREE_CODE (tem) == INTEGER_CST
13089 && !TREE_OVERFLOW (tem))
13090 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13091 build2 (GE_EXPR, type,
13092 TREE_OPERAND (arg0, 0), tem),
13093 build2 (LE_EXPR, type,
13094 TREE_OPERAND (arg0, 0), arg1));
13096 /* Convert ABS_EXPR<x> >= 0 to true. */
13097 strict_overflow_p = false;
13098 if (code == GE_EXPR
13099 && (integer_zerop (arg1)
13100 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13101 && real_zerop (arg1)))
13102 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13104 if (strict_overflow_p)
13105 fold_overflow_warning (("assuming signed overflow does not occur "
13106 "when simplifying comparison of "
13107 "absolute value and zero"),
13108 WARN_STRICT_OVERFLOW_CONDITIONAL);
13109 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13112 /* Convert ABS_EXPR<x> < 0 to false. */
13113 strict_overflow_p = false;
13114 if (code == LT_EXPR
13115 && (integer_zerop (arg1) || real_zerop (arg1))
13116 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13118 if (strict_overflow_p)
13119 fold_overflow_warning (("assuming signed overflow does not occur "
13120 "when simplifying comparison of "
13121 "absolute value and zero"),
13122 WARN_STRICT_OVERFLOW_CONDITIONAL);
13123 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13126 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13127 and similarly for >= into !=. */
13128 if ((code == LT_EXPR || code == GE_EXPR)
13129 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13130 && TREE_CODE (arg1) == LSHIFT_EXPR
13131 && integer_onep (TREE_OPERAND (arg1, 0)))
13133 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13134 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13135 TREE_OPERAND (arg1, 1)),
13136 build_int_cst (TREE_TYPE (arg0), 0));
13137 goto fold_binary_exit;
13140 if ((code == LT_EXPR || code == GE_EXPR)
13141 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13142 && CONVERT_EXPR_P (arg1)
13143 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13144 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13146 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13147 fold_convert_loc (loc, TREE_TYPE (arg0),
13148 build2 (RSHIFT_EXPR,
13149 TREE_TYPE (arg0), arg0,
13150 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13151 1))),
13152 build_int_cst (TREE_TYPE (arg0), 0));
13153 goto fold_binary_exit;
13156 return NULL_TREE;
13158 case UNORDERED_EXPR:
13159 case ORDERED_EXPR:
13160 case UNLT_EXPR:
13161 case UNLE_EXPR:
13162 case UNGT_EXPR:
13163 case UNGE_EXPR:
13164 case UNEQ_EXPR:
13165 case LTGT_EXPR:
13166 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13168 t1 = fold_relational_const (code, type, arg0, arg1);
13169 if (t1 != NULL_TREE)
13170 return t1;
13173 /* If the first operand is NaN, the result is constant. */
13174 if (TREE_CODE (arg0) == REAL_CST
13175 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13176 && (code != LTGT_EXPR || ! flag_trapping_math))
13178 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13179 ? integer_zero_node
13180 : integer_one_node;
13181 return omit_one_operand_loc (loc, type, t1, arg1);
13184 /* If the second operand is NaN, the result is constant. */
13185 if (TREE_CODE (arg1) == REAL_CST
13186 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13187 && (code != LTGT_EXPR || ! flag_trapping_math))
13189 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13190 ? integer_zero_node
13191 : integer_one_node;
13192 return omit_one_operand_loc (loc, type, t1, arg0);
13195 /* Simplify unordered comparison of something with itself. */
13196 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13197 && operand_equal_p (arg0, arg1, 0))
13198 return constant_boolean_node (1, type);
13200 if (code == LTGT_EXPR
13201 && !flag_trapping_math
13202 && operand_equal_p (arg0, arg1, 0))
13203 return constant_boolean_node (0, type);
13205 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13207 tree targ0 = strip_float_extensions (arg0);
13208 tree targ1 = strip_float_extensions (arg1);
13209 tree newtype = TREE_TYPE (targ0);
13211 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13212 newtype = TREE_TYPE (targ1);
13214 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13215 return fold_build2_loc (loc, code, type,
13216 fold_convert_loc (loc, newtype, targ0),
13217 fold_convert_loc (loc, newtype, targ1));
13220 return NULL_TREE;
13222 case COMPOUND_EXPR:
13223 /* When pedantic, a compound expression can be neither an lvalue
13224 nor an integer constant expression. */
13225 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13226 return NULL_TREE;
13227 /* Don't let (0, 0) be null pointer constant. */
13228 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13229 : fold_convert_loc (loc, type, arg1);
13230 return pedantic_non_lvalue_loc (loc, tem);
13232 case COMPLEX_EXPR:
13233 if ((TREE_CODE (arg0) == REAL_CST
13234 && TREE_CODE (arg1) == REAL_CST)
13235 || (TREE_CODE (arg0) == INTEGER_CST
13236 && TREE_CODE (arg1) == INTEGER_CST))
13237 return build_complex (type, arg0, arg1);
13238 return NULL_TREE;
13240 case ASSERT_EXPR:
13241 /* An ASSERT_EXPR should never be passed to fold_binary. */
13242 gcc_unreachable ();
13244 default:
13245 return NULL_TREE;
13246 } /* switch (code) */
13247 fold_binary_exit:
13248 protected_set_expr_location (tem, loc);
13249 return tem;
13252 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13253 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13254 of GOTO_EXPR. */
13256 static tree
13257 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13259 switch (TREE_CODE (*tp))
13261 case LABEL_EXPR:
13262 return *tp;
13264 case GOTO_EXPR:
13265 *walk_subtrees = 0;
13267 /* ... fall through ... */
13269 default:
13270 return NULL_TREE;
13274 /* Return whether the sub-tree ST contains a label which is accessible from
13275 outside the sub-tree. */
13277 static bool
13278 contains_label_p (tree st)
13280 return
13281 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13284 /* Fold a ternary expression of code CODE and type TYPE with operands
13285 OP0, OP1, and OP2. Return the folded expression if folding is
13286 successful. Otherwise, return NULL_TREE. */
13288 tree
13289 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13290 tree op0, tree op1, tree op2)
13292 tree tem;
13293 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13294 enum tree_code_class kind = TREE_CODE_CLASS (code);
13296 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13297 && TREE_CODE_LENGTH (code) == 3);
13299 /* Strip any conversions that don't change the mode. This is safe
13300 for every expression, except for a comparison expression because
13301 its signedness is derived from its operands. So, in the latter
13302 case, only strip conversions that don't change the signedness.
13304 Note that this is done as an internal manipulation within the
13305 constant folder, in order to find the simplest representation of
13306 the arguments so that their form can be studied. In any cases,
13307 the appropriate type conversions should be put back in the tree
13308 that will get out of the constant folder. */
13309 if (op0)
13311 arg0 = op0;
13312 STRIP_NOPS (arg0);
13315 if (op1)
13317 arg1 = op1;
13318 STRIP_NOPS (arg1);
13321 switch (code)
13323 case COMPONENT_REF:
13324 if (TREE_CODE (arg0) == CONSTRUCTOR
13325 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13327 unsigned HOST_WIDE_INT idx;
13328 tree field, value;
13329 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13330 if (field == arg1)
13331 return value;
13333 return NULL_TREE;
13335 case COND_EXPR:
13336 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13337 so all simple results must be passed through pedantic_non_lvalue. */
13338 if (TREE_CODE (arg0) == INTEGER_CST)
13340 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13341 tem = integer_zerop (arg0) ? op2 : op1;
13342 /* Only optimize constant conditions when the selected branch
13343 has the same type as the COND_EXPR. This avoids optimizing
13344 away "c ? x : throw", where the throw has a void type.
13345 Avoid throwing away that operand which contains label. */
13346 if ((!TREE_SIDE_EFFECTS (unused_op)
13347 || !contains_label_p (unused_op))
13348 && (! VOID_TYPE_P (TREE_TYPE (tem))
13349 || VOID_TYPE_P (type)))
13350 return pedantic_non_lvalue_loc (loc, tem);
13351 return NULL_TREE;
13353 if (operand_equal_p (arg1, op2, 0))
13354 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13356 /* If we have A op B ? A : C, we may be able to convert this to a
13357 simpler expression, depending on the operation and the values
13358 of B and C. Signed zeros prevent all of these transformations,
13359 for reasons given above each one.
13361 Also try swapping the arguments and inverting the conditional. */
13362 if (COMPARISON_CLASS_P (arg0)
13363 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13364 arg1, TREE_OPERAND (arg0, 1))
13365 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13367 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13368 if (tem)
13369 return tem;
13372 if (COMPARISON_CLASS_P (arg0)
13373 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13374 op2,
13375 TREE_OPERAND (arg0, 1))
13376 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13378 tem = fold_truth_not_expr (loc, arg0);
13379 if (tem && COMPARISON_CLASS_P (tem))
13381 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13382 if (tem)
13383 return tem;
13387 /* If the second operand is simpler than the third, swap them
13388 since that produces better jump optimization results. */
13389 if (truth_value_p (TREE_CODE (arg0))
13390 && tree_swap_operands_p (op1, op2, false))
13392 /* See if this can be inverted. If it can't, possibly because
13393 it was a floating-point inequality comparison, don't do
13394 anything. */
13395 tem = fold_truth_not_expr (loc, arg0);
13396 if (tem)
13397 return fold_build3_loc (loc, code, type, tem, op2, op1);
13400 /* Convert A ? 1 : 0 to simply A. */
13401 if (integer_onep (op1)
13402 && integer_zerop (op2)
13403 /* If we try to convert OP0 to our type, the
13404 call to fold will try to move the conversion inside
13405 a COND, which will recurse. In that case, the COND_EXPR
13406 is probably the best choice, so leave it alone. */
13407 && type == TREE_TYPE (arg0))
13408 return pedantic_non_lvalue_loc (loc, arg0);
13410 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13411 over COND_EXPR in cases such as floating point comparisons. */
13412 if (integer_zerop (op1)
13413 && integer_onep (op2)
13414 && truth_value_p (TREE_CODE (arg0)))
13415 return pedantic_non_lvalue_loc (loc,
13416 fold_convert_loc (loc, type,
13417 invert_truthvalue_loc (loc,
13418 arg0)));
13420 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13421 if (TREE_CODE (arg0) == LT_EXPR
13422 && integer_zerop (TREE_OPERAND (arg0, 1))
13423 && integer_zerop (op2)
13424 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13426 /* sign_bit_p only checks ARG1 bits within A's precision.
13427 If <sign bit of A> has wider type than A, bits outside
13428 of A's precision in <sign bit of A> need to be checked.
13429 If they are all 0, this optimization needs to be done
13430 in unsigned A's type, if they are all 1 in signed A's type,
13431 otherwise this can't be done. */
13432 if (TYPE_PRECISION (TREE_TYPE (tem))
13433 < TYPE_PRECISION (TREE_TYPE (arg1))
13434 && TYPE_PRECISION (TREE_TYPE (tem))
13435 < TYPE_PRECISION (type))
13437 unsigned HOST_WIDE_INT mask_lo;
13438 HOST_WIDE_INT mask_hi;
13439 int inner_width, outer_width;
13440 tree tem_type;
13442 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13443 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13444 if (outer_width > TYPE_PRECISION (type))
13445 outer_width = TYPE_PRECISION (type);
13447 if (outer_width > HOST_BITS_PER_WIDE_INT)
13449 mask_hi = ((unsigned HOST_WIDE_INT) -1
13450 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13451 mask_lo = -1;
13453 else
13455 mask_hi = 0;
13456 mask_lo = ((unsigned HOST_WIDE_INT) -1
13457 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13459 if (inner_width > HOST_BITS_PER_WIDE_INT)
13461 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13462 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13463 mask_lo = 0;
13465 else
13466 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13467 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13469 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13470 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13472 tem_type = signed_type_for (TREE_TYPE (tem));
13473 tem = fold_convert_loc (loc, tem_type, tem);
13475 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13476 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13478 tem_type = unsigned_type_for (TREE_TYPE (tem));
13479 tem = fold_convert_loc (loc, tem_type, tem);
13481 else
13482 tem = NULL;
13485 if (tem)
13486 return
13487 fold_convert_loc (loc, type,
13488 fold_build2_loc (loc, BIT_AND_EXPR,
13489 TREE_TYPE (tem), tem,
13490 fold_convert_loc (loc,
13491 TREE_TYPE (tem),
13492 arg1)));
13495 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13496 already handled above. */
13497 if (TREE_CODE (arg0) == BIT_AND_EXPR
13498 && integer_onep (TREE_OPERAND (arg0, 1))
13499 && integer_zerop (op2)
13500 && integer_pow2p (arg1))
13502 tree tem = TREE_OPERAND (arg0, 0);
13503 STRIP_NOPS (tem);
13504 if (TREE_CODE (tem) == RSHIFT_EXPR
13505 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13506 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13507 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13508 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13509 TREE_OPERAND (tem, 0), arg1);
13512 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13513 is probably obsolete because the first operand should be a
13514 truth value (that's why we have the two cases above), but let's
13515 leave it in until we can confirm this for all front-ends. */
13516 if (integer_zerop (op2)
13517 && TREE_CODE (arg0) == NE_EXPR
13518 && integer_zerop (TREE_OPERAND (arg0, 1))
13519 && integer_pow2p (arg1)
13520 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13521 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13522 arg1, OEP_ONLY_CONST))
13523 return pedantic_non_lvalue_loc (loc,
13524 fold_convert_loc (loc, type,
13525 TREE_OPERAND (arg0, 0)));
13527 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13528 if (integer_zerop (op2)
13529 && truth_value_p (TREE_CODE (arg0))
13530 && truth_value_p (TREE_CODE (arg1)))
13531 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13532 fold_convert_loc (loc, type, arg0),
13533 arg1);
13535 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13536 if (integer_onep (op2)
13537 && truth_value_p (TREE_CODE (arg0))
13538 && truth_value_p (TREE_CODE (arg1)))
13540 /* Only perform transformation if ARG0 is easily inverted. */
13541 tem = fold_truth_not_expr (loc, arg0);
13542 if (tem)
13543 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13544 fold_convert_loc (loc, type, tem),
13545 arg1);
13548 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13549 if (integer_zerop (arg1)
13550 && truth_value_p (TREE_CODE (arg0))
13551 && truth_value_p (TREE_CODE (op2)))
13553 /* Only perform transformation if ARG0 is easily inverted. */
13554 tem = fold_truth_not_expr (loc, arg0);
13555 if (tem)
13556 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13557 fold_convert_loc (loc, type, tem),
13558 op2);
13561 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13562 if (integer_onep (arg1)
13563 && truth_value_p (TREE_CODE (arg0))
13564 && truth_value_p (TREE_CODE (op2)))
13565 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13566 fold_convert_loc (loc, type, arg0),
13567 op2);
13569 return NULL_TREE;
13571 case CALL_EXPR:
13572 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13573 of fold_ternary on them. */
13574 gcc_unreachable ();
13576 case BIT_FIELD_REF:
13577 if ((TREE_CODE (arg0) == VECTOR_CST
13578 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13579 && type == TREE_TYPE (TREE_TYPE (arg0)))
13581 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13582 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13584 if (width != 0
13585 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13586 && (idx % width) == 0
13587 && (idx = idx / width)
13588 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13590 tree elements = NULL_TREE;
13592 if (TREE_CODE (arg0) == VECTOR_CST)
13593 elements = TREE_VECTOR_CST_ELTS (arg0);
13594 else
13596 unsigned HOST_WIDE_INT idx;
13597 tree value;
13599 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13600 elements = tree_cons (NULL_TREE, value, elements);
13602 while (idx-- > 0 && elements)
13603 elements = TREE_CHAIN (elements);
13604 if (elements)
13605 return TREE_VALUE (elements);
13606 else
13607 return fold_convert_loc (loc, type, integer_zero_node);
13611 /* A bit-field-ref that referenced the full argument can be stripped. */
13612 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13613 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13614 && integer_zerop (op2))
13615 return fold_convert_loc (loc, type, arg0);
13617 return NULL_TREE;
13619 default:
13620 return NULL_TREE;
13621 } /* switch (code) */
13624 /* Perform constant folding and related simplification of EXPR.
13625 The related simplifications include x*1 => x, x*0 => 0, etc.,
13626 and application of the associative law.
13627 NOP_EXPR conversions may be removed freely (as long as we
13628 are careful not to change the type of the overall expression).
13629 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13630 but we can constant-fold them if they have constant operands. */
13632 #ifdef ENABLE_FOLD_CHECKING
13633 # define fold(x) fold_1 (x)
13634 static tree fold_1 (tree);
13635 static
13636 #endif
13637 tree
13638 fold (tree expr)
13640 const tree t = expr;
13641 enum tree_code code = TREE_CODE (t);
13642 enum tree_code_class kind = TREE_CODE_CLASS (code);
13643 tree tem;
13644 location_t loc = EXPR_LOCATION (expr);
13646 /* Return right away if a constant. */
13647 if (kind == tcc_constant)
13648 return t;
13650 /* CALL_EXPR-like objects with variable numbers of operands are
13651 treated specially. */
13652 if (kind == tcc_vl_exp)
13654 if (code == CALL_EXPR)
13656 tem = fold_call_expr (loc, expr, false);
13657 return tem ? tem : expr;
13659 return expr;
13662 if (IS_EXPR_CODE_CLASS (kind))
13664 tree type = TREE_TYPE (t);
13665 tree op0, op1, op2;
13667 switch (TREE_CODE_LENGTH (code))
13669 case 1:
13670 op0 = TREE_OPERAND (t, 0);
13671 tem = fold_unary_loc (loc, code, type, op0);
13672 return tem ? tem : expr;
13673 case 2:
13674 op0 = TREE_OPERAND (t, 0);
13675 op1 = TREE_OPERAND (t, 1);
13676 tem = fold_binary_loc (loc, code, type, op0, op1);
13677 return tem ? tem : expr;
13678 case 3:
13679 op0 = TREE_OPERAND (t, 0);
13680 op1 = TREE_OPERAND (t, 1);
13681 op2 = TREE_OPERAND (t, 2);
13682 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13683 return tem ? tem : expr;
13684 default:
13685 break;
13689 switch (code)
13691 case ARRAY_REF:
13693 tree op0 = TREE_OPERAND (t, 0);
13694 tree op1 = TREE_OPERAND (t, 1);
13696 if (TREE_CODE (op1) == INTEGER_CST
13697 && TREE_CODE (op0) == CONSTRUCTOR
13698 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13700 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13701 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13702 unsigned HOST_WIDE_INT begin = 0;
13704 /* Find a matching index by means of a binary search. */
13705 while (begin != end)
13707 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13708 tree index = VEC_index (constructor_elt, elts, middle)->index;
13710 if (TREE_CODE (index) == INTEGER_CST
13711 && tree_int_cst_lt (index, op1))
13712 begin = middle + 1;
13713 else if (TREE_CODE (index) == INTEGER_CST
13714 && tree_int_cst_lt (op1, index))
13715 end = middle;
13716 else if (TREE_CODE (index) == RANGE_EXPR
13717 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13718 begin = middle + 1;
13719 else if (TREE_CODE (index) == RANGE_EXPR
13720 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13721 end = middle;
13722 else
13723 return VEC_index (constructor_elt, elts, middle)->value;
13727 return t;
13730 case CONST_DECL:
13731 return fold (DECL_INITIAL (t));
13733 default:
13734 return t;
13735 } /* switch (code) */
13738 #ifdef ENABLE_FOLD_CHECKING
13739 #undef fold
13741 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13742 static void fold_check_failed (const_tree, const_tree);
13743 void print_fold_checksum (const_tree);
13745 /* When --enable-checking=fold, compute a digest of expr before
13746 and after actual fold call to see if fold did not accidentally
13747 change original expr. */
13749 tree
13750 fold (tree expr)
13752 tree ret;
13753 struct md5_ctx ctx;
13754 unsigned char checksum_before[16], checksum_after[16];
13755 htab_t ht;
13757 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13758 md5_init_ctx (&ctx);
13759 fold_checksum_tree (expr, &ctx, ht);
13760 md5_finish_ctx (&ctx, checksum_before);
13761 htab_empty (ht);
13763 ret = fold_1 (expr);
13765 md5_init_ctx (&ctx);
13766 fold_checksum_tree (expr, &ctx, ht);
13767 md5_finish_ctx (&ctx, checksum_after);
13768 htab_delete (ht);
13770 if (memcmp (checksum_before, checksum_after, 16))
13771 fold_check_failed (expr, ret);
13773 return ret;
13776 void
13777 print_fold_checksum (const_tree expr)
13779 struct md5_ctx ctx;
13780 unsigned char checksum[16], cnt;
13781 htab_t ht;
13783 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13784 md5_init_ctx (&ctx);
13785 fold_checksum_tree (expr, &ctx, ht);
13786 md5_finish_ctx (&ctx, checksum);
13787 htab_delete (ht);
13788 for (cnt = 0; cnt < 16; ++cnt)
13789 fprintf (stderr, "%02x", checksum[cnt]);
13790 putc ('\n', stderr);
13793 static void
13794 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13796 internal_error ("fold check: original tree changed by fold");
13799 static void
13800 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13802 void **slot;
13803 enum tree_code code;
13804 union tree_node buf;
13805 int i, len;
13807 recursive_label:
13809 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13810 <= sizeof (struct tree_function_decl))
13811 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13812 if (expr == NULL)
13813 return;
13814 slot = (void **) htab_find_slot (ht, expr, INSERT);
13815 if (*slot != NULL)
13816 return;
13817 *slot = CONST_CAST_TREE (expr);
13818 code = TREE_CODE (expr);
13819 if (TREE_CODE_CLASS (code) == tcc_declaration
13820 && DECL_ASSEMBLER_NAME_SET_P (expr))
13822 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13823 memcpy ((char *) &buf, expr, tree_size (expr));
13824 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13825 expr = (tree) &buf;
13827 else if (TREE_CODE_CLASS (code) == tcc_type
13828 && (TYPE_POINTER_TO (expr)
13829 || TYPE_REFERENCE_TO (expr)
13830 || TYPE_CACHED_VALUES_P (expr)
13831 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13832 || TYPE_NEXT_VARIANT (expr)))
13834 /* Allow these fields to be modified. */
13835 tree tmp;
13836 memcpy ((char *) &buf, expr, tree_size (expr));
13837 expr = tmp = (tree) &buf;
13838 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13839 TYPE_POINTER_TO (tmp) = NULL;
13840 TYPE_REFERENCE_TO (tmp) = NULL;
13841 TYPE_NEXT_VARIANT (tmp) = NULL;
13842 if (TYPE_CACHED_VALUES_P (tmp))
13844 TYPE_CACHED_VALUES_P (tmp) = 0;
13845 TYPE_CACHED_VALUES (tmp) = NULL;
13848 md5_process_bytes (expr, tree_size (expr), ctx);
13849 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13850 if (TREE_CODE_CLASS (code) != tcc_type
13851 && TREE_CODE_CLASS (code) != tcc_declaration
13852 && code != TREE_LIST
13853 && code != SSA_NAME)
13854 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13855 switch (TREE_CODE_CLASS (code))
13857 case tcc_constant:
13858 switch (code)
13860 case STRING_CST:
13861 md5_process_bytes (TREE_STRING_POINTER (expr),
13862 TREE_STRING_LENGTH (expr), ctx);
13863 break;
13864 case COMPLEX_CST:
13865 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13866 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13867 break;
13868 case VECTOR_CST:
13869 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13870 break;
13871 default:
13872 break;
13874 break;
13875 case tcc_exceptional:
13876 switch (code)
13878 case TREE_LIST:
13879 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13880 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13881 expr = TREE_CHAIN (expr);
13882 goto recursive_label;
13883 break;
13884 case TREE_VEC:
13885 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13886 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13887 break;
13888 default:
13889 break;
13891 break;
13892 case tcc_expression:
13893 case tcc_reference:
13894 case tcc_comparison:
13895 case tcc_unary:
13896 case tcc_binary:
13897 case tcc_statement:
13898 case tcc_vl_exp:
13899 len = TREE_OPERAND_LENGTH (expr);
13900 for (i = 0; i < len; ++i)
13901 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13902 break;
13903 case tcc_declaration:
13904 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13905 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13906 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13908 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13909 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13910 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13911 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13912 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13914 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13915 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13917 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13919 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13920 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13921 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13923 break;
13924 case tcc_type:
13925 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13926 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13927 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13928 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13929 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13930 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13931 if (INTEGRAL_TYPE_P (expr)
13932 || SCALAR_FLOAT_TYPE_P (expr))
13934 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13935 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13937 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13938 if (TREE_CODE (expr) == RECORD_TYPE
13939 || TREE_CODE (expr) == UNION_TYPE
13940 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13941 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13942 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13943 break;
13944 default:
13945 break;
13949 /* Helper function for outputting the checksum of a tree T. When
13950 debugging with gdb, you can "define mynext" to be "next" followed
13951 by "call debug_fold_checksum (op0)", then just trace down till the
13952 outputs differ. */
13954 DEBUG_FUNCTION void
13955 debug_fold_checksum (const_tree t)
13957 int i;
13958 unsigned char checksum[16];
13959 struct md5_ctx ctx;
13960 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13962 md5_init_ctx (&ctx);
13963 fold_checksum_tree (t, &ctx, ht);
13964 md5_finish_ctx (&ctx, checksum);
13965 htab_empty (ht);
13967 for (i = 0; i < 16; i++)
13968 fprintf (stderr, "%d ", checksum[i]);
13970 fprintf (stderr, "\n");
13973 #endif
13975 /* Fold a unary tree expression with code CODE of type TYPE with an
13976 operand OP0. LOC is the location of the resulting expression.
13977 Return a folded expression if successful. Otherwise, return a tree
13978 expression with code CODE of type TYPE with an operand OP0. */
13980 tree
13981 fold_build1_stat_loc (location_t loc,
13982 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13984 tree tem;
13985 #ifdef ENABLE_FOLD_CHECKING
13986 unsigned char checksum_before[16], checksum_after[16];
13987 struct md5_ctx ctx;
13988 htab_t ht;
13990 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13991 md5_init_ctx (&ctx);
13992 fold_checksum_tree (op0, &ctx, ht);
13993 md5_finish_ctx (&ctx, checksum_before);
13994 htab_empty (ht);
13995 #endif
13997 tem = fold_unary_loc (loc, code, type, op0);
13998 if (!tem)
14000 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14001 SET_EXPR_LOCATION (tem, loc);
14004 #ifdef ENABLE_FOLD_CHECKING
14005 md5_init_ctx (&ctx);
14006 fold_checksum_tree (op0, &ctx, ht);
14007 md5_finish_ctx (&ctx, checksum_after);
14008 htab_delete (ht);
14010 if (memcmp (checksum_before, checksum_after, 16))
14011 fold_check_failed (op0, tem);
14012 #endif
14013 return tem;
14016 /* Fold a binary tree expression with code CODE of type TYPE with
14017 operands OP0 and OP1. LOC is the location of the resulting
14018 expression. Return a folded expression if successful. Otherwise,
14019 return a tree expression with code CODE of type TYPE with operands
14020 OP0 and OP1. */
14022 tree
14023 fold_build2_stat_loc (location_t loc,
14024 enum tree_code code, tree type, tree op0, tree op1
14025 MEM_STAT_DECL)
14027 tree tem;
14028 #ifdef ENABLE_FOLD_CHECKING
14029 unsigned char checksum_before_op0[16],
14030 checksum_before_op1[16],
14031 checksum_after_op0[16],
14032 checksum_after_op1[16];
14033 struct md5_ctx ctx;
14034 htab_t ht;
14036 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14037 md5_init_ctx (&ctx);
14038 fold_checksum_tree (op0, &ctx, ht);
14039 md5_finish_ctx (&ctx, checksum_before_op0);
14040 htab_empty (ht);
14042 md5_init_ctx (&ctx);
14043 fold_checksum_tree (op1, &ctx, ht);
14044 md5_finish_ctx (&ctx, checksum_before_op1);
14045 htab_empty (ht);
14046 #endif
14048 tem = fold_binary_loc (loc, code, type, op0, op1);
14049 if (!tem)
14051 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14052 SET_EXPR_LOCATION (tem, loc);
14055 #ifdef ENABLE_FOLD_CHECKING
14056 md5_init_ctx (&ctx);
14057 fold_checksum_tree (op0, &ctx, ht);
14058 md5_finish_ctx (&ctx, checksum_after_op0);
14059 htab_empty (ht);
14061 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14062 fold_check_failed (op0, tem);
14064 md5_init_ctx (&ctx);
14065 fold_checksum_tree (op1, &ctx, ht);
14066 md5_finish_ctx (&ctx, checksum_after_op1);
14067 htab_delete (ht);
14069 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14070 fold_check_failed (op1, tem);
14071 #endif
14072 return tem;
14075 /* Fold a ternary tree expression with code CODE of type TYPE with
14076 operands OP0, OP1, and OP2. Return a folded expression if
14077 successful. Otherwise, return a tree expression with code CODE of
14078 type TYPE with operands OP0, OP1, and OP2. */
14080 tree
14081 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14082 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14084 tree tem;
14085 #ifdef ENABLE_FOLD_CHECKING
14086 unsigned char checksum_before_op0[16],
14087 checksum_before_op1[16],
14088 checksum_before_op2[16],
14089 checksum_after_op0[16],
14090 checksum_after_op1[16],
14091 checksum_after_op2[16];
14092 struct md5_ctx ctx;
14093 htab_t ht;
14095 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14096 md5_init_ctx (&ctx);
14097 fold_checksum_tree (op0, &ctx, ht);
14098 md5_finish_ctx (&ctx, checksum_before_op0);
14099 htab_empty (ht);
14101 md5_init_ctx (&ctx);
14102 fold_checksum_tree (op1, &ctx, ht);
14103 md5_finish_ctx (&ctx, checksum_before_op1);
14104 htab_empty (ht);
14106 md5_init_ctx (&ctx);
14107 fold_checksum_tree (op2, &ctx, ht);
14108 md5_finish_ctx (&ctx, checksum_before_op2);
14109 htab_empty (ht);
14110 #endif
14112 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14113 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14114 if (!tem)
14116 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14117 SET_EXPR_LOCATION (tem, loc);
14120 #ifdef ENABLE_FOLD_CHECKING
14121 md5_init_ctx (&ctx);
14122 fold_checksum_tree (op0, &ctx, ht);
14123 md5_finish_ctx (&ctx, checksum_after_op0);
14124 htab_empty (ht);
14126 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14127 fold_check_failed (op0, tem);
14129 md5_init_ctx (&ctx);
14130 fold_checksum_tree (op1, &ctx, ht);
14131 md5_finish_ctx (&ctx, checksum_after_op1);
14132 htab_empty (ht);
14134 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14135 fold_check_failed (op1, tem);
14137 md5_init_ctx (&ctx);
14138 fold_checksum_tree (op2, &ctx, ht);
14139 md5_finish_ctx (&ctx, checksum_after_op2);
14140 htab_delete (ht);
14142 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14143 fold_check_failed (op2, tem);
14144 #endif
14145 return tem;
14148 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14149 arguments in ARGARRAY, and a null static chain.
14150 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14151 of type TYPE from the given operands as constructed by build_call_array. */
14153 tree
14154 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14155 int nargs, tree *argarray)
14157 tree tem;
14158 #ifdef ENABLE_FOLD_CHECKING
14159 unsigned char checksum_before_fn[16],
14160 checksum_before_arglist[16],
14161 checksum_after_fn[16],
14162 checksum_after_arglist[16];
14163 struct md5_ctx ctx;
14164 htab_t ht;
14165 int i;
14167 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14168 md5_init_ctx (&ctx);
14169 fold_checksum_tree (fn, &ctx, ht);
14170 md5_finish_ctx (&ctx, checksum_before_fn);
14171 htab_empty (ht);
14173 md5_init_ctx (&ctx);
14174 for (i = 0; i < nargs; i++)
14175 fold_checksum_tree (argarray[i], &ctx, ht);
14176 md5_finish_ctx (&ctx, checksum_before_arglist);
14177 htab_empty (ht);
14178 #endif
14180 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14182 #ifdef ENABLE_FOLD_CHECKING
14183 md5_init_ctx (&ctx);
14184 fold_checksum_tree (fn, &ctx, ht);
14185 md5_finish_ctx (&ctx, checksum_after_fn);
14186 htab_empty (ht);
14188 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14189 fold_check_failed (fn, tem);
14191 md5_init_ctx (&ctx);
14192 for (i = 0; i < nargs; i++)
14193 fold_checksum_tree (argarray[i], &ctx, ht);
14194 md5_finish_ctx (&ctx, checksum_after_arglist);
14195 htab_delete (ht);
14197 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14198 fold_check_failed (NULL_TREE, tem);
14199 #endif
14200 return tem;
14203 /* Perform constant folding and related simplification of initializer
14204 expression EXPR. These behave identically to "fold_buildN" but ignore
14205 potential run-time traps and exceptions that fold must preserve. */
14207 #define START_FOLD_INIT \
14208 int saved_signaling_nans = flag_signaling_nans;\
14209 int saved_trapping_math = flag_trapping_math;\
14210 int saved_rounding_math = flag_rounding_math;\
14211 int saved_trapv = flag_trapv;\
14212 int saved_folding_initializer = folding_initializer;\
14213 flag_signaling_nans = 0;\
14214 flag_trapping_math = 0;\
14215 flag_rounding_math = 0;\
14216 flag_trapv = 0;\
14217 folding_initializer = 1;
14219 #define END_FOLD_INIT \
14220 flag_signaling_nans = saved_signaling_nans;\
14221 flag_trapping_math = saved_trapping_math;\
14222 flag_rounding_math = saved_rounding_math;\
14223 flag_trapv = saved_trapv;\
14224 folding_initializer = saved_folding_initializer;
14226 tree
14227 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14228 tree type, tree op)
14230 tree result;
14231 START_FOLD_INIT;
14233 result = fold_build1_loc (loc, code, type, op);
14235 END_FOLD_INIT;
14236 return result;
14239 tree
14240 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14241 tree type, tree op0, tree op1)
14243 tree result;
14244 START_FOLD_INIT;
14246 result = fold_build2_loc (loc, code, type, op0, op1);
14248 END_FOLD_INIT;
14249 return result;
14252 tree
14253 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14254 tree type, tree op0, tree op1, tree op2)
14256 tree result;
14257 START_FOLD_INIT;
14259 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14261 END_FOLD_INIT;
14262 return result;
14265 tree
14266 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14267 int nargs, tree *argarray)
14269 tree result;
14270 START_FOLD_INIT;
14272 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14274 END_FOLD_INIT;
14275 return result;
14278 #undef START_FOLD_INIT
14279 #undef END_FOLD_INIT
14281 /* Determine if first argument is a multiple of second argument. Return 0 if
14282 it is not, or we cannot easily determined it to be.
14284 An example of the sort of thing we care about (at this point; this routine
14285 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14286 fold cases do now) is discovering that
14288 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14290 is a multiple of
14292 SAVE_EXPR (J * 8)
14294 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14296 This code also handles discovering that
14298 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14300 is a multiple of 8 so we don't have to worry about dealing with a
14301 possible remainder.
14303 Note that we *look* inside a SAVE_EXPR only to determine how it was
14304 calculated; it is not safe for fold to do much of anything else with the
14305 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14306 at run time. For example, the latter example above *cannot* be implemented
14307 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14308 evaluation time of the original SAVE_EXPR is not necessarily the same at
14309 the time the new expression is evaluated. The only optimization of this
14310 sort that would be valid is changing
14312 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14314 divided by 8 to
14316 SAVE_EXPR (I) * SAVE_EXPR (J)
14318 (where the same SAVE_EXPR (J) is used in the original and the
14319 transformed version). */
14322 multiple_of_p (tree type, const_tree top, const_tree bottom)
14324 if (operand_equal_p (top, bottom, 0))
14325 return 1;
14327 if (TREE_CODE (type) != INTEGER_TYPE)
14328 return 0;
14330 switch (TREE_CODE (top))
14332 case BIT_AND_EXPR:
14333 /* Bitwise and provides a power of two multiple. If the mask is
14334 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14335 if (!integer_pow2p (bottom))
14336 return 0;
14337 /* FALLTHRU */
14339 case MULT_EXPR:
14340 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14341 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14343 case PLUS_EXPR:
14344 case MINUS_EXPR:
14345 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14346 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14348 case LSHIFT_EXPR:
14349 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14351 tree op1, t1;
14353 op1 = TREE_OPERAND (top, 1);
14354 /* const_binop may not detect overflow correctly,
14355 so check for it explicitly here. */
14356 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14357 > TREE_INT_CST_LOW (op1)
14358 && TREE_INT_CST_HIGH (op1) == 0
14359 && 0 != (t1 = fold_convert (type,
14360 const_binop (LSHIFT_EXPR,
14361 size_one_node,
14362 op1)))
14363 && !TREE_OVERFLOW (t1))
14364 return multiple_of_p (type, t1, bottom);
14366 return 0;
14368 case NOP_EXPR:
14369 /* Can't handle conversions from non-integral or wider integral type. */
14370 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14371 || (TYPE_PRECISION (type)
14372 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14373 return 0;
14375 /* .. fall through ... */
14377 case SAVE_EXPR:
14378 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14380 case COND_EXPR:
14381 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14382 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14384 case INTEGER_CST:
14385 if (TREE_CODE (bottom) != INTEGER_CST
14386 || integer_zerop (bottom)
14387 || (TYPE_UNSIGNED (type)
14388 && (tree_int_cst_sgn (top) < 0
14389 || tree_int_cst_sgn (bottom) < 0)))
14390 return 0;
14391 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14392 top, bottom, 0));
14394 default:
14395 return 0;
14399 /* Return true if CODE or TYPE is known to be non-negative. */
14401 static bool
14402 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14404 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14405 && truth_value_p (code))
14406 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14407 have a signed:1 type (where the value is -1 and 0). */
14408 return true;
14409 return false;
14412 /* Return true if (CODE OP0) is known to be non-negative. If the return
14413 value is based on the assumption that signed overflow is undefined,
14414 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14415 *STRICT_OVERFLOW_P. */
14417 bool
14418 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14419 bool *strict_overflow_p)
14421 if (TYPE_UNSIGNED (type))
14422 return true;
14424 switch (code)
14426 case ABS_EXPR:
14427 /* We can't return 1 if flag_wrapv is set because
14428 ABS_EXPR<INT_MIN> = INT_MIN. */
14429 if (!INTEGRAL_TYPE_P (type))
14430 return true;
14431 if (TYPE_OVERFLOW_UNDEFINED (type))
14433 *strict_overflow_p = true;
14434 return true;
14436 break;
14438 case NON_LVALUE_EXPR:
14439 case FLOAT_EXPR:
14440 case FIX_TRUNC_EXPR:
14441 return tree_expr_nonnegative_warnv_p (op0,
14442 strict_overflow_p);
14444 case NOP_EXPR:
14446 tree inner_type = TREE_TYPE (op0);
14447 tree outer_type = type;
14449 if (TREE_CODE (outer_type) == REAL_TYPE)
14451 if (TREE_CODE (inner_type) == REAL_TYPE)
14452 return tree_expr_nonnegative_warnv_p (op0,
14453 strict_overflow_p);
14454 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14456 if (TYPE_UNSIGNED (inner_type))
14457 return true;
14458 return tree_expr_nonnegative_warnv_p (op0,
14459 strict_overflow_p);
14462 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14464 if (TREE_CODE (inner_type) == REAL_TYPE)
14465 return tree_expr_nonnegative_warnv_p (op0,
14466 strict_overflow_p);
14467 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14468 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14469 && TYPE_UNSIGNED (inner_type);
14472 break;
14474 default:
14475 return tree_simple_nonnegative_warnv_p (code, type);
14478 /* We don't know sign of `t', so be conservative and return false. */
14479 return false;
14482 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14483 value is based on the assumption that signed overflow is undefined,
14484 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14485 *STRICT_OVERFLOW_P. */
14487 bool
14488 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14489 tree op1, bool *strict_overflow_p)
14491 if (TYPE_UNSIGNED (type))
14492 return true;
14494 switch (code)
14496 case POINTER_PLUS_EXPR:
14497 case PLUS_EXPR:
14498 if (FLOAT_TYPE_P (type))
14499 return (tree_expr_nonnegative_warnv_p (op0,
14500 strict_overflow_p)
14501 && tree_expr_nonnegative_warnv_p (op1,
14502 strict_overflow_p));
14504 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14505 both unsigned and at least 2 bits shorter than the result. */
14506 if (TREE_CODE (type) == INTEGER_TYPE
14507 && TREE_CODE (op0) == NOP_EXPR
14508 && TREE_CODE (op1) == NOP_EXPR)
14510 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14511 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14512 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14513 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14515 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14516 TYPE_PRECISION (inner2)) + 1;
14517 return prec < TYPE_PRECISION (type);
14520 break;
14522 case MULT_EXPR:
14523 if (FLOAT_TYPE_P (type))
14525 /* x * x for floating point x is always non-negative. */
14526 if (operand_equal_p (op0, op1, 0))
14527 return true;
14528 return (tree_expr_nonnegative_warnv_p (op0,
14529 strict_overflow_p)
14530 && tree_expr_nonnegative_warnv_p (op1,
14531 strict_overflow_p));
14534 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14535 both unsigned and their total bits is shorter than the result. */
14536 if (TREE_CODE (type) == INTEGER_TYPE
14537 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14538 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14540 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14541 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14542 : TREE_TYPE (op0);
14543 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14544 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14545 : TREE_TYPE (op1);
14547 bool unsigned0 = TYPE_UNSIGNED (inner0);
14548 bool unsigned1 = TYPE_UNSIGNED (inner1);
14550 if (TREE_CODE (op0) == INTEGER_CST)
14551 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14553 if (TREE_CODE (op1) == INTEGER_CST)
14554 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14556 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14557 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14559 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14560 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14561 : TYPE_PRECISION (inner0);
14563 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14564 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14565 : TYPE_PRECISION (inner1);
14567 return precision0 + precision1 < TYPE_PRECISION (type);
14570 return false;
14572 case BIT_AND_EXPR:
14573 case MAX_EXPR:
14574 return (tree_expr_nonnegative_warnv_p (op0,
14575 strict_overflow_p)
14576 || tree_expr_nonnegative_warnv_p (op1,
14577 strict_overflow_p));
14579 case BIT_IOR_EXPR:
14580 case BIT_XOR_EXPR:
14581 case MIN_EXPR:
14582 case RDIV_EXPR:
14583 case TRUNC_DIV_EXPR:
14584 case CEIL_DIV_EXPR:
14585 case FLOOR_DIV_EXPR:
14586 case ROUND_DIV_EXPR:
14587 return (tree_expr_nonnegative_warnv_p (op0,
14588 strict_overflow_p)
14589 && tree_expr_nonnegative_warnv_p (op1,
14590 strict_overflow_p));
14592 case TRUNC_MOD_EXPR:
14593 case CEIL_MOD_EXPR:
14594 case FLOOR_MOD_EXPR:
14595 case ROUND_MOD_EXPR:
14596 return tree_expr_nonnegative_warnv_p (op0,
14597 strict_overflow_p);
14598 default:
14599 return tree_simple_nonnegative_warnv_p (code, type);
14602 /* We don't know sign of `t', so be conservative and return false. */
14603 return false;
14606 /* Return true if T is known to be non-negative. If the return
14607 value is based on the assumption that signed overflow is undefined,
14608 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14609 *STRICT_OVERFLOW_P. */
14611 bool
14612 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14614 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14615 return true;
14617 switch (TREE_CODE (t))
14619 case INTEGER_CST:
14620 return tree_int_cst_sgn (t) >= 0;
14622 case REAL_CST:
14623 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14625 case FIXED_CST:
14626 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14628 case COND_EXPR:
14629 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14630 strict_overflow_p)
14631 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14632 strict_overflow_p));
14633 default:
14634 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14635 TREE_TYPE (t));
14637 /* We don't know sign of `t', so be conservative and return false. */
14638 return false;
14641 /* Return true if T is known to be non-negative. If the return
14642 value is based on the assumption that signed overflow is undefined,
14643 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14644 *STRICT_OVERFLOW_P. */
14646 bool
14647 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14648 tree arg0, tree arg1, bool *strict_overflow_p)
14650 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14651 switch (DECL_FUNCTION_CODE (fndecl))
14653 CASE_FLT_FN (BUILT_IN_ACOS):
14654 CASE_FLT_FN (BUILT_IN_ACOSH):
14655 CASE_FLT_FN (BUILT_IN_CABS):
14656 CASE_FLT_FN (BUILT_IN_COSH):
14657 CASE_FLT_FN (BUILT_IN_ERFC):
14658 CASE_FLT_FN (BUILT_IN_EXP):
14659 CASE_FLT_FN (BUILT_IN_EXP10):
14660 CASE_FLT_FN (BUILT_IN_EXP2):
14661 CASE_FLT_FN (BUILT_IN_FABS):
14662 CASE_FLT_FN (BUILT_IN_FDIM):
14663 CASE_FLT_FN (BUILT_IN_HYPOT):
14664 CASE_FLT_FN (BUILT_IN_POW10):
14665 CASE_INT_FN (BUILT_IN_FFS):
14666 CASE_INT_FN (BUILT_IN_PARITY):
14667 CASE_INT_FN (BUILT_IN_POPCOUNT):
14668 case BUILT_IN_BSWAP32:
14669 case BUILT_IN_BSWAP64:
14670 /* Always true. */
14671 return true;
14673 CASE_FLT_FN (BUILT_IN_SQRT):
14674 /* sqrt(-0.0) is -0.0. */
14675 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14676 return true;
14677 return tree_expr_nonnegative_warnv_p (arg0,
14678 strict_overflow_p);
14680 CASE_FLT_FN (BUILT_IN_ASINH):
14681 CASE_FLT_FN (BUILT_IN_ATAN):
14682 CASE_FLT_FN (BUILT_IN_ATANH):
14683 CASE_FLT_FN (BUILT_IN_CBRT):
14684 CASE_FLT_FN (BUILT_IN_CEIL):
14685 CASE_FLT_FN (BUILT_IN_ERF):
14686 CASE_FLT_FN (BUILT_IN_EXPM1):
14687 CASE_FLT_FN (BUILT_IN_FLOOR):
14688 CASE_FLT_FN (BUILT_IN_FMOD):
14689 CASE_FLT_FN (BUILT_IN_FREXP):
14690 CASE_FLT_FN (BUILT_IN_LCEIL):
14691 CASE_FLT_FN (BUILT_IN_LDEXP):
14692 CASE_FLT_FN (BUILT_IN_LFLOOR):
14693 CASE_FLT_FN (BUILT_IN_LLCEIL):
14694 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14695 CASE_FLT_FN (BUILT_IN_LLRINT):
14696 CASE_FLT_FN (BUILT_IN_LLROUND):
14697 CASE_FLT_FN (BUILT_IN_LRINT):
14698 CASE_FLT_FN (BUILT_IN_LROUND):
14699 CASE_FLT_FN (BUILT_IN_MODF):
14700 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14701 CASE_FLT_FN (BUILT_IN_RINT):
14702 CASE_FLT_FN (BUILT_IN_ROUND):
14703 CASE_FLT_FN (BUILT_IN_SCALB):
14704 CASE_FLT_FN (BUILT_IN_SCALBLN):
14705 CASE_FLT_FN (BUILT_IN_SCALBN):
14706 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14707 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14708 CASE_FLT_FN (BUILT_IN_SINH):
14709 CASE_FLT_FN (BUILT_IN_TANH):
14710 CASE_FLT_FN (BUILT_IN_TRUNC):
14711 /* True if the 1st argument is nonnegative. */
14712 return tree_expr_nonnegative_warnv_p (arg0,
14713 strict_overflow_p);
14715 CASE_FLT_FN (BUILT_IN_FMAX):
14716 /* True if the 1st OR 2nd arguments are nonnegative. */
14717 return (tree_expr_nonnegative_warnv_p (arg0,
14718 strict_overflow_p)
14719 || (tree_expr_nonnegative_warnv_p (arg1,
14720 strict_overflow_p)));
14722 CASE_FLT_FN (BUILT_IN_FMIN):
14723 /* True if the 1st AND 2nd arguments are nonnegative. */
14724 return (tree_expr_nonnegative_warnv_p (arg0,
14725 strict_overflow_p)
14726 && (tree_expr_nonnegative_warnv_p (arg1,
14727 strict_overflow_p)));
14729 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14730 /* True if the 2nd argument is nonnegative. */
14731 return tree_expr_nonnegative_warnv_p (arg1,
14732 strict_overflow_p);
14734 CASE_FLT_FN (BUILT_IN_POWI):
14735 /* True if the 1st argument is nonnegative or the second
14736 argument is an even integer. */
14737 if (TREE_CODE (arg1) == INTEGER_CST
14738 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14739 return true;
14740 return tree_expr_nonnegative_warnv_p (arg0,
14741 strict_overflow_p);
14743 CASE_FLT_FN (BUILT_IN_POW):
14744 /* True if the 1st argument is nonnegative or the second
14745 argument is an even integer valued real. */
14746 if (TREE_CODE (arg1) == REAL_CST)
14748 REAL_VALUE_TYPE c;
14749 HOST_WIDE_INT n;
14751 c = TREE_REAL_CST (arg1);
14752 n = real_to_integer (&c);
14753 if ((n & 1) == 0)
14755 REAL_VALUE_TYPE cint;
14756 real_from_integer (&cint, VOIDmode, n,
14757 n < 0 ? -1 : 0, 0);
14758 if (real_identical (&c, &cint))
14759 return true;
14762 return tree_expr_nonnegative_warnv_p (arg0,
14763 strict_overflow_p);
14765 default:
14766 break;
14768 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14769 type);
14772 /* Return true if T is known to be non-negative. If the return
14773 value is based on the assumption that signed overflow is undefined,
14774 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14775 *STRICT_OVERFLOW_P. */
14777 bool
14778 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14780 enum tree_code code = TREE_CODE (t);
14781 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14782 return true;
14784 switch (code)
14786 case TARGET_EXPR:
14788 tree temp = TARGET_EXPR_SLOT (t);
14789 t = TARGET_EXPR_INITIAL (t);
14791 /* If the initializer is non-void, then it's a normal expression
14792 that will be assigned to the slot. */
14793 if (!VOID_TYPE_P (t))
14794 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14796 /* Otherwise, the initializer sets the slot in some way. One common
14797 way is an assignment statement at the end of the initializer. */
14798 while (1)
14800 if (TREE_CODE (t) == BIND_EXPR)
14801 t = expr_last (BIND_EXPR_BODY (t));
14802 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14803 || TREE_CODE (t) == TRY_CATCH_EXPR)
14804 t = expr_last (TREE_OPERAND (t, 0));
14805 else if (TREE_CODE (t) == STATEMENT_LIST)
14806 t = expr_last (t);
14807 else
14808 break;
14810 if (TREE_CODE (t) == MODIFY_EXPR
14811 && TREE_OPERAND (t, 0) == temp)
14812 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14813 strict_overflow_p);
14815 return false;
14818 case CALL_EXPR:
14820 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14821 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14823 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14824 get_callee_fndecl (t),
14825 arg0,
14826 arg1,
14827 strict_overflow_p);
14829 case COMPOUND_EXPR:
14830 case MODIFY_EXPR:
14831 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14832 strict_overflow_p);
14833 case BIND_EXPR:
14834 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14835 strict_overflow_p);
14836 case SAVE_EXPR:
14837 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14838 strict_overflow_p);
14840 default:
14841 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14842 TREE_TYPE (t));
14845 /* We don't know sign of `t', so be conservative and return false. */
14846 return false;
14849 /* Return true if T is known to be non-negative. If the return
14850 value is based on the assumption that signed overflow is undefined,
14851 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14852 *STRICT_OVERFLOW_P. */
14854 bool
14855 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14857 enum tree_code code;
14858 if (t == error_mark_node)
14859 return false;
14861 code = TREE_CODE (t);
14862 switch (TREE_CODE_CLASS (code))
14864 case tcc_binary:
14865 case tcc_comparison:
14866 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14867 TREE_TYPE (t),
14868 TREE_OPERAND (t, 0),
14869 TREE_OPERAND (t, 1),
14870 strict_overflow_p);
14872 case tcc_unary:
14873 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14874 TREE_TYPE (t),
14875 TREE_OPERAND (t, 0),
14876 strict_overflow_p);
14878 case tcc_constant:
14879 case tcc_declaration:
14880 case tcc_reference:
14881 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14883 default:
14884 break;
14887 switch (code)
14889 case TRUTH_AND_EXPR:
14890 case TRUTH_OR_EXPR:
14891 case TRUTH_XOR_EXPR:
14892 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14893 TREE_TYPE (t),
14894 TREE_OPERAND (t, 0),
14895 TREE_OPERAND (t, 1),
14896 strict_overflow_p);
14897 case TRUTH_NOT_EXPR:
14898 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14899 TREE_TYPE (t),
14900 TREE_OPERAND (t, 0),
14901 strict_overflow_p);
14903 case COND_EXPR:
14904 case CONSTRUCTOR:
14905 case OBJ_TYPE_REF:
14906 case ASSERT_EXPR:
14907 case ADDR_EXPR:
14908 case WITH_SIZE_EXPR:
14909 case SSA_NAME:
14910 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14912 default:
14913 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14917 /* Return true if `t' is known to be non-negative. Handle warnings
14918 about undefined signed overflow. */
14920 bool
14921 tree_expr_nonnegative_p (tree t)
14923 bool ret, strict_overflow_p;
14925 strict_overflow_p = false;
14926 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14927 if (strict_overflow_p)
14928 fold_overflow_warning (("assuming signed overflow does not occur when "
14929 "determining that expression is always "
14930 "non-negative"),
14931 WARN_STRICT_OVERFLOW_MISC);
14932 return ret;
14936 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14937 For floating point we further ensure that T is not denormal.
14938 Similar logic is present in nonzero_address in rtlanal.h.
14940 If the return value is based on the assumption that signed overflow
14941 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14942 change *STRICT_OVERFLOW_P. */
14944 bool
14945 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14946 bool *strict_overflow_p)
14948 switch (code)
14950 case ABS_EXPR:
14951 return tree_expr_nonzero_warnv_p (op0,
14952 strict_overflow_p);
14954 case NOP_EXPR:
14956 tree inner_type = TREE_TYPE (op0);
14957 tree outer_type = type;
14959 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14960 && tree_expr_nonzero_warnv_p (op0,
14961 strict_overflow_p));
14963 break;
14965 case NON_LVALUE_EXPR:
14966 return tree_expr_nonzero_warnv_p (op0,
14967 strict_overflow_p);
14969 default:
14970 break;
14973 return false;
14976 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14977 For floating point we further ensure that T is not denormal.
14978 Similar logic is present in nonzero_address in rtlanal.h.
14980 If the return value is based on the assumption that signed overflow
14981 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14982 change *STRICT_OVERFLOW_P. */
14984 bool
14985 tree_binary_nonzero_warnv_p (enum tree_code code,
14986 tree type,
14987 tree op0,
14988 tree op1, bool *strict_overflow_p)
14990 bool sub_strict_overflow_p;
14991 switch (code)
14993 case POINTER_PLUS_EXPR:
14994 case PLUS_EXPR:
14995 if (TYPE_OVERFLOW_UNDEFINED (type))
14997 /* With the presence of negative values it is hard
14998 to say something. */
14999 sub_strict_overflow_p = false;
15000 if (!tree_expr_nonnegative_warnv_p (op0,
15001 &sub_strict_overflow_p)
15002 || !tree_expr_nonnegative_warnv_p (op1,
15003 &sub_strict_overflow_p))
15004 return false;
15005 /* One of operands must be positive and the other non-negative. */
15006 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15007 overflows, on a twos-complement machine the sum of two
15008 nonnegative numbers can never be zero. */
15009 return (tree_expr_nonzero_warnv_p (op0,
15010 strict_overflow_p)
15011 || tree_expr_nonzero_warnv_p (op1,
15012 strict_overflow_p));
15014 break;
15016 case MULT_EXPR:
15017 if (TYPE_OVERFLOW_UNDEFINED (type))
15019 if (tree_expr_nonzero_warnv_p (op0,
15020 strict_overflow_p)
15021 && tree_expr_nonzero_warnv_p (op1,
15022 strict_overflow_p))
15024 *strict_overflow_p = true;
15025 return true;
15028 break;
15030 case MIN_EXPR:
15031 sub_strict_overflow_p = false;
15032 if (tree_expr_nonzero_warnv_p (op0,
15033 &sub_strict_overflow_p)
15034 && tree_expr_nonzero_warnv_p (op1,
15035 &sub_strict_overflow_p))
15037 if (sub_strict_overflow_p)
15038 *strict_overflow_p = true;
15040 break;
15042 case MAX_EXPR:
15043 sub_strict_overflow_p = false;
15044 if (tree_expr_nonzero_warnv_p (op0,
15045 &sub_strict_overflow_p))
15047 if (sub_strict_overflow_p)
15048 *strict_overflow_p = true;
15050 /* When both operands are nonzero, then MAX must be too. */
15051 if (tree_expr_nonzero_warnv_p (op1,
15052 strict_overflow_p))
15053 return true;
15055 /* MAX where operand 0 is positive is positive. */
15056 return tree_expr_nonnegative_warnv_p (op0,
15057 strict_overflow_p);
15059 /* MAX where operand 1 is positive is positive. */
15060 else if (tree_expr_nonzero_warnv_p (op1,
15061 &sub_strict_overflow_p)
15062 && tree_expr_nonnegative_warnv_p (op1,
15063 &sub_strict_overflow_p))
15065 if (sub_strict_overflow_p)
15066 *strict_overflow_p = true;
15067 return true;
15069 break;
15071 case BIT_IOR_EXPR:
15072 return (tree_expr_nonzero_warnv_p (op1,
15073 strict_overflow_p)
15074 || tree_expr_nonzero_warnv_p (op0,
15075 strict_overflow_p));
15077 default:
15078 break;
15081 return false;
15084 /* Return true when T is an address and is known to be nonzero.
15085 For floating point we further ensure that T is not denormal.
15086 Similar logic is present in nonzero_address in rtlanal.h.
15088 If the return value is based on the assumption that signed overflow
15089 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15090 change *STRICT_OVERFLOW_P. */
15092 bool
15093 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15095 bool sub_strict_overflow_p;
15096 switch (TREE_CODE (t))
15098 case INTEGER_CST:
15099 return !integer_zerop (t);
15101 case ADDR_EXPR:
15103 tree base = TREE_OPERAND (t, 0);
15104 if (!DECL_P (base))
15105 base = get_base_address (base);
15107 if (!base)
15108 return false;
15110 /* Weak declarations may link to NULL. Other things may also be NULL
15111 so protect with -fdelete-null-pointer-checks; but not variables
15112 allocated on the stack. */
15113 if (DECL_P (base)
15114 && (flag_delete_null_pointer_checks
15115 || (DECL_CONTEXT (base)
15116 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15117 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15118 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15120 /* Constants are never weak. */
15121 if (CONSTANT_CLASS_P (base))
15122 return true;
15124 return false;
15127 case COND_EXPR:
15128 sub_strict_overflow_p = false;
15129 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15130 &sub_strict_overflow_p)
15131 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15132 &sub_strict_overflow_p))
15134 if (sub_strict_overflow_p)
15135 *strict_overflow_p = true;
15136 return true;
15138 break;
15140 default:
15141 break;
15143 return false;
15146 /* Return true when T is an address and is known to be nonzero.
15147 For floating point we further ensure that T is not denormal.
15148 Similar logic is present in nonzero_address in rtlanal.h.
15150 If the return value is based on the assumption that signed overflow
15151 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15152 change *STRICT_OVERFLOW_P. */
15154 bool
15155 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15157 tree type = TREE_TYPE (t);
15158 enum tree_code code;
15160 /* Doing something useful for floating point would need more work. */
15161 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15162 return false;
15164 code = TREE_CODE (t);
15165 switch (TREE_CODE_CLASS (code))
15167 case tcc_unary:
15168 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15169 strict_overflow_p);
15170 case tcc_binary:
15171 case tcc_comparison:
15172 return tree_binary_nonzero_warnv_p (code, type,
15173 TREE_OPERAND (t, 0),
15174 TREE_OPERAND (t, 1),
15175 strict_overflow_p);
15176 case tcc_constant:
15177 case tcc_declaration:
15178 case tcc_reference:
15179 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15181 default:
15182 break;
15185 switch (code)
15187 case TRUTH_NOT_EXPR:
15188 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15189 strict_overflow_p);
15191 case TRUTH_AND_EXPR:
15192 case TRUTH_OR_EXPR:
15193 case TRUTH_XOR_EXPR:
15194 return tree_binary_nonzero_warnv_p (code, type,
15195 TREE_OPERAND (t, 0),
15196 TREE_OPERAND (t, 1),
15197 strict_overflow_p);
15199 case COND_EXPR:
15200 case CONSTRUCTOR:
15201 case OBJ_TYPE_REF:
15202 case ASSERT_EXPR:
15203 case ADDR_EXPR:
15204 case WITH_SIZE_EXPR:
15205 case SSA_NAME:
15206 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15208 case COMPOUND_EXPR:
15209 case MODIFY_EXPR:
15210 case BIND_EXPR:
15211 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15212 strict_overflow_p);
15214 case SAVE_EXPR:
15215 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15216 strict_overflow_p);
15218 case CALL_EXPR:
15219 return alloca_call_p (t);
15221 default:
15222 break;
15224 return false;
15227 /* Return true when T is an address and is known to be nonzero.
15228 Handle warnings about undefined signed overflow. */
15230 bool
15231 tree_expr_nonzero_p (tree t)
15233 bool ret, strict_overflow_p;
15235 strict_overflow_p = false;
15236 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15237 if (strict_overflow_p)
15238 fold_overflow_warning (("assuming signed overflow does not occur when "
15239 "determining that expression is always "
15240 "non-zero"),
15241 WARN_STRICT_OVERFLOW_MISC);
15242 return ret;
15245 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15246 attempt to fold the expression to a constant without modifying TYPE,
15247 OP0 or OP1.
15249 If the expression could be simplified to a constant, then return
15250 the constant. If the expression would not be simplified to a
15251 constant, then return NULL_TREE. */
15253 tree
15254 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15256 tree tem = fold_binary (code, type, op0, op1);
15257 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15260 /* Given the components of a unary expression CODE, TYPE and OP0,
15261 attempt to fold the expression to a constant without modifying
15262 TYPE or OP0.
15264 If the expression could be simplified to a constant, then return
15265 the constant. If the expression would not be simplified to a
15266 constant, then return NULL_TREE. */
15268 tree
15269 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15271 tree tem = fold_unary (code, type, op0);
15272 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15275 /* If EXP represents referencing an element in a constant string
15276 (either via pointer arithmetic or array indexing), return the
15277 tree representing the value accessed, otherwise return NULL. */
15279 tree
15280 fold_read_from_constant_string (tree exp)
15282 if ((TREE_CODE (exp) == INDIRECT_REF
15283 || TREE_CODE (exp) == ARRAY_REF)
15284 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15286 tree exp1 = TREE_OPERAND (exp, 0);
15287 tree index;
15288 tree string;
15289 location_t loc = EXPR_LOCATION (exp);
15291 if (TREE_CODE (exp) == INDIRECT_REF)
15292 string = string_constant (exp1, &index);
15293 else
15295 tree low_bound = array_ref_low_bound (exp);
15296 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15298 /* Optimize the special-case of a zero lower bound.
15300 We convert the low_bound to sizetype to avoid some problems
15301 with constant folding. (E.g. suppose the lower bound is 1,
15302 and its mode is QI. Without the conversion,l (ARRAY
15303 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15304 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15305 if (! integer_zerop (low_bound))
15306 index = size_diffop_loc (loc, index,
15307 fold_convert_loc (loc, sizetype, low_bound));
15309 string = exp1;
15312 if (string
15313 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15314 && TREE_CODE (string) == STRING_CST
15315 && TREE_CODE (index) == INTEGER_CST
15316 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15317 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15318 == MODE_INT)
15319 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15320 return build_int_cst_type (TREE_TYPE (exp),
15321 (TREE_STRING_POINTER (string)
15322 [TREE_INT_CST_LOW (index)]));
15324 return NULL;
15327 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15328 an integer constant, real, or fixed-point constant.
15330 TYPE is the type of the result. */
15332 static tree
15333 fold_negate_const (tree arg0, tree type)
15335 tree t = NULL_TREE;
15337 switch (TREE_CODE (arg0))
15339 case INTEGER_CST:
15341 double_int val = tree_to_double_int (arg0);
15342 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15344 t = force_fit_type_double (type, val, 1,
15345 (overflow | TREE_OVERFLOW (arg0))
15346 && !TYPE_UNSIGNED (type));
15347 break;
15350 case REAL_CST:
15351 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15352 break;
15354 case FIXED_CST:
15356 FIXED_VALUE_TYPE f;
15357 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15358 &(TREE_FIXED_CST (arg0)), NULL,
15359 TYPE_SATURATING (type));
15360 t = build_fixed (type, f);
15361 /* Propagate overflow flags. */
15362 if (overflow_p | TREE_OVERFLOW (arg0))
15363 TREE_OVERFLOW (t) = 1;
15364 break;
15367 default:
15368 gcc_unreachable ();
15371 return t;
15374 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15375 an integer constant or real constant.
15377 TYPE is the type of the result. */
15379 tree
15380 fold_abs_const (tree arg0, tree type)
15382 tree t = NULL_TREE;
15384 switch (TREE_CODE (arg0))
15386 case INTEGER_CST:
15388 double_int val = tree_to_double_int (arg0);
15390 /* If the value is unsigned or non-negative, then the absolute value
15391 is the same as the ordinary value. */
15392 if (TYPE_UNSIGNED (type)
15393 || !double_int_negative_p (val))
15394 t = arg0;
15396 /* If the value is negative, then the absolute value is
15397 its negation. */
15398 else
15400 int overflow;
15402 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15403 t = force_fit_type_double (type, val, -1,
15404 overflow | TREE_OVERFLOW (arg0));
15407 break;
15409 case REAL_CST:
15410 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15411 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15412 else
15413 t = arg0;
15414 break;
15416 default:
15417 gcc_unreachable ();
15420 return t;
15423 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15424 constant. TYPE is the type of the result. */
15426 static tree
15427 fold_not_const (const_tree arg0, tree type)
15429 double_int val;
15431 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15433 val = double_int_not (tree_to_double_int (arg0));
15434 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15437 /* Given CODE, a relational operator, the target type, TYPE and two
15438 constant operands OP0 and OP1, return the result of the
15439 relational operation. If the result is not a compile time
15440 constant, then return NULL_TREE. */
15442 static tree
15443 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15445 int result, invert;
15447 /* From here on, the only cases we handle are when the result is
15448 known to be a constant. */
15450 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15452 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15453 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15455 /* Handle the cases where either operand is a NaN. */
15456 if (real_isnan (c0) || real_isnan (c1))
15458 switch (code)
15460 case EQ_EXPR:
15461 case ORDERED_EXPR:
15462 result = 0;
15463 break;
15465 case NE_EXPR:
15466 case UNORDERED_EXPR:
15467 case UNLT_EXPR:
15468 case UNLE_EXPR:
15469 case UNGT_EXPR:
15470 case UNGE_EXPR:
15471 case UNEQ_EXPR:
15472 result = 1;
15473 break;
15475 case LT_EXPR:
15476 case LE_EXPR:
15477 case GT_EXPR:
15478 case GE_EXPR:
15479 case LTGT_EXPR:
15480 if (flag_trapping_math)
15481 return NULL_TREE;
15482 result = 0;
15483 break;
15485 default:
15486 gcc_unreachable ();
15489 return constant_boolean_node (result, type);
15492 return constant_boolean_node (real_compare (code, c0, c1), type);
15495 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15497 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15498 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15499 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15502 /* Handle equality/inequality of complex constants. */
15503 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15505 tree rcond = fold_relational_const (code, type,
15506 TREE_REALPART (op0),
15507 TREE_REALPART (op1));
15508 tree icond = fold_relational_const (code, type,
15509 TREE_IMAGPART (op0),
15510 TREE_IMAGPART (op1));
15511 if (code == EQ_EXPR)
15512 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15513 else if (code == NE_EXPR)
15514 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15515 else
15516 return NULL_TREE;
15519 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15521 To compute GT, swap the arguments and do LT.
15522 To compute GE, do LT and invert the result.
15523 To compute LE, swap the arguments, do LT and invert the result.
15524 To compute NE, do EQ and invert the result.
15526 Therefore, the code below must handle only EQ and LT. */
15528 if (code == LE_EXPR || code == GT_EXPR)
15530 tree tem = op0;
15531 op0 = op1;
15532 op1 = tem;
15533 code = swap_tree_comparison (code);
15536 /* Note that it is safe to invert for real values here because we
15537 have already handled the one case that it matters. */
15539 invert = 0;
15540 if (code == NE_EXPR || code == GE_EXPR)
15542 invert = 1;
15543 code = invert_tree_comparison (code, false);
15546 /* Compute a result for LT or EQ if args permit;
15547 Otherwise return T. */
15548 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15550 if (code == EQ_EXPR)
15551 result = tree_int_cst_equal (op0, op1);
15552 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15553 result = INT_CST_LT_UNSIGNED (op0, op1);
15554 else
15555 result = INT_CST_LT (op0, op1);
15557 else
15558 return NULL_TREE;
15560 if (invert)
15561 result ^= 1;
15562 return constant_boolean_node (result, type);
15565 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15566 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15567 itself. */
15569 tree
15570 fold_build_cleanup_point_expr (tree type, tree expr)
15572 /* If the expression does not have side effects then we don't have to wrap
15573 it with a cleanup point expression. */
15574 if (!TREE_SIDE_EFFECTS (expr))
15575 return expr;
15577 /* If the expression is a return, check to see if the expression inside the
15578 return has no side effects or the right hand side of the modify expression
15579 inside the return. If either don't have side effects set we don't need to
15580 wrap the expression in a cleanup point expression. Note we don't check the
15581 left hand side of the modify because it should always be a return decl. */
15582 if (TREE_CODE (expr) == RETURN_EXPR)
15584 tree op = TREE_OPERAND (expr, 0);
15585 if (!op || !TREE_SIDE_EFFECTS (op))
15586 return expr;
15587 op = TREE_OPERAND (op, 1);
15588 if (!TREE_SIDE_EFFECTS (op))
15589 return expr;
15592 return build1 (CLEANUP_POINT_EXPR, type, expr);
15595 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15596 of an indirection through OP0, or NULL_TREE if no simplification is
15597 possible. */
15599 tree
15600 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15602 tree sub = op0;
15603 tree subtype;
15605 STRIP_NOPS (sub);
15606 subtype = TREE_TYPE (sub);
15607 if (!POINTER_TYPE_P (subtype))
15608 return NULL_TREE;
15610 if (TREE_CODE (sub) == ADDR_EXPR)
15612 tree op = TREE_OPERAND (sub, 0);
15613 tree optype = TREE_TYPE (op);
15614 /* *&CONST_DECL -> to the value of the const decl. */
15615 if (TREE_CODE (op) == CONST_DECL)
15616 return DECL_INITIAL (op);
15617 /* *&p => p; make sure to handle *&"str"[cst] here. */
15618 if (type == optype)
15620 tree fop = fold_read_from_constant_string (op);
15621 if (fop)
15622 return fop;
15623 else
15624 return op;
15626 /* *(foo *)&fooarray => fooarray[0] */
15627 else if (TREE_CODE (optype) == ARRAY_TYPE
15628 && type == TREE_TYPE (optype))
15630 tree type_domain = TYPE_DOMAIN (optype);
15631 tree min_val = size_zero_node;
15632 if (type_domain && TYPE_MIN_VALUE (type_domain))
15633 min_val = TYPE_MIN_VALUE (type_domain);
15634 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15635 SET_EXPR_LOCATION (op0, loc);
15636 return op0;
15638 /* *(foo *)&complexfoo => __real__ complexfoo */
15639 else if (TREE_CODE (optype) == COMPLEX_TYPE
15640 && type == TREE_TYPE (optype))
15641 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15642 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15643 else if (TREE_CODE (optype) == VECTOR_TYPE
15644 && type == TREE_TYPE (optype))
15646 tree part_width = TYPE_SIZE (type);
15647 tree index = bitsize_int (0);
15648 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15652 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15653 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15654 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15656 tree op00 = TREE_OPERAND (sub, 0);
15657 tree op01 = TREE_OPERAND (sub, 1);
15658 tree op00type;
15660 STRIP_NOPS (op00);
15661 op00type = TREE_TYPE (op00);
15662 if (TREE_CODE (op00) == ADDR_EXPR
15663 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15664 && type == TREE_TYPE (TREE_TYPE (op00type)))
15666 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15667 tree part_width = TYPE_SIZE (type);
15668 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15669 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15670 tree index = bitsize_int (indexi);
15672 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15673 return fold_build3_loc (loc,
15674 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15675 part_width, index);
15681 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15682 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15683 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15685 tree op00 = TREE_OPERAND (sub, 0);
15686 tree op01 = TREE_OPERAND (sub, 1);
15687 tree op00type;
15689 STRIP_NOPS (op00);
15690 op00type = TREE_TYPE (op00);
15691 if (TREE_CODE (op00) == ADDR_EXPR
15692 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15693 && type == TREE_TYPE (TREE_TYPE (op00type)))
15695 tree size = TYPE_SIZE_UNIT (type);
15696 if (tree_int_cst_equal (size, op01))
15697 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15698 TREE_OPERAND (op00, 0));
15702 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15703 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15704 && type == TREE_TYPE (TREE_TYPE (subtype)))
15706 tree type_domain;
15707 tree min_val = size_zero_node;
15708 sub = build_fold_indirect_ref_loc (loc, sub);
15709 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15710 if (type_domain && TYPE_MIN_VALUE (type_domain))
15711 min_val = TYPE_MIN_VALUE (type_domain);
15712 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15713 SET_EXPR_LOCATION (op0, loc);
15714 return op0;
15717 return NULL_TREE;
15720 /* Builds an expression for an indirection through T, simplifying some
15721 cases. */
15723 tree
15724 build_fold_indirect_ref_loc (location_t loc, tree t)
15726 tree type = TREE_TYPE (TREE_TYPE (t));
15727 tree sub = fold_indirect_ref_1 (loc, type, t);
15729 if (sub)
15730 return sub;
15732 t = build1 (INDIRECT_REF, type, t);
15733 SET_EXPR_LOCATION (t, loc);
15734 return t;
15737 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15739 tree
15740 fold_indirect_ref_loc (location_t loc, tree t)
15742 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15744 if (sub)
15745 return sub;
15746 else
15747 return t;
15750 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15751 whose result is ignored. The type of the returned tree need not be
15752 the same as the original expression. */
15754 tree
15755 fold_ignored_result (tree t)
15757 if (!TREE_SIDE_EFFECTS (t))
15758 return integer_zero_node;
15760 for (;;)
15761 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15763 case tcc_unary:
15764 t = TREE_OPERAND (t, 0);
15765 break;
15767 case tcc_binary:
15768 case tcc_comparison:
15769 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15770 t = TREE_OPERAND (t, 0);
15771 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15772 t = TREE_OPERAND (t, 1);
15773 else
15774 return t;
15775 break;
15777 case tcc_expression:
15778 switch (TREE_CODE (t))
15780 case COMPOUND_EXPR:
15781 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15782 return t;
15783 t = TREE_OPERAND (t, 0);
15784 break;
15786 case COND_EXPR:
15787 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15788 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15789 return t;
15790 t = TREE_OPERAND (t, 0);
15791 break;
15793 default:
15794 return t;
15796 break;
15798 default:
15799 return t;
15803 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15804 This can only be applied to objects of a sizetype. */
15806 tree
15807 round_up_loc (location_t loc, tree value, int divisor)
15809 tree div = NULL_TREE;
15811 gcc_assert (divisor > 0);
15812 if (divisor == 1)
15813 return value;
15815 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15816 have to do anything. Only do this when we are not given a const,
15817 because in that case, this check is more expensive than just
15818 doing it. */
15819 if (TREE_CODE (value) != INTEGER_CST)
15821 div = build_int_cst (TREE_TYPE (value), divisor);
15823 if (multiple_of_p (TREE_TYPE (value), value, div))
15824 return value;
15827 /* If divisor is a power of two, simplify this to bit manipulation. */
15828 if (divisor == (divisor & -divisor))
15830 if (TREE_CODE (value) == INTEGER_CST)
15832 double_int val = tree_to_double_int (value);
15833 bool overflow_p;
15835 if ((val.low & (divisor - 1)) == 0)
15836 return value;
15838 overflow_p = TREE_OVERFLOW (value);
15839 val.low &= ~(divisor - 1);
15840 val.low += divisor;
15841 if (val.low == 0)
15843 val.high++;
15844 if (val.high == 0)
15845 overflow_p = true;
15848 return force_fit_type_double (TREE_TYPE (value), val,
15849 -1, overflow_p);
15851 else
15853 tree t;
15855 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15856 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15857 t = build_int_cst (TREE_TYPE (value), -divisor);
15858 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15861 else
15863 if (!div)
15864 div = build_int_cst (TREE_TYPE (value), divisor);
15865 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15866 value = size_binop_loc (loc, MULT_EXPR, value, div);
15869 return value;
15872 /* Likewise, but round down. */
15874 tree
15875 round_down_loc (location_t loc, tree value, int divisor)
15877 tree div = NULL_TREE;
15879 gcc_assert (divisor > 0);
15880 if (divisor == 1)
15881 return value;
15883 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15884 have to do anything. Only do this when we are not given a const,
15885 because in that case, this check is more expensive than just
15886 doing it. */
15887 if (TREE_CODE (value) != INTEGER_CST)
15889 div = build_int_cst (TREE_TYPE (value), divisor);
15891 if (multiple_of_p (TREE_TYPE (value), value, div))
15892 return value;
15895 /* If divisor is a power of two, simplify this to bit manipulation. */
15896 if (divisor == (divisor & -divisor))
15898 tree t;
15900 t = build_int_cst (TREE_TYPE (value), -divisor);
15901 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15903 else
15905 if (!div)
15906 div = build_int_cst (TREE_TYPE (value), divisor);
15907 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15908 value = size_binop_loc (loc, MULT_EXPR, value, div);
15911 return value;
15914 /* Returns the pointer to the base of the object addressed by EXP and
15915 extracts the information about the offset of the access, storing it
15916 to PBITPOS and POFFSET. */
15918 static tree
15919 split_address_to_core_and_offset (tree exp,
15920 HOST_WIDE_INT *pbitpos, tree *poffset)
15922 tree core;
15923 enum machine_mode mode;
15924 int unsignedp, volatilep;
15925 HOST_WIDE_INT bitsize;
15926 location_t loc = EXPR_LOCATION (exp);
15928 if (TREE_CODE (exp) == ADDR_EXPR)
15930 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15931 poffset, &mode, &unsignedp, &volatilep,
15932 false);
15933 core = build_fold_addr_expr_loc (loc, core);
15935 else
15937 core = exp;
15938 *pbitpos = 0;
15939 *poffset = NULL_TREE;
15942 return core;
15945 /* Returns true if addresses of E1 and E2 differ by a constant, false
15946 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15948 bool
15949 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15951 tree core1, core2;
15952 HOST_WIDE_INT bitpos1, bitpos2;
15953 tree toffset1, toffset2, tdiff, type;
15955 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15956 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15958 if (bitpos1 % BITS_PER_UNIT != 0
15959 || bitpos2 % BITS_PER_UNIT != 0
15960 || !operand_equal_p (core1, core2, 0))
15961 return false;
15963 if (toffset1 && toffset2)
15965 type = TREE_TYPE (toffset1);
15966 if (type != TREE_TYPE (toffset2))
15967 toffset2 = fold_convert (type, toffset2);
15969 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15970 if (!cst_and_fits_in_hwi (tdiff))
15971 return false;
15973 *diff = int_cst_value (tdiff);
15975 else if (toffset1 || toffset2)
15977 /* If only one of the offsets is non-constant, the difference cannot
15978 be a constant. */
15979 return false;
15981 else
15982 *diff = 0;
15984 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15985 return true;
15988 /* Simplify the floating point expression EXP when the sign of the
15989 result is not significant. Return NULL_TREE if no simplification
15990 is possible. */
15992 tree
15993 fold_strip_sign_ops (tree exp)
15995 tree arg0, arg1;
15996 location_t loc = EXPR_LOCATION (exp);
15998 switch (TREE_CODE (exp))
16000 case ABS_EXPR:
16001 case NEGATE_EXPR:
16002 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16003 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16005 case MULT_EXPR:
16006 case RDIV_EXPR:
16007 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16008 return NULL_TREE;
16009 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16010 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16011 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16012 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16013 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16014 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16015 break;
16017 case COMPOUND_EXPR:
16018 arg0 = TREE_OPERAND (exp, 0);
16019 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16020 if (arg1)
16021 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16022 break;
16024 case COND_EXPR:
16025 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16026 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16027 if (arg0 || arg1)
16028 return fold_build3_loc (loc,
16029 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16030 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16031 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16032 break;
16034 case CALL_EXPR:
16036 const enum built_in_function fcode = builtin_mathfn_code (exp);
16037 switch (fcode)
16039 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16040 /* Strip copysign function call, return the 1st argument. */
16041 arg0 = CALL_EXPR_ARG (exp, 0);
16042 arg1 = CALL_EXPR_ARG (exp, 1);
16043 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16045 default:
16046 /* Strip sign ops from the argument of "odd" math functions. */
16047 if (negate_mathfn_p (fcode))
16049 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16050 if (arg0)
16051 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16053 break;
16056 break;
16058 default:
16059 break;
16061 return NULL_TREE;