Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / fold-const.c
blob07b59d89a32af2d1c8a3b619b2b6823c3a16dff2
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
120 tree, tree);
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
126 tree, tree, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
145 /* Similar to protected_set_expr_location, but never modify x in place,
146 if location can and needs to be set, unshare it. */
148 static inline tree
149 protected_set_expr_location_unshare (tree x, location_t loc)
151 if (CAN_HAVE_LOCATION_P (x)
152 && EXPR_LOCATION (x) != loc
153 && !(TREE_CODE (x) == SAVE_EXPR
154 || TREE_CODE (x) == TARGET_EXPR
155 || TREE_CODE (x) == BIND_EXPR))
157 x = copy_node (x);
158 SET_EXPR_LOCATION (x, loc);
160 return x;
164 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
165 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
166 and SUM1. Then this yields nonzero if overflow occurred during the
167 addition.
169 Overflow occurs if A and B have the same sign, but A and SUM differ in
170 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
171 sign. */
172 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
174 /* If ARG2 divides ARG1 with zero remainder, carries out the division
175 of type CODE and returns the quotient.
176 Otherwise returns NULL_TREE. */
178 tree
179 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
181 double_int quo, rem;
182 int uns;
184 /* The sign of the division is according to operand two, that
185 does the correct thing for POINTER_PLUS_EXPR where we want
186 a signed division. */
187 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
188 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
189 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
190 uns = false;
192 quo = double_int_divmod (tree_to_double_int (arg1),
193 tree_to_double_int (arg2),
194 uns, code, &rem);
196 if (double_int_zero_p (rem))
197 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
199 return NULL_TREE;
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
228 void
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
243 void
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
246 const char *warnmsg;
247 location_t locus;
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
264 return;
266 if (gimple_no_warning_p (stmt))
267 return;
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
275 return;
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
295 bool
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
323 static bool
324 negate_mathfn_p (enum built_in_function code)
326 switch (code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
359 default:
360 break;
362 return false;
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
368 bool
369 may_negate_without_overflow_p (const_tree t)
371 unsigned HOST_WIDE_INT val;
372 unsigned int prec;
373 tree type;
375 gcc_assert (TREE_CODE (t) == INTEGER_CST);
377 type = TREE_TYPE (t);
378 if (TYPE_UNSIGNED (type))
379 return false;
381 prec = TYPE_PRECISION (type);
382 if (prec > HOST_BITS_PER_WIDE_INT)
384 if (TREE_INT_CST_LOW (t) != 0)
385 return true;
386 prec -= HOST_BITS_PER_WIDE_INT;
387 val = TREE_INT_CST_HIGH (t);
389 else
390 val = TREE_INT_CST_LOW (t);
391 if (prec < HOST_BITS_PER_WIDE_INT)
392 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
393 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
396 /* Determine whether an expression T can be cheaply negated using
397 the function negate_expr without introducing undefined overflow. */
399 static bool
400 negate_expr_p (tree t)
402 tree type;
404 if (t == 0)
405 return false;
407 type = TREE_TYPE (t);
409 STRIP_SIGN_NOPS (t);
410 switch (TREE_CODE (t))
412 case INTEGER_CST:
413 if (TYPE_OVERFLOW_WRAPS (type))
414 return true;
416 /* Check that -CST will not overflow type. */
417 return may_negate_without_overflow_p (t);
418 case BIT_NOT_EXPR:
419 return (INTEGRAL_TYPE_P (type)
420 && TYPE_OVERFLOW_WRAPS (type));
422 case FIXED_CST:
423 case NEGATE_EXPR:
424 return true;
426 case REAL_CST:
427 /* We want to canonicalize to positive real constants. Pretend
428 that only negative ones can be easily negated. */
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
431 case COMPLEX_CST:
432 return negate_expr_p (TREE_REALPART (t))
433 && negate_expr_p (TREE_IMAGPART (t));
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
442 case PLUS_EXPR:
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
461 case MULT_EXPR:
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
463 break;
465 /* Fall through. */
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case FLOOR_DIV_EXPR:
476 case CEIL_DIV_EXPR:
477 case EXACT_DIV_EXPR:
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
482 overflow. */
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
484 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
485 break;
486 return negate_expr_p (TREE_OPERAND (t, 1))
487 || negate_expr_p (TREE_OPERAND (t, 0));
489 case NOP_EXPR:
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
493 tree tem = strip_float_extensions (t);
494 if (tem != t)
495 return negate_expr_p (tem);
497 break;
499 case CALL_EXPR:
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
503 break;
505 case RSHIFT_EXPR:
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
509 tree op1 = TREE_OPERAND (t, 1);
510 if (TREE_INT_CST_HIGH (op1) == 0
511 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
512 == TREE_INT_CST_LOW (op1))
513 return true;
515 break;
517 default:
518 break;
520 return false;
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
526 returned. */
528 static tree
529 fold_negate_expr (location_t loc, tree t)
531 tree type = TREE_TYPE (t);
532 tree tem;
534 switch (TREE_CODE (t))
536 /* Convert - (~A) to A + 1. */
537 case BIT_NOT_EXPR:
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_int_cst (type, 1));
541 break;
543 case INTEGER_CST:
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
554 return tem;
555 break;
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
572 break;
574 case COMPLEX_EXPR:
575 if (negate_expr_p (t))
576 return fold_build2_loc (loc, COMPLEX_EXPR, type,
577 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
578 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
579 break;
581 case CONJ_EXPR:
582 if (negate_expr_p (t))
583 return fold_build1_loc (loc, CONJ_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
585 break;
587 case NEGATE_EXPR:
588 return TREE_OPERAND (t, 0);
590 case PLUS_EXPR:
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
594 /* -(A + B) -> (-B) - A. */
595 if (negate_expr_p (TREE_OPERAND (t, 1))
596 && reorder_operands_p (TREE_OPERAND (t, 0),
597 TREE_OPERAND (t, 1)))
599 tem = negate_expr (TREE_OPERAND (t, 1));
600 return fold_build2_loc (loc, MINUS_EXPR, type,
601 tem, TREE_OPERAND (t, 0));
604 /* -(A + B) -> (-A) - B. */
605 if (negate_expr_p (TREE_OPERAND (t, 0)))
607 tem = negate_expr (TREE_OPERAND (t, 0));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 1));
612 break;
614 case MINUS_EXPR:
615 /* - (A - B) -> B - A */
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
618 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
621 break;
623 case MULT_EXPR:
624 if (TYPE_UNSIGNED (type))
625 break;
627 /* Fall through. */
629 case RDIV_EXPR:
630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
632 tem = TREE_OPERAND (t, 1);
633 if (negate_expr_p (tem))
634 return fold_build2_loc (loc, TREE_CODE (t), type,
635 TREE_OPERAND (t, 0), negate_expr (tem));
636 tem = TREE_OPERAND (t, 0);
637 if (negate_expr_p (tem))
638 return fold_build2_loc (loc, TREE_CODE (t), type,
639 negate_expr (tem), TREE_OPERAND (t, 1));
641 break;
643 case TRUNC_DIV_EXPR:
644 case ROUND_DIV_EXPR:
645 case FLOOR_DIV_EXPR:
646 case CEIL_DIV_EXPR:
647 case EXACT_DIV_EXPR:
648 /* In general we can't negate A / B, because if A is INT_MIN and
649 B is 1, we may turn this into INT_MIN / -1 which is undefined
650 and actually traps on some architectures. But if overflow is
651 undefined, we can negate, because - (INT_MIN / 1) is an
652 overflow. */
653 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
655 const char * const warnmsg = G_("assuming signed overflow does not "
656 "occur when negating a division");
657 tem = TREE_OPERAND (t, 1);
658 if (negate_expr_p (tem))
660 if (INTEGRAL_TYPE_P (type)
661 && (TREE_CODE (tem) != INTEGER_CST
662 || integer_onep (tem)))
663 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
664 return fold_build2_loc (loc, TREE_CODE (t), type,
665 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
670 if (INTEGRAL_TYPE_P (type)
671 && (TREE_CODE (tem) != INTEGER_CST
672 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
673 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 negate_expr (tem), TREE_OPERAND (t, 1));
678 break;
680 case NOP_EXPR:
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type) == REAL_TYPE)
684 tem = strip_float_extensions (t);
685 if (tem != t && negate_expr_p (tem))
686 return fold_convert_loc (loc, type, negate_expr (tem));
688 break;
690 case CALL_EXPR:
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (builtin_mathfn_code (t))
693 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
695 tree fndecl, arg;
697 fndecl = get_callee_fndecl (t);
698 arg = negate_expr (CALL_EXPR_ARG (t, 0));
699 return build_call_expr_loc (loc, fndecl, 1, arg);
701 break;
703 case RSHIFT_EXPR:
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
707 tree op1 = TREE_OPERAND (t, 1);
708 if (TREE_INT_CST_HIGH (op1) == 0
709 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
710 == TREE_INT_CST_LOW (op1))
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
720 break;
722 default:
723 break;
726 return NULL_TREE;
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 return NULL_TREE. */
733 static tree
734 negate_expr (tree t)
736 tree type, tem;
737 location_t loc;
739 if (t == NULL_TREE)
740 return NULL_TREE;
742 loc = EXPR_LOCATION (t);
743 type = TREE_TYPE (t);
744 STRIP_SIGN_NOPS (t);
746 tem = fold_negate_expr (loc, t);
747 if (!tem)
748 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
749 return fold_convert_loc (loc, type, tem);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead.
767 If IN is itself a literal or constant, return it as appropriate.
769 Note that we do not guarantee that any of the three values will be the
770 same type as IN, but they will have the same signedness and mode. */
772 static tree
773 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
774 tree *minus_litp, int negate_p)
776 tree var = 0;
778 *conp = 0;
779 *litp = 0;
780 *minus_litp = 0;
782 /* Strip any conversions that don't change the machine mode or signedness. */
783 STRIP_SIGN_NOPS (in);
785 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
786 || TREE_CODE (in) == FIXED_CST)
787 *litp = in;
788 else if (TREE_CODE (in) == code
789 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
790 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
791 /* We can associate addition and subtraction together (even
792 though the C standard doesn't say so) for integers because
793 the value is not affected. For reals, the value might be
794 affected, so we can't. */
795 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
796 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
798 tree op0 = TREE_OPERAND (in, 0);
799 tree op1 = TREE_OPERAND (in, 1);
800 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
801 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
803 /* First see if either of the operands is a literal, then a constant. */
804 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
805 || TREE_CODE (op0) == FIXED_CST)
806 *litp = op0, op0 = 0;
807 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
808 || TREE_CODE (op1) == FIXED_CST)
809 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
811 if (op0 != 0 && TREE_CONSTANT (op0))
812 *conp = op0, op0 = 0;
813 else if (op1 != 0 && TREE_CONSTANT (op1))
814 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
816 /* If we haven't dealt with either operand, this is not a case we can
817 decompose. Otherwise, VAR is either of the ones remaining, if any. */
818 if (op0 != 0 && op1 != 0)
819 var = in;
820 else if (op0 != 0)
821 var = op0;
822 else
823 var = op1, neg_var_p = neg1_p;
825 /* Now do any needed negations. */
826 if (neg_litp_p)
827 *minus_litp = *litp, *litp = 0;
828 if (neg_conp_p)
829 *conp = negate_expr (*conp);
830 if (neg_var_p)
831 var = negate_expr (var);
833 else if (TREE_CONSTANT (in))
834 *conp = in;
835 else
836 var = in;
838 if (negate_p)
840 if (*litp)
841 *minus_litp = *litp, *litp = 0;
842 else if (*minus_litp)
843 *litp = *minus_litp, *minus_litp = 0;
844 *conp = negate_expr (*conp);
845 var = negate_expr (var);
848 return var;
851 /* Re-associate trees split by the above function. T1 and T2 are
852 either expressions to associate or null. Return the new
853 expression, if any. LOC is the location of the new expression. If
854 we build an operation, do it in TYPE and with CODE. */
856 static tree
857 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
859 if (t1 == 0)
860 return t2;
861 else if (t2 == 0)
862 return t1;
864 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
865 try to fold this since we will have infinite recursion. But do
866 deal with any NEGATE_EXPRs. */
867 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
868 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
870 if (code == PLUS_EXPR)
872 if (TREE_CODE (t1) == NEGATE_EXPR)
873 return build2_loc (loc, MINUS_EXPR, type,
874 fold_convert_loc (loc, type, t2),
875 fold_convert_loc (loc, type,
876 TREE_OPERAND (t1, 0)));
877 else if (TREE_CODE (t2) == NEGATE_EXPR)
878 return build2_loc (loc, MINUS_EXPR, type,
879 fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type,
881 TREE_OPERAND (t2, 0)));
882 else if (integer_zerop (t2))
883 return fold_convert_loc (loc, type, t1);
885 else if (code == MINUS_EXPR)
887 if (integer_zerop (t2))
888 return fold_convert_loc (loc, type, t1);
891 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
896 fold_convert_loc (loc, type, t2));
899 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
900 for use in int_const_binop, size_binop and size_diffop. */
902 static bool
903 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
905 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
906 return false;
907 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
908 return false;
910 switch (code)
912 case LSHIFT_EXPR:
913 case RSHIFT_EXPR:
914 case LROTATE_EXPR:
915 case RROTATE_EXPR:
916 return true;
918 default:
919 break;
922 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
923 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
924 && TYPE_MODE (type1) == TYPE_MODE (type2);
928 /* Combine two integer constants ARG1 and ARG2 under operation CODE
929 to produce a new constant. Return NULL_TREE if we don't know how
930 to evaluate CODE at compile-time.
932 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
934 tree
935 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
937 double_int op1, op2, res, tmp;
938 tree t;
939 tree type = TREE_TYPE (arg1);
940 bool uns = TYPE_UNSIGNED (type);
941 bool is_sizetype
942 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
943 bool overflow = false;
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
948 switch (code)
950 case BIT_IOR_EXPR:
951 res = double_int_ior (op1, op2);
952 break;
954 case BIT_XOR_EXPR:
955 res = double_int_xor (op1, op2);
956 break;
958 case BIT_AND_EXPR:
959 res = double_int_and (op1, op2);
960 break;
962 case RSHIFT_EXPR:
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
965 break;
967 case LSHIFT_EXPR:
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
973 break;
975 case RROTATE_EXPR:
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
978 break;
980 case LROTATE_EXPR:
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
983 break;
985 case PLUS_EXPR:
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
988 break;
990 case MINUS_EXPR:
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
995 break;
997 case MULT_EXPR:
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
1000 break;
1002 case TRUNC_DIV_EXPR:
1003 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1004 case EXACT_DIV_EXPR:
1005 /* This is a shortcut for a common special case. */
1006 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1007 && !TREE_OVERFLOW (arg1)
1008 && !TREE_OVERFLOW (arg2)
1009 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1011 if (code == CEIL_DIV_EXPR)
1012 op1.low += op2.low - 1;
1014 res.low = op1.low / op2.low, res.high = 0;
1015 break;
1018 /* ... fall through ... */
1020 case ROUND_DIV_EXPR:
1021 if (double_int_zero_p (op2))
1022 return NULL_TREE;
1023 if (double_int_one_p (op2))
1025 res = op1;
1026 break;
1028 if (double_int_equal_p (op1, op2)
1029 && ! double_int_zero_p (op1))
1031 res = double_int_one;
1032 break;
1034 overflow = div_and_round_double (code, uns,
1035 op1.low, op1.high, op2.low, op2.high,
1036 &res.low, &res.high,
1037 &tmp.low, &tmp.high);
1038 break;
1040 case TRUNC_MOD_EXPR:
1041 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1042 /* This is a shortcut for a common special case. */
1043 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1044 && !TREE_OVERFLOW (arg1)
1045 && !TREE_OVERFLOW (arg2)
1046 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1048 if (code == CEIL_MOD_EXPR)
1049 op1.low += op2.low - 1;
1050 res.low = op1.low % op2.low, res.high = 0;
1051 break;
1054 /* ... fall through ... */
1056 case ROUND_MOD_EXPR:
1057 if (double_int_zero_p (op2))
1058 return NULL_TREE;
1059 overflow = div_and_round_double (code, uns,
1060 op1.low, op1.high, op2.low, op2.high,
1061 &tmp.low, &tmp.high,
1062 &res.low, &res.high);
1063 break;
1065 case MIN_EXPR:
1066 res = double_int_min (op1, op2, uns);
1067 break;
1069 case MAX_EXPR:
1070 res = double_int_max (op1, op2, uns);
1071 break;
1073 default:
1074 return NULL_TREE;
1077 if (notrunc)
1079 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1081 /* Propagate overflow flags ourselves. */
1082 if (((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1085 t = copy_node (t);
1086 TREE_OVERFLOW (t) = 1;
1089 else
1090 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1091 ((!uns || is_sizetype) && overflow)
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1094 return t;
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1102 static tree
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1105 /* Sanity check for the recursive cases. */
1106 if (!arg1 || !arg2)
1107 return NULL_TREE;
1109 STRIP_NOPS (arg1);
1110 STRIP_NOPS (arg2);
1112 if (TREE_CODE (arg1) == INTEGER_CST)
1113 return int_const_binop (code, arg1, arg2, 0);
1115 if (TREE_CODE (arg1) == REAL_CST)
1117 enum machine_mode mode;
1118 REAL_VALUE_TYPE d1;
1119 REAL_VALUE_TYPE d2;
1120 REAL_VALUE_TYPE value;
1121 REAL_VALUE_TYPE result;
1122 bool inexact;
1123 tree t, type;
1125 /* The following codes are handled by real_arithmetic. */
1126 switch (code)
1128 case PLUS_EXPR:
1129 case MINUS_EXPR:
1130 case MULT_EXPR:
1131 case RDIV_EXPR:
1132 case MIN_EXPR:
1133 case MAX_EXPR:
1134 break;
1136 default:
1137 return NULL_TREE;
1140 d1 = TREE_REAL_CST (arg1);
1141 d2 = TREE_REAL_CST (arg2);
1143 type = TREE_TYPE (arg1);
1144 mode = TYPE_MODE (type);
1146 /* Don't perform operation if we honor signaling NaNs and
1147 either operand is a NaN. */
1148 if (HONOR_SNANS (mode)
1149 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1150 return NULL_TREE;
1152 /* Don't perform operation if it would raise a division
1153 by zero exception. */
1154 if (code == RDIV_EXPR
1155 && REAL_VALUES_EQUAL (d2, dconst0)
1156 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1157 return NULL_TREE;
1159 /* If either operand is a NaN, just return it. Otherwise, set up
1160 for floating-point trap; we return an overflow. */
1161 if (REAL_VALUE_ISNAN (d1))
1162 return arg1;
1163 else if (REAL_VALUE_ISNAN (d2))
1164 return arg2;
1166 inexact = real_arithmetic (&value, code, &d1, &d2);
1167 real_convert (&result, mode, &value);
1169 /* Don't constant fold this floating point operation if
1170 the result has overflowed and flag_trapping_math. */
1171 if (flag_trapping_math
1172 && MODE_HAS_INFINITIES (mode)
1173 && REAL_VALUE_ISINF (result)
1174 && !REAL_VALUE_ISINF (d1)
1175 && !REAL_VALUE_ISINF (d2))
1176 return NULL_TREE;
1178 /* Don't constant fold this floating point operation if the
1179 result may dependent upon the run-time rounding mode and
1180 flag_rounding_math is set, or if GCC's software emulation
1181 is unable to accurately represent the result. */
1182 if ((flag_rounding_math
1183 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1184 && (inexact || !real_identical (&result, &value)))
1185 return NULL_TREE;
1187 t = build_real (type, result);
1189 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1190 return t;
1193 if (TREE_CODE (arg1) == FIXED_CST)
1195 FIXED_VALUE_TYPE f1;
1196 FIXED_VALUE_TYPE f2;
1197 FIXED_VALUE_TYPE result;
1198 tree t, type;
1199 int sat_p;
1200 bool overflow_p;
1202 /* The following codes are handled by fixed_arithmetic. */
1203 switch (code)
1205 case PLUS_EXPR:
1206 case MINUS_EXPR:
1207 case MULT_EXPR:
1208 case TRUNC_DIV_EXPR:
1209 f2 = TREE_FIXED_CST (arg2);
1210 break;
1212 case LSHIFT_EXPR:
1213 case RSHIFT_EXPR:
1214 f2.data.high = TREE_INT_CST_HIGH (arg2);
1215 f2.data.low = TREE_INT_CST_LOW (arg2);
1216 f2.mode = SImode;
1217 break;
1219 default:
1220 return NULL_TREE;
1223 f1 = TREE_FIXED_CST (arg1);
1224 type = TREE_TYPE (arg1);
1225 sat_p = TYPE_SATURATING (type);
1226 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1227 t = build_fixed (type, result);
1228 /* Propagate overflow flags. */
1229 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1230 TREE_OVERFLOW (t) = 1;
1231 return t;
1234 if (TREE_CODE (arg1) == COMPLEX_CST)
1236 tree type = TREE_TYPE (arg1);
1237 tree r1 = TREE_REALPART (arg1);
1238 tree i1 = TREE_IMAGPART (arg1);
1239 tree r2 = TREE_REALPART (arg2);
1240 tree i2 = TREE_IMAGPART (arg2);
1241 tree real, imag;
1243 switch (code)
1245 case PLUS_EXPR:
1246 case MINUS_EXPR:
1247 real = const_binop (code, r1, r2);
1248 imag = const_binop (code, i1, i2);
1249 break;
1251 case MULT_EXPR:
1252 if (COMPLEX_FLOAT_TYPE_P (type))
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1255 mpc_mul);
1257 real = const_binop (MINUS_EXPR,
1258 const_binop (MULT_EXPR, r1, r2),
1259 const_binop (MULT_EXPR, i1, i2));
1260 imag = const_binop (PLUS_EXPR,
1261 const_binop (MULT_EXPR, r1, i2),
1262 const_binop (MULT_EXPR, i1, r2));
1263 break;
1265 case RDIV_EXPR:
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1269 mpc_div);
1270 /* Fallthru ... */
1271 case TRUNC_DIV_EXPR:
1272 case CEIL_DIV_EXPR:
1273 case FLOOR_DIV_EXPR:
1274 case ROUND_DIV_EXPR:
1275 if (flag_complex_method == 0)
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1282 t = br*br + bi*bi
1284 tree magsquared
1285 = const_binop (PLUS_EXPR,
1286 const_binop (MULT_EXPR, r2, r2),
1287 const_binop (MULT_EXPR, i2, i2));
1288 tree t1
1289 = const_binop (PLUS_EXPR,
1290 const_binop (MULT_EXPR, r1, r2),
1291 const_binop (MULT_EXPR, i1, i2));
1292 tree t2
1293 = const_binop (MINUS_EXPR,
1294 const_binop (MULT_EXPR, i1, r2),
1295 const_binop (MULT_EXPR, r1, i2));
1297 real = const_binop (code, t1, magsquared);
1298 imag = const_binop (code, t2, magsquared);
1300 else
1302 /* Keep this algorithm in sync with
1303 tree-complex.c:expand_complex_div_wide().
1305 Expand complex division to scalars, modified algorithm to minimize
1306 overflow with wide input ranges. */
1307 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1308 fold_abs_const (r2, TREE_TYPE (type)),
1309 fold_abs_const (i2, TREE_TYPE (type)));
1311 if (integer_nonzerop (compare))
1313 /* In the TRUE branch, we compute
1314 ratio = br/bi;
1315 div = (br * ratio) + bi;
1316 tr = (ar * ratio) + ai;
1317 ti = (ai * ratio) - ar;
1318 tr = tr / div;
1319 ti = ti / div; */
1320 tree ratio = const_binop (code, r2, i2);
1321 tree div = const_binop (PLUS_EXPR, i2,
1322 const_binop (MULT_EXPR, r2, ratio));
1323 real = const_binop (MULT_EXPR, r1, ratio);
1324 real = const_binop (PLUS_EXPR, real, i1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, i1, ratio);
1328 imag = const_binop (MINUS_EXPR, imag, r1);
1329 imag = const_binop (code, imag, div);
1331 else
1333 /* In the FALSE branch, we compute
1334 ratio = d/c;
1335 divisor = (d * ratio) + c;
1336 tr = (b * ratio) + a;
1337 ti = b - (a * ratio);
1338 tr = tr / div;
1339 ti = ti / div; */
1340 tree ratio = const_binop (code, i2, r2);
1341 tree div = const_binop (PLUS_EXPR, r2,
1342 const_binop (MULT_EXPR, i2, ratio));
1344 real = const_binop (MULT_EXPR, i1, ratio);
1345 real = const_binop (PLUS_EXPR, real, r1);
1346 real = const_binop (code, real, div);
1348 imag = const_binop (MULT_EXPR, r1, ratio);
1349 imag = const_binop (MINUS_EXPR, i1, imag);
1350 imag = const_binop (code, imag, div);
1353 break;
1355 default:
1356 return NULL_TREE;
1359 if (real && imag)
1360 return build_complex (type, real, imag);
1363 if (TREE_CODE (arg1) == VECTOR_CST)
1365 tree type = TREE_TYPE(arg1);
1366 int count = TYPE_VECTOR_SUBPARTS (type), i;
1367 tree elements1, elements2, list = NULL_TREE;
1369 if(TREE_CODE(arg2) != VECTOR_CST)
1370 return NULL_TREE;
1372 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1373 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1375 for (i = 0; i < count; i++)
1377 tree elem1, elem2, elem;
1379 /* The trailing elements can be empty and should be treated as 0 */
1380 if(!elements1)
1381 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1382 else
1384 elem1 = TREE_VALUE(elements1);
1385 elements1 = TREE_CHAIN (elements1);
1388 if(!elements2)
1389 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1390 else
1392 elem2 = TREE_VALUE(elements2);
1393 elements2 = TREE_CHAIN (elements2);
1396 elem = const_binop (code, elem1, elem2);
1398 /* It is possible that const_binop cannot handle the given
1399 code and return NULL_TREE */
1400 if(elem == NULL_TREE)
1401 return NULL_TREE;
1403 list = tree_cons (NULL_TREE, elem, list);
1405 return build_vector(type, nreverse(list));
1407 return NULL_TREE;
1410 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1411 indicates which particular sizetype to create. */
1413 tree
1414 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1416 return build_int_cst (sizetype_tab[(int) kind], number);
1419 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1420 is a tree code. The type of the result is taken from the operands.
1421 Both must be equivalent integer types, ala int_binop_types_match_p.
1422 If the operands are constant, so is the result. */
1424 tree
1425 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1427 tree type = TREE_TYPE (arg0);
1429 if (arg0 == error_mark_node || arg1 == error_mark_node)
1430 return error_mark_node;
1432 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1433 TREE_TYPE (arg1)));
1435 /* Handle the special case of two integer constants faster. */
1436 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1438 /* And some specific cases even faster than that. */
1439 if (code == PLUS_EXPR)
1441 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1442 return arg1;
1443 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1444 return arg0;
1446 else if (code == MINUS_EXPR)
1448 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1449 return arg0;
1451 else if (code == MULT_EXPR)
1453 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1454 return arg1;
1457 /* Handle general case of two integer constants. */
1458 return int_const_binop (code, arg0, arg1, 0);
1461 return fold_build2_loc (loc, code, type, arg0, arg1);
1464 /* Given two values, either both of sizetype or both of bitsizetype,
1465 compute the difference between the two values. Return the value
1466 in signed type corresponding to the type of the operands. */
1468 tree
1469 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1471 tree type = TREE_TYPE (arg0);
1472 tree ctype;
1474 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1475 TREE_TYPE (arg1)));
1477 /* If the type is already signed, just do the simple thing. */
1478 if (!TYPE_UNSIGNED (type))
1479 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1481 if (type == sizetype)
1482 ctype = ssizetype;
1483 else if (type == bitsizetype)
1484 ctype = sbitsizetype;
1485 else
1486 ctype = signed_type_for (type);
1488 /* If either operand is not a constant, do the conversions to the signed
1489 type and subtract. The hardware will do the right thing with any
1490 overflow in the subtraction. */
1491 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1492 return size_binop_loc (loc, MINUS_EXPR,
1493 fold_convert_loc (loc, ctype, arg0),
1494 fold_convert_loc (loc, ctype, arg1));
1496 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1497 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1498 overflow) and negate (which can't either). Special-case a result
1499 of zero while we're here. */
1500 if (tree_int_cst_equal (arg0, arg1))
1501 return build_int_cst (ctype, 0);
1502 else if (tree_int_cst_lt (arg1, arg0))
1503 return fold_convert_loc (loc, ctype,
1504 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1505 else
1506 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1507 fold_convert_loc (loc, ctype,
1508 size_binop_loc (loc,
1509 MINUS_EXPR,
1510 arg1, arg0)));
1513 /* A subroutine of fold_convert_const handling conversions of an
1514 INTEGER_CST to another integer type. */
1516 static tree
1517 fold_convert_const_int_from_int (tree type, const_tree arg1)
1519 tree t;
1521 /* Given an integer constant, make new constant with new type,
1522 appropriately sign-extended or truncated. */
1523 t = force_fit_type_double (type, tree_to_double_int (arg1),
1524 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1525 (TREE_INT_CST_HIGH (arg1) < 0
1526 && (TYPE_UNSIGNED (type)
1527 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1528 | TREE_OVERFLOW (arg1));
1530 return t;
1533 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1534 to an integer type. */
1536 static tree
1537 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1539 int overflow = 0;
1540 tree t;
1542 /* The following code implements the floating point to integer
1543 conversion rules required by the Java Language Specification,
1544 that IEEE NaNs are mapped to zero and values that overflow
1545 the target precision saturate, i.e. values greater than
1546 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1547 are mapped to INT_MIN. These semantics are allowed by the
1548 C and C++ standards that simply state that the behavior of
1549 FP-to-integer conversion is unspecified upon overflow. */
1551 double_int val;
1552 REAL_VALUE_TYPE r;
1553 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1555 switch (code)
1557 case FIX_TRUNC_EXPR:
1558 real_trunc (&r, VOIDmode, &x);
1559 break;
1561 default:
1562 gcc_unreachable ();
1565 /* If R is NaN, return zero and show we have an overflow. */
1566 if (REAL_VALUE_ISNAN (r))
1568 overflow = 1;
1569 val = double_int_zero;
1572 /* See if R is less than the lower bound or greater than the
1573 upper bound. */
1575 if (! overflow)
1577 tree lt = TYPE_MIN_VALUE (type);
1578 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1579 if (REAL_VALUES_LESS (r, l))
1581 overflow = 1;
1582 val = tree_to_double_int (lt);
1586 if (! overflow)
1588 tree ut = TYPE_MAX_VALUE (type);
1589 if (ut)
1591 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1592 if (REAL_VALUES_LESS (u, r))
1594 overflow = 1;
1595 val = tree_to_double_int (ut);
1600 if (! overflow)
1601 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1603 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1604 return t;
1607 /* A subroutine of fold_convert_const handling conversions of a
1608 FIXED_CST to an integer type. */
1610 static tree
1611 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1613 tree t;
1614 double_int temp, temp_trunc;
1615 unsigned int mode;
1617 /* Right shift FIXED_CST to temp by fbit. */
1618 temp = TREE_FIXED_CST (arg1).data;
1619 mode = TREE_FIXED_CST (arg1).mode;
1620 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1622 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1623 HOST_BITS_PER_DOUBLE_INT,
1624 SIGNED_FIXED_POINT_MODE_P (mode));
1626 /* Left shift temp to temp_trunc by fbit. */
1627 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1628 HOST_BITS_PER_DOUBLE_INT,
1629 SIGNED_FIXED_POINT_MODE_P (mode));
1631 else
1633 temp = double_int_zero;
1634 temp_trunc = double_int_zero;
1637 /* If FIXED_CST is negative, we need to round the value toward 0.
1638 By checking if the fractional bits are not zero to add 1 to temp. */
1639 if (SIGNED_FIXED_POINT_MODE_P (mode)
1640 && double_int_negative_p (temp_trunc)
1641 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1642 temp = double_int_add (temp, double_int_one);
1644 /* Given a fixed-point constant, make new constant with new type,
1645 appropriately sign-extended or truncated. */
1646 t = force_fit_type_double (type, temp, -1,
1647 (double_int_negative_p (temp)
1648 && (TYPE_UNSIGNED (type)
1649 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1650 | TREE_OVERFLOW (arg1));
1652 return t;
1655 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1656 to another floating point type. */
1658 static tree
1659 fold_convert_const_real_from_real (tree type, const_tree arg1)
1661 REAL_VALUE_TYPE value;
1662 tree t;
1664 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1665 t = build_real (type, value);
1667 /* If converting an infinity or NAN to a representation that doesn't
1668 have one, set the overflow bit so that we can produce some kind of
1669 error message at the appropriate point if necessary. It's not the
1670 most user-friendly message, but it's better than nothing. */
1671 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1672 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1673 TREE_OVERFLOW (t) = 1;
1674 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1675 && !MODE_HAS_NANS (TYPE_MODE (type)))
1676 TREE_OVERFLOW (t) = 1;
1677 /* Regular overflow, conversion produced an infinity in a mode that
1678 can't represent them. */
1679 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1680 && REAL_VALUE_ISINF (value)
1681 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1682 TREE_OVERFLOW (t) = 1;
1683 else
1684 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1685 return t;
1688 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1689 to a floating point type. */
1691 static tree
1692 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1694 REAL_VALUE_TYPE value;
1695 tree t;
1697 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1698 t = build_real (type, value);
1700 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1701 return t;
1704 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1705 to another fixed-point type. */
1707 static tree
1708 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1710 FIXED_VALUE_TYPE value;
1711 tree t;
1712 bool overflow_p;
1714 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1715 TYPE_SATURATING (type));
1716 t = build_fixed (type, value);
1718 /* Propagate overflow flags. */
1719 if (overflow_p | TREE_OVERFLOW (arg1))
1720 TREE_OVERFLOW (t) = 1;
1721 return t;
1724 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1725 to a fixed-point type. */
1727 static tree
1728 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1730 FIXED_VALUE_TYPE value;
1731 tree t;
1732 bool overflow_p;
1734 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1735 TREE_INT_CST (arg1),
1736 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1737 TYPE_SATURATING (type));
1738 t = build_fixed (type, value);
1740 /* Propagate overflow flags. */
1741 if (overflow_p | TREE_OVERFLOW (arg1))
1742 TREE_OVERFLOW (t) = 1;
1743 return t;
1746 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to a fixed-point type. */
1749 static tree
1750 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1752 FIXED_VALUE_TYPE value;
1753 tree t;
1754 bool overflow_p;
1756 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1757 &TREE_REAL_CST (arg1),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
1763 TREE_OVERFLOW (t) = 1;
1764 return t;
1767 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1768 type TYPE. If no simplification can be done return NULL_TREE. */
1770 static tree
1771 fold_convert_const (enum tree_code code, tree type, tree arg1)
1773 if (TREE_TYPE (arg1) == type)
1774 return arg1;
1776 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1777 || TREE_CODE (type) == OFFSET_TYPE)
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return fold_convert_const_int_from_int (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_int_from_real (code, type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_int_from_fixed (type, arg1);
1786 else if (TREE_CODE (type) == REAL_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return build_real_from_int_cst (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_real_from_real (type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_real_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1797 if (TREE_CODE (arg1) == FIXED_CST)
1798 return fold_convert_const_fixed_from_fixed (type, arg1);
1799 else if (TREE_CODE (arg1) == INTEGER_CST)
1800 return fold_convert_const_fixed_from_int (type, arg1);
1801 else if (TREE_CODE (arg1) == REAL_CST)
1802 return fold_convert_const_fixed_from_real (type, arg1);
1804 return NULL_TREE;
1807 /* Construct a vector of zero elements of vector type TYPE. */
1809 static tree
1810 build_zero_vector (tree type)
1812 tree t;
1814 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1815 return build_vector_from_val (type, t);
1818 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1820 bool
1821 fold_convertible_p (const_tree type, const_tree arg)
1823 tree orig = TREE_TYPE (arg);
1825 if (type == orig)
1826 return true;
1828 if (TREE_CODE (arg) == ERROR_MARK
1829 || TREE_CODE (type) == ERROR_MARK
1830 || TREE_CODE (orig) == ERROR_MARK)
1831 return false;
1833 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1834 return true;
1836 switch (TREE_CODE (type))
1838 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1839 case POINTER_TYPE: case REFERENCE_TYPE:
1840 case OFFSET_TYPE:
1841 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1842 || TREE_CODE (orig) == OFFSET_TYPE)
1843 return true;
1844 return (TREE_CODE (orig) == VECTOR_TYPE
1845 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1847 case REAL_TYPE:
1848 case FIXED_POINT_TYPE:
1849 case COMPLEX_TYPE:
1850 case VECTOR_TYPE:
1851 case VOID_TYPE:
1852 return TREE_CODE (type) == TREE_CODE (orig);
1854 default:
1855 return false;
1859 /* Convert expression ARG to type TYPE. Used by the middle-end for
1860 simple conversions in preference to calling the front-end's convert. */
1862 tree
1863 fold_convert_loc (location_t loc, tree type, tree arg)
1865 tree orig = TREE_TYPE (arg);
1866 tree tem;
1868 if (type == orig)
1869 return arg;
1871 if (TREE_CODE (arg) == ERROR_MARK
1872 || TREE_CODE (type) == ERROR_MARK
1873 || TREE_CODE (orig) == ERROR_MARK)
1874 return error_mark_node;
1876 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1877 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1879 switch (TREE_CODE (type))
1881 case POINTER_TYPE:
1882 case REFERENCE_TYPE:
1883 /* Handle conversions between pointers to different address spaces. */
1884 if (POINTER_TYPE_P (orig)
1885 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1886 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1887 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1888 /* fall through */
1890 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1891 case OFFSET_TYPE:
1892 if (TREE_CODE (arg) == INTEGER_CST)
1894 tem = fold_convert_const (NOP_EXPR, type, arg);
1895 if (tem != NULL_TREE)
1896 return tem;
1898 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1899 || TREE_CODE (orig) == OFFSET_TYPE)
1900 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1901 if (TREE_CODE (orig) == COMPLEX_TYPE)
1902 return fold_convert_loc (loc, type,
1903 fold_build1_loc (loc, REALPART_EXPR,
1904 TREE_TYPE (orig), arg));
1905 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1906 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1909 case REAL_TYPE:
1910 if (TREE_CODE (arg) == INTEGER_CST)
1912 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1916 else if (TREE_CODE (arg) == REAL_CST)
1918 tem = fold_convert_const (NOP_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1920 return tem;
1922 else if (TREE_CODE (arg) == FIXED_CST)
1924 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1925 if (tem != NULL_TREE)
1926 return tem;
1929 switch (TREE_CODE (orig))
1931 case INTEGER_TYPE:
1932 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1933 case POINTER_TYPE: case REFERENCE_TYPE:
1934 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1936 case REAL_TYPE:
1937 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1939 case FIXED_POINT_TYPE:
1940 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1942 case COMPLEX_TYPE:
1943 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert_loc (loc, type, tem);
1946 default:
1947 gcc_unreachable ();
1950 case FIXED_POINT_TYPE:
1951 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1952 || TREE_CODE (arg) == REAL_CST)
1954 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1956 goto fold_convert_exit;
1959 switch (TREE_CODE (orig))
1961 case FIXED_POINT_TYPE:
1962 case INTEGER_TYPE:
1963 case ENUMERAL_TYPE:
1964 case BOOLEAN_TYPE:
1965 case REAL_TYPE:
1966 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1968 case COMPLEX_TYPE:
1969 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1970 return fold_convert_loc (loc, type, tem);
1972 default:
1973 gcc_unreachable ();
1976 case COMPLEX_TYPE:
1977 switch (TREE_CODE (orig))
1979 case INTEGER_TYPE:
1980 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1982 case REAL_TYPE:
1983 case FIXED_POINT_TYPE:
1984 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1985 fold_convert_loc (loc, TREE_TYPE (type), arg),
1986 fold_convert_loc (loc, TREE_TYPE (type),
1987 integer_zero_node));
1988 case COMPLEX_TYPE:
1990 tree rpart, ipart;
1992 if (TREE_CODE (arg) == COMPLEX_EXPR)
1994 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1995 TREE_OPERAND (arg, 0));
1996 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1997 TREE_OPERAND (arg, 1));
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2001 arg = save_expr (arg);
2002 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2003 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2004 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2006 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2009 default:
2010 gcc_unreachable ();
2013 case VECTOR_TYPE:
2014 if (integer_zerop (arg))
2015 return build_zero_vector (type);
2016 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2018 || TREE_CODE (orig) == VECTOR_TYPE);
2019 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2021 case VOID_TYPE:
2022 tem = fold_ignored_result (arg);
2023 if (TREE_CODE (tem) == MODIFY_EXPR)
2024 goto fold_convert_exit;
2025 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2027 default:
2028 gcc_unreachable ();
2030 fold_convert_exit:
2031 protected_set_expr_location_unshare (tem, loc);
2032 return tem;
2035 /* Return false if expr can be assumed not to be an lvalue, true
2036 otherwise. */
2038 static bool
2039 maybe_lvalue_p (const_tree x)
2041 /* We only need to wrap lvalue tree codes. */
2042 switch (TREE_CODE (x))
2044 case VAR_DECL:
2045 case PARM_DECL:
2046 case RESULT_DECL:
2047 case LABEL_DECL:
2048 case FUNCTION_DECL:
2049 case SSA_NAME:
2051 case COMPONENT_REF:
2052 case MEM_REF:
2053 case INDIRECT_REF:
2054 case ARRAY_REF:
2055 case ARRAY_RANGE_REF:
2056 case BIT_FIELD_REF:
2057 case OBJ_TYPE_REF:
2059 case REALPART_EXPR:
2060 case IMAGPART_EXPR:
2061 case PREINCREMENT_EXPR:
2062 case PREDECREMENT_EXPR:
2063 case SAVE_EXPR:
2064 case TRY_CATCH_EXPR:
2065 case WITH_CLEANUP_EXPR:
2066 case COMPOUND_EXPR:
2067 case MODIFY_EXPR:
2068 case TARGET_EXPR:
2069 case COND_EXPR:
2070 case BIND_EXPR:
2071 break;
2073 default:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2076 break;
2077 return false;
2080 return true;
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2085 tree
2086 non_lvalue_loc (location_t loc, tree x)
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2089 us. */
2090 if (in_gimple_form)
2091 return x;
2093 if (! maybe_lvalue_p (x))
2094 return x;
2095 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2101 int pedantic_lvalues;
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2106 static tree
2107 pedantic_non_lvalue_loc (location_t loc, tree x)
2109 if (pedantic_lvalues)
2110 return non_lvalue_loc (loc, x);
2112 return protected_set_expr_location_unshare (x, loc);
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2120 enum tree_code
2121 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 if (honor_nans && flag_trapping_math)
2124 return ERROR_MARK;
2126 switch (code)
2128 case EQ_EXPR:
2129 return NE_EXPR;
2130 case NE_EXPR:
2131 return EQ_EXPR;
2132 case GT_EXPR:
2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
2134 case GE_EXPR:
2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
2136 case LT_EXPR:
2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
2138 case LE_EXPR:
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2140 case LTGT_EXPR:
2141 return UNEQ_EXPR;
2142 case UNEQ_EXPR:
2143 return LTGT_EXPR;
2144 case UNGT_EXPR:
2145 return LE_EXPR;
2146 case UNGE_EXPR:
2147 return LT_EXPR;
2148 case UNLT_EXPR:
2149 return GE_EXPR;
2150 case UNLE_EXPR:
2151 return GT_EXPR;
2152 case ORDERED_EXPR:
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
2156 default:
2157 gcc_unreachable ();
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2164 enum tree_code
2165 swap_tree_comparison (enum tree_code code)
2167 switch (code)
2169 case EQ_EXPR:
2170 case NE_EXPR:
2171 case ORDERED_EXPR:
2172 case UNORDERED_EXPR:
2173 case LTGT_EXPR:
2174 case UNEQ_EXPR:
2175 return code;
2176 case GT_EXPR:
2177 return LT_EXPR;
2178 case GE_EXPR:
2179 return LE_EXPR;
2180 case LT_EXPR:
2181 return GT_EXPR;
2182 case LE_EXPR:
2183 return GE_EXPR;
2184 case UNGT_EXPR:
2185 return UNLT_EXPR;
2186 case UNGE_EXPR:
2187 return UNLE_EXPR;
2188 case UNLT_EXPR:
2189 return UNGT_EXPR;
2190 case UNLE_EXPR:
2191 return UNGE_EXPR;
2192 default:
2193 gcc_unreachable ();
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code)
2205 switch (code)
2207 case LT_EXPR:
2208 return COMPCODE_LT;
2209 case EQ_EXPR:
2210 return COMPCODE_EQ;
2211 case LE_EXPR:
2212 return COMPCODE_LE;
2213 case GT_EXPR:
2214 return COMPCODE_GT;
2215 case NE_EXPR:
2216 return COMPCODE_NE;
2217 case GE_EXPR:
2218 return COMPCODE_GE;
2219 case ORDERED_EXPR:
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2223 case UNLT_EXPR:
2224 return COMPCODE_UNLT;
2225 case UNEQ_EXPR:
2226 return COMPCODE_UNEQ;
2227 case UNLE_EXPR:
2228 return COMPCODE_UNLE;
2229 case UNGT_EXPR:
2230 return COMPCODE_UNGT;
2231 case LTGT_EXPR:
2232 return COMPCODE_LTGT;
2233 case UNGE_EXPR:
2234 return COMPCODE_UNGE;
2235 default:
2236 gcc_unreachable ();
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code)
2247 switch (code)
2249 case COMPCODE_LT:
2250 return LT_EXPR;
2251 case COMPCODE_EQ:
2252 return EQ_EXPR;
2253 case COMPCODE_LE:
2254 return LE_EXPR;
2255 case COMPCODE_GT:
2256 return GT_EXPR;
2257 case COMPCODE_NE:
2258 return NE_EXPR;
2259 case COMPCODE_GE:
2260 return GE_EXPR;
2261 case COMPCODE_ORD:
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2265 case COMPCODE_UNLT:
2266 return UNLT_EXPR;
2267 case COMPCODE_UNEQ:
2268 return UNEQ_EXPR;
2269 case COMPCODE_UNLE:
2270 return UNLE_EXPR;
2271 case COMPCODE_UNGT:
2272 return UNGT_EXPR;
2273 case COMPCODE_LTGT:
2274 return LTGT_EXPR;
2275 case COMPCODE_UNGE:
2276 return UNGE_EXPR;
2277 default:
2278 gcc_unreachable ();
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2288 tree
2289 combine_comparisons (location_t loc,
2290 enum tree_code code, enum tree_code lcode,
2291 enum tree_code rcode, tree truth_type,
2292 tree ll_arg, tree lr_arg)
2294 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2295 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2296 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2297 int compcode;
2299 switch (code)
2301 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2302 compcode = lcompcode & rcompcode;
2303 break;
2305 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2306 compcode = lcompcode | rcompcode;
2307 break;
2309 default:
2310 return NULL_TREE;
2313 if (!honor_nans)
2315 /* Eliminate unordered comparisons, as well as LTGT and ORD
2316 which are not used unless the mode has NaNs. */
2317 compcode &= ~COMPCODE_UNORD;
2318 if (compcode == COMPCODE_LTGT)
2319 compcode = COMPCODE_NE;
2320 else if (compcode == COMPCODE_ORD)
2321 compcode = COMPCODE_TRUE;
2323 else if (flag_trapping_math)
2325 /* Check that the original operation and the optimized ones will trap
2326 under the same condition. */
2327 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2328 && (lcompcode != COMPCODE_EQ)
2329 && (lcompcode != COMPCODE_ORD);
2330 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2331 && (rcompcode != COMPCODE_EQ)
2332 && (rcompcode != COMPCODE_ORD);
2333 bool trap = (compcode & COMPCODE_UNORD) == 0
2334 && (compcode != COMPCODE_EQ)
2335 && (compcode != COMPCODE_ORD);
2337 /* In a short-circuited boolean expression the LHS might be
2338 such that the RHS, if evaluated, will never trap. For
2339 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2340 if neither x nor y is NaN. (This is a mixed blessing: for
2341 example, the expression above will never trap, hence
2342 optimizing it to x < y would be invalid). */
2343 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2344 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 rtrap = false;
2347 /* If the comparison was short-circuited, and only the RHS
2348 trapped, we may now generate a spurious trap. */
2349 if (rtrap && !ltrap
2350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 return NULL_TREE;
2353 /* If we changed the conditions that cause a trap, we lose. */
2354 if ((ltrap || rtrap) != trap)
2355 return NULL_TREE;
2358 if (compcode == COMPCODE_TRUE)
2359 return constant_boolean_node (true, truth_type);
2360 else if (compcode == COMPCODE_FALSE)
2361 return constant_boolean_node (false, truth_type);
2362 else
2364 enum tree_code tcode;
2366 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2367 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2371 /* Return nonzero if two operands (typically of the same tree node)
2372 are necessarily equal. If either argument has side-effects this
2373 function returns zero. FLAGS modifies behavior as follows:
2375 If OEP_ONLY_CONST is set, only return nonzero for constants.
2376 This function tests whether the operands are indistinguishable;
2377 it does not test whether they are equal using C's == operation.
2378 The distinction is important for IEEE floating point, because
2379 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2380 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2382 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2383 even though it may hold multiple values during a function.
2384 This is because a GCC tree node guarantees that nothing else is
2385 executed between the evaluation of its "operands" (which may often
2386 be evaluated in arbitrary order). Hence if the operands themselves
2387 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2388 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2389 unset means assuming isochronic (or instantaneous) tree equivalence.
2390 Unless comparing arbitrary expression trees, such as from different
2391 statements, this flag can usually be left unset.
2393 If OEP_PURE_SAME is set, then pure functions with identical arguments
2394 are considered the same. It is used when the caller has other ways
2395 to ensure that global memory is unchanged in between. */
2398 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2400 /* If either is ERROR_MARK, they aren't equal. */
2401 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2402 || TREE_TYPE (arg0) == error_mark_node
2403 || TREE_TYPE (arg1) == error_mark_node)
2404 return 0;
2406 /* Similar, if either does not have a type (like a released SSA name),
2407 they aren't equal. */
2408 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2409 return 0;
2411 /* Check equality of integer constants before bailing out due to
2412 precision differences. */
2413 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2414 return tree_int_cst_equal (arg0, arg1);
2416 /* If both types don't have the same signedness, then we can't consider
2417 them equal. We must check this before the STRIP_NOPS calls
2418 because they may change the signedness of the arguments. As pointers
2419 strictly don't have a signedness, require either two pointers or
2420 two non-pointers as well. */
2421 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2422 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2423 return 0;
2425 /* We cannot consider pointers to different address space equal. */
2426 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2427 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2428 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2429 return 0;
2431 /* If both types don't have the same precision, then it is not safe
2432 to strip NOPs. */
2433 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2434 return 0;
2436 STRIP_NOPS (arg0);
2437 STRIP_NOPS (arg1);
2439 /* In case both args are comparisons but with different comparison
2440 code, try to swap the comparison operands of one arg to produce
2441 a match and compare that variant. */
2442 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2443 && COMPARISON_CLASS_P (arg0)
2444 && COMPARISON_CLASS_P (arg1))
2446 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2448 if (TREE_CODE (arg0) == swap_code)
2449 return operand_equal_p (TREE_OPERAND (arg0, 0),
2450 TREE_OPERAND (arg1, 1), flags)
2451 && operand_equal_p (TREE_OPERAND (arg0, 1),
2452 TREE_OPERAND (arg1, 0), flags);
2455 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2456 /* This is needed for conversions and for COMPONENT_REF.
2457 Might as well play it safe and always test this. */
2458 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2459 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2460 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2461 return 0;
2463 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2464 We don't care about side effects in that case because the SAVE_EXPR
2465 takes care of that for us. In all other cases, two expressions are
2466 equal if they have no side effects. If we have two identical
2467 expressions with side effects that should be treated the same due
2468 to the only side effects being identical SAVE_EXPR's, that will
2469 be detected in the recursive calls below. */
2470 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2471 && (TREE_CODE (arg0) == SAVE_EXPR
2472 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2473 return 1;
2475 /* Next handle constant cases, those for which we can return 1 even
2476 if ONLY_CONST is set. */
2477 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2478 switch (TREE_CODE (arg0))
2480 case INTEGER_CST:
2481 return tree_int_cst_equal (arg0, arg1);
2483 case FIXED_CST:
2484 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2485 TREE_FIXED_CST (arg1));
2487 case REAL_CST:
2488 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2489 TREE_REAL_CST (arg1)))
2490 return 1;
2493 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2495 /* If we do not distinguish between signed and unsigned zero,
2496 consider them equal. */
2497 if (real_zerop (arg0) && real_zerop (arg1))
2498 return 1;
2500 return 0;
2502 case VECTOR_CST:
2504 tree v1, v2;
2506 v1 = TREE_VECTOR_CST_ELTS (arg0);
2507 v2 = TREE_VECTOR_CST_ELTS (arg1);
2508 while (v1 && v2)
2510 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2511 flags))
2512 return 0;
2513 v1 = TREE_CHAIN (v1);
2514 v2 = TREE_CHAIN (v2);
2517 return v1 == v2;
2520 case COMPLEX_CST:
2521 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2522 flags)
2523 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2524 flags));
2526 case STRING_CST:
2527 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2528 && ! memcmp (TREE_STRING_POINTER (arg0),
2529 TREE_STRING_POINTER (arg1),
2530 TREE_STRING_LENGTH (arg0)));
2532 case ADDR_EXPR:
2533 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2535 default:
2536 break;
2539 if (flags & OEP_ONLY_CONST)
2540 return 0;
2542 /* Define macros to test an operand from arg0 and arg1 for equality and a
2543 variant that allows null and views null as being different from any
2544 non-null value. In the latter case, if either is null, the both
2545 must be; otherwise, do the normal comparison. */
2546 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2547 TREE_OPERAND (arg1, N), flags)
2549 #define OP_SAME_WITH_NULL(N) \
2550 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2551 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2553 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2555 case tcc_unary:
2556 /* Two conversions are equal only if signedness and modes match. */
2557 switch (TREE_CODE (arg0))
2559 CASE_CONVERT:
2560 case FIX_TRUNC_EXPR:
2561 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2562 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2563 return 0;
2564 break;
2565 default:
2566 break;
2569 return OP_SAME (0);
2572 case tcc_comparison:
2573 case tcc_binary:
2574 if (OP_SAME (0) && OP_SAME (1))
2575 return 1;
2577 /* For commutative ops, allow the other order. */
2578 return (commutative_tree_code (TREE_CODE (arg0))
2579 && operand_equal_p (TREE_OPERAND (arg0, 0),
2580 TREE_OPERAND (arg1, 1), flags)
2581 && operand_equal_p (TREE_OPERAND (arg0, 1),
2582 TREE_OPERAND (arg1, 0), flags));
2584 case tcc_reference:
2585 /* If either of the pointer (or reference) expressions we are
2586 dereferencing contain a side effect, these cannot be equal. */
2587 if (TREE_SIDE_EFFECTS (arg0)
2588 || TREE_SIDE_EFFECTS (arg1))
2589 return 0;
2591 switch (TREE_CODE (arg0))
2593 case INDIRECT_REF:
2594 case REALPART_EXPR:
2595 case IMAGPART_EXPR:
2596 return OP_SAME (0);
2598 case MEM_REF:
2599 /* Require equal access sizes, and similar pointer types.
2600 We can have incomplete types for array references of
2601 variable-sized arrays from the Fortran frontent
2602 though. */
2603 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2604 || (TYPE_SIZE (TREE_TYPE (arg0))
2605 && TYPE_SIZE (TREE_TYPE (arg1))
2606 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2607 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2608 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2609 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2610 && OP_SAME (0) && OP_SAME (1));
2612 case ARRAY_REF:
2613 case ARRAY_RANGE_REF:
2614 /* Operands 2 and 3 may be null.
2615 Compare the array index by value if it is constant first as we
2616 may have different types but same value here. */
2617 return (OP_SAME (0)
2618 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2619 TREE_OPERAND (arg1, 1))
2620 || OP_SAME (1))
2621 && OP_SAME_WITH_NULL (2)
2622 && OP_SAME_WITH_NULL (3));
2624 case COMPONENT_REF:
2625 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2626 may be NULL when we're called to compare MEM_EXPRs. */
2627 return OP_SAME_WITH_NULL (0)
2628 && OP_SAME (1)
2629 && OP_SAME_WITH_NULL (2);
2631 case BIT_FIELD_REF:
2632 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2634 default:
2635 return 0;
2638 case tcc_expression:
2639 switch (TREE_CODE (arg0))
2641 case ADDR_EXPR:
2642 case TRUTH_NOT_EXPR:
2643 return OP_SAME (0);
2645 case TRUTH_ANDIF_EXPR:
2646 case TRUTH_ORIF_EXPR:
2647 return OP_SAME (0) && OP_SAME (1);
2649 case FMA_EXPR:
2650 case WIDEN_MULT_PLUS_EXPR:
2651 case WIDEN_MULT_MINUS_EXPR:
2652 if (!OP_SAME (2))
2653 return 0;
2654 /* The multiplcation operands are commutative. */
2655 /* FALLTHRU */
2657 case TRUTH_AND_EXPR:
2658 case TRUTH_OR_EXPR:
2659 case TRUTH_XOR_EXPR:
2660 if (OP_SAME (0) && OP_SAME (1))
2661 return 1;
2663 /* Otherwise take into account this is a commutative operation. */
2664 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2665 TREE_OPERAND (arg1, 1), flags)
2666 && operand_equal_p (TREE_OPERAND (arg0, 1),
2667 TREE_OPERAND (arg1, 0), flags));
2669 case COND_EXPR:
2670 case VEC_COND_EXPR:
2671 case DOT_PROD_EXPR:
2672 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2674 default:
2675 return 0;
2678 case tcc_vl_exp:
2679 switch (TREE_CODE (arg0))
2681 case CALL_EXPR:
2682 /* If the CALL_EXPRs call different functions, then they
2683 clearly can not be equal. */
2684 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2685 flags))
2686 return 0;
2689 unsigned int cef = call_expr_flags (arg0);
2690 if (flags & OEP_PURE_SAME)
2691 cef &= ECF_CONST | ECF_PURE;
2692 else
2693 cef &= ECF_CONST;
2694 if (!cef)
2695 return 0;
2698 /* Now see if all the arguments are the same. */
2700 const_call_expr_arg_iterator iter0, iter1;
2701 const_tree a0, a1;
2702 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2703 a1 = first_const_call_expr_arg (arg1, &iter1);
2704 a0 && a1;
2705 a0 = next_const_call_expr_arg (&iter0),
2706 a1 = next_const_call_expr_arg (&iter1))
2707 if (! operand_equal_p (a0, a1, flags))
2708 return 0;
2710 /* If we get here and both argument lists are exhausted
2711 then the CALL_EXPRs are equal. */
2712 return ! (a0 || a1);
2714 default:
2715 return 0;
2718 case tcc_declaration:
2719 /* Consider __builtin_sqrt equal to sqrt. */
2720 return (TREE_CODE (arg0) == FUNCTION_DECL
2721 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2722 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2723 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2725 default:
2726 return 0;
2729 #undef OP_SAME
2730 #undef OP_SAME_WITH_NULL
2733 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2734 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2736 When in doubt, return 0. */
2738 static int
2739 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2741 int unsignedp1, unsignedpo;
2742 tree primarg0, primarg1, primother;
2743 unsigned int correct_width;
2745 if (operand_equal_p (arg0, arg1, 0))
2746 return 1;
2748 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2749 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2750 return 0;
2752 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2753 and see if the inner values are the same. This removes any
2754 signedness comparison, which doesn't matter here. */
2755 primarg0 = arg0, primarg1 = arg1;
2756 STRIP_NOPS (primarg0);
2757 STRIP_NOPS (primarg1);
2758 if (operand_equal_p (primarg0, primarg1, 0))
2759 return 1;
2761 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2762 actual comparison operand, ARG0.
2764 First throw away any conversions to wider types
2765 already present in the operands. */
2767 primarg1 = get_narrower (arg1, &unsignedp1);
2768 primother = get_narrower (other, &unsignedpo);
2770 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2771 if (unsignedp1 == unsignedpo
2772 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2773 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2775 tree type = TREE_TYPE (arg0);
2777 /* Make sure shorter operand is extended the right way
2778 to match the longer operand. */
2779 primarg1 = fold_convert (signed_or_unsigned_type_for
2780 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2782 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2783 return 1;
2786 return 0;
2789 /* See if ARG is an expression that is either a comparison or is performing
2790 arithmetic on comparisons. The comparisons must only be comparing
2791 two different values, which will be stored in *CVAL1 and *CVAL2; if
2792 they are nonzero it means that some operands have already been found.
2793 No variables may be used anywhere else in the expression except in the
2794 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2795 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2797 If this is true, return 1. Otherwise, return zero. */
2799 static int
2800 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2802 enum tree_code code = TREE_CODE (arg);
2803 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2805 /* We can handle some of the tcc_expression cases here. */
2806 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2807 tclass = tcc_unary;
2808 else if (tclass == tcc_expression
2809 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2810 || code == COMPOUND_EXPR))
2811 tclass = tcc_binary;
2813 else if (tclass == tcc_expression && code == SAVE_EXPR
2814 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2816 /* If we've already found a CVAL1 or CVAL2, this expression is
2817 two complex to handle. */
2818 if (*cval1 || *cval2)
2819 return 0;
2821 tclass = tcc_unary;
2822 *save_p = 1;
2825 switch (tclass)
2827 case tcc_unary:
2828 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2830 case tcc_binary:
2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2832 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2833 cval1, cval2, save_p));
2835 case tcc_constant:
2836 return 1;
2838 case tcc_expression:
2839 if (code == COND_EXPR)
2840 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2843 cval1, cval2, save_p)
2844 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2845 cval1, cval2, save_p));
2846 return 0;
2848 case tcc_comparison:
2849 /* First see if we can handle the first operand, then the second. For
2850 the second operand, we know *CVAL1 can't be zero. It must be that
2851 one side of the comparison is each of the values; test for the
2852 case where this isn't true by failing if the two operands
2853 are the same. */
2855 if (operand_equal_p (TREE_OPERAND (arg, 0),
2856 TREE_OPERAND (arg, 1), 0))
2857 return 0;
2859 if (*cval1 == 0)
2860 *cval1 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2863 else if (*cval2 == 0)
2864 *cval2 = TREE_OPERAND (arg, 0);
2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2867 else
2868 return 0;
2870 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2872 else if (*cval2 == 0)
2873 *cval2 = TREE_OPERAND (arg, 1);
2874 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2876 else
2877 return 0;
2879 return 1;
2881 default:
2882 return 0;
2886 /* ARG is a tree that is known to contain just arithmetic operations and
2887 comparisons. Evaluate the operations in the tree substituting NEW0 for
2888 any occurrence of OLD0 as an operand of a comparison and likewise for
2889 NEW1 and OLD1. */
2891 static tree
2892 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2893 tree old1, tree new1)
2895 tree type = TREE_TYPE (arg);
2896 enum tree_code code = TREE_CODE (arg);
2897 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2899 /* We can handle some of the tcc_expression cases here. */
2900 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2901 tclass = tcc_unary;
2902 else if (tclass == tcc_expression
2903 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2904 tclass = tcc_binary;
2906 switch (tclass)
2908 case tcc_unary:
2909 return fold_build1_loc (loc, code, type,
2910 eval_subst (loc, TREE_OPERAND (arg, 0),
2911 old0, new0, old1, new1));
2913 case tcc_binary:
2914 return fold_build2_loc (loc, code, type,
2915 eval_subst (loc, TREE_OPERAND (arg, 0),
2916 old0, new0, old1, new1),
2917 eval_subst (loc, TREE_OPERAND (arg, 1),
2918 old0, new0, old1, new1));
2920 case tcc_expression:
2921 switch (code)
2923 case SAVE_EXPR:
2924 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2925 old1, new1);
2927 case COMPOUND_EXPR:
2928 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2929 old1, new1);
2931 case COND_EXPR:
2932 return fold_build3_loc (loc, code, type,
2933 eval_subst (loc, TREE_OPERAND (arg, 0),
2934 old0, new0, old1, new1),
2935 eval_subst (loc, TREE_OPERAND (arg, 1),
2936 old0, new0, old1, new1),
2937 eval_subst (loc, TREE_OPERAND (arg, 2),
2938 old0, new0, old1, new1));
2939 default:
2940 break;
2942 /* Fall through - ??? */
2944 case tcc_comparison:
2946 tree arg0 = TREE_OPERAND (arg, 0);
2947 tree arg1 = TREE_OPERAND (arg, 1);
2949 /* We need to check both for exact equality and tree equality. The
2950 former will be true if the operand has a side-effect. In that
2951 case, we know the operand occurred exactly once. */
2953 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2954 arg0 = new0;
2955 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2956 arg0 = new1;
2958 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2959 arg1 = new0;
2960 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2961 arg1 = new1;
2963 return fold_build2_loc (loc, code, type, arg0, arg1);
2966 default:
2967 return arg;
2971 /* Return a tree for the case when the result of an expression is RESULT
2972 converted to TYPE and OMITTED was previously an operand of the expression
2973 but is now not needed (e.g., we folded OMITTED * 0).
2975 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2976 the conversion of RESULT to TYPE. */
2978 tree
2979 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2981 tree t = fold_convert_loc (loc, type, result);
2983 /* If the resulting operand is an empty statement, just return the omitted
2984 statement casted to void. */
2985 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2986 return build1_loc (loc, NOP_EXPR, void_type_node,
2987 fold_ignored_result (omitted));
2989 if (TREE_SIDE_EFFECTS (omitted))
2990 return build2_loc (loc, COMPOUND_EXPR, type,
2991 fold_ignored_result (omitted), t);
2993 return non_lvalue_loc (loc, t);
2996 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2998 static tree
2999 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3000 tree omitted)
3002 tree t = fold_convert_loc (loc, type, result);
3004 /* If the resulting operand is an empty statement, just return the omitted
3005 statement casted to void. */
3006 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3007 return build1_loc (loc, NOP_EXPR, void_type_node,
3008 fold_ignored_result (omitted));
3010 if (TREE_SIDE_EFFECTS (omitted))
3011 return build2_loc (loc, COMPOUND_EXPR, type,
3012 fold_ignored_result (omitted), t);
3014 return pedantic_non_lvalue_loc (loc, t);
3017 /* Return a tree for the case when the result of an expression is RESULT
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3019 of the expression but are now not needed.
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3022 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3023 evaluated before OMITTED2. Otherwise, if neither has side effects,
3024 just do the conversion of RESULT to TYPE. */
3026 tree
3027 omit_two_operands_loc (location_t loc, tree type, tree result,
3028 tree omitted1, tree omitted2)
3030 tree t = fold_convert_loc (loc, type, result);
3032 if (TREE_SIDE_EFFECTS (omitted2))
3033 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3034 if (TREE_SIDE_EFFECTS (omitted1))
3035 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3037 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3041 /* Return a simplified tree node for the truth-negation of ARG. This
3042 never alters ARG itself. We assume that ARG is an operation that
3043 returns a truth value (0 or 1).
3045 FIXME: one would think we would fold the result, but it causes
3046 problems with the dominator optimizer. */
3048 tree
3049 fold_truth_not_expr (location_t loc, tree arg)
3051 tree type = TREE_TYPE (arg);
3052 enum tree_code code = TREE_CODE (arg);
3053 location_t loc1, loc2;
3055 /* If this is a comparison, we can simply invert it, except for
3056 floating-point non-equality comparisons, in which case we just
3057 enclose a TRUTH_NOT_EXPR around what we have. */
3059 if (TREE_CODE_CLASS (code) == tcc_comparison)
3061 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3062 if (FLOAT_TYPE_P (op_type)
3063 && flag_trapping_math
3064 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3065 && code != NE_EXPR && code != EQ_EXPR)
3066 return NULL_TREE;
3068 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3069 if (code == ERROR_MARK)
3070 return NULL_TREE;
3072 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3073 TREE_OPERAND (arg, 1));
3076 switch (code)
3078 case INTEGER_CST:
3079 return constant_boolean_node (integer_zerop (arg), type);
3081 case TRUTH_AND_EXPR:
3082 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3083 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3084 if (loc1 == UNKNOWN_LOCATION)
3085 loc1 = loc;
3086 if (loc2 == UNKNOWN_LOCATION)
3087 loc2 = loc;
3088 return build2_loc (loc, TRUTH_OR_EXPR, type,
3089 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3090 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3092 case TRUTH_OR_EXPR:
3093 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3094 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3095 if (loc1 == UNKNOWN_LOCATION)
3096 loc1 = loc;
3097 if (loc2 == UNKNOWN_LOCATION)
3098 loc2 = loc;
3099 return build2_loc (loc, TRUTH_AND_EXPR, type,
3100 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3101 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3103 case TRUTH_XOR_EXPR:
3104 /* Here we can invert either operand. We invert the first operand
3105 unless the second operand is a TRUTH_NOT_EXPR in which case our
3106 result is the XOR of the first operand with the inside of the
3107 negation of the second operand. */
3109 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3110 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3111 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3112 else
3113 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3114 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3115 TREE_OPERAND (arg, 1));
3117 case TRUTH_ANDIF_EXPR:
3118 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3119 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3120 if (loc1 == UNKNOWN_LOCATION)
3121 loc1 = loc;
3122 if (loc2 == UNKNOWN_LOCATION)
3123 loc2 = loc;
3124 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3126 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3128 case TRUTH_ORIF_EXPR:
3129 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3130 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3131 if (loc1 == UNKNOWN_LOCATION)
3132 loc1 = loc;
3133 if (loc2 == UNKNOWN_LOCATION)
3134 loc2 = loc;
3135 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3136 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3137 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3139 case TRUTH_NOT_EXPR:
3140 return TREE_OPERAND (arg, 0);
3142 case COND_EXPR:
3144 tree arg1 = TREE_OPERAND (arg, 1);
3145 tree arg2 = TREE_OPERAND (arg, 2);
3147 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3148 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3149 if (loc1 == UNKNOWN_LOCATION)
3150 loc1 = loc;
3151 if (loc2 == UNKNOWN_LOCATION)
3152 loc2 = loc;
3154 /* A COND_EXPR may have a throw as one operand, which
3155 then has void type. Just leave void operands
3156 as they are. */
3157 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3158 VOID_TYPE_P (TREE_TYPE (arg1))
3159 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3160 VOID_TYPE_P (TREE_TYPE (arg2))
3161 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3164 case COMPOUND_EXPR:
3165 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3166 if (loc1 == UNKNOWN_LOCATION)
3167 loc1 = loc;
3168 return build2_loc (loc, COMPOUND_EXPR, type,
3169 TREE_OPERAND (arg, 0),
3170 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3172 case NON_LVALUE_EXPR:
3173 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3174 if (loc1 == UNKNOWN_LOCATION)
3175 loc1 = loc;
3176 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3178 CASE_CONVERT:
3179 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3180 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3182 /* ... fall through ... */
3184 case FLOAT_EXPR:
3185 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3186 if (loc1 == UNKNOWN_LOCATION)
3187 loc1 = loc;
3188 return build1_loc (loc, TREE_CODE (arg), type,
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3191 case BIT_AND_EXPR:
3192 if (!integer_onep (TREE_OPERAND (arg, 1)))
3193 return NULL_TREE;
3194 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3196 case SAVE_EXPR:
3197 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3199 case CLEANUP_POINT_EXPR:
3200 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3201 if (loc1 == UNKNOWN_LOCATION)
3202 loc1 = loc;
3203 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3204 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3206 default:
3207 return NULL_TREE;
3211 /* Return a simplified tree node for the truth-negation of ARG. This
3212 never alters ARG itself. We assume that ARG is an operation that
3213 returns a truth value (0 or 1).
3215 FIXME: one would think we would fold the result, but it causes
3216 problems with the dominator optimizer. */
3218 tree
3219 invert_truthvalue_loc (location_t loc, tree arg)
3221 tree tem;
3223 if (TREE_CODE (arg) == ERROR_MARK)
3224 return arg;
3226 tem = fold_truth_not_expr (loc, arg);
3227 if (!tem)
3228 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3230 return tem;
3233 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3234 operands are another bit-wise operation with a common input. If so,
3235 distribute the bit operations to save an operation and possibly two if
3236 constants are involved. For example, convert
3237 (A | B) & (A | C) into A | (B & C)
3238 Further simplification will occur if B and C are constants.
3240 If this optimization cannot be done, 0 will be returned. */
3242 static tree
3243 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3244 tree arg0, tree arg1)
3246 tree common;
3247 tree left, right;
3249 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3250 || TREE_CODE (arg0) == code
3251 || (TREE_CODE (arg0) != BIT_AND_EXPR
3252 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3253 return 0;
3255 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3257 common = TREE_OPERAND (arg0, 0);
3258 left = TREE_OPERAND (arg0, 1);
3259 right = TREE_OPERAND (arg1, 1);
3261 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3263 common = TREE_OPERAND (arg0, 0);
3264 left = TREE_OPERAND (arg0, 1);
3265 right = TREE_OPERAND (arg1, 0);
3267 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3269 common = TREE_OPERAND (arg0, 1);
3270 left = TREE_OPERAND (arg0, 0);
3271 right = TREE_OPERAND (arg1, 1);
3273 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3275 common = TREE_OPERAND (arg0, 1);
3276 left = TREE_OPERAND (arg0, 0);
3277 right = TREE_OPERAND (arg1, 0);
3279 else
3280 return 0;
3282 common = fold_convert_loc (loc, type, common);
3283 left = fold_convert_loc (loc, type, left);
3284 right = fold_convert_loc (loc, type, right);
3285 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3286 fold_build2_loc (loc, code, type, left, right));
3289 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3290 with code CODE. This optimization is unsafe. */
3291 static tree
3292 distribute_real_division (location_t loc, enum tree_code code, tree type,
3293 tree arg0, tree arg1)
3295 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3296 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3298 /* (A / C) +- (B / C) -> (A +- B) / C. */
3299 if (mul0 == mul1
3300 && operand_equal_p (TREE_OPERAND (arg0, 1),
3301 TREE_OPERAND (arg1, 1), 0))
3302 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3303 fold_build2_loc (loc, code, type,
3304 TREE_OPERAND (arg0, 0),
3305 TREE_OPERAND (arg1, 0)),
3306 TREE_OPERAND (arg0, 1));
3308 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3309 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3310 TREE_OPERAND (arg1, 0), 0)
3311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3312 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3314 REAL_VALUE_TYPE r0, r1;
3315 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3316 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3317 if (!mul0)
3318 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3319 if (!mul1)
3320 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3321 real_arithmetic (&r0, code, &r0, &r1);
3322 return fold_build2_loc (loc, MULT_EXPR, type,
3323 TREE_OPERAND (arg0, 0),
3324 build_real (type, r0));
3327 return NULL_TREE;
3330 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3331 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3333 static tree
3334 make_bit_field_ref (location_t loc, tree inner, tree type,
3335 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3337 tree result, bftype;
3339 if (bitpos == 0)
3341 tree size = TYPE_SIZE (TREE_TYPE (inner));
3342 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3343 || POINTER_TYPE_P (TREE_TYPE (inner)))
3344 && host_integerp (size, 0)
3345 && tree_low_cst (size, 0) == bitsize)
3346 return fold_convert_loc (loc, type, inner);
3349 bftype = type;
3350 if (TYPE_PRECISION (bftype) != bitsize
3351 || TYPE_UNSIGNED (bftype) == !unsignedp)
3352 bftype = build_nonstandard_integer_type (bitsize, 0);
3354 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3355 size_int (bitsize), bitsize_int (bitpos));
3357 if (bftype != type)
3358 result = fold_convert_loc (loc, type, result);
3360 return result;
3363 /* Optimize a bit-field compare.
3365 There are two cases: First is a compare against a constant and the
3366 second is a comparison of two items where the fields are at the same
3367 bit position relative to the start of a chunk (byte, halfword, word)
3368 large enough to contain it. In these cases we can avoid the shift
3369 implicit in bitfield extractions.
3371 For constants, we emit a compare of the shifted constant with the
3372 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3373 compared. For two fields at the same position, we do the ANDs with the
3374 similar mask and compare the result of the ANDs.
3376 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3377 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3378 are the left and right operands of the comparison, respectively.
3380 If the optimization described above can be done, we return the resulting
3381 tree. Otherwise we return zero. */
3383 static tree
3384 optimize_bit_field_compare (location_t loc, enum tree_code code,
3385 tree compare_type, tree lhs, tree rhs)
3387 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3388 tree type = TREE_TYPE (lhs);
3389 tree signed_type, unsigned_type;
3390 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3391 enum machine_mode lmode, rmode, nmode;
3392 int lunsignedp, runsignedp;
3393 int lvolatilep = 0, rvolatilep = 0;
3394 tree linner, rinner = NULL_TREE;
3395 tree mask;
3396 tree offset;
3398 /* Get all the information about the extractions being done. If the bit size
3399 if the same as the size of the underlying object, we aren't doing an
3400 extraction at all and so can do nothing. We also don't want to
3401 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3402 then will no longer be able to replace it. */
3403 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3404 &lunsignedp, &lvolatilep, false);
3405 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3406 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3407 return 0;
3409 if (!const_p)
3411 /* If this is not a constant, we can only do something if bit positions,
3412 sizes, and signedness are the same. */
3413 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3414 &runsignedp, &rvolatilep, false);
3416 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3417 || lunsignedp != runsignedp || offset != 0
3418 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3419 return 0;
3422 /* See if we can find a mode to refer to this field. We should be able to,
3423 but fail if we can't. */
3424 if (lvolatilep
3425 && GET_MODE_BITSIZE (lmode) > 0
3426 && flag_strict_volatile_bitfields > 0)
3427 nmode = lmode;
3428 else
3429 nmode = get_best_mode (lbitsize, lbitpos,
3430 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3431 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3432 TYPE_ALIGN (TREE_TYPE (rinner))),
3433 word_mode, lvolatilep || rvolatilep);
3434 if (nmode == VOIDmode)
3435 return 0;
3437 /* Set signed and unsigned types of the precision of this mode for the
3438 shifts below. */
3439 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3440 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3442 /* Compute the bit position and size for the new reference and our offset
3443 within it. If the new reference is the same size as the original, we
3444 won't optimize anything, so return zero. */
3445 nbitsize = GET_MODE_BITSIZE (nmode);
3446 nbitpos = lbitpos & ~ (nbitsize - 1);
3447 lbitpos -= nbitpos;
3448 if (nbitsize == lbitsize)
3449 return 0;
3451 if (BYTES_BIG_ENDIAN)
3452 lbitpos = nbitsize - lbitsize - lbitpos;
3454 /* Make the mask to be used against the extracted field. */
3455 mask = build_int_cst_type (unsigned_type, -1);
3456 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3457 mask = const_binop (RSHIFT_EXPR, mask,
3458 size_int (nbitsize - lbitsize - lbitpos));
3460 if (! const_p)
3461 /* If not comparing with constant, just rework the comparison
3462 and return. */
3463 return fold_build2_loc (loc, code, compare_type,
3464 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3465 make_bit_field_ref (loc, linner,
3466 unsigned_type,
3467 nbitsize, nbitpos,
3469 mask),
3470 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3471 make_bit_field_ref (loc, rinner,
3472 unsigned_type,
3473 nbitsize, nbitpos,
3475 mask));
3477 /* Otherwise, we are handling the constant case. See if the constant is too
3478 big for the field. Warn and return a tree of for 0 (false) if so. We do
3479 this not only for its own sake, but to avoid having to test for this
3480 error case below. If we didn't, we might generate wrong code.
3482 For unsigned fields, the constant shifted right by the field length should
3483 be all zero. For signed fields, the high-order bits should agree with
3484 the sign bit. */
3486 if (lunsignedp)
3488 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3489 fold_convert_loc (loc,
3490 unsigned_type, rhs),
3491 size_int (lbitsize))))
3493 warning (0, "comparison is always %d due to width of bit-field",
3494 code == NE_EXPR);
3495 return constant_boolean_node (code == NE_EXPR, compare_type);
3498 else
3500 tree tem = const_binop (RSHIFT_EXPR,
3501 fold_convert_loc (loc, signed_type, rhs),
3502 size_int (lbitsize - 1));
3503 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3505 warning (0, "comparison is always %d due to width of bit-field",
3506 code == NE_EXPR);
3507 return constant_boolean_node (code == NE_EXPR, compare_type);
3511 /* Single-bit compares should always be against zero. */
3512 if (lbitsize == 1 && ! integer_zerop (rhs))
3514 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3515 rhs = build_int_cst (type, 0);
3518 /* Make a new bitfield reference, shift the constant over the
3519 appropriate number of bits and mask it with the computed mask
3520 (in case this was a signed field). If we changed it, make a new one. */
3521 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3522 if (lvolatilep)
3524 TREE_SIDE_EFFECTS (lhs) = 1;
3525 TREE_THIS_VOLATILE (lhs) = 1;
3528 rhs = const_binop (BIT_AND_EXPR,
3529 const_binop (LSHIFT_EXPR,
3530 fold_convert_loc (loc, unsigned_type, rhs),
3531 size_int (lbitpos)),
3532 mask);
3534 lhs = build2_loc (loc, code, compare_type,
3535 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3536 return lhs;
3539 /* Subroutine for fold_truthop: decode a field reference.
3541 If EXP is a comparison reference, we return the innermost reference.
3543 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3544 set to the starting bit number.
3546 If the innermost field can be completely contained in a mode-sized
3547 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3549 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3550 otherwise it is not changed.
3552 *PUNSIGNEDP is set to the signedness of the field.
3554 *PMASK is set to the mask used. This is either contained in a
3555 BIT_AND_EXPR or derived from the width of the field.
3557 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3559 Return 0 if this is not a component reference or is one that we can't
3560 do anything with. */
3562 static tree
3563 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3564 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3565 int *punsignedp, int *pvolatilep,
3566 tree *pmask, tree *pand_mask)
3568 tree outer_type = 0;
3569 tree and_mask = 0;
3570 tree mask, inner, offset;
3571 tree unsigned_type;
3572 unsigned int precision;
3574 /* All the optimizations using this function assume integer fields.
3575 There are problems with FP fields since the type_for_size call
3576 below can fail for, e.g., XFmode. */
3577 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3578 return 0;
3580 /* We are interested in the bare arrangement of bits, so strip everything
3581 that doesn't affect the machine mode. However, record the type of the
3582 outermost expression if it may matter below. */
3583 if (CONVERT_EXPR_P (exp)
3584 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3585 outer_type = TREE_TYPE (exp);
3586 STRIP_NOPS (exp);
3588 if (TREE_CODE (exp) == BIT_AND_EXPR)
3590 and_mask = TREE_OPERAND (exp, 1);
3591 exp = TREE_OPERAND (exp, 0);
3592 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3593 if (TREE_CODE (and_mask) != INTEGER_CST)
3594 return 0;
3597 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3598 punsignedp, pvolatilep, false);
3599 if ((inner == exp && and_mask == 0)
3600 || *pbitsize < 0 || offset != 0
3601 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3602 return 0;
3604 /* If the number of bits in the reference is the same as the bitsize of
3605 the outer type, then the outer type gives the signedness. Otherwise
3606 (in case of a small bitfield) the signedness is unchanged. */
3607 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3608 *punsignedp = TYPE_UNSIGNED (outer_type);
3610 /* Compute the mask to access the bitfield. */
3611 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3612 precision = TYPE_PRECISION (unsigned_type);
3614 mask = build_int_cst_type (unsigned_type, -1);
3616 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3617 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3619 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3620 if (and_mask != 0)
3621 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3622 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3624 *pmask = mask;
3625 *pand_mask = and_mask;
3626 return inner;
3629 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3630 bit positions. */
3632 static int
3633 all_ones_mask_p (const_tree mask, int size)
3635 tree type = TREE_TYPE (mask);
3636 unsigned int precision = TYPE_PRECISION (type);
3637 tree tmask;
3639 tmask = build_int_cst_type (signed_type_for (type), -1);
3641 return
3642 tree_int_cst_equal (mask,
3643 const_binop (RSHIFT_EXPR,
3644 const_binop (LSHIFT_EXPR, tmask,
3645 size_int (precision - size)),
3646 size_int (precision - size)));
3649 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3650 represents the sign bit of EXP's type. If EXP represents a sign
3651 or zero extension, also test VAL against the unextended type.
3652 The return value is the (sub)expression whose sign bit is VAL,
3653 or NULL_TREE otherwise. */
3655 static tree
3656 sign_bit_p (tree exp, const_tree val)
3658 unsigned HOST_WIDE_INT mask_lo, lo;
3659 HOST_WIDE_INT mask_hi, hi;
3660 int width;
3661 tree t;
3663 /* Tree EXP must have an integral type. */
3664 t = TREE_TYPE (exp);
3665 if (! INTEGRAL_TYPE_P (t))
3666 return NULL_TREE;
3668 /* Tree VAL must be an integer constant. */
3669 if (TREE_CODE (val) != INTEGER_CST
3670 || TREE_OVERFLOW (val))
3671 return NULL_TREE;
3673 width = TYPE_PRECISION (t);
3674 if (width > HOST_BITS_PER_WIDE_INT)
3676 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3677 lo = 0;
3679 mask_hi = ((unsigned HOST_WIDE_INT) -1
3680 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3681 mask_lo = -1;
3683 else
3685 hi = 0;
3686 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3688 mask_hi = 0;
3689 mask_lo = ((unsigned HOST_WIDE_INT) -1
3690 >> (HOST_BITS_PER_WIDE_INT - width));
3693 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3694 treat VAL as if it were unsigned. */
3695 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3696 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3697 return exp;
3699 /* Handle extension from a narrower type. */
3700 if (TREE_CODE (exp) == NOP_EXPR
3701 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3702 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3704 return NULL_TREE;
3707 /* Subroutine for fold_truthop: determine if an operand is simple enough
3708 to be evaluated unconditionally. */
3710 static int
3711 simple_operand_p (const_tree exp)
3713 /* Strip any conversions that don't change the machine mode. */
3714 STRIP_NOPS (exp);
3716 return (CONSTANT_CLASS_P (exp)
3717 || TREE_CODE (exp) == SSA_NAME
3718 || (DECL_P (exp)
3719 && ! TREE_ADDRESSABLE (exp)
3720 && ! TREE_THIS_VOLATILE (exp)
3721 && ! DECL_NONLOCAL (exp)
3722 /* Don't regard global variables as simple. They may be
3723 allocated in ways unknown to the compiler (shared memory,
3724 #pragma weak, etc). */
3725 && ! TREE_PUBLIC (exp)
3726 && ! DECL_EXTERNAL (exp)
3727 /* Loading a static variable is unduly expensive, but global
3728 registers aren't expensive. */
3729 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3732 /* The following functions are subroutines to fold_range_test and allow it to
3733 try to change a logical combination of comparisons into a range test.
3735 For example, both
3736 X == 2 || X == 3 || X == 4 || X == 5
3738 X >= 2 && X <= 5
3739 are converted to
3740 (unsigned) (X - 2) <= 3
3742 We describe each set of comparisons as being either inside or outside
3743 a range, using a variable named like IN_P, and then describe the
3744 range with a lower and upper bound. If one of the bounds is omitted,
3745 it represents either the highest or lowest value of the type.
3747 In the comments below, we represent a range by two numbers in brackets
3748 preceded by a "+" to designate being inside that range, or a "-" to
3749 designate being outside that range, so the condition can be inverted by
3750 flipping the prefix. An omitted bound is represented by a "-". For
3751 example, "- [-, 10]" means being outside the range starting at the lowest
3752 possible value and ending at 10, in other words, being greater than 10.
3753 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3754 always false.
3756 We set up things so that the missing bounds are handled in a consistent
3757 manner so neither a missing bound nor "true" and "false" need to be
3758 handled using a special case. */
3760 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3761 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3762 and UPPER1_P are nonzero if the respective argument is an upper bound
3763 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3764 must be specified for a comparison. ARG1 will be converted to ARG0's
3765 type if both are specified. */
3767 static tree
3768 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3769 tree arg1, int upper1_p)
3771 tree tem;
3772 int result;
3773 int sgn0, sgn1;
3775 /* If neither arg represents infinity, do the normal operation.
3776 Else, if not a comparison, return infinity. Else handle the special
3777 comparison rules. Note that most of the cases below won't occur, but
3778 are handled for consistency. */
3780 if (arg0 != 0 && arg1 != 0)
3782 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3783 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3784 STRIP_NOPS (tem);
3785 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3788 if (TREE_CODE_CLASS (code) != tcc_comparison)
3789 return 0;
3791 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3792 for neither. In real maths, we cannot assume open ended ranges are
3793 the same. But, this is computer arithmetic, where numbers are finite.
3794 We can therefore make the transformation of any unbounded range with
3795 the value Z, Z being greater than any representable number. This permits
3796 us to treat unbounded ranges as equal. */
3797 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3798 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3799 switch (code)
3801 case EQ_EXPR:
3802 result = sgn0 == sgn1;
3803 break;
3804 case NE_EXPR:
3805 result = sgn0 != sgn1;
3806 break;
3807 case LT_EXPR:
3808 result = sgn0 < sgn1;
3809 break;
3810 case LE_EXPR:
3811 result = sgn0 <= sgn1;
3812 break;
3813 case GT_EXPR:
3814 result = sgn0 > sgn1;
3815 break;
3816 case GE_EXPR:
3817 result = sgn0 >= sgn1;
3818 break;
3819 default:
3820 gcc_unreachable ();
3823 return constant_boolean_node (result, type);
3826 /* Given EXP, a logical expression, set the range it is testing into
3827 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3828 actually being tested. *PLOW and *PHIGH will be made of the same
3829 type as the returned expression. If EXP is not a comparison, we
3830 will most likely not be returning a useful value and range. Set
3831 *STRICT_OVERFLOW_P to true if the return value is only valid
3832 because signed overflow is undefined; otherwise, do not change
3833 *STRICT_OVERFLOW_P. */
3835 tree
3836 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3837 bool *strict_overflow_p)
3839 enum tree_code code;
3840 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3841 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3842 int in_p, n_in_p;
3843 tree low, high, n_low, n_high;
3844 location_t loc = EXPR_LOCATION (exp);
3846 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3847 and see if we can refine the range. Some of the cases below may not
3848 happen, but it doesn't seem worth worrying about this. We "continue"
3849 the outer loop when we've changed something; otherwise we "break"
3850 the switch, which will "break" the while. */
3852 in_p = 0;
3853 low = high = build_int_cst (TREE_TYPE (exp), 0);
3855 while (1)
3857 code = TREE_CODE (exp);
3858 exp_type = TREE_TYPE (exp);
3860 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3862 if (TREE_OPERAND_LENGTH (exp) > 0)
3863 arg0 = TREE_OPERAND (exp, 0);
3864 if (TREE_CODE_CLASS (code) == tcc_comparison
3865 || TREE_CODE_CLASS (code) == tcc_unary
3866 || TREE_CODE_CLASS (code) == tcc_binary)
3867 arg0_type = TREE_TYPE (arg0);
3868 if (TREE_CODE_CLASS (code) == tcc_binary
3869 || TREE_CODE_CLASS (code) == tcc_comparison
3870 || (TREE_CODE_CLASS (code) == tcc_expression
3871 && TREE_OPERAND_LENGTH (exp) > 1))
3872 arg1 = TREE_OPERAND (exp, 1);
3875 switch (code)
3877 case TRUTH_NOT_EXPR:
3878 in_p = ! in_p, exp = arg0;
3879 continue;
3881 case EQ_EXPR: case NE_EXPR:
3882 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3883 /* We can only do something if the range is testing for zero
3884 and if the second operand is an integer constant. Note that
3885 saying something is "in" the range we make is done by
3886 complementing IN_P since it will set in the initial case of
3887 being not equal to zero; "out" is leaving it alone. */
3888 if (low == 0 || high == 0
3889 || ! integer_zerop (low) || ! integer_zerop (high)
3890 || TREE_CODE (arg1) != INTEGER_CST)
3891 break;
3893 switch (code)
3895 case NE_EXPR: /* - [c, c] */
3896 low = high = arg1;
3897 break;
3898 case EQ_EXPR: /* + [c, c] */
3899 in_p = ! in_p, low = high = arg1;
3900 break;
3901 case GT_EXPR: /* - [-, c] */
3902 low = 0, high = arg1;
3903 break;
3904 case GE_EXPR: /* + [c, -] */
3905 in_p = ! in_p, low = arg1, high = 0;
3906 break;
3907 case LT_EXPR: /* - [c, -] */
3908 low = arg1, high = 0;
3909 break;
3910 case LE_EXPR: /* + [-, c] */
3911 in_p = ! in_p, low = 0, high = arg1;
3912 break;
3913 default:
3914 gcc_unreachable ();
3917 /* If this is an unsigned comparison, we also know that EXP is
3918 greater than or equal to zero. We base the range tests we make
3919 on that fact, so we record it here so we can parse existing
3920 range tests. We test arg0_type since often the return type
3921 of, e.g. EQ_EXPR, is boolean. */
3922 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3924 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3925 in_p, low, high, 1,
3926 build_int_cst (arg0_type, 0),
3927 NULL_TREE))
3928 break;
3930 in_p = n_in_p, low = n_low, high = n_high;
3932 /* If the high bound is missing, but we have a nonzero low
3933 bound, reverse the range so it goes from zero to the low bound
3934 minus 1. */
3935 if (high == 0 && low && ! integer_zerop (low))
3937 in_p = ! in_p;
3938 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3939 integer_one_node, 0);
3940 low = build_int_cst (arg0_type, 0);
3944 exp = arg0;
3945 continue;
3947 case NEGATE_EXPR:
3948 /* (-x) IN [a,b] -> x in [-b, -a] */
3949 n_low = range_binop (MINUS_EXPR, exp_type,
3950 build_int_cst (exp_type, 0),
3951 0, high, 1);
3952 n_high = range_binop (MINUS_EXPR, exp_type,
3953 build_int_cst (exp_type, 0),
3954 0, low, 0);
3955 if (n_high != 0 && TREE_OVERFLOW (n_high))
3956 break;
3957 goto normalize;
3959 case BIT_NOT_EXPR:
3960 /* ~ X -> -X - 1 */
3961 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3962 build_int_cst (exp_type, 1));
3963 continue;
3965 case PLUS_EXPR: case MINUS_EXPR:
3966 if (TREE_CODE (arg1) != INTEGER_CST)
3967 break;
3969 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3970 move a constant to the other side. */
3971 if (!TYPE_UNSIGNED (arg0_type)
3972 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3973 break;
3975 /* If EXP is signed, any overflow in the computation is undefined,
3976 so we don't worry about it so long as our computations on
3977 the bounds don't overflow. For unsigned, overflow is defined
3978 and this is exactly the right thing. */
3979 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3980 arg0_type, low, 0, arg1, 0);
3981 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3982 arg0_type, high, 1, arg1, 0);
3983 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3984 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3985 break;
3987 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3988 *strict_overflow_p = true;
3990 normalize:
3991 /* Check for an unsigned range which has wrapped around the maximum
3992 value thus making n_high < n_low, and normalize it. */
3993 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3995 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3996 integer_one_node, 0);
3997 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3998 integer_one_node, 0);
4000 /* If the range is of the form +/- [ x+1, x ], we won't
4001 be able to normalize it. But then, it represents the
4002 whole range or the empty set, so make it
4003 +/- [ -, - ]. */
4004 if (tree_int_cst_equal (n_low, low)
4005 && tree_int_cst_equal (n_high, high))
4006 low = high = 0;
4007 else
4008 in_p = ! in_p;
4010 else
4011 low = n_low, high = n_high;
4013 exp = arg0;
4014 continue;
4016 CASE_CONVERT: case NON_LVALUE_EXPR:
4017 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4018 break;
4020 if (! INTEGRAL_TYPE_P (arg0_type)
4021 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4022 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4023 break;
4025 n_low = low, n_high = high;
4027 if (n_low != 0)
4028 n_low = fold_convert_loc (loc, arg0_type, n_low);
4030 if (n_high != 0)
4031 n_high = fold_convert_loc (loc, arg0_type, n_high);
4034 /* If we're converting arg0 from an unsigned type, to exp,
4035 a signed type, we will be doing the comparison as unsigned.
4036 The tests above have already verified that LOW and HIGH
4037 are both positive.
4039 So we have to ensure that we will handle large unsigned
4040 values the same way that the current signed bounds treat
4041 negative values. */
4043 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4045 tree high_positive;
4046 tree equiv_type;
4047 /* For fixed-point modes, we need to pass the saturating flag
4048 as the 2nd parameter. */
4049 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4050 equiv_type = lang_hooks.types.type_for_mode
4051 (TYPE_MODE (arg0_type),
4052 TYPE_SATURATING (arg0_type));
4053 else
4054 equiv_type = lang_hooks.types.type_for_mode
4055 (TYPE_MODE (arg0_type), 1);
4057 /* A range without an upper bound is, naturally, unbounded.
4058 Since convert would have cropped a very large value, use
4059 the max value for the destination type. */
4060 high_positive
4061 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4062 : TYPE_MAX_VALUE (arg0_type);
4064 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4065 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4066 fold_convert_loc (loc, arg0_type,
4067 high_positive),
4068 build_int_cst (arg0_type, 1));
4070 /* If the low bound is specified, "and" the range with the
4071 range for which the original unsigned value will be
4072 positive. */
4073 if (low != 0)
4075 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4076 1, n_low, n_high, 1,
4077 fold_convert_loc (loc, arg0_type,
4078 integer_zero_node),
4079 high_positive))
4080 break;
4082 in_p = (n_in_p == in_p);
4084 else
4086 /* Otherwise, "or" the range with the range of the input
4087 that will be interpreted as negative. */
4088 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4089 0, n_low, n_high, 1,
4090 fold_convert_loc (loc, arg0_type,
4091 integer_zero_node),
4092 high_positive))
4093 break;
4095 in_p = (in_p != n_in_p);
4099 exp = arg0;
4100 low = n_low, high = n_high;
4101 continue;
4103 default:
4104 break;
4107 break;
4110 /* If EXP is a constant, we can evaluate whether this is true or false. */
4111 if (TREE_CODE (exp) == INTEGER_CST)
4113 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4114 exp, 0, low, 0))
4115 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4116 exp, 1, high, 1)));
4117 low = high = 0;
4118 exp = 0;
4121 *pin_p = in_p, *plow = low, *phigh = high;
4122 return exp;
4125 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4126 type, TYPE, return an expression to test if EXP is in (or out of, depending
4127 on IN_P) the range. Return 0 if the test couldn't be created. */
4129 tree
4130 build_range_check (location_t loc, tree type, tree exp, int in_p,
4131 tree low, tree high)
4133 tree etype = TREE_TYPE (exp), value;
4135 #ifdef HAVE_canonicalize_funcptr_for_compare
4136 /* Disable this optimization for function pointer expressions
4137 on targets that require function pointer canonicalization. */
4138 if (HAVE_canonicalize_funcptr_for_compare
4139 && TREE_CODE (etype) == POINTER_TYPE
4140 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4141 return NULL_TREE;
4142 #endif
4144 if (! in_p)
4146 value = build_range_check (loc, type, exp, 1, low, high);
4147 if (value != 0)
4148 return invert_truthvalue_loc (loc, value);
4150 return 0;
4153 if (low == 0 && high == 0)
4154 return build_int_cst (type, 1);
4156 if (low == 0)
4157 return fold_build2_loc (loc, LE_EXPR, type, exp,
4158 fold_convert_loc (loc, etype, high));
4160 if (high == 0)
4161 return fold_build2_loc (loc, GE_EXPR, type, exp,
4162 fold_convert_loc (loc, etype, low));
4164 if (operand_equal_p (low, high, 0))
4165 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4166 fold_convert_loc (loc, etype, low));
4168 if (integer_zerop (low))
4170 if (! TYPE_UNSIGNED (etype))
4172 etype = unsigned_type_for (etype);
4173 high = fold_convert_loc (loc, etype, high);
4174 exp = fold_convert_loc (loc, etype, exp);
4176 return build_range_check (loc, type, exp, 1, 0, high);
4179 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4180 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4182 unsigned HOST_WIDE_INT lo;
4183 HOST_WIDE_INT hi;
4184 int prec;
4186 prec = TYPE_PRECISION (etype);
4187 if (prec <= HOST_BITS_PER_WIDE_INT)
4189 hi = 0;
4190 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4192 else
4194 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4195 lo = (unsigned HOST_WIDE_INT) -1;
4198 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4200 if (TYPE_UNSIGNED (etype))
4202 tree signed_etype = signed_type_for (etype);
4203 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4204 etype
4205 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4206 else
4207 etype = signed_etype;
4208 exp = fold_convert_loc (loc, etype, exp);
4210 return fold_build2_loc (loc, GT_EXPR, type, exp,
4211 build_int_cst (etype, 0));
4215 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4216 This requires wrap-around arithmetics for the type of the expression.
4217 First make sure that arithmetics in this type is valid, then make sure
4218 that it wraps around. */
4219 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4220 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4221 TYPE_UNSIGNED (etype));
4223 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4225 tree utype, minv, maxv;
4227 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4228 for the type in question, as we rely on this here. */
4229 utype = unsigned_type_for (etype);
4230 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4231 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4232 integer_one_node, 1);
4233 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4235 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4236 minv, 1, maxv, 1)))
4237 etype = utype;
4238 else
4239 return 0;
4242 high = fold_convert_loc (loc, etype, high);
4243 low = fold_convert_loc (loc, etype, low);
4244 exp = fold_convert_loc (loc, etype, exp);
4246 value = const_binop (MINUS_EXPR, high, low);
4249 if (POINTER_TYPE_P (etype))
4251 if (value != 0 && !TREE_OVERFLOW (value))
4253 low = fold_convert_loc (loc, sizetype, low);
4254 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4255 return build_range_check (loc, type,
4256 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4257 etype, exp, low),
4258 1, build_int_cst (etype, 0), value);
4260 return 0;
4263 if (value != 0 && !TREE_OVERFLOW (value))
4264 return build_range_check (loc, type,
4265 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4266 1, build_int_cst (etype, 0), value);
4268 return 0;
4271 /* Return the predecessor of VAL in its type, handling the infinite case. */
4273 static tree
4274 range_predecessor (tree val)
4276 tree type = TREE_TYPE (val);
4278 if (INTEGRAL_TYPE_P (type)
4279 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4280 return 0;
4281 else
4282 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4285 /* Return the successor of VAL in its type, handling the infinite case. */
4287 static tree
4288 range_successor (tree val)
4290 tree type = TREE_TYPE (val);
4292 if (INTEGRAL_TYPE_P (type)
4293 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4294 return 0;
4295 else
4296 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4299 /* Given two ranges, see if we can merge them into one. Return 1 if we
4300 can, 0 if we can't. Set the output range into the specified parameters. */
4302 bool
4303 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4304 tree high0, int in1_p, tree low1, tree high1)
4306 int no_overlap;
4307 int subset;
4308 int temp;
4309 tree tem;
4310 int in_p;
4311 tree low, high;
4312 int lowequal = ((low0 == 0 && low1 == 0)
4313 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4314 low0, 0, low1, 0)));
4315 int highequal = ((high0 == 0 && high1 == 0)
4316 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4317 high0, 1, high1, 1)));
4319 /* Make range 0 be the range that starts first, or ends last if they
4320 start at the same value. Swap them if it isn't. */
4321 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4322 low0, 0, low1, 0))
4323 || (lowequal
4324 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4325 high1, 1, high0, 1))))
4327 temp = in0_p, in0_p = in1_p, in1_p = temp;
4328 tem = low0, low0 = low1, low1 = tem;
4329 tem = high0, high0 = high1, high1 = tem;
4332 /* Now flag two cases, whether the ranges are disjoint or whether the
4333 second range is totally subsumed in the first. Note that the tests
4334 below are simplified by the ones above. */
4335 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4336 high0, 1, low1, 0));
4337 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4338 high1, 1, high0, 1));
4340 /* We now have four cases, depending on whether we are including or
4341 excluding the two ranges. */
4342 if (in0_p && in1_p)
4344 /* If they don't overlap, the result is false. If the second range
4345 is a subset it is the result. Otherwise, the range is from the start
4346 of the second to the end of the first. */
4347 if (no_overlap)
4348 in_p = 0, low = high = 0;
4349 else if (subset)
4350 in_p = 1, low = low1, high = high1;
4351 else
4352 in_p = 1, low = low1, high = high0;
4355 else if (in0_p && ! in1_p)
4357 /* If they don't overlap, the result is the first range. If they are
4358 equal, the result is false. If the second range is a subset of the
4359 first, and the ranges begin at the same place, we go from just after
4360 the end of the second range to the end of the first. If the second
4361 range is not a subset of the first, or if it is a subset and both
4362 ranges end at the same place, the range starts at the start of the
4363 first range and ends just before the second range.
4364 Otherwise, we can't describe this as a single range. */
4365 if (no_overlap)
4366 in_p = 1, low = low0, high = high0;
4367 else if (lowequal && highequal)
4368 in_p = 0, low = high = 0;
4369 else if (subset && lowequal)
4371 low = range_successor (high1);
4372 high = high0;
4373 in_p = 1;
4374 if (low == 0)
4376 /* We are in the weird situation where high0 > high1 but
4377 high1 has no successor. Punt. */
4378 return 0;
4381 else if (! subset || highequal)
4383 low = low0;
4384 high = range_predecessor (low1);
4385 in_p = 1;
4386 if (high == 0)
4388 /* low0 < low1 but low1 has no predecessor. Punt. */
4389 return 0;
4392 else
4393 return 0;
4396 else if (! in0_p && in1_p)
4398 /* If they don't overlap, the result is the second range. If the second
4399 is a subset of the first, the result is false. Otherwise,
4400 the range starts just after the first range and ends at the
4401 end of the second. */
4402 if (no_overlap)
4403 in_p = 1, low = low1, high = high1;
4404 else if (subset || highequal)
4405 in_p = 0, low = high = 0;
4406 else
4408 low = range_successor (high0);
4409 high = high1;
4410 in_p = 1;
4411 if (low == 0)
4413 /* high1 > high0 but high0 has no successor. Punt. */
4414 return 0;
4419 else
4421 /* The case where we are excluding both ranges. Here the complex case
4422 is if they don't overlap. In that case, the only time we have a
4423 range is if they are adjacent. If the second is a subset of the
4424 first, the result is the first. Otherwise, the range to exclude
4425 starts at the beginning of the first range and ends at the end of the
4426 second. */
4427 if (no_overlap)
4429 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4430 range_successor (high0),
4431 1, low1, 0)))
4432 in_p = 0, low = low0, high = high1;
4433 else
4435 /* Canonicalize - [min, x] into - [-, x]. */
4436 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4437 switch (TREE_CODE (TREE_TYPE (low0)))
4439 case ENUMERAL_TYPE:
4440 if (TYPE_PRECISION (TREE_TYPE (low0))
4441 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4442 break;
4443 /* FALLTHROUGH */
4444 case INTEGER_TYPE:
4445 if (tree_int_cst_equal (low0,
4446 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4447 low0 = 0;
4448 break;
4449 case POINTER_TYPE:
4450 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4451 && integer_zerop (low0))
4452 low0 = 0;
4453 break;
4454 default:
4455 break;
4458 /* Canonicalize - [x, max] into - [x, -]. */
4459 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4460 switch (TREE_CODE (TREE_TYPE (high1)))
4462 case ENUMERAL_TYPE:
4463 if (TYPE_PRECISION (TREE_TYPE (high1))
4464 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4465 break;
4466 /* FALLTHROUGH */
4467 case INTEGER_TYPE:
4468 if (tree_int_cst_equal (high1,
4469 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4470 high1 = 0;
4471 break;
4472 case POINTER_TYPE:
4473 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4474 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4475 high1, 1,
4476 integer_one_node, 1)))
4477 high1 = 0;
4478 break;
4479 default:
4480 break;
4483 /* The ranges might be also adjacent between the maximum and
4484 minimum values of the given type. For
4485 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4486 return + [x + 1, y - 1]. */
4487 if (low0 == 0 && high1 == 0)
4489 low = range_successor (high0);
4490 high = range_predecessor (low1);
4491 if (low == 0 || high == 0)
4492 return 0;
4494 in_p = 1;
4496 else
4497 return 0;
4500 else if (subset)
4501 in_p = 0, low = low0, high = high0;
4502 else
4503 in_p = 0, low = low0, high = high1;
4506 *pin_p = in_p, *plow = low, *phigh = high;
4507 return 1;
4511 /* Subroutine of fold, looking inside expressions of the form
4512 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4513 of the COND_EXPR. This function is being used also to optimize
4514 A op B ? C : A, by reversing the comparison first.
4516 Return a folded expression whose code is not a COND_EXPR
4517 anymore, or NULL_TREE if no folding opportunity is found. */
4519 static tree
4520 fold_cond_expr_with_comparison (location_t loc, tree type,
4521 tree arg0, tree arg1, tree arg2)
4523 enum tree_code comp_code = TREE_CODE (arg0);
4524 tree arg00 = TREE_OPERAND (arg0, 0);
4525 tree arg01 = TREE_OPERAND (arg0, 1);
4526 tree arg1_type = TREE_TYPE (arg1);
4527 tree tem;
4529 STRIP_NOPS (arg1);
4530 STRIP_NOPS (arg2);
4532 /* If we have A op 0 ? A : -A, consider applying the following
4533 transformations:
4535 A == 0? A : -A same as -A
4536 A != 0? A : -A same as A
4537 A >= 0? A : -A same as abs (A)
4538 A > 0? A : -A same as abs (A)
4539 A <= 0? A : -A same as -abs (A)
4540 A < 0? A : -A same as -abs (A)
4542 None of these transformations work for modes with signed
4543 zeros. If A is +/-0, the first two transformations will
4544 change the sign of the result (from +0 to -0, or vice
4545 versa). The last four will fix the sign of the result,
4546 even though the original expressions could be positive or
4547 negative, depending on the sign of A.
4549 Note that all these transformations are correct if A is
4550 NaN, since the two alternatives (A and -A) are also NaNs. */
4551 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4552 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4553 ? real_zerop (arg01)
4554 : integer_zerop (arg01))
4555 && ((TREE_CODE (arg2) == NEGATE_EXPR
4556 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4557 /* In the case that A is of the form X-Y, '-A' (arg2) may
4558 have already been folded to Y-X, check for that. */
4559 || (TREE_CODE (arg1) == MINUS_EXPR
4560 && TREE_CODE (arg2) == MINUS_EXPR
4561 && operand_equal_p (TREE_OPERAND (arg1, 0),
4562 TREE_OPERAND (arg2, 1), 0)
4563 && operand_equal_p (TREE_OPERAND (arg1, 1),
4564 TREE_OPERAND (arg2, 0), 0))))
4565 switch (comp_code)
4567 case EQ_EXPR:
4568 case UNEQ_EXPR:
4569 tem = fold_convert_loc (loc, arg1_type, arg1);
4570 return pedantic_non_lvalue_loc (loc,
4571 fold_convert_loc (loc, type,
4572 negate_expr (tem)));
4573 case NE_EXPR:
4574 case LTGT_EXPR:
4575 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4576 case UNGE_EXPR:
4577 case UNGT_EXPR:
4578 if (flag_trapping_math)
4579 break;
4580 /* Fall through. */
4581 case GE_EXPR:
4582 case GT_EXPR:
4583 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4584 arg1 = fold_convert_loc (loc, signed_type_for
4585 (TREE_TYPE (arg1)), arg1);
4586 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4587 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4588 case UNLE_EXPR:
4589 case UNLT_EXPR:
4590 if (flag_trapping_math)
4591 break;
4592 case LE_EXPR:
4593 case LT_EXPR:
4594 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4595 arg1 = fold_convert_loc (loc, signed_type_for
4596 (TREE_TYPE (arg1)), arg1);
4597 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4598 return negate_expr (fold_convert_loc (loc, type, tem));
4599 default:
4600 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4601 break;
4604 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4605 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4606 both transformations are correct when A is NaN: A != 0
4607 is then true, and A == 0 is false. */
4609 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4610 && integer_zerop (arg01) && integer_zerop (arg2))
4612 if (comp_code == NE_EXPR)
4613 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4614 else if (comp_code == EQ_EXPR)
4615 return build_int_cst (type, 0);
4618 /* Try some transformations of A op B ? A : B.
4620 A == B? A : B same as B
4621 A != B? A : B same as A
4622 A >= B? A : B same as max (A, B)
4623 A > B? A : B same as max (B, A)
4624 A <= B? A : B same as min (A, B)
4625 A < B? A : B same as min (B, A)
4627 As above, these transformations don't work in the presence
4628 of signed zeros. For example, if A and B are zeros of
4629 opposite sign, the first two transformations will change
4630 the sign of the result. In the last four, the original
4631 expressions give different results for (A=+0, B=-0) and
4632 (A=-0, B=+0), but the transformed expressions do not.
4634 The first two transformations are correct if either A or B
4635 is a NaN. In the first transformation, the condition will
4636 be false, and B will indeed be chosen. In the case of the
4637 second transformation, the condition A != B will be true,
4638 and A will be chosen.
4640 The conversions to max() and min() are not correct if B is
4641 a number and A is not. The conditions in the original
4642 expressions will be false, so all four give B. The min()
4643 and max() versions would give a NaN instead. */
4644 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4645 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4646 /* Avoid these transformations if the COND_EXPR may be used
4647 as an lvalue in the C++ front-end. PR c++/19199. */
4648 && (in_gimple_form
4649 || (strcmp (lang_hooks.name, "GNU C++") != 0
4650 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4651 || ! maybe_lvalue_p (arg1)
4652 || ! maybe_lvalue_p (arg2)))
4654 tree comp_op0 = arg00;
4655 tree comp_op1 = arg01;
4656 tree comp_type = TREE_TYPE (comp_op0);
4658 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4659 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4661 comp_type = type;
4662 comp_op0 = arg1;
4663 comp_op1 = arg2;
4666 switch (comp_code)
4668 case EQ_EXPR:
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4670 case NE_EXPR:
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4672 case LE_EXPR:
4673 case LT_EXPR:
4674 case UNLE_EXPR:
4675 case UNLT_EXPR:
4676 /* In C++ a ?: expression can be an lvalue, so put the
4677 operand which will be used if they are equal first
4678 so that we can convert this back to the
4679 corresponding COND_EXPR. */
4680 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4682 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4683 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4684 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4685 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4686 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4687 comp_op1, comp_op0);
4688 return pedantic_non_lvalue_loc (loc,
4689 fold_convert_loc (loc, type, tem));
4691 break;
4692 case GE_EXPR:
4693 case GT_EXPR:
4694 case UNGE_EXPR:
4695 case UNGT_EXPR:
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4698 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4699 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4700 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4701 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4702 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4703 comp_op1, comp_op0);
4704 return pedantic_non_lvalue_loc (loc,
4705 fold_convert_loc (loc, type, tem));
4707 break;
4708 case UNEQ_EXPR:
4709 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4710 return pedantic_non_lvalue_loc (loc,
4711 fold_convert_loc (loc, type, arg2));
4712 break;
4713 case LTGT_EXPR:
4714 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4715 return pedantic_non_lvalue_loc (loc,
4716 fold_convert_loc (loc, type, arg1));
4717 break;
4718 default:
4719 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4720 break;
4724 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4725 we might still be able to simplify this. For example,
4726 if C1 is one less or one more than C2, this might have started
4727 out as a MIN or MAX and been transformed by this function.
4728 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4730 if (INTEGRAL_TYPE_P (type)
4731 && TREE_CODE (arg01) == INTEGER_CST
4732 && TREE_CODE (arg2) == INTEGER_CST)
4733 switch (comp_code)
4735 case EQ_EXPR:
4736 if (TREE_CODE (arg1) == INTEGER_CST)
4737 break;
4738 /* We can replace A with C1 in this case. */
4739 arg1 = fold_convert_loc (loc, type, arg01);
4740 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4742 case LT_EXPR:
4743 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4744 MIN_EXPR, to preserve the signedness of the comparison. */
4745 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4746 OEP_ONLY_CONST)
4747 && operand_equal_p (arg01,
4748 const_binop (PLUS_EXPR, arg2,
4749 build_int_cst (type, 1)),
4750 OEP_ONLY_CONST))
4752 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4753 fold_convert_loc (loc, TREE_TYPE (arg00),
4754 arg2));
4755 return pedantic_non_lvalue_loc (loc,
4756 fold_convert_loc (loc, type, tem));
4758 break;
4760 case LE_EXPR:
4761 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4762 as above. */
4763 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4764 OEP_ONLY_CONST)
4765 && operand_equal_p (arg01,
4766 const_binop (MINUS_EXPR, arg2,
4767 build_int_cst (type, 1)),
4768 OEP_ONLY_CONST))
4770 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4771 fold_convert_loc (loc, TREE_TYPE (arg00),
4772 arg2));
4773 return pedantic_non_lvalue_loc (loc,
4774 fold_convert_loc (loc, type, tem));
4776 break;
4778 case GT_EXPR:
4779 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4780 MAX_EXPR, to preserve the signedness of the comparison. */
4781 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4782 OEP_ONLY_CONST)
4783 && operand_equal_p (arg01,
4784 const_binop (MINUS_EXPR, arg2,
4785 build_int_cst (type, 1)),
4786 OEP_ONLY_CONST))
4788 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4789 fold_convert_loc (loc, TREE_TYPE (arg00),
4790 arg2));
4791 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4793 break;
4795 case GE_EXPR:
4796 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4797 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4798 OEP_ONLY_CONST)
4799 && operand_equal_p (arg01,
4800 const_binop (PLUS_EXPR, arg2,
4801 build_int_cst (type, 1)),
4802 OEP_ONLY_CONST))
4804 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4805 fold_convert_loc (loc, TREE_TYPE (arg00),
4806 arg2));
4807 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4809 break;
4810 case NE_EXPR:
4811 break;
4812 default:
4813 gcc_unreachable ();
4816 return NULL_TREE;
4821 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4822 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4823 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4824 false) >= 2)
4825 #endif
4827 /* EXP is some logical combination of boolean tests. See if we can
4828 merge it into some range test. Return the new tree if so. */
4830 static tree
4831 fold_range_test (location_t loc, enum tree_code code, tree type,
4832 tree op0, tree op1)
4834 int or_op = (code == TRUTH_ORIF_EXPR
4835 || code == TRUTH_OR_EXPR);
4836 int in0_p, in1_p, in_p;
4837 tree low0, low1, low, high0, high1, high;
4838 bool strict_overflow_p = false;
4839 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4840 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4841 tree tem;
4842 const char * const warnmsg = G_("assuming signed overflow does not occur "
4843 "when simplifying range test");
4845 /* If this is an OR operation, invert both sides; we will invert
4846 again at the end. */
4847 if (or_op)
4848 in0_p = ! in0_p, in1_p = ! in1_p;
4850 /* If both expressions are the same, if we can merge the ranges, and we
4851 can build the range test, return it or it inverted. If one of the
4852 ranges is always true or always false, consider it to be the same
4853 expression as the other. */
4854 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4855 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4856 in1_p, low1, high1)
4857 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4858 lhs != 0 ? lhs
4859 : rhs != 0 ? rhs : integer_zero_node,
4860 in_p, low, high))))
4862 if (strict_overflow_p)
4863 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4864 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4867 /* On machines where the branch cost is expensive, if this is a
4868 short-circuited branch and the underlying object on both sides
4869 is the same, make a non-short-circuit operation. */
4870 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4871 && lhs != 0 && rhs != 0
4872 && (code == TRUTH_ANDIF_EXPR
4873 || code == TRUTH_ORIF_EXPR)
4874 && operand_equal_p (lhs, rhs, 0))
4876 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4877 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4878 which cases we can't do this. */
4879 if (simple_operand_p (lhs))
4880 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4881 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4882 type, op0, op1);
4884 else if (lang_hooks.decls.global_bindings_p () == 0
4885 && ! CONTAINS_PLACEHOLDER_P (lhs))
4887 tree common = save_expr (lhs);
4889 if (0 != (lhs = build_range_check (loc, type, common,
4890 or_op ? ! in0_p : in0_p,
4891 low0, high0))
4892 && (0 != (rhs = build_range_check (loc, type, common,
4893 or_op ? ! in1_p : in1_p,
4894 low1, high1))))
4896 if (strict_overflow_p)
4897 fold_overflow_warning (warnmsg,
4898 WARN_STRICT_OVERFLOW_COMPARISON);
4899 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4900 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4901 type, lhs, rhs);
4906 return 0;
4909 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4910 bit value. Arrange things so the extra bits will be set to zero if and
4911 only if C is signed-extended to its full width. If MASK is nonzero,
4912 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4914 static tree
4915 unextend (tree c, int p, int unsignedp, tree mask)
4917 tree type = TREE_TYPE (c);
4918 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4919 tree temp;
4921 if (p == modesize || unsignedp)
4922 return c;
4924 /* We work by getting just the sign bit into the low-order bit, then
4925 into the high-order bit, then sign-extend. We then XOR that value
4926 with C. */
4927 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4928 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4930 /* We must use a signed type in order to get an arithmetic right shift.
4931 However, we must also avoid introducing accidental overflows, so that
4932 a subsequent call to integer_zerop will work. Hence we must
4933 do the type conversion here. At this point, the constant is either
4934 zero or one, and the conversion to a signed type can never overflow.
4935 We could get an overflow if this conversion is done anywhere else. */
4936 if (TYPE_UNSIGNED (type))
4937 temp = fold_convert (signed_type_for (type), temp);
4939 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4940 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4941 if (mask != 0)
4942 temp = const_binop (BIT_AND_EXPR, temp,
4943 fold_convert (TREE_TYPE (c), mask));
4944 /* If necessary, convert the type back to match the type of C. */
4945 if (TYPE_UNSIGNED (type))
4946 temp = fold_convert (type, temp);
4948 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4951 /* For an expression that has the form
4952 (A && B) || ~B
4954 (A || B) && ~B,
4955 we can drop one of the inner expressions and simplify to
4956 A || ~B
4958 A && ~B
4959 LOC is the location of the resulting expression. OP is the inner
4960 logical operation; the left-hand side in the examples above, while CMPOP
4961 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4962 removing a condition that guards another, as in
4963 (A != NULL && A->...) || A == NULL
4964 which we must not transform. If RHS_ONLY is true, only eliminate the
4965 right-most operand of the inner logical operation. */
4967 static tree
4968 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4969 bool rhs_only)
4971 tree type = TREE_TYPE (cmpop);
4972 enum tree_code code = TREE_CODE (cmpop);
4973 enum tree_code truthop_code = TREE_CODE (op);
4974 tree lhs = TREE_OPERAND (op, 0);
4975 tree rhs = TREE_OPERAND (op, 1);
4976 tree orig_lhs = lhs, orig_rhs = rhs;
4977 enum tree_code rhs_code = TREE_CODE (rhs);
4978 enum tree_code lhs_code = TREE_CODE (lhs);
4979 enum tree_code inv_code;
4981 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4982 return NULL_TREE;
4984 if (TREE_CODE_CLASS (code) != tcc_comparison)
4985 return NULL_TREE;
4987 if (rhs_code == truthop_code)
4989 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4990 if (newrhs != NULL_TREE)
4992 rhs = newrhs;
4993 rhs_code = TREE_CODE (rhs);
4996 if (lhs_code == truthop_code && !rhs_only)
4998 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4999 if (newlhs != NULL_TREE)
5001 lhs = newlhs;
5002 lhs_code = TREE_CODE (lhs);
5006 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5007 if (inv_code == rhs_code
5008 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5009 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5010 return lhs;
5011 if (!rhs_only && inv_code == lhs_code
5012 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5013 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5014 return rhs;
5015 if (rhs != orig_rhs || lhs != orig_lhs)
5016 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5017 lhs, rhs);
5018 return NULL_TREE;
5021 /* Find ways of folding logical expressions of LHS and RHS:
5022 Try to merge two comparisons to the same innermost item.
5023 Look for range tests like "ch >= '0' && ch <= '9'".
5024 Look for combinations of simple terms on machines with expensive branches
5025 and evaluate the RHS unconditionally.
5027 For example, if we have p->a == 2 && p->b == 4 and we can make an
5028 object large enough to span both A and B, we can do this with a comparison
5029 against the object ANDed with the a mask.
5031 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5032 operations to do this with one comparison.
5034 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5035 function and the one above.
5037 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5038 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5040 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5041 two operands.
5043 We return the simplified tree or 0 if no optimization is possible. */
5045 static tree
5046 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5047 tree lhs, tree rhs)
5049 /* If this is the "or" of two comparisons, we can do something if
5050 the comparisons are NE_EXPR. If this is the "and", we can do something
5051 if the comparisons are EQ_EXPR. I.e.,
5052 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5054 WANTED_CODE is this operation code. For single bit fields, we can
5055 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5056 comparison for one-bit fields. */
5058 enum tree_code wanted_code;
5059 enum tree_code lcode, rcode;
5060 tree ll_arg, lr_arg, rl_arg, rr_arg;
5061 tree ll_inner, lr_inner, rl_inner, rr_inner;
5062 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5063 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5064 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5065 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5066 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5067 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5068 enum machine_mode lnmode, rnmode;
5069 tree ll_mask, lr_mask, rl_mask, rr_mask;
5070 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5071 tree l_const, r_const;
5072 tree lntype, rntype, result;
5073 HOST_WIDE_INT first_bit, end_bit;
5074 int volatilep;
5075 tree orig_lhs = lhs, orig_rhs = rhs;
5076 enum tree_code orig_code = code;
5078 /* Start by getting the comparison codes. Fail if anything is volatile.
5079 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5080 it were surrounded with a NE_EXPR. */
5082 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5083 return 0;
5085 lcode = TREE_CODE (lhs);
5086 rcode = TREE_CODE (rhs);
5088 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5090 lhs = build2 (NE_EXPR, truth_type, lhs,
5091 build_int_cst (TREE_TYPE (lhs), 0));
5092 lcode = NE_EXPR;
5095 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5097 rhs = build2 (NE_EXPR, truth_type, rhs,
5098 build_int_cst (TREE_TYPE (rhs), 0));
5099 rcode = NE_EXPR;
5102 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5103 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5104 return 0;
5106 ll_arg = TREE_OPERAND (lhs, 0);
5107 lr_arg = TREE_OPERAND (lhs, 1);
5108 rl_arg = TREE_OPERAND (rhs, 0);
5109 rr_arg = TREE_OPERAND (rhs, 1);
5111 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5112 if (simple_operand_p (ll_arg)
5113 && simple_operand_p (lr_arg))
5115 if (operand_equal_p (ll_arg, rl_arg, 0)
5116 && operand_equal_p (lr_arg, rr_arg, 0))
5118 result = combine_comparisons (loc, code, lcode, rcode,
5119 truth_type, ll_arg, lr_arg);
5120 if (result)
5121 return result;
5123 else if (operand_equal_p (ll_arg, rr_arg, 0)
5124 && operand_equal_p (lr_arg, rl_arg, 0))
5126 result = combine_comparisons (loc, code, lcode,
5127 swap_tree_comparison (rcode),
5128 truth_type, ll_arg, lr_arg);
5129 if (result)
5130 return result;
5134 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5135 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5137 /* If the RHS can be evaluated unconditionally and its operands are
5138 simple, it wins to evaluate the RHS unconditionally on machines
5139 with expensive branches. In this case, this isn't a comparison
5140 that can be merged. Avoid doing this if the RHS is a floating-point
5141 comparison since those can trap. */
5143 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5144 false) >= 2
5145 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5146 && simple_operand_p (rl_arg)
5147 && simple_operand_p (rr_arg))
5149 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5150 if (code == TRUTH_OR_EXPR
5151 && lcode == NE_EXPR && integer_zerop (lr_arg)
5152 && rcode == NE_EXPR && integer_zerop (rr_arg)
5153 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5154 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5155 return build2_loc (loc, NE_EXPR, truth_type,
5156 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5157 ll_arg, rl_arg),
5158 build_int_cst (TREE_TYPE (ll_arg), 0));
5160 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5161 if (code == TRUTH_AND_EXPR
5162 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5163 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5164 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5166 return build2_loc (loc, EQ_EXPR, truth_type,
5167 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5168 ll_arg, rl_arg),
5169 build_int_cst (TREE_TYPE (ll_arg), 0));
5171 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5173 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5174 return build2_loc (loc, code, truth_type, lhs, rhs);
5175 return NULL_TREE;
5179 /* See if the comparisons can be merged. Then get all the parameters for
5180 each side. */
5182 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5183 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5184 return 0;
5186 volatilep = 0;
5187 ll_inner = decode_field_reference (loc, ll_arg,
5188 &ll_bitsize, &ll_bitpos, &ll_mode,
5189 &ll_unsignedp, &volatilep, &ll_mask,
5190 &ll_and_mask);
5191 lr_inner = decode_field_reference (loc, lr_arg,
5192 &lr_bitsize, &lr_bitpos, &lr_mode,
5193 &lr_unsignedp, &volatilep, &lr_mask,
5194 &lr_and_mask);
5195 rl_inner = decode_field_reference (loc, rl_arg,
5196 &rl_bitsize, &rl_bitpos, &rl_mode,
5197 &rl_unsignedp, &volatilep, &rl_mask,
5198 &rl_and_mask);
5199 rr_inner = decode_field_reference (loc, rr_arg,
5200 &rr_bitsize, &rr_bitpos, &rr_mode,
5201 &rr_unsignedp, &volatilep, &rr_mask,
5202 &rr_and_mask);
5204 /* It must be true that the inner operation on the lhs of each
5205 comparison must be the same if we are to be able to do anything.
5206 Then see if we have constants. If not, the same must be true for
5207 the rhs's. */
5208 if (volatilep || ll_inner == 0 || rl_inner == 0
5209 || ! operand_equal_p (ll_inner, rl_inner, 0))
5210 return 0;
5212 if (TREE_CODE (lr_arg) == INTEGER_CST
5213 && TREE_CODE (rr_arg) == INTEGER_CST)
5214 l_const = lr_arg, r_const = rr_arg;
5215 else if (lr_inner == 0 || rr_inner == 0
5216 || ! operand_equal_p (lr_inner, rr_inner, 0))
5217 return 0;
5218 else
5219 l_const = r_const = 0;
5221 /* If either comparison code is not correct for our logical operation,
5222 fail. However, we can convert a one-bit comparison against zero into
5223 the opposite comparison against that bit being set in the field. */
5225 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5226 if (lcode != wanted_code)
5228 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5230 /* Make the left operand unsigned, since we are only interested
5231 in the value of one bit. Otherwise we are doing the wrong
5232 thing below. */
5233 ll_unsignedp = 1;
5234 l_const = ll_mask;
5236 else
5237 return 0;
5240 /* This is analogous to the code for l_const above. */
5241 if (rcode != wanted_code)
5243 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5245 rl_unsignedp = 1;
5246 r_const = rl_mask;
5248 else
5249 return 0;
5252 /* See if we can find a mode that contains both fields being compared on
5253 the left. If we can't, fail. Otherwise, update all constants and masks
5254 to be relative to a field of that size. */
5255 first_bit = MIN (ll_bitpos, rl_bitpos);
5256 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5257 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5258 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5259 volatilep);
5260 if (lnmode == VOIDmode)
5261 return 0;
5263 lnbitsize = GET_MODE_BITSIZE (lnmode);
5264 lnbitpos = first_bit & ~ (lnbitsize - 1);
5265 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5266 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5268 if (BYTES_BIG_ENDIAN)
5270 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5271 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5274 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5275 size_int (xll_bitpos));
5276 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5277 size_int (xrl_bitpos));
5279 if (l_const)
5281 l_const = fold_convert_loc (loc, lntype, l_const);
5282 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5283 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5284 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5285 fold_build1_loc (loc, BIT_NOT_EXPR,
5286 lntype, ll_mask))))
5288 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5290 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5293 if (r_const)
5295 r_const = fold_convert_loc (loc, lntype, r_const);
5296 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5297 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5298 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5299 fold_build1_loc (loc, BIT_NOT_EXPR,
5300 lntype, rl_mask))))
5302 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5304 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5308 /* If the right sides are not constant, do the same for it. Also,
5309 disallow this optimization if a size or signedness mismatch occurs
5310 between the left and right sides. */
5311 if (l_const == 0)
5313 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5314 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5315 /* Make sure the two fields on the right
5316 correspond to the left without being swapped. */
5317 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5318 return 0;
5320 first_bit = MIN (lr_bitpos, rr_bitpos);
5321 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5322 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5323 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5324 volatilep);
5325 if (rnmode == VOIDmode)
5326 return 0;
5328 rnbitsize = GET_MODE_BITSIZE (rnmode);
5329 rnbitpos = first_bit & ~ (rnbitsize - 1);
5330 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5331 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5333 if (BYTES_BIG_ENDIAN)
5335 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5336 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5339 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5340 rntype, lr_mask),
5341 size_int (xlr_bitpos));
5342 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5343 rntype, rr_mask),
5344 size_int (xrr_bitpos));
5346 /* Make a mask that corresponds to both fields being compared.
5347 Do this for both items being compared. If the operands are the
5348 same size and the bits being compared are in the same position
5349 then we can do this by masking both and comparing the masked
5350 results. */
5351 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5352 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5353 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5355 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5356 ll_unsignedp || rl_unsignedp);
5357 if (! all_ones_mask_p (ll_mask, lnbitsize))
5358 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5360 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5361 lr_unsignedp || rr_unsignedp);
5362 if (! all_ones_mask_p (lr_mask, rnbitsize))
5363 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5365 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5368 /* There is still another way we can do something: If both pairs of
5369 fields being compared are adjacent, we may be able to make a wider
5370 field containing them both.
5372 Note that we still must mask the lhs/rhs expressions. Furthermore,
5373 the mask must be shifted to account for the shift done by
5374 make_bit_field_ref. */
5375 if ((ll_bitsize + ll_bitpos == rl_bitpos
5376 && lr_bitsize + lr_bitpos == rr_bitpos)
5377 || (ll_bitpos == rl_bitpos + rl_bitsize
5378 && lr_bitpos == rr_bitpos + rr_bitsize))
5380 tree type;
5382 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5383 ll_bitsize + rl_bitsize,
5384 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5385 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5386 lr_bitsize + rr_bitsize,
5387 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5389 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5390 size_int (MIN (xll_bitpos, xrl_bitpos)));
5391 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5392 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5394 /* Convert to the smaller type before masking out unwanted bits. */
5395 type = lntype;
5396 if (lntype != rntype)
5398 if (lnbitsize > rnbitsize)
5400 lhs = fold_convert_loc (loc, rntype, lhs);
5401 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5402 type = rntype;
5404 else if (lnbitsize < rnbitsize)
5406 rhs = fold_convert_loc (loc, lntype, rhs);
5407 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5408 type = lntype;
5412 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5413 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5415 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5416 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5418 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5421 return 0;
5424 /* Handle the case of comparisons with constants. If there is something in
5425 common between the masks, those bits of the constants must be the same.
5426 If not, the condition is always false. Test for this to avoid generating
5427 incorrect code below. */
5428 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5429 if (! integer_zerop (result)
5430 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5431 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5433 if (wanted_code == NE_EXPR)
5435 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5436 return constant_boolean_node (true, truth_type);
5438 else
5440 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5441 return constant_boolean_node (false, truth_type);
5445 /* Construct the expression we will return. First get the component
5446 reference we will make. Unless the mask is all ones the width of
5447 that field, perform the mask operation. Then compare with the
5448 merged constant. */
5449 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5450 ll_unsignedp || rl_unsignedp);
5452 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5453 if (! all_ones_mask_p (ll_mask, lnbitsize))
5454 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5456 return build2_loc (loc, wanted_code, truth_type, result,
5457 const_binop (BIT_IOR_EXPR, l_const, r_const));
5460 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5461 constant. */
5463 static tree
5464 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5465 tree op0, tree op1)
5467 tree arg0 = op0;
5468 enum tree_code op_code;
5469 tree comp_const;
5470 tree minmax_const;
5471 int consts_equal, consts_lt;
5472 tree inner;
5474 STRIP_SIGN_NOPS (arg0);
5476 op_code = TREE_CODE (arg0);
5477 minmax_const = TREE_OPERAND (arg0, 1);
5478 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5479 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5480 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5481 inner = TREE_OPERAND (arg0, 0);
5483 /* If something does not permit us to optimize, return the original tree. */
5484 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5485 || TREE_CODE (comp_const) != INTEGER_CST
5486 || TREE_OVERFLOW (comp_const)
5487 || TREE_CODE (minmax_const) != INTEGER_CST
5488 || TREE_OVERFLOW (minmax_const))
5489 return NULL_TREE;
5491 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5492 and GT_EXPR, doing the rest with recursive calls using logical
5493 simplifications. */
5494 switch (code)
5496 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5498 tree tem
5499 = optimize_minmax_comparison (loc,
5500 invert_tree_comparison (code, false),
5501 type, op0, op1);
5502 if (tem)
5503 return invert_truthvalue_loc (loc, tem);
5504 return NULL_TREE;
5507 case GE_EXPR:
5508 return
5509 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5510 optimize_minmax_comparison
5511 (loc, EQ_EXPR, type, arg0, comp_const),
5512 optimize_minmax_comparison
5513 (loc, GT_EXPR, type, arg0, comp_const));
5515 case EQ_EXPR:
5516 if (op_code == MAX_EXPR && consts_equal)
5517 /* MAX (X, 0) == 0 -> X <= 0 */
5518 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5520 else if (op_code == MAX_EXPR && consts_lt)
5521 /* MAX (X, 0) == 5 -> X == 5 */
5522 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5524 else if (op_code == MAX_EXPR)
5525 /* MAX (X, 0) == -1 -> false */
5526 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5528 else if (consts_equal)
5529 /* MIN (X, 0) == 0 -> X >= 0 */
5530 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5532 else if (consts_lt)
5533 /* MIN (X, 0) == 5 -> false */
5534 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5536 else
5537 /* MIN (X, 0) == -1 -> X == -1 */
5538 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5540 case GT_EXPR:
5541 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5542 /* MAX (X, 0) > 0 -> X > 0
5543 MAX (X, 0) > 5 -> X > 5 */
5544 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5546 else if (op_code == MAX_EXPR)
5547 /* MAX (X, 0) > -1 -> true */
5548 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5550 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5551 /* MIN (X, 0) > 0 -> false
5552 MIN (X, 0) > 5 -> false */
5553 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5555 else
5556 /* MIN (X, 0) > -1 -> X > -1 */
5557 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5559 default:
5560 return NULL_TREE;
5564 /* T is an integer expression that is being multiplied, divided, or taken a
5565 modulus (CODE says which and what kind of divide or modulus) by a
5566 constant C. See if we can eliminate that operation by folding it with
5567 other operations already in T. WIDE_TYPE, if non-null, is a type that
5568 should be used for the computation if wider than our type.
5570 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5571 (X * 2) + (Y * 4). We must, however, be assured that either the original
5572 expression would not overflow or that overflow is undefined for the type
5573 in the language in question.
5575 If we return a non-null expression, it is an equivalent form of the
5576 original computation, but need not be in the original type.
5578 We set *STRICT_OVERFLOW_P to true if the return values depends on
5579 signed overflow being undefined. Otherwise we do not change
5580 *STRICT_OVERFLOW_P. */
5582 static tree
5583 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5584 bool *strict_overflow_p)
5586 /* To avoid exponential search depth, refuse to allow recursion past
5587 three levels. Beyond that (1) it's highly unlikely that we'll find
5588 something interesting and (2) we've probably processed it before
5589 when we built the inner expression. */
5591 static int depth;
5592 tree ret;
5594 if (depth > 3)
5595 return NULL;
5597 depth++;
5598 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5599 depth--;
5601 return ret;
5604 static tree
5605 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5606 bool *strict_overflow_p)
5608 tree type = TREE_TYPE (t);
5609 enum tree_code tcode = TREE_CODE (t);
5610 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5611 > GET_MODE_SIZE (TYPE_MODE (type)))
5612 ? wide_type : type);
5613 tree t1, t2;
5614 int same_p = tcode == code;
5615 tree op0 = NULL_TREE, op1 = NULL_TREE;
5616 bool sub_strict_overflow_p;
5618 /* Don't deal with constants of zero here; they confuse the code below. */
5619 if (integer_zerop (c))
5620 return NULL_TREE;
5622 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5623 op0 = TREE_OPERAND (t, 0);
5625 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5626 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5628 /* Note that we need not handle conditional operations here since fold
5629 already handles those cases. So just do arithmetic here. */
5630 switch (tcode)
5632 case INTEGER_CST:
5633 /* For a constant, we can always simplify if we are a multiply
5634 or (for divide and modulus) if it is a multiple of our constant. */
5635 if (code == MULT_EXPR
5636 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5637 return const_binop (code, fold_convert (ctype, t),
5638 fold_convert (ctype, c));
5639 break;
5641 CASE_CONVERT: case NON_LVALUE_EXPR:
5642 /* If op0 is an expression ... */
5643 if ((COMPARISON_CLASS_P (op0)
5644 || UNARY_CLASS_P (op0)
5645 || BINARY_CLASS_P (op0)
5646 || VL_EXP_CLASS_P (op0)
5647 || EXPRESSION_CLASS_P (op0))
5648 /* ... and has wrapping overflow, and its type is smaller
5649 than ctype, then we cannot pass through as widening. */
5650 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5651 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5652 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5653 && (TYPE_PRECISION (ctype)
5654 > TYPE_PRECISION (TREE_TYPE (op0))))
5655 /* ... or this is a truncation (t is narrower than op0),
5656 then we cannot pass through this narrowing. */
5657 || (TYPE_PRECISION (type)
5658 < TYPE_PRECISION (TREE_TYPE (op0)))
5659 /* ... or signedness changes for division or modulus,
5660 then we cannot pass through this conversion. */
5661 || (code != MULT_EXPR
5662 && (TYPE_UNSIGNED (ctype)
5663 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5664 /* ... or has undefined overflow while the converted to
5665 type has not, we cannot do the operation in the inner type
5666 as that would introduce undefined overflow. */
5667 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5668 && !TYPE_OVERFLOW_UNDEFINED (type))))
5669 break;
5671 /* Pass the constant down and see if we can make a simplification. If
5672 we can, replace this expression with the inner simplification for
5673 possible later conversion to our or some other type. */
5674 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5675 && TREE_CODE (t2) == INTEGER_CST
5676 && !TREE_OVERFLOW (t2)
5677 && (0 != (t1 = extract_muldiv (op0, t2, code,
5678 code == MULT_EXPR
5679 ? ctype : NULL_TREE,
5680 strict_overflow_p))))
5681 return t1;
5682 break;
5684 case ABS_EXPR:
5685 /* If widening the type changes it from signed to unsigned, then we
5686 must avoid building ABS_EXPR itself as unsigned. */
5687 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5689 tree cstype = (*signed_type_for) (ctype);
5690 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5691 != 0)
5693 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5694 return fold_convert (ctype, t1);
5696 break;
5698 /* If the constant is negative, we cannot simplify this. */
5699 if (tree_int_cst_sgn (c) == -1)
5700 break;
5701 /* FALLTHROUGH */
5702 case NEGATE_EXPR:
5703 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5704 != 0)
5705 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5706 break;
5708 case MIN_EXPR: case MAX_EXPR:
5709 /* If widening the type changes the signedness, then we can't perform
5710 this optimization as that changes the result. */
5711 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5712 break;
5714 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5715 sub_strict_overflow_p = false;
5716 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0
5718 && (t2 = extract_muldiv (op1, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0)
5721 if (tree_int_cst_sgn (c) < 0)
5722 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5723 if (sub_strict_overflow_p)
5724 *strict_overflow_p = true;
5725 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5726 fold_convert (ctype, t2));
5728 break;
5730 case LSHIFT_EXPR: case RSHIFT_EXPR:
5731 /* If the second operand is constant, this is a multiplication
5732 or floor division, by a power of two, so we can treat it that
5733 way unless the multiplier or divisor overflows. Signed
5734 left-shift overflow is implementation-defined rather than
5735 undefined in C90, so do not convert signed left shift into
5736 multiplication. */
5737 if (TREE_CODE (op1) == INTEGER_CST
5738 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5739 /* const_binop may not detect overflow correctly,
5740 so check for it explicitly here. */
5741 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5742 && TREE_INT_CST_HIGH (op1) == 0
5743 && 0 != (t1 = fold_convert (ctype,
5744 const_binop (LSHIFT_EXPR,
5745 size_one_node,
5746 op1)))
5747 && !TREE_OVERFLOW (t1))
5748 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5749 ? MULT_EXPR : FLOOR_DIV_EXPR,
5750 ctype,
5751 fold_convert (ctype, op0),
5752 t1),
5753 c, code, wide_type, strict_overflow_p);
5754 break;
5756 case PLUS_EXPR: case MINUS_EXPR:
5757 /* See if we can eliminate the operation on both sides. If we can, we
5758 can return a new PLUS or MINUS. If we can't, the only remaining
5759 cases where we can do anything are if the second operand is a
5760 constant. */
5761 sub_strict_overflow_p = false;
5762 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5763 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5764 if (t1 != 0 && t2 != 0
5765 && (code == MULT_EXPR
5766 /* If not multiplication, we can only do this if both operands
5767 are divisible by c. */
5768 || (multiple_of_p (ctype, op0, c)
5769 && multiple_of_p (ctype, op1, c))))
5771 if (sub_strict_overflow_p)
5772 *strict_overflow_p = true;
5773 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5774 fold_convert (ctype, t2));
5777 /* If this was a subtraction, negate OP1 and set it to be an addition.
5778 This simplifies the logic below. */
5779 if (tcode == MINUS_EXPR)
5781 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5782 /* If OP1 was not easily negatable, the constant may be OP0. */
5783 if (TREE_CODE (op0) == INTEGER_CST)
5785 tree tem = op0;
5786 op0 = op1;
5787 op1 = tem;
5788 tem = t1;
5789 t1 = t2;
5790 t2 = tem;
5794 if (TREE_CODE (op1) != INTEGER_CST)
5795 break;
5797 /* If either OP1 or C are negative, this optimization is not safe for
5798 some of the division and remainder types while for others we need
5799 to change the code. */
5800 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5802 if (code == CEIL_DIV_EXPR)
5803 code = FLOOR_DIV_EXPR;
5804 else if (code == FLOOR_DIV_EXPR)
5805 code = CEIL_DIV_EXPR;
5806 else if (code != MULT_EXPR
5807 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5808 break;
5811 /* If it's a multiply or a division/modulus operation of a multiple
5812 of our constant, do the operation and verify it doesn't overflow. */
5813 if (code == MULT_EXPR
5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5816 op1 = const_binop (code, fold_convert (ctype, op1),
5817 fold_convert (ctype, c));
5818 /* We allow the constant to overflow with wrapping semantics. */
5819 if (op1 == 0
5820 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5821 break;
5823 else
5824 break;
5826 /* If we have an unsigned type is not a sizetype, we cannot widen
5827 the operation since it will change the result if the original
5828 computation overflowed. */
5829 if (TYPE_UNSIGNED (ctype)
5830 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5831 && ctype != type)
5832 break;
5834 /* If we were able to eliminate our operation from the first side,
5835 apply our operation to the second side and reform the PLUS. */
5836 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5837 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5839 /* The last case is if we are a multiply. In that case, we can
5840 apply the distributive law to commute the multiply and addition
5841 if the multiplication of the constants doesn't overflow. */
5842 if (code == MULT_EXPR)
5843 return fold_build2 (tcode, ctype,
5844 fold_build2 (code, ctype,
5845 fold_convert (ctype, op0),
5846 fold_convert (ctype, c)),
5847 op1);
5849 break;
5851 case MULT_EXPR:
5852 /* We have a special case here if we are doing something like
5853 (C * 8) % 4 since we know that's zero. */
5854 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5855 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5856 /* If the multiplication can overflow we cannot optimize this.
5857 ??? Until we can properly mark individual operations as
5858 not overflowing we need to treat sizetype special here as
5859 stor-layout relies on this opimization to make
5860 DECL_FIELD_BIT_OFFSET always a constant. */
5861 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5862 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5863 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5864 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5865 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5867 *strict_overflow_p = true;
5868 return omit_one_operand (type, integer_zero_node, op0);
5871 /* ... fall through ... */
5873 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5874 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5875 /* If we can extract our operation from the LHS, do so and return a
5876 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5877 do something only if the second operand is a constant. */
5878 if (same_p
5879 && (t1 = extract_muldiv (op0, c, code, wide_type,
5880 strict_overflow_p)) != 0)
5881 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5882 fold_convert (ctype, op1));
5883 else if (tcode == MULT_EXPR && code == MULT_EXPR
5884 && (t1 = extract_muldiv (op1, c, code, wide_type,
5885 strict_overflow_p)) != 0)
5886 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5887 fold_convert (ctype, t1));
5888 else if (TREE_CODE (op1) != INTEGER_CST)
5889 return 0;
5891 /* If these are the same operation types, we can associate them
5892 assuming no overflow. */
5893 if (tcode == code
5894 && 0 != (t1 = int_const_binop (MULT_EXPR,
5895 fold_convert (ctype, op1),
5896 fold_convert (ctype, c), 1))
5897 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5898 (TYPE_UNSIGNED (ctype)
5899 && tcode != MULT_EXPR) ? -1 : 1,
5900 TREE_OVERFLOW (t1)))
5901 && !TREE_OVERFLOW (t1))
5902 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5904 /* If these operations "cancel" each other, we have the main
5905 optimizations of this pass, which occur when either constant is a
5906 multiple of the other, in which case we replace this with either an
5907 operation or CODE or TCODE.
5909 If we have an unsigned type that is not a sizetype, we cannot do
5910 this since it will change the result if the original computation
5911 overflowed. */
5912 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5913 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5914 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5915 || (tcode == MULT_EXPR
5916 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5917 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5918 && code != MULT_EXPR)))
5920 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5922 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5923 *strict_overflow_p = true;
5924 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5925 fold_convert (ctype,
5926 const_binop (TRUNC_DIV_EXPR,
5927 op1, c)));
5929 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5931 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5932 *strict_overflow_p = true;
5933 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5934 fold_convert (ctype,
5935 const_binop (TRUNC_DIV_EXPR,
5936 c, op1)));
5939 break;
5941 default:
5942 break;
5945 return 0;
5948 /* Return a node which has the indicated constant VALUE (either 0 or
5949 1), and is of the indicated TYPE. */
5951 tree
5952 constant_boolean_node (int value, tree type)
5954 if (type == integer_type_node)
5955 return value ? integer_one_node : integer_zero_node;
5956 else if (type == boolean_type_node)
5957 return value ? boolean_true_node : boolean_false_node;
5958 else
5959 return build_int_cst (type, value);
5963 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5964 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5965 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5966 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5967 COND is the first argument to CODE; otherwise (as in the example
5968 given here), it is the second argument. TYPE is the type of the
5969 original expression. Return NULL_TREE if no simplification is
5970 possible. */
5972 static tree
5973 fold_binary_op_with_conditional_arg (location_t loc,
5974 enum tree_code code,
5975 tree type, tree op0, tree op1,
5976 tree cond, tree arg, int cond_first_p)
5978 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5979 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5980 tree test, true_value, false_value;
5981 tree lhs = NULL_TREE;
5982 tree rhs = NULL_TREE;
5984 if (TREE_CODE (cond) == COND_EXPR)
5986 test = TREE_OPERAND (cond, 0);
5987 true_value = TREE_OPERAND (cond, 1);
5988 false_value = TREE_OPERAND (cond, 2);
5989 /* If this operand throws an expression, then it does not make
5990 sense to try to perform a logical or arithmetic operation
5991 involving it. */
5992 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5993 lhs = true_value;
5994 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5995 rhs = false_value;
5997 else
5999 tree testtype = TREE_TYPE (cond);
6000 test = cond;
6001 true_value = constant_boolean_node (true, testtype);
6002 false_value = constant_boolean_node (false, testtype);
6005 /* This transformation is only worthwhile if we don't have to wrap ARG
6006 in a SAVE_EXPR and the operation can be simplified on at least one
6007 of the branches once its pushed inside the COND_EXPR. */
6008 if (!TREE_CONSTANT (arg)
6009 && (TREE_SIDE_EFFECTS (arg)
6010 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6011 return NULL_TREE;
6013 arg = fold_convert_loc (loc, arg_type, arg);
6014 if (lhs == 0)
6016 true_value = fold_convert_loc (loc, cond_type, true_value);
6017 if (cond_first_p)
6018 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6019 else
6020 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6022 if (rhs == 0)
6024 false_value = fold_convert_loc (loc, cond_type, false_value);
6025 if (cond_first_p)
6026 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6027 else
6028 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6031 /* Check that we have simplified at least one of the branches. */
6032 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6033 return NULL_TREE;
6035 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6039 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6041 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6042 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6043 ADDEND is the same as X.
6045 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6046 and finite. The problematic cases are when X is zero, and its mode
6047 has signed zeros. In the case of rounding towards -infinity,
6048 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6049 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6051 bool
6052 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6054 if (!real_zerop (addend))
6055 return false;
6057 /* Don't allow the fold with -fsignaling-nans. */
6058 if (HONOR_SNANS (TYPE_MODE (type)))
6059 return false;
6061 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6062 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6063 return true;
6065 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6066 if (TREE_CODE (addend) == REAL_CST
6067 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6068 negate = !negate;
6070 /* The mode has signed zeros, and we have to honor their sign.
6071 In this situation, there is only one case we can return true for.
6072 X - 0 is the same as X unless rounding towards -infinity is
6073 supported. */
6074 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6077 /* Subroutine of fold() that checks comparisons of built-in math
6078 functions against real constants.
6080 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6081 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6082 is the type of the result and ARG0 and ARG1 are the operands of the
6083 comparison. ARG1 must be a TREE_REAL_CST.
6085 The function returns the constant folded tree if a simplification
6086 can be made, and NULL_TREE otherwise. */
6088 static tree
6089 fold_mathfn_compare (location_t loc,
6090 enum built_in_function fcode, enum tree_code code,
6091 tree type, tree arg0, tree arg1)
6093 REAL_VALUE_TYPE c;
6095 if (BUILTIN_SQRT_P (fcode))
6097 tree arg = CALL_EXPR_ARG (arg0, 0);
6098 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6100 c = TREE_REAL_CST (arg1);
6101 if (REAL_VALUE_NEGATIVE (c))
6103 /* sqrt(x) < y is always false, if y is negative. */
6104 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6105 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6107 /* sqrt(x) > y is always true, if y is negative and we
6108 don't care about NaNs, i.e. negative values of x. */
6109 if (code == NE_EXPR || !HONOR_NANS (mode))
6110 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6112 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6113 return fold_build2_loc (loc, GE_EXPR, type, arg,
6114 build_real (TREE_TYPE (arg), dconst0));
6116 else if (code == GT_EXPR || code == GE_EXPR)
6118 REAL_VALUE_TYPE c2;
6120 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6121 real_convert (&c2, mode, &c2);
6123 if (REAL_VALUE_ISINF (c2))
6125 /* sqrt(x) > y is x == +Inf, when y is very large. */
6126 if (HONOR_INFINITIES (mode))
6127 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6128 build_real (TREE_TYPE (arg), c2));
6130 /* sqrt(x) > y is always false, when y is very large
6131 and we don't care about infinities. */
6132 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6135 /* sqrt(x) > c is the same as x > c*c. */
6136 return fold_build2_loc (loc, code, type, arg,
6137 build_real (TREE_TYPE (arg), c2));
6139 else if (code == LT_EXPR || code == LE_EXPR)
6141 REAL_VALUE_TYPE c2;
6143 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6144 real_convert (&c2, mode, &c2);
6146 if (REAL_VALUE_ISINF (c2))
6148 /* sqrt(x) < y is always true, when y is a very large
6149 value and we don't care about NaNs or Infinities. */
6150 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6151 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6153 /* sqrt(x) < y is x != +Inf when y is very large and we
6154 don't care about NaNs. */
6155 if (! HONOR_NANS (mode))
6156 return fold_build2_loc (loc, NE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg), c2));
6159 /* sqrt(x) < y is x >= 0 when y is very large and we
6160 don't care about Infinities. */
6161 if (! HONOR_INFINITIES (mode))
6162 return fold_build2_loc (loc, GE_EXPR, type, arg,
6163 build_real (TREE_TYPE (arg), dconst0));
6165 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6166 if (lang_hooks.decls.global_bindings_p () != 0
6167 || CONTAINS_PLACEHOLDER_P (arg))
6168 return NULL_TREE;
6170 arg = save_expr (arg);
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6172 fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6174 dconst0)),
6175 fold_build2_loc (loc, NE_EXPR, type, arg,
6176 build_real (TREE_TYPE (arg),
6177 c2)));
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6181 if (! HONOR_NANS (mode))
6182 return fold_build2_loc (loc, code, type, arg,
6183 build_real (TREE_TYPE (arg), c2));
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6186 if (lang_hooks.decls.global_bindings_p () == 0
6187 && ! CONTAINS_PLACEHOLDER_P (arg))
6189 arg = save_expr (arg);
6190 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6191 fold_build2_loc (loc, GE_EXPR, type, arg,
6192 build_real (TREE_TYPE (arg),
6193 dconst0)),
6194 fold_build2_loc (loc, code, type, arg,
6195 build_real (TREE_TYPE (arg),
6196 c2)));
6201 return NULL_TREE;
6204 /* Subroutine of fold() that optimizes comparisons against Infinities,
6205 either +Inf or -Inf.
6207 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6208 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6209 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6211 The function returns the constant folded tree if a simplification
6212 can be made, and NULL_TREE otherwise. */
6214 static tree
6215 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6216 tree arg0, tree arg1)
6218 enum machine_mode mode;
6219 REAL_VALUE_TYPE max;
6220 tree temp;
6221 bool neg;
6223 mode = TYPE_MODE (TREE_TYPE (arg0));
6225 /* For negative infinity swap the sense of the comparison. */
6226 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6227 if (neg)
6228 code = swap_tree_comparison (code);
6230 switch (code)
6232 case GT_EXPR:
6233 /* x > +Inf is always false, if with ignore sNANs. */
6234 if (HONOR_SNANS (mode))
6235 return NULL_TREE;
6236 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6238 case LE_EXPR:
6239 /* x <= +Inf is always true, if we don't case about NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6243 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6244 if (lang_hooks.decls.global_bindings_p () == 0
6245 && ! CONTAINS_PLACEHOLDER_P (arg0))
6247 arg0 = save_expr (arg0);
6248 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6250 break;
6252 case EQ_EXPR:
6253 case GE_EXPR:
6254 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6255 real_maxval (&max, neg, mode);
6256 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6257 arg0, build_real (TREE_TYPE (arg0), max));
6259 case LT_EXPR:
6260 /* x < +Inf is always equal to x <= DBL_MAX. */
6261 real_maxval (&max, neg, mode);
6262 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6263 arg0, build_real (TREE_TYPE (arg0), max));
6265 case NE_EXPR:
6266 /* x != +Inf is always equal to !(x > DBL_MAX). */
6267 real_maxval (&max, neg, mode);
6268 if (! HONOR_NANS (mode))
6269 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6270 arg0, build_real (TREE_TYPE (arg0), max));
6272 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6273 arg0, build_real (TREE_TYPE (arg0), max));
6274 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6276 default:
6277 break;
6280 return NULL_TREE;
6283 /* Subroutine of fold() that optimizes comparisons of a division by
6284 a nonzero integer constant against an integer constant, i.e.
6285 X/C1 op C2.
6287 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6288 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6289 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6291 The function returns the constant folded tree if a simplification
6292 can be made, and NULL_TREE otherwise. */
6294 static tree
6295 fold_div_compare (location_t loc,
6296 enum tree_code code, tree type, tree arg0, tree arg1)
6298 tree prod, tmp, hi, lo;
6299 tree arg00 = TREE_OPERAND (arg0, 0);
6300 tree arg01 = TREE_OPERAND (arg0, 1);
6301 double_int val;
6302 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6303 bool neg_overflow;
6304 int overflow;
6306 /* We have to do this the hard way to detect unsigned overflow.
6307 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6308 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6309 TREE_INT_CST_HIGH (arg01),
6310 TREE_INT_CST_LOW (arg1),
6311 TREE_INT_CST_HIGH (arg1),
6312 &val.low, &val.high, unsigned_p);
6313 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6314 neg_overflow = false;
6316 if (unsigned_p)
6318 tmp = int_const_binop (MINUS_EXPR, arg01,
6319 build_int_cst (TREE_TYPE (arg01), 1), 0);
6320 lo = prod;
6322 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6323 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6324 TREE_INT_CST_HIGH (prod),
6325 TREE_INT_CST_LOW (tmp),
6326 TREE_INT_CST_HIGH (tmp),
6327 &val.low, &val.high, unsigned_p);
6328 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6329 -1, overflow | TREE_OVERFLOW (prod));
6331 else if (tree_int_cst_sgn (arg01) >= 0)
6333 tmp = int_const_binop (MINUS_EXPR, arg01,
6334 build_int_cst (TREE_TYPE (arg01), 1), 0);
6335 switch (tree_int_cst_sgn (arg1))
6337 case -1:
6338 neg_overflow = true;
6339 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6340 hi = prod;
6341 break;
6343 case 0:
6344 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6345 hi = tmp;
6346 break;
6348 case 1:
6349 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6350 lo = prod;
6351 break;
6353 default:
6354 gcc_unreachable ();
6357 else
6359 /* A negative divisor reverses the relational operators. */
6360 code = swap_tree_comparison (code);
6362 tmp = int_const_binop (PLUS_EXPR, arg01,
6363 build_int_cst (TREE_TYPE (arg01), 1), 0);
6364 switch (tree_int_cst_sgn (arg1))
6366 case -1:
6367 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6368 lo = prod;
6369 break;
6371 case 0:
6372 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6373 lo = tmp;
6374 break;
6376 case 1:
6377 neg_overflow = true;
6378 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6379 hi = prod;
6380 break;
6382 default:
6383 gcc_unreachable ();
6387 switch (code)
6389 case EQ_EXPR:
6390 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6391 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6392 if (TREE_OVERFLOW (hi))
6393 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6394 if (TREE_OVERFLOW (lo))
6395 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6396 return build_range_check (loc, type, arg00, 1, lo, hi);
6398 case NE_EXPR:
6399 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6400 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6401 if (TREE_OVERFLOW (hi))
6402 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6403 if (TREE_OVERFLOW (lo))
6404 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6405 return build_range_check (loc, type, arg00, 0, lo, hi);
6407 case LT_EXPR:
6408 if (TREE_OVERFLOW (lo))
6410 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6411 return omit_one_operand_loc (loc, type, tmp, arg00);
6413 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6415 case LE_EXPR:
6416 if (TREE_OVERFLOW (hi))
6418 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6419 return omit_one_operand_loc (loc, type, tmp, arg00);
6421 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6423 case GT_EXPR:
6424 if (TREE_OVERFLOW (hi))
6426 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6427 return omit_one_operand_loc (loc, type, tmp, arg00);
6429 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6431 case GE_EXPR:
6432 if (TREE_OVERFLOW (lo))
6434 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6435 return omit_one_operand_loc (loc, type, tmp, arg00);
6437 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6439 default:
6440 break;
6443 return NULL_TREE;
6447 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6448 equality/inequality test, then return a simplified form of the test
6449 using a sign testing. Otherwise return NULL. TYPE is the desired
6450 result type. */
6452 static tree
6453 fold_single_bit_test_into_sign_test (location_t loc,
6454 enum tree_code code, tree arg0, tree arg1,
6455 tree result_type)
6457 /* If this is testing a single bit, we can optimize the test. */
6458 if ((code == NE_EXPR || code == EQ_EXPR)
6459 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6460 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6462 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6463 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6464 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6466 if (arg00 != NULL_TREE
6467 /* This is only a win if casting to a signed type is cheap,
6468 i.e. when arg00's type is not a partial mode. */
6469 && TYPE_PRECISION (TREE_TYPE (arg00))
6470 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6472 tree stype = signed_type_for (TREE_TYPE (arg00));
6473 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6474 result_type,
6475 fold_convert_loc (loc, stype, arg00),
6476 build_int_cst (stype, 0));
6480 return NULL_TREE;
6483 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6484 equality/inequality test, then return a simplified form of
6485 the test using shifts and logical operations. Otherwise return
6486 NULL. TYPE is the desired result type. */
6488 tree
6489 fold_single_bit_test (location_t loc, enum tree_code code,
6490 tree arg0, tree arg1, tree result_type)
6492 /* If this is testing a single bit, we can optimize the test. */
6493 if ((code == NE_EXPR || code == EQ_EXPR)
6494 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6495 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6497 tree inner = TREE_OPERAND (arg0, 0);
6498 tree type = TREE_TYPE (arg0);
6499 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6500 enum machine_mode operand_mode = TYPE_MODE (type);
6501 int ops_unsigned;
6502 tree signed_type, unsigned_type, intermediate_type;
6503 tree tem, one;
6505 /* First, see if we can fold the single bit test into a sign-bit
6506 test. */
6507 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6508 result_type);
6509 if (tem)
6510 return tem;
6512 /* Otherwise we have (A & C) != 0 where C is a single bit,
6513 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6514 Similarly for (A & C) == 0. */
6516 /* If INNER is a right shift of a constant and it plus BITNUM does
6517 not overflow, adjust BITNUM and INNER. */
6518 if (TREE_CODE (inner) == RSHIFT_EXPR
6519 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6520 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6521 && bitnum < TYPE_PRECISION (type)
6522 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6523 bitnum - TYPE_PRECISION (type)))
6525 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6526 inner = TREE_OPERAND (inner, 0);
6529 /* If we are going to be able to omit the AND below, we must do our
6530 operations as unsigned. If we must use the AND, we have a choice.
6531 Normally unsigned is faster, but for some machines signed is. */
6532 #ifdef LOAD_EXTEND_OP
6533 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6534 && !flag_syntax_only) ? 0 : 1;
6535 #else
6536 ops_unsigned = 1;
6537 #endif
6539 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6540 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6541 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6542 inner = fold_convert_loc (loc, intermediate_type, inner);
6544 if (bitnum != 0)
6545 inner = build2 (RSHIFT_EXPR, intermediate_type,
6546 inner, size_int (bitnum));
6548 one = build_int_cst (intermediate_type, 1);
6550 if (code == EQ_EXPR)
6551 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6553 /* Put the AND last so it can combine with more things. */
6554 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6556 /* Make sure to return the proper type. */
6557 inner = fold_convert_loc (loc, result_type, inner);
6559 return inner;
6561 return NULL_TREE;
6564 /* Check whether we are allowed to reorder operands arg0 and arg1,
6565 such that the evaluation of arg1 occurs before arg0. */
6567 static bool
6568 reorder_operands_p (const_tree arg0, const_tree arg1)
6570 if (! flag_evaluation_order)
6571 return true;
6572 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6573 return true;
6574 return ! TREE_SIDE_EFFECTS (arg0)
6575 && ! TREE_SIDE_EFFECTS (arg1);
6578 /* Test whether it is preferable two swap two operands, ARG0 and
6579 ARG1, for example because ARG0 is an integer constant and ARG1
6580 isn't. If REORDER is true, only recommend swapping if we can
6581 evaluate the operands in reverse order. */
6583 bool
6584 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6586 STRIP_SIGN_NOPS (arg0);
6587 STRIP_SIGN_NOPS (arg1);
6589 if (TREE_CODE (arg1) == INTEGER_CST)
6590 return 0;
6591 if (TREE_CODE (arg0) == INTEGER_CST)
6592 return 1;
6594 if (TREE_CODE (arg1) == REAL_CST)
6595 return 0;
6596 if (TREE_CODE (arg0) == REAL_CST)
6597 return 1;
6599 if (TREE_CODE (arg1) == FIXED_CST)
6600 return 0;
6601 if (TREE_CODE (arg0) == FIXED_CST)
6602 return 1;
6604 if (TREE_CODE (arg1) == COMPLEX_CST)
6605 return 0;
6606 if (TREE_CODE (arg0) == COMPLEX_CST)
6607 return 1;
6609 if (TREE_CONSTANT (arg1))
6610 return 0;
6611 if (TREE_CONSTANT (arg0))
6612 return 1;
6614 if (optimize_function_for_size_p (cfun))
6615 return 0;
6617 if (reorder && flag_evaluation_order
6618 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6619 return 0;
6621 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6622 for commutative and comparison operators. Ensuring a canonical
6623 form allows the optimizers to find additional redundancies without
6624 having to explicitly check for both orderings. */
6625 if (TREE_CODE (arg0) == SSA_NAME
6626 && TREE_CODE (arg1) == SSA_NAME
6627 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6628 return 1;
6630 /* Put SSA_NAMEs last. */
6631 if (TREE_CODE (arg1) == SSA_NAME)
6632 return 0;
6633 if (TREE_CODE (arg0) == SSA_NAME)
6634 return 1;
6636 /* Put variables last. */
6637 if (DECL_P (arg1))
6638 return 0;
6639 if (DECL_P (arg0))
6640 return 1;
6642 return 0;
6645 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6646 ARG0 is extended to a wider type. */
6648 static tree
6649 fold_widened_comparison (location_t loc, enum tree_code code,
6650 tree type, tree arg0, tree arg1)
6652 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6653 tree arg1_unw;
6654 tree shorter_type, outer_type;
6655 tree min, max;
6656 bool above, below;
6658 if (arg0_unw == arg0)
6659 return NULL_TREE;
6660 shorter_type = TREE_TYPE (arg0_unw);
6662 #ifdef HAVE_canonicalize_funcptr_for_compare
6663 /* Disable this optimization if we're casting a function pointer
6664 type on targets that require function pointer canonicalization. */
6665 if (HAVE_canonicalize_funcptr_for_compare
6666 && TREE_CODE (shorter_type) == POINTER_TYPE
6667 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6668 return NULL_TREE;
6669 #endif
6671 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6672 return NULL_TREE;
6674 arg1_unw = get_unwidened (arg1, NULL_TREE);
6676 /* If possible, express the comparison in the shorter mode. */
6677 if ((code == EQ_EXPR || code == NE_EXPR
6678 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6679 && (TREE_TYPE (arg1_unw) == shorter_type
6680 || ((TYPE_PRECISION (shorter_type)
6681 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6682 && (TYPE_UNSIGNED (shorter_type)
6683 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6684 || (TREE_CODE (arg1_unw) == INTEGER_CST
6685 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6686 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6687 && int_fits_type_p (arg1_unw, shorter_type))))
6688 return fold_build2_loc (loc, code, type, arg0_unw,
6689 fold_convert_loc (loc, shorter_type, arg1_unw));
6691 if (TREE_CODE (arg1_unw) != INTEGER_CST
6692 || TREE_CODE (shorter_type) != INTEGER_TYPE
6693 || !int_fits_type_p (arg1_unw, shorter_type))
6694 return NULL_TREE;
6696 /* If we are comparing with the integer that does not fit into the range
6697 of the shorter type, the result is known. */
6698 outer_type = TREE_TYPE (arg1_unw);
6699 min = lower_bound_in_type (outer_type, shorter_type);
6700 max = upper_bound_in_type (outer_type, shorter_type);
6702 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6703 max, arg1_unw));
6704 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6705 arg1_unw, min));
6707 switch (code)
6709 case EQ_EXPR:
6710 if (above || below)
6711 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6712 break;
6714 case NE_EXPR:
6715 if (above || below)
6716 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6717 break;
6719 case LT_EXPR:
6720 case LE_EXPR:
6721 if (above)
6722 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6723 else if (below)
6724 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6726 case GT_EXPR:
6727 case GE_EXPR:
6728 if (above)
6729 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6730 else if (below)
6731 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6733 default:
6734 break;
6737 return NULL_TREE;
6740 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6741 ARG0 just the signedness is changed. */
6743 static tree
6744 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6745 tree arg0, tree arg1)
6747 tree arg0_inner;
6748 tree inner_type, outer_type;
6750 if (!CONVERT_EXPR_P (arg0))
6751 return NULL_TREE;
6753 outer_type = TREE_TYPE (arg0);
6754 arg0_inner = TREE_OPERAND (arg0, 0);
6755 inner_type = TREE_TYPE (arg0_inner);
6757 #ifdef HAVE_canonicalize_funcptr_for_compare
6758 /* Disable this optimization if we're casting a function pointer
6759 type on targets that require function pointer canonicalization. */
6760 if (HAVE_canonicalize_funcptr_for_compare
6761 && TREE_CODE (inner_type) == POINTER_TYPE
6762 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6763 return NULL_TREE;
6764 #endif
6766 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6767 return NULL_TREE;
6769 if (TREE_CODE (arg1) != INTEGER_CST
6770 && !(CONVERT_EXPR_P (arg1)
6771 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6772 return NULL_TREE;
6774 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6775 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6776 && code != NE_EXPR
6777 && code != EQ_EXPR)
6778 return NULL_TREE;
6780 if (TREE_CODE (arg1) == INTEGER_CST)
6781 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6782 0, TREE_OVERFLOW (arg1));
6783 else
6784 arg1 = fold_convert_loc (loc, inner_type, arg1);
6786 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6789 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6790 step of the array. Reconstructs s and delta in the case of s *
6791 delta being an integer constant (and thus already folded). ADDR is
6792 the address. MULT is the multiplicative expression. If the
6793 function succeeds, the new address expression is returned.
6794 Otherwise NULL_TREE is returned. LOC is the location of the
6795 resulting expression. */
6797 static tree
6798 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6800 tree s, delta, step;
6801 tree ref = TREE_OPERAND (addr, 0), pref;
6802 tree ret, pos;
6803 tree itype;
6804 bool mdim = false;
6806 /* Strip the nops that might be added when converting op1 to sizetype. */
6807 STRIP_NOPS (op1);
6809 /* Canonicalize op1 into a possibly non-constant delta
6810 and an INTEGER_CST s. */
6811 if (TREE_CODE (op1) == MULT_EXPR)
6813 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6815 STRIP_NOPS (arg0);
6816 STRIP_NOPS (arg1);
6818 if (TREE_CODE (arg0) == INTEGER_CST)
6820 s = arg0;
6821 delta = arg1;
6823 else if (TREE_CODE (arg1) == INTEGER_CST)
6825 s = arg1;
6826 delta = arg0;
6828 else
6829 return NULL_TREE;
6831 else if (TREE_CODE (op1) == INTEGER_CST)
6833 delta = op1;
6834 s = NULL_TREE;
6836 else
6838 /* Simulate we are delta * 1. */
6839 delta = op1;
6840 s = integer_one_node;
6843 for (;; ref = TREE_OPERAND (ref, 0))
6845 if (TREE_CODE (ref) == ARRAY_REF)
6847 tree domain;
6849 /* Remember if this was a multi-dimensional array. */
6850 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6851 mdim = true;
6853 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6854 if (! domain)
6855 continue;
6856 itype = TREE_TYPE (domain);
6858 step = array_ref_element_size (ref);
6859 if (TREE_CODE (step) != INTEGER_CST)
6860 continue;
6862 if (s)
6864 if (! tree_int_cst_equal (step, s))
6865 continue;
6867 else
6869 /* Try if delta is a multiple of step. */
6870 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6871 if (! tmp)
6872 continue;
6873 delta = tmp;
6876 /* Only fold here if we can verify we do not overflow one
6877 dimension of a multi-dimensional array. */
6878 if (mdim)
6880 tree tmp;
6882 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6883 || !TYPE_MAX_VALUE (domain)
6884 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6885 continue;
6887 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6888 fold_convert_loc (loc, itype,
6889 TREE_OPERAND (ref, 1)),
6890 fold_convert_loc (loc, itype, delta));
6891 if (!tmp
6892 || TREE_CODE (tmp) != INTEGER_CST
6893 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6894 continue;
6897 break;
6899 else
6900 mdim = false;
6902 if (!handled_component_p (ref))
6903 return NULL_TREE;
6906 /* We found the suitable array reference. So copy everything up to it,
6907 and replace the index. */
6909 pref = TREE_OPERAND (addr, 0);
6910 ret = copy_node (pref);
6911 SET_EXPR_LOCATION (ret, loc);
6912 pos = ret;
6914 while (pref != ref)
6916 pref = TREE_OPERAND (pref, 0);
6917 TREE_OPERAND (pos, 0) = copy_node (pref);
6918 pos = TREE_OPERAND (pos, 0);
6921 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6922 fold_convert_loc (loc, itype,
6923 TREE_OPERAND (pos, 1)),
6924 fold_convert_loc (loc, itype, delta));
6926 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6930 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6931 means A >= Y && A != MAX, but in this case we know that
6932 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6934 static tree
6935 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6937 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6939 if (TREE_CODE (bound) == LT_EXPR)
6940 a = TREE_OPERAND (bound, 0);
6941 else if (TREE_CODE (bound) == GT_EXPR)
6942 a = TREE_OPERAND (bound, 1);
6943 else
6944 return NULL_TREE;
6946 typea = TREE_TYPE (a);
6947 if (!INTEGRAL_TYPE_P (typea)
6948 && !POINTER_TYPE_P (typea))
6949 return NULL_TREE;
6951 if (TREE_CODE (ineq) == LT_EXPR)
6953 a1 = TREE_OPERAND (ineq, 1);
6954 y = TREE_OPERAND (ineq, 0);
6956 else if (TREE_CODE (ineq) == GT_EXPR)
6958 a1 = TREE_OPERAND (ineq, 0);
6959 y = TREE_OPERAND (ineq, 1);
6961 else
6962 return NULL_TREE;
6964 if (TREE_TYPE (a1) != typea)
6965 return NULL_TREE;
6967 if (POINTER_TYPE_P (typea))
6969 /* Convert the pointer types into integer before taking the difference. */
6970 tree ta = fold_convert_loc (loc, ssizetype, a);
6971 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6972 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6974 else
6975 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6977 if (!diff || !integer_onep (diff))
6978 return NULL_TREE;
6980 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6983 /* Fold a sum or difference of at least one multiplication.
6984 Returns the folded tree or NULL if no simplification could be made. */
6986 static tree
6987 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6988 tree arg0, tree arg1)
6990 tree arg00, arg01, arg10, arg11;
6991 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6993 /* (A * C) +- (B * C) -> (A+-B) * C.
6994 (A * C) +- A -> A * (C+-1).
6995 We are most concerned about the case where C is a constant,
6996 but other combinations show up during loop reduction. Since
6997 it is not difficult, try all four possibilities. */
6999 if (TREE_CODE (arg0) == MULT_EXPR)
7001 arg00 = TREE_OPERAND (arg0, 0);
7002 arg01 = TREE_OPERAND (arg0, 1);
7004 else if (TREE_CODE (arg0) == INTEGER_CST)
7006 arg00 = build_one_cst (type);
7007 arg01 = arg0;
7009 else
7011 /* We cannot generate constant 1 for fract. */
7012 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7013 return NULL_TREE;
7014 arg00 = arg0;
7015 arg01 = build_one_cst (type);
7017 if (TREE_CODE (arg1) == MULT_EXPR)
7019 arg10 = TREE_OPERAND (arg1, 0);
7020 arg11 = TREE_OPERAND (arg1, 1);
7022 else if (TREE_CODE (arg1) == INTEGER_CST)
7024 arg10 = build_one_cst (type);
7025 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7026 the purpose of this canonicalization. */
7027 if (TREE_INT_CST_HIGH (arg1) == -1
7028 && negate_expr_p (arg1)
7029 && code == PLUS_EXPR)
7031 arg11 = negate_expr (arg1);
7032 code = MINUS_EXPR;
7034 else
7035 arg11 = arg1;
7037 else
7039 /* We cannot generate constant 1 for fract. */
7040 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7041 return NULL_TREE;
7042 arg10 = arg1;
7043 arg11 = build_one_cst (type);
7045 same = NULL_TREE;
7047 if (operand_equal_p (arg01, arg11, 0))
7048 same = arg01, alt0 = arg00, alt1 = arg10;
7049 else if (operand_equal_p (arg00, arg10, 0))
7050 same = arg00, alt0 = arg01, alt1 = arg11;
7051 else if (operand_equal_p (arg00, arg11, 0))
7052 same = arg00, alt0 = arg01, alt1 = arg10;
7053 else if (operand_equal_p (arg01, arg10, 0))
7054 same = arg01, alt0 = arg00, alt1 = arg11;
7056 /* No identical multiplicands; see if we can find a common
7057 power-of-two factor in non-power-of-two multiplies. This
7058 can help in multi-dimensional array access. */
7059 else if (host_integerp (arg01, 0)
7060 && host_integerp (arg11, 0))
7062 HOST_WIDE_INT int01, int11, tmp;
7063 bool swap = false;
7064 tree maybe_same;
7065 int01 = TREE_INT_CST_LOW (arg01);
7066 int11 = TREE_INT_CST_LOW (arg11);
7068 /* Move min of absolute values to int11. */
7069 if ((int01 >= 0 ? int01 : -int01)
7070 < (int11 >= 0 ? int11 : -int11))
7072 tmp = int01, int01 = int11, int11 = tmp;
7073 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7074 maybe_same = arg01;
7075 swap = true;
7077 else
7078 maybe_same = arg11;
7080 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7081 /* The remainder should not be a constant, otherwise we
7082 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7083 increased the number of multiplications necessary. */
7084 && TREE_CODE (arg10) != INTEGER_CST)
7086 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7087 build_int_cst (TREE_TYPE (arg00),
7088 int01 / int11));
7089 alt1 = arg10;
7090 same = maybe_same;
7091 if (swap)
7092 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7096 if (same)
7097 return fold_build2_loc (loc, MULT_EXPR, type,
7098 fold_build2_loc (loc, code, type,
7099 fold_convert_loc (loc, type, alt0),
7100 fold_convert_loc (loc, type, alt1)),
7101 fold_convert_loc (loc, type, same));
7103 return NULL_TREE;
7106 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7107 specified by EXPR into the buffer PTR of length LEN bytes.
7108 Return the number of bytes placed in the buffer, or zero
7109 upon failure. */
7111 static int
7112 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7114 tree type = TREE_TYPE (expr);
7115 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7116 int byte, offset, word, words;
7117 unsigned char value;
7119 if (total_bytes > len)
7120 return 0;
7121 words = total_bytes / UNITS_PER_WORD;
7123 for (byte = 0; byte < total_bytes; byte++)
7125 int bitpos = byte * BITS_PER_UNIT;
7126 if (bitpos < HOST_BITS_PER_WIDE_INT)
7127 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7128 else
7129 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7130 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7132 if (total_bytes > UNITS_PER_WORD)
7134 word = byte / UNITS_PER_WORD;
7135 if (WORDS_BIG_ENDIAN)
7136 word = (words - 1) - word;
7137 offset = word * UNITS_PER_WORD;
7138 if (BYTES_BIG_ENDIAN)
7139 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7140 else
7141 offset += byte % UNITS_PER_WORD;
7143 else
7144 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7145 ptr[offset] = value;
7147 return total_bytes;
7151 /* Subroutine of native_encode_expr. Encode the REAL_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7154 upon failure. */
7156 static int
7157 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7159 tree type = TREE_TYPE (expr);
7160 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7161 int byte, offset, word, words, bitpos;
7162 unsigned char value;
7164 /* There are always 32 bits in each long, no matter the size of
7165 the hosts long. We handle floating point representations with
7166 up to 192 bits. */
7167 long tmp[6];
7169 if (total_bytes > len)
7170 return 0;
7171 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7173 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7175 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7176 bitpos += BITS_PER_UNIT)
7178 byte = (bitpos / BITS_PER_UNIT) & 3;
7179 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7181 if (UNITS_PER_WORD < 4)
7183 word = byte / UNITS_PER_WORD;
7184 if (WORDS_BIG_ENDIAN)
7185 word = (words - 1) - word;
7186 offset = word * UNITS_PER_WORD;
7187 if (BYTES_BIG_ENDIAN)
7188 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7189 else
7190 offset += byte % UNITS_PER_WORD;
7192 else
7193 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7194 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7196 return total_bytes;
7199 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7200 specified by EXPR into the buffer PTR of length LEN bytes.
7201 Return the number of bytes placed in the buffer, or zero
7202 upon failure. */
7204 static int
7205 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7207 int rsize, isize;
7208 tree part;
7210 part = TREE_REALPART (expr);
7211 rsize = native_encode_expr (part, ptr, len);
7212 if (rsize == 0)
7213 return 0;
7214 part = TREE_IMAGPART (expr);
7215 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7216 if (isize != rsize)
7217 return 0;
7218 return rsize + isize;
7222 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7223 specified by EXPR into the buffer PTR of length LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero
7225 upon failure. */
7227 static int
7228 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7230 int i, size, offset, count;
7231 tree itype, elem, elements;
7233 offset = 0;
7234 elements = TREE_VECTOR_CST_ELTS (expr);
7235 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7236 itype = TREE_TYPE (TREE_TYPE (expr));
7237 size = GET_MODE_SIZE (TYPE_MODE (itype));
7238 for (i = 0; i < count; i++)
7240 if (elements)
7242 elem = TREE_VALUE (elements);
7243 elements = TREE_CHAIN (elements);
7245 else
7246 elem = NULL_TREE;
7248 if (elem)
7250 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7251 return 0;
7253 else
7255 if (offset + size > len)
7256 return 0;
7257 memset (ptr+offset, 0, size);
7259 offset += size;
7261 return offset;
7265 /* Subroutine of native_encode_expr. Encode the STRING_CST
7266 specified by EXPR into the buffer PTR of length LEN bytes.
7267 Return the number of bytes placed in the buffer, or zero
7268 upon failure. */
7270 static int
7271 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7273 tree type = TREE_TYPE (expr);
7274 HOST_WIDE_INT total_bytes;
7276 if (TREE_CODE (type) != ARRAY_TYPE
7277 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7278 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7279 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7280 return 0;
7281 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7282 if (total_bytes > len)
7283 return 0;
7284 if (TREE_STRING_LENGTH (expr) < total_bytes)
7286 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7287 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7288 total_bytes - TREE_STRING_LENGTH (expr));
7290 else
7291 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7292 return total_bytes;
7296 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7297 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7298 buffer PTR of length LEN bytes. Return the number of bytes
7299 placed in the buffer, or zero upon failure. */
7302 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7304 switch (TREE_CODE (expr))
7306 case INTEGER_CST:
7307 return native_encode_int (expr, ptr, len);
7309 case REAL_CST:
7310 return native_encode_real (expr, ptr, len);
7312 case COMPLEX_CST:
7313 return native_encode_complex (expr, ptr, len);
7315 case VECTOR_CST:
7316 return native_encode_vector (expr, ptr, len);
7318 case STRING_CST:
7319 return native_encode_string (expr, ptr, len);
7321 default:
7322 return 0;
7327 /* Subroutine of native_interpret_expr. Interpret the contents of
7328 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7329 If the buffer cannot be interpreted, return NULL_TREE. */
7331 static tree
7332 native_interpret_int (tree type, const unsigned char *ptr, int len)
7334 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7335 int byte, offset, word, words;
7336 unsigned char value;
7337 double_int result;
7339 if (total_bytes > len)
7340 return NULL_TREE;
7341 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7342 return NULL_TREE;
7344 result = double_int_zero;
7345 words = total_bytes / UNITS_PER_WORD;
7347 for (byte = 0; byte < total_bytes; byte++)
7349 int bitpos = byte * BITS_PER_UNIT;
7350 if (total_bytes > UNITS_PER_WORD)
7352 word = byte / UNITS_PER_WORD;
7353 if (WORDS_BIG_ENDIAN)
7354 word = (words - 1) - word;
7355 offset = word * UNITS_PER_WORD;
7356 if (BYTES_BIG_ENDIAN)
7357 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7358 else
7359 offset += byte % UNITS_PER_WORD;
7361 else
7362 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7363 value = ptr[offset];
7365 if (bitpos < HOST_BITS_PER_WIDE_INT)
7366 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7367 else
7368 result.high |= (unsigned HOST_WIDE_INT) value
7369 << (bitpos - HOST_BITS_PER_WIDE_INT);
7372 return double_int_to_tree (type, result);
7376 /* Subroutine of native_interpret_expr. Interpret the contents of
7377 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7378 If the buffer cannot be interpreted, return NULL_TREE. */
7380 static tree
7381 native_interpret_real (tree type, const unsigned char *ptr, int len)
7383 enum machine_mode mode = TYPE_MODE (type);
7384 int total_bytes = GET_MODE_SIZE (mode);
7385 int byte, offset, word, words, bitpos;
7386 unsigned char value;
7387 /* There are always 32 bits in each long, no matter the size of
7388 the hosts long. We handle floating point representations with
7389 up to 192 bits. */
7390 REAL_VALUE_TYPE r;
7391 long tmp[6];
7393 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7394 if (total_bytes > len || total_bytes > 24)
7395 return NULL_TREE;
7396 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7398 memset (tmp, 0, sizeof (tmp));
7399 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7400 bitpos += BITS_PER_UNIT)
7402 byte = (bitpos / BITS_PER_UNIT) & 3;
7403 if (UNITS_PER_WORD < 4)
7405 word = byte / UNITS_PER_WORD;
7406 if (WORDS_BIG_ENDIAN)
7407 word = (words - 1) - word;
7408 offset = word * UNITS_PER_WORD;
7409 if (BYTES_BIG_ENDIAN)
7410 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7411 else
7412 offset += byte % UNITS_PER_WORD;
7414 else
7415 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7416 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7418 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7421 real_from_target (&r, tmp, mode);
7422 return build_real (type, r);
7426 /* Subroutine of native_interpret_expr. Interpret the contents of
7427 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7428 If the buffer cannot be interpreted, return NULL_TREE. */
7430 static tree
7431 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7433 tree etype, rpart, ipart;
7434 int size;
7436 etype = TREE_TYPE (type);
7437 size = GET_MODE_SIZE (TYPE_MODE (etype));
7438 if (size * 2 > len)
7439 return NULL_TREE;
7440 rpart = native_interpret_expr (etype, ptr, size);
7441 if (!rpart)
7442 return NULL_TREE;
7443 ipart = native_interpret_expr (etype, ptr+size, size);
7444 if (!ipart)
7445 return NULL_TREE;
7446 return build_complex (type, rpart, ipart);
7450 /* Subroutine of native_interpret_expr. Interpret the contents of
7451 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7452 If the buffer cannot be interpreted, return NULL_TREE. */
7454 static tree
7455 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7457 tree etype, elem, elements;
7458 int i, size, count;
7460 etype = TREE_TYPE (type);
7461 size = GET_MODE_SIZE (TYPE_MODE (etype));
7462 count = TYPE_VECTOR_SUBPARTS (type);
7463 if (size * count > len)
7464 return NULL_TREE;
7466 elements = NULL_TREE;
7467 for (i = count - 1; i >= 0; i--)
7469 elem = native_interpret_expr (etype, ptr+(i*size), size);
7470 if (!elem)
7471 return NULL_TREE;
7472 elements = tree_cons (NULL_TREE, elem, elements);
7474 return build_vector (type, elements);
7478 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7479 the buffer PTR of length LEN as a constant of type TYPE. For
7480 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7481 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7482 return NULL_TREE. */
7484 tree
7485 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7487 switch (TREE_CODE (type))
7489 case INTEGER_TYPE:
7490 case ENUMERAL_TYPE:
7491 case BOOLEAN_TYPE:
7492 return native_interpret_int (type, ptr, len);
7494 case REAL_TYPE:
7495 return native_interpret_real (type, ptr, len);
7497 case COMPLEX_TYPE:
7498 return native_interpret_complex (type, ptr, len);
7500 case VECTOR_TYPE:
7501 return native_interpret_vector (type, ptr, len);
7503 default:
7504 return NULL_TREE;
7509 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7510 TYPE at compile-time. If we're unable to perform the conversion
7511 return NULL_TREE. */
7513 static tree
7514 fold_view_convert_expr (tree type, tree expr)
7516 /* We support up to 512-bit values (for V8DFmode). */
7517 unsigned char buffer[64];
7518 int len;
7520 /* Check that the host and target are sane. */
7521 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7522 return NULL_TREE;
7524 len = native_encode_expr (expr, buffer, sizeof (buffer));
7525 if (len == 0)
7526 return NULL_TREE;
7528 return native_interpret_expr (type, buffer, len);
7531 /* Build an expression for the address of T. Folds away INDIRECT_REF
7532 to avoid confusing the gimplify process. */
7534 tree
7535 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7537 /* The size of the object is not relevant when talking about its address. */
7538 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7539 t = TREE_OPERAND (t, 0);
7541 if (TREE_CODE (t) == INDIRECT_REF)
7543 t = TREE_OPERAND (t, 0);
7545 if (TREE_TYPE (t) != ptrtype)
7546 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7548 else if (TREE_CODE (t) == MEM_REF
7549 && integer_zerop (TREE_OPERAND (t, 1)))
7550 return TREE_OPERAND (t, 0);
7551 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7553 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7555 if (TREE_TYPE (t) != ptrtype)
7556 t = fold_convert_loc (loc, ptrtype, t);
7558 else
7559 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7561 return t;
7564 /* Build an expression for the address of T. */
7566 tree
7567 build_fold_addr_expr_loc (location_t loc, tree t)
7569 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7571 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7574 /* Fold a unary expression of code CODE and type TYPE with operand
7575 OP0. Return the folded expression if folding is successful.
7576 Otherwise, return NULL_TREE. */
7578 tree
7579 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7581 tree tem;
7582 tree arg0;
7583 enum tree_code_class kind = TREE_CODE_CLASS (code);
7585 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7586 && TREE_CODE_LENGTH (code) == 1);
7588 arg0 = op0;
7589 if (arg0)
7591 if (CONVERT_EXPR_CODE_P (code)
7592 || code == FLOAT_EXPR || code == ABS_EXPR)
7594 /* Don't use STRIP_NOPS, because signedness of argument type
7595 matters. */
7596 STRIP_SIGN_NOPS (arg0);
7598 else
7600 /* Strip any conversions that don't change the mode. This
7601 is safe for every expression, except for a comparison
7602 expression because its signedness is derived from its
7603 operands.
7605 Note that this is done as an internal manipulation within
7606 the constant folder, in order to find the simplest
7607 representation of the arguments so that their form can be
7608 studied. In any cases, the appropriate type conversions
7609 should be put back in the tree that will get out of the
7610 constant folder. */
7611 STRIP_NOPS (arg0);
7615 if (TREE_CODE_CLASS (code) == tcc_unary)
7617 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7618 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7619 fold_build1_loc (loc, code, type,
7620 fold_convert_loc (loc, TREE_TYPE (op0),
7621 TREE_OPERAND (arg0, 1))));
7622 else if (TREE_CODE (arg0) == COND_EXPR)
7624 tree arg01 = TREE_OPERAND (arg0, 1);
7625 tree arg02 = TREE_OPERAND (arg0, 2);
7626 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7627 arg01 = fold_build1_loc (loc, code, type,
7628 fold_convert_loc (loc,
7629 TREE_TYPE (op0), arg01));
7630 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7631 arg02 = fold_build1_loc (loc, code, type,
7632 fold_convert_loc (loc,
7633 TREE_TYPE (op0), arg02));
7634 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7635 arg01, arg02);
7637 /* If this was a conversion, and all we did was to move into
7638 inside the COND_EXPR, bring it back out. But leave it if
7639 it is a conversion from integer to integer and the
7640 result precision is no wider than a word since such a
7641 conversion is cheap and may be optimized away by combine,
7642 while it couldn't if it were outside the COND_EXPR. Then return
7643 so we don't get into an infinite recursion loop taking the
7644 conversion out and then back in. */
7646 if ((CONVERT_EXPR_CODE_P (code)
7647 || code == NON_LVALUE_EXPR)
7648 && TREE_CODE (tem) == COND_EXPR
7649 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7650 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7651 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7652 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7653 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7654 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7655 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7656 && (INTEGRAL_TYPE_P
7657 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7658 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7659 || flag_syntax_only))
7660 tem = build1_loc (loc, code, type,
7661 build3 (COND_EXPR,
7662 TREE_TYPE (TREE_OPERAND
7663 (TREE_OPERAND (tem, 1), 0)),
7664 TREE_OPERAND (tem, 0),
7665 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7666 TREE_OPERAND (TREE_OPERAND (tem, 2),
7667 0)));
7668 return tem;
7670 else if (COMPARISON_CLASS_P (arg0))
7672 if (TREE_CODE (type) == BOOLEAN_TYPE)
7674 arg0 = copy_node (arg0);
7675 TREE_TYPE (arg0) = type;
7676 return arg0;
7678 else if (TREE_CODE (type) != INTEGER_TYPE)
7679 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7680 fold_build1_loc (loc, code, type,
7681 integer_one_node),
7682 fold_build1_loc (loc, code, type,
7683 integer_zero_node));
7687 switch (code)
7689 case PAREN_EXPR:
7690 /* Re-association barriers around constants and other re-association
7691 barriers can be removed. */
7692 if (CONSTANT_CLASS_P (op0)
7693 || TREE_CODE (op0) == PAREN_EXPR)
7694 return fold_convert_loc (loc, type, op0);
7695 return NULL_TREE;
7697 CASE_CONVERT:
7698 case FLOAT_EXPR:
7699 case FIX_TRUNC_EXPR:
7700 if (TREE_TYPE (op0) == type)
7701 return op0;
7703 /* If we have (type) (a CMP b) and type is an integral type, return
7704 new expression involving the new type. */
7705 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7706 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7707 TREE_OPERAND (op0, 1));
7709 /* Handle cases of two conversions in a row. */
7710 if (CONVERT_EXPR_P (op0))
7712 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7713 tree inter_type = TREE_TYPE (op0);
7714 int inside_int = INTEGRAL_TYPE_P (inside_type);
7715 int inside_ptr = POINTER_TYPE_P (inside_type);
7716 int inside_float = FLOAT_TYPE_P (inside_type);
7717 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7718 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7719 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7720 int inter_int = INTEGRAL_TYPE_P (inter_type);
7721 int inter_ptr = POINTER_TYPE_P (inter_type);
7722 int inter_float = FLOAT_TYPE_P (inter_type);
7723 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7724 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7725 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7726 int final_int = INTEGRAL_TYPE_P (type);
7727 int final_ptr = POINTER_TYPE_P (type);
7728 int final_float = FLOAT_TYPE_P (type);
7729 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7730 unsigned int final_prec = TYPE_PRECISION (type);
7731 int final_unsignedp = TYPE_UNSIGNED (type);
7733 /* In addition to the cases of two conversions in a row
7734 handled below, if we are converting something to its own
7735 type via an object of identical or wider precision, neither
7736 conversion is needed. */
7737 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7738 && (((inter_int || inter_ptr) && final_int)
7739 || (inter_float && final_float))
7740 && inter_prec >= final_prec)
7741 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7743 /* Likewise, if the intermediate and initial types are either both
7744 float or both integer, we don't need the middle conversion if the
7745 former is wider than the latter and doesn't change the signedness
7746 (for integers). Avoid this if the final type is a pointer since
7747 then we sometimes need the middle conversion. Likewise if the
7748 final type has a precision not equal to the size of its mode. */
7749 if (((inter_int && inside_int)
7750 || (inter_float && inside_float)
7751 || (inter_vec && inside_vec))
7752 && inter_prec >= inside_prec
7753 && (inter_float || inter_vec
7754 || inter_unsignedp == inside_unsignedp)
7755 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7756 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7757 && ! final_ptr
7758 && (! final_vec || inter_prec == inside_prec))
7759 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7761 /* If we have a sign-extension of a zero-extended value, we can
7762 replace that by a single zero-extension. */
7763 if (inside_int && inter_int && final_int
7764 && inside_prec < inter_prec && inter_prec < final_prec
7765 && inside_unsignedp && !inter_unsignedp)
7766 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7768 /* Two conversions in a row are not needed unless:
7769 - some conversion is floating-point (overstrict for now), or
7770 - some conversion is a vector (overstrict for now), or
7771 - the intermediate type is narrower than both initial and
7772 final, or
7773 - the intermediate type and innermost type differ in signedness,
7774 and the outermost type is wider than the intermediate, or
7775 - the initial type is a pointer type and the precisions of the
7776 intermediate and final types differ, or
7777 - the final type is a pointer type and the precisions of the
7778 initial and intermediate types differ. */
7779 if (! inside_float && ! inter_float && ! final_float
7780 && ! inside_vec && ! inter_vec && ! final_vec
7781 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7782 && ! (inside_int && inter_int
7783 && inter_unsignedp != inside_unsignedp
7784 && inter_prec < final_prec)
7785 && ((inter_unsignedp && inter_prec > inside_prec)
7786 == (final_unsignedp && final_prec > inter_prec))
7787 && ! (inside_ptr && inter_prec != final_prec)
7788 && ! (final_ptr && inside_prec != inter_prec)
7789 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7790 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7791 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7794 /* Handle (T *)&A.B.C for A being of type T and B and C
7795 living at offset zero. This occurs frequently in
7796 C++ upcasting and then accessing the base. */
7797 if (TREE_CODE (op0) == ADDR_EXPR
7798 && POINTER_TYPE_P (type)
7799 && handled_component_p (TREE_OPERAND (op0, 0)))
7801 HOST_WIDE_INT bitsize, bitpos;
7802 tree offset;
7803 enum machine_mode mode;
7804 int unsignedp, volatilep;
7805 tree base = TREE_OPERAND (op0, 0);
7806 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7807 &mode, &unsignedp, &volatilep, false);
7808 /* If the reference was to a (constant) zero offset, we can use
7809 the address of the base if it has the same base type
7810 as the result type and the pointer type is unqualified. */
7811 if (! offset && bitpos == 0
7812 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7813 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7814 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7815 return fold_convert_loc (loc, type,
7816 build_fold_addr_expr_loc (loc, base));
7819 if (TREE_CODE (op0) == MODIFY_EXPR
7820 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7821 /* Detect assigning a bitfield. */
7822 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7823 && DECL_BIT_FIELD
7824 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7826 /* Don't leave an assignment inside a conversion
7827 unless assigning a bitfield. */
7828 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7829 /* First do the assignment, then return converted constant. */
7830 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7831 TREE_NO_WARNING (tem) = 1;
7832 TREE_USED (tem) = 1;
7833 return tem;
7836 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7837 constants (if x has signed type, the sign bit cannot be set
7838 in c). This folds extension into the BIT_AND_EXPR.
7839 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7840 very likely don't have maximal range for their precision and this
7841 transformation effectively doesn't preserve non-maximal ranges. */
7842 if (TREE_CODE (type) == INTEGER_TYPE
7843 && TREE_CODE (op0) == BIT_AND_EXPR
7844 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7846 tree and_expr = op0;
7847 tree and0 = TREE_OPERAND (and_expr, 0);
7848 tree and1 = TREE_OPERAND (and_expr, 1);
7849 int change = 0;
7851 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7852 || (TYPE_PRECISION (type)
7853 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7854 change = 1;
7855 else if (TYPE_PRECISION (TREE_TYPE (and1))
7856 <= HOST_BITS_PER_WIDE_INT
7857 && host_integerp (and1, 1))
7859 unsigned HOST_WIDE_INT cst;
7861 cst = tree_low_cst (and1, 1);
7862 cst &= (HOST_WIDE_INT) -1
7863 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7864 change = (cst == 0);
7865 #ifdef LOAD_EXTEND_OP
7866 if (change
7867 && !flag_syntax_only
7868 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7869 == ZERO_EXTEND))
7871 tree uns = unsigned_type_for (TREE_TYPE (and0));
7872 and0 = fold_convert_loc (loc, uns, and0);
7873 and1 = fold_convert_loc (loc, uns, and1);
7875 #endif
7877 if (change)
7879 tem = force_fit_type_double (type, tree_to_double_int (and1),
7880 0, TREE_OVERFLOW (and1));
7881 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7882 fold_convert_loc (loc, type, and0), tem);
7886 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7887 when one of the new casts will fold away. Conservatively we assume
7888 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7889 if (POINTER_TYPE_P (type)
7890 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7891 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7892 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7893 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7895 tree arg00 = TREE_OPERAND (arg0, 0);
7896 tree arg01 = TREE_OPERAND (arg0, 1);
7898 return fold_build2_loc (loc,
7899 TREE_CODE (arg0), type,
7900 fold_convert_loc (loc, type, arg00),
7901 fold_convert_loc (loc, sizetype, arg01));
7904 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7905 of the same precision, and X is an integer type not narrower than
7906 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7907 if (INTEGRAL_TYPE_P (type)
7908 && TREE_CODE (op0) == BIT_NOT_EXPR
7909 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7910 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7911 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7913 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7914 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7915 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7916 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7917 fold_convert_loc (loc, type, tem));
7920 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7921 type of X and Y (integer types only). */
7922 if (INTEGRAL_TYPE_P (type)
7923 && TREE_CODE (op0) == MULT_EXPR
7924 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7925 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7927 /* Be careful not to introduce new overflows. */
7928 tree mult_type;
7929 if (TYPE_OVERFLOW_WRAPS (type))
7930 mult_type = type;
7931 else
7932 mult_type = unsigned_type_for (type);
7934 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7936 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7937 fold_convert_loc (loc, mult_type,
7938 TREE_OPERAND (op0, 0)),
7939 fold_convert_loc (loc, mult_type,
7940 TREE_OPERAND (op0, 1)));
7941 return fold_convert_loc (loc, type, tem);
7945 tem = fold_convert_const (code, type, op0);
7946 return tem ? tem : NULL_TREE;
7948 case ADDR_SPACE_CONVERT_EXPR:
7949 if (integer_zerop (arg0))
7950 return fold_convert_const (code, type, arg0);
7951 return NULL_TREE;
7953 case FIXED_CONVERT_EXPR:
7954 tem = fold_convert_const (code, type, arg0);
7955 return tem ? tem : NULL_TREE;
7957 case VIEW_CONVERT_EXPR:
7958 if (TREE_TYPE (op0) == type)
7959 return op0;
7960 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7961 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7962 type, TREE_OPERAND (op0, 0));
7963 if (TREE_CODE (op0) == MEM_REF)
7964 return fold_build2_loc (loc, MEM_REF, type,
7965 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7967 /* For integral conversions with the same precision or pointer
7968 conversions use a NOP_EXPR instead. */
7969 if ((INTEGRAL_TYPE_P (type)
7970 || POINTER_TYPE_P (type))
7971 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7972 || POINTER_TYPE_P (TREE_TYPE (op0)))
7973 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7974 return fold_convert_loc (loc, type, op0);
7976 /* Strip inner integral conversions that do not change the precision. */
7977 if (CONVERT_EXPR_P (op0)
7978 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7979 || POINTER_TYPE_P (TREE_TYPE (op0)))
7980 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7981 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7982 && (TYPE_PRECISION (TREE_TYPE (op0))
7983 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7984 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7985 type, TREE_OPERAND (op0, 0));
7987 return fold_view_convert_expr (type, op0);
7989 case NEGATE_EXPR:
7990 tem = fold_negate_expr (loc, arg0);
7991 if (tem)
7992 return fold_convert_loc (loc, type, tem);
7993 return NULL_TREE;
7995 case ABS_EXPR:
7996 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7997 return fold_abs_const (arg0, type);
7998 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7999 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8000 /* Convert fabs((double)float) into (double)fabsf(float). */
8001 else if (TREE_CODE (arg0) == NOP_EXPR
8002 && TREE_CODE (type) == REAL_TYPE)
8004 tree targ0 = strip_float_extensions (arg0);
8005 if (targ0 != arg0)
8006 return fold_convert_loc (loc, type,
8007 fold_build1_loc (loc, ABS_EXPR,
8008 TREE_TYPE (targ0),
8009 targ0));
8011 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8012 else if (TREE_CODE (arg0) == ABS_EXPR)
8013 return arg0;
8014 else if (tree_expr_nonnegative_p (arg0))
8015 return arg0;
8017 /* Strip sign ops from argument. */
8018 if (TREE_CODE (type) == REAL_TYPE)
8020 tem = fold_strip_sign_ops (arg0);
8021 if (tem)
8022 return fold_build1_loc (loc, ABS_EXPR, type,
8023 fold_convert_loc (loc, type, tem));
8025 return NULL_TREE;
8027 case CONJ_EXPR:
8028 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8029 return fold_convert_loc (loc, type, arg0);
8030 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8032 tree itype = TREE_TYPE (type);
8033 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8034 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8035 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8036 negate_expr (ipart));
8038 if (TREE_CODE (arg0) == COMPLEX_CST)
8040 tree itype = TREE_TYPE (type);
8041 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8042 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8043 return build_complex (type, rpart, negate_expr (ipart));
8045 if (TREE_CODE (arg0) == CONJ_EXPR)
8046 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8047 return NULL_TREE;
8049 case BIT_NOT_EXPR:
8050 if (TREE_CODE (arg0) == INTEGER_CST)
8051 return fold_not_const (arg0, type);
8052 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8053 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8054 /* Convert ~ (-A) to A - 1. */
8055 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8056 return fold_build2_loc (loc, MINUS_EXPR, type,
8057 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8058 build_int_cst (type, 1));
8059 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8060 else if (INTEGRAL_TYPE_P (type)
8061 && ((TREE_CODE (arg0) == MINUS_EXPR
8062 && integer_onep (TREE_OPERAND (arg0, 1)))
8063 || (TREE_CODE (arg0) == PLUS_EXPR
8064 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8065 return fold_build1_loc (loc, NEGATE_EXPR, type,
8066 fold_convert_loc (loc, type,
8067 TREE_OPERAND (arg0, 0)));
8068 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8069 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8070 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8071 fold_convert_loc (loc, type,
8072 TREE_OPERAND (arg0, 0)))))
8073 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8074 fold_convert_loc (loc, type,
8075 TREE_OPERAND (arg0, 1)));
8076 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8077 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8078 fold_convert_loc (loc, type,
8079 TREE_OPERAND (arg0, 1)))))
8080 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8081 fold_convert_loc (loc, type,
8082 TREE_OPERAND (arg0, 0)), tem);
8083 /* Perform BIT_NOT_EXPR on each element individually. */
8084 else if (TREE_CODE (arg0) == VECTOR_CST)
8086 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8087 int count = TYPE_VECTOR_SUBPARTS (type), i;
8089 for (i = 0; i < count; i++)
8091 if (elements)
8093 elem = TREE_VALUE (elements);
8094 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8095 if (elem == NULL_TREE)
8096 break;
8097 elements = TREE_CHAIN (elements);
8099 else
8100 elem = build_int_cst (TREE_TYPE (type), -1);
8101 list = tree_cons (NULL_TREE, elem, list);
8103 if (i == count)
8104 return build_vector (type, nreverse (list));
8107 return NULL_TREE;
8109 case TRUTH_NOT_EXPR:
8110 /* The argument to invert_truthvalue must have Boolean type. */
8111 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8112 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8114 /* Note that the operand of this must be an int
8115 and its values must be 0 or 1.
8116 ("true" is a fixed value perhaps depending on the language,
8117 but we don't handle values other than 1 correctly yet.) */
8118 tem = fold_truth_not_expr (loc, arg0);
8119 if (!tem)
8120 return NULL_TREE;
8121 return fold_convert_loc (loc, type, tem);
8123 case REALPART_EXPR:
8124 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8125 return fold_convert_loc (loc, type, arg0);
8126 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8127 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8128 TREE_OPERAND (arg0, 1));
8129 if (TREE_CODE (arg0) == COMPLEX_CST)
8130 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8131 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8133 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8134 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8135 fold_build1_loc (loc, REALPART_EXPR, itype,
8136 TREE_OPERAND (arg0, 0)),
8137 fold_build1_loc (loc, REALPART_EXPR, itype,
8138 TREE_OPERAND (arg0, 1)));
8139 return fold_convert_loc (loc, type, tem);
8141 if (TREE_CODE (arg0) == CONJ_EXPR)
8143 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8144 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 0));
8146 return fold_convert_loc (loc, type, tem);
8148 if (TREE_CODE (arg0) == CALL_EXPR)
8150 tree fn = get_callee_fndecl (arg0);
8151 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8152 switch (DECL_FUNCTION_CODE (fn))
8154 CASE_FLT_FN (BUILT_IN_CEXPI):
8155 fn = mathfn_built_in (type, BUILT_IN_COS);
8156 if (fn)
8157 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8158 break;
8160 default:
8161 break;
8164 return NULL_TREE;
8166 case IMAGPART_EXPR:
8167 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8168 return build_zero_cst (type);
8169 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8170 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8171 TREE_OPERAND (arg0, 0));
8172 if (TREE_CODE (arg0) == COMPLEX_CST)
8173 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8174 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8176 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8177 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8178 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8179 TREE_OPERAND (arg0, 0)),
8180 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8181 TREE_OPERAND (arg0, 1)));
8182 return fold_convert_loc (loc, type, tem);
8184 if (TREE_CODE (arg0) == CONJ_EXPR)
8186 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8187 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8188 return fold_convert_loc (loc, type, negate_expr (tem));
8190 if (TREE_CODE (arg0) == CALL_EXPR)
8192 tree fn = get_callee_fndecl (arg0);
8193 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8194 switch (DECL_FUNCTION_CODE (fn))
8196 CASE_FLT_FN (BUILT_IN_CEXPI):
8197 fn = mathfn_built_in (type, BUILT_IN_SIN);
8198 if (fn)
8199 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8200 break;
8202 default:
8203 break;
8206 return NULL_TREE;
8208 case INDIRECT_REF:
8209 /* Fold *&X to X if X is an lvalue. */
8210 if (TREE_CODE (op0) == ADDR_EXPR)
8212 tree op00 = TREE_OPERAND (op0, 0);
8213 if ((TREE_CODE (op00) == VAR_DECL
8214 || TREE_CODE (op00) == PARM_DECL
8215 || TREE_CODE (op00) == RESULT_DECL)
8216 && !TREE_READONLY (op00))
8217 return op00;
8219 return NULL_TREE;
8221 default:
8222 return NULL_TREE;
8223 } /* switch (code) */
8227 /* If the operation was a conversion do _not_ mark a resulting constant
8228 with TREE_OVERFLOW if the original constant was not. These conversions
8229 have implementation defined behavior and retaining the TREE_OVERFLOW
8230 flag here would confuse later passes such as VRP. */
8231 tree
8232 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8233 tree type, tree op0)
8235 tree res = fold_unary_loc (loc, code, type, op0);
8236 if (res
8237 && TREE_CODE (res) == INTEGER_CST
8238 && TREE_CODE (op0) == INTEGER_CST
8239 && CONVERT_EXPR_CODE_P (code))
8240 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8242 return res;
8245 /* Fold a binary expression of code CODE and type TYPE with operands
8246 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8247 Return the folded expression if folding is successful. Otherwise,
8248 return NULL_TREE. */
8250 static tree
8251 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8253 enum tree_code compl_code;
8255 if (code == MIN_EXPR)
8256 compl_code = MAX_EXPR;
8257 else if (code == MAX_EXPR)
8258 compl_code = MIN_EXPR;
8259 else
8260 gcc_unreachable ();
8262 /* MIN (MAX (a, b), b) == b. */
8263 if (TREE_CODE (op0) == compl_code
8264 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8265 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8267 /* MIN (MAX (b, a), b) == b. */
8268 if (TREE_CODE (op0) == compl_code
8269 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8270 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8271 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8273 /* MIN (a, MAX (a, b)) == a. */
8274 if (TREE_CODE (op1) == compl_code
8275 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8276 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8277 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8279 /* MIN (a, MAX (b, a)) == a. */
8280 if (TREE_CODE (op1) == compl_code
8281 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8282 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8283 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8285 return NULL_TREE;
8288 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8289 by changing CODE to reduce the magnitude of constants involved in
8290 ARG0 of the comparison.
8291 Returns a canonicalized comparison tree if a simplification was
8292 possible, otherwise returns NULL_TREE.
8293 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8294 valid if signed overflow is undefined. */
8296 static tree
8297 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8298 tree arg0, tree arg1,
8299 bool *strict_overflow_p)
8301 enum tree_code code0 = TREE_CODE (arg0);
8302 tree t, cst0 = NULL_TREE;
8303 int sgn0;
8304 bool swap = false;
8306 /* Match A +- CST code arg1 and CST code arg1. We can change the
8307 first form only if overflow is undefined. */
8308 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8309 /* In principle pointers also have undefined overflow behavior,
8310 but that causes problems elsewhere. */
8311 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8312 && (code0 == MINUS_EXPR
8313 || code0 == PLUS_EXPR)
8314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8315 || code0 == INTEGER_CST))
8316 return NULL_TREE;
8318 /* Identify the constant in arg0 and its sign. */
8319 if (code0 == INTEGER_CST)
8320 cst0 = arg0;
8321 else
8322 cst0 = TREE_OPERAND (arg0, 1);
8323 sgn0 = tree_int_cst_sgn (cst0);
8325 /* Overflowed constants and zero will cause problems. */
8326 if (integer_zerop (cst0)
8327 || TREE_OVERFLOW (cst0))
8328 return NULL_TREE;
8330 /* See if we can reduce the magnitude of the constant in
8331 arg0 by changing the comparison code. */
8332 if (code0 == INTEGER_CST)
8334 /* CST <= arg1 -> CST-1 < arg1. */
8335 if (code == LE_EXPR && sgn0 == 1)
8336 code = LT_EXPR;
8337 /* -CST < arg1 -> -CST-1 <= arg1. */
8338 else if (code == LT_EXPR && sgn0 == -1)
8339 code = LE_EXPR;
8340 /* CST > arg1 -> CST-1 >= arg1. */
8341 else if (code == GT_EXPR && sgn0 == 1)
8342 code = GE_EXPR;
8343 /* -CST >= arg1 -> -CST-1 > arg1. */
8344 else if (code == GE_EXPR && sgn0 == -1)
8345 code = GT_EXPR;
8346 else
8347 return NULL_TREE;
8348 /* arg1 code' CST' might be more canonical. */
8349 swap = true;
8351 else
8353 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8354 if (code == LT_EXPR
8355 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8356 code = LE_EXPR;
8357 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8358 else if (code == GT_EXPR
8359 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8360 code = GE_EXPR;
8361 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8362 else if (code == LE_EXPR
8363 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8364 code = LT_EXPR;
8365 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8366 else if (code == GE_EXPR
8367 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8368 code = GT_EXPR;
8369 else
8370 return NULL_TREE;
8371 *strict_overflow_p = true;
8374 /* Now build the constant reduced in magnitude. But not if that
8375 would produce one outside of its types range. */
8376 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8377 && ((sgn0 == 1
8378 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8379 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8380 || (sgn0 == -1
8381 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8382 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8383 /* We cannot swap the comparison here as that would cause us to
8384 endlessly recurse. */
8385 return NULL_TREE;
8387 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8388 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8389 if (code0 != INTEGER_CST)
8390 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8392 /* If swapping might yield to a more canonical form, do so. */
8393 if (swap)
8394 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8395 else
8396 return fold_build2_loc (loc, code, type, t, arg1);
8399 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8400 overflow further. Try to decrease the magnitude of constants involved
8401 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8402 and put sole constants at the second argument position.
8403 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8405 static tree
8406 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8407 tree arg0, tree arg1)
8409 tree t;
8410 bool strict_overflow_p;
8411 const char * const warnmsg = G_("assuming signed overflow does not occur "
8412 "when reducing constant in comparison");
8414 /* Try canonicalization by simplifying arg0. */
8415 strict_overflow_p = false;
8416 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8417 &strict_overflow_p);
8418 if (t)
8420 if (strict_overflow_p)
8421 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8422 return t;
8425 /* Try canonicalization by simplifying arg1 using the swapped
8426 comparison. */
8427 code = swap_tree_comparison (code);
8428 strict_overflow_p = false;
8429 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8430 &strict_overflow_p);
8431 if (t && strict_overflow_p)
8432 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8433 return t;
8436 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8437 space. This is used to avoid issuing overflow warnings for
8438 expressions like &p->x which can not wrap. */
8440 static bool
8441 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8443 unsigned HOST_WIDE_INT offset_low, total_low;
8444 HOST_WIDE_INT size, offset_high, total_high;
8446 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8447 return true;
8449 if (bitpos < 0)
8450 return true;
8452 if (offset == NULL_TREE)
8454 offset_low = 0;
8455 offset_high = 0;
8457 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8458 return true;
8459 else
8461 offset_low = TREE_INT_CST_LOW (offset);
8462 offset_high = TREE_INT_CST_HIGH (offset);
8465 if (add_double_with_sign (offset_low, offset_high,
8466 bitpos / BITS_PER_UNIT, 0,
8467 &total_low, &total_high,
8468 true))
8469 return true;
8471 if (total_high != 0)
8472 return true;
8474 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8475 if (size <= 0)
8476 return true;
8478 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8479 array. */
8480 if (TREE_CODE (base) == ADDR_EXPR)
8482 HOST_WIDE_INT base_size;
8484 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8485 if (base_size > 0 && size < base_size)
8486 size = base_size;
8489 return total_low > (unsigned HOST_WIDE_INT) size;
8492 /* Subroutine of fold_binary. This routine performs all of the
8493 transformations that are common to the equality/inequality
8494 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8495 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8496 fold_binary should call fold_binary. Fold a comparison with
8497 tree code CODE and type TYPE with operands OP0 and OP1. Return
8498 the folded comparison or NULL_TREE. */
8500 static tree
8501 fold_comparison (location_t loc, enum tree_code code, tree type,
8502 tree op0, tree op1)
8504 tree arg0, arg1, tem;
8506 arg0 = op0;
8507 arg1 = op1;
8509 STRIP_SIGN_NOPS (arg0);
8510 STRIP_SIGN_NOPS (arg1);
8512 tem = fold_relational_const (code, type, arg0, arg1);
8513 if (tem != NULL_TREE)
8514 return tem;
8516 /* If one arg is a real or integer constant, put it last. */
8517 if (tree_swap_operands_p (arg0, arg1, true))
8518 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8520 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8521 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8522 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8523 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8524 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8525 && (TREE_CODE (arg1) == INTEGER_CST
8526 && !TREE_OVERFLOW (arg1)))
8528 tree const1 = TREE_OPERAND (arg0, 1);
8529 tree const2 = arg1;
8530 tree variable = TREE_OPERAND (arg0, 0);
8531 tree lhs;
8532 int lhs_add;
8533 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8535 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8536 TREE_TYPE (arg1), const2, const1);
8538 /* If the constant operation overflowed this can be
8539 simplified as a comparison against INT_MAX/INT_MIN. */
8540 if (TREE_CODE (lhs) == INTEGER_CST
8541 && TREE_OVERFLOW (lhs))
8543 int const1_sgn = tree_int_cst_sgn (const1);
8544 enum tree_code code2 = code;
8546 /* Get the sign of the constant on the lhs if the
8547 operation were VARIABLE + CONST1. */
8548 if (TREE_CODE (arg0) == MINUS_EXPR)
8549 const1_sgn = -const1_sgn;
8551 /* The sign of the constant determines if we overflowed
8552 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8553 Canonicalize to the INT_MIN overflow by swapping the comparison
8554 if necessary. */
8555 if (const1_sgn == -1)
8556 code2 = swap_tree_comparison (code);
8558 /* We now can look at the canonicalized case
8559 VARIABLE + 1 CODE2 INT_MIN
8560 and decide on the result. */
8561 if (code2 == LT_EXPR
8562 || code2 == LE_EXPR
8563 || code2 == EQ_EXPR)
8564 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8565 else if (code2 == NE_EXPR
8566 || code2 == GE_EXPR
8567 || code2 == GT_EXPR)
8568 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8571 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8572 && (TREE_CODE (lhs) != INTEGER_CST
8573 || !TREE_OVERFLOW (lhs)))
8575 fold_overflow_warning ("assuming signed overflow does not occur "
8576 "when changing X +- C1 cmp C2 to "
8577 "X cmp C1 +- C2",
8578 WARN_STRICT_OVERFLOW_COMPARISON);
8579 return fold_build2_loc (loc, code, type, variable, lhs);
8583 /* For comparisons of pointers we can decompose it to a compile time
8584 comparison of the base objects and the offsets into the object.
8585 This requires at least one operand being an ADDR_EXPR or a
8586 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8587 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8588 && (TREE_CODE (arg0) == ADDR_EXPR
8589 || TREE_CODE (arg1) == ADDR_EXPR
8590 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8591 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8593 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8594 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8595 enum machine_mode mode;
8596 int volatilep, unsignedp;
8597 bool indirect_base0 = false, indirect_base1 = false;
8599 /* Get base and offset for the access. Strip ADDR_EXPR for
8600 get_inner_reference, but put it back by stripping INDIRECT_REF
8601 off the base object if possible. indirect_baseN will be true
8602 if baseN is not an address but refers to the object itself. */
8603 base0 = arg0;
8604 if (TREE_CODE (arg0) == ADDR_EXPR)
8606 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8607 &bitsize, &bitpos0, &offset0, &mode,
8608 &unsignedp, &volatilep, false);
8609 if (TREE_CODE (base0) == INDIRECT_REF)
8610 base0 = TREE_OPERAND (base0, 0);
8611 else
8612 indirect_base0 = true;
8614 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8616 base0 = TREE_OPERAND (arg0, 0);
8617 STRIP_SIGN_NOPS (base0);
8618 if (TREE_CODE (base0) == ADDR_EXPR)
8620 base0 = TREE_OPERAND (base0, 0);
8621 indirect_base0 = true;
8623 offset0 = TREE_OPERAND (arg0, 1);
8626 base1 = arg1;
8627 if (TREE_CODE (arg1) == ADDR_EXPR)
8629 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8630 &bitsize, &bitpos1, &offset1, &mode,
8631 &unsignedp, &volatilep, false);
8632 if (TREE_CODE (base1) == INDIRECT_REF)
8633 base1 = TREE_OPERAND (base1, 0);
8634 else
8635 indirect_base1 = true;
8637 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8639 base1 = TREE_OPERAND (arg1, 0);
8640 STRIP_SIGN_NOPS (base1);
8641 if (TREE_CODE (base1) == ADDR_EXPR)
8643 base1 = TREE_OPERAND (base1, 0);
8644 indirect_base1 = true;
8646 offset1 = TREE_OPERAND (arg1, 1);
8649 /* A local variable can never be pointed to by
8650 the default SSA name of an incoming parameter. */
8651 if ((TREE_CODE (arg0) == ADDR_EXPR
8652 && indirect_base0
8653 && TREE_CODE (base0) == VAR_DECL
8654 && auto_var_in_fn_p (base0, current_function_decl)
8655 && !indirect_base1
8656 && TREE_CODE (base1) == SSA_NAME
8657 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8658 && SSA_NAME_IS_DEFAULT_DEF (base1))
8659 || (TREE_CODE (arg1) == ADDR_EXPR
8660 && indirect_base1
8661 && TREE_CODE (base1) == VAR_DECL
8662 && auto_var_in_fn_p (base1, current_function_decl)
8663 && !indirect_base0
8664 && TREE_CODE (base0) == SSA_NAME
8665 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8666 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8668 if (code == NE_EXPR)
8669 return constant_boolean_node (1, type);
8670 else if (code == EQ_EXPR)
8671 return constant_boolean_node (0, type);
8673 /* If we have equivalent bases we might be able to simplify. */
8674 else if (indirect_base0 == indirect_base1
8675 && operand_equal_p (base0, base1, 0))
8677 /* We can fold this expression to a constant if the non-constant
8678 offset parts are equal. */
8679 if ((offset0 == offset1
8680 || (offset0 && offset1
8681 && operand_equal_p (offset0, offset1, 0)))
8682 && (code == EQ_EXPR
8683 || code == NE_EXPR
8684 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8687 if (code != EQ_EXPR
8688 && code != NE_EXPR
8689 && bitpos0 != bitpos1
8690 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8691 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8692 fold_overflow_warning (("assuming pointer wraparound does not "
8693 "occur when comparing P +- C1 with "
8694 "P +- C2"),
8695 WARN_STRICT_OVERFLOW_CONDITIONAL);
8697 switch (code)
8699 case EQ_EXPR:
8700 return constant_boolean_node (bitpos0 == bitpos1, type);
8701 case NE_EXPR:
8702 return constant_boolean_node (bitpos0 != bitpos1, type);
8703 case LT_EXPR:
8704 return constant_boolean_node (bitpos0 < bitpos1, type);
8705 case LE_EXPR:
8706 return constant_boolean_node (bitpos0 <= bitpos1, type);
8707 case GE_EXPR:
8708 return constant_boolean_node (bitpos0 >= bitpos1, type);
8709 case GT_EXPR:
8710 return constant_boolean_node (bitpos0 > bitpos1, type);
8711 default:;
8714 /* We can simplify the comparison to a comparison of the variable
8715 offset parts if the constant offset parts are equal.
8716 Be careful to use signed size type here because otherwise we
8717 mess with array offsets in the wrong way. This is possible
8718 because pointer arithmetic is restricted to retain within an
8719 object and overflow on pointer differences is undefined as of
8720 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8721 else if (bitpos0 == bitpos1
8722 && ((code == EQ_EXPR || code == NE_EXPR)
8723 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8725 /* By converting to signed size type we cover middle-end pointer
8726 arithmetic which operates on unsigned pointer types of size
8727 type size and ARRAY_REF offsets which are properly sign or
8728 zero extended from their type in case it is narrower than
8729 size type. */
8730 if (offset0 == NULL_TREE)
8731 offset0 = build_int_cst (ssizetype, 0);
8732 else
8733 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8734 if (offset1 == NULL_TREE)
8735 offset1 = build_int_cst (ssizetype, 0);
8736 else
8737 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8739 if (code != EQ_EXPR
8740 && code != NE_EXPR
8741 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8742 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8743 fold_overflow_warning (("assuming pointer wraparound does not "
8744 "occur when comparing P +- C1 with "
8745 "P +- C2"),
8746 WARN_STRICT_OVERFLOW_COMPARISON);
8748 return fold_build2_loc (loc, code, type, offset0, offset1);
8751 /* For non-equal bases we can simplify if they are addresses
8752 of local binding decls or constants. */
8753 else if (indirect_base0 && indirect_base1
8754 /* We know that !operand_equal_p (base0, base1, 0)
8755 because the if condition was false. But make
8756 sure two decls are not the same. */
8757 && base0 != base1
8758 && TREE_CODE (arg0) == ADDR_EXPR
8759 && TREE_CODE (arg1) == ADDR_EXPR
8760 && (((TREE_CODE (base0) == VAR_DECL
8761 || TREE_CODE (base0) == PARM_DECL)
8762 && (targetm.binds_local_p (base0)
8763 || CONSTANT_CLASS_P (base1)))
8764 || CONSTANT_CLASS_P (base0))
8765 && (((TREE_CODE (base1) == VAR_DECL
8766 || TREE_CODE (base1) == PARM_DECL)
8767 && (targetm.binds_local_p (base1)
8768 || CONSTANT_CLASS_P (base0)))
8769 || CONSTANT_CLASS_P (base1)))
8771 if (code == EQ_EXPR)
8772 return omit_two_operands_loc (loc, type, boolean_false_node,
8773 arg0, arg1);
8774 else if (code == NE_EXPR)
8775 return omit_two_operands_loc (loc, type, boolean_true_node,
8776 arg0, arg1);
8778 /* For equal offsets we can simplify to a comparison of the
8779 base addresses. */
8780 else if (bitpos0 == bitpos1
8781 && (indirect_base0
8782 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8783 && (indirect_base1
8784 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8785 && ((offset0 == offset1)
8786 || (offset0 && offset1
8787 && operand_equal_p (offset0, offset1, 0))))
8789 if (indirect_base0)
8790 base0 = build_fold_addr_expr_loc (loc, base0);
8791 if (indirect_base1)
8792 base1 = build_fold_addr_expr_loc (loc, base1);
8793 return fold_build2_loc (loc, code, type, base0, base1);
8797 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8798 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8799 the resulting offset is smaller in absolute value than the
8800 original one. */
8801 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8802 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8803 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8804 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8805 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8806 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8807 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8809 tree const1 = TREE_OPERAND (arg0, 1);
8810 tree const2 = TREE_OPERAND (arg1, 1);
8811 tree variable1 = TREE_OPERAND (arg0, 0);
8812 tree variable2 = TREE_OPERAND (arg1, 0);
8813 tree cst;
8814 const char * const warnmsg = G_("assuming signed overflow does not "
8815 "occur when combining constants around "
8816 "a comparison");
8818 /* Put the constant on the side where it doesn't overflow and is
8819 of lower absolute value than before. */
8820 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8821 ? MINUS_EXPR : PLUS_EXPR,
8822 const2, const1, 0);
8823 if (!TREE_OVERFLOW (cst)
8824 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8826 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8827 return fold_build2_loc (loc, code, type,
8828 variable1,
8829 fold_build2_loc (loc,
8830 TREE_CODE (arg1), TREE_TYPE (arg1),
8831 variable2, cst));
8834 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8835 ? MINUS_EXPR : PLUS_EXPR,
8836 const1, const2, 0);
8837 if (!TREE_OVERFLOW (cst)
8838 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8840 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8841 return fold_build2_loc (loc, code, type,
8842 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8843 variable1, cst),
8844 variable2);
8848 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8849 signed arithmetic case. That form is created by the compiler
8850 often enough for folding it to be of value. One example is in
8851 computing loop trip counts after Operator Strength Reduction. */
8852 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8853 && TREE_CODE (arg0) == MULT_EXPR
8854 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8855 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8856 && integer_zerop (arg1))
8858 tree const1 = TREE_OPERAND (arg0, 1);
8859 tree const2 = arg1; /* zero */
8860 tree variable1 = TREE_OPERAND (arg0, 0);
8861 enum tree_code cmp_code = code;
8863 /* Handle unfolded multiplication by zero. */
8864 if (integer_zerop (const1))
8865 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8867 fold_overflow_warning (("assuming signed overflow does not occur when "
8868 "eliminating multiplication in comparison "
8869 "with zero"),
8870 WARN_STRICT_OVERFLOW_COMPARISON);
8872 /* If const1 is negative we swap the sense of the comparison. */
8873 if (tree_int_cst_sgn (const1) < 0)
8874 cmp_code = swap_tree_comparison (cmp_code);
8876 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8879 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8880 if (tem)
8881 return tem;
8883 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8885 tree targ0 = strip_float_extensions (arg0);
8886 tree targ1 = strip_float_extensions (arg1);
8887 tree newtype = TREE_TYPE (targ0);
8889 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8890 newtype = TREE_TYPE (targ1);
8892 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8893 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8894 return fold_build2_loc (loc, code, type,
8895 fold_convert_loc (loc, newtype, targ0),
8896 fold_convert_loc (loc, newtype, targ1));
8898 /* (-a) CMP (-b) -> b CMP a */
8899 if (TREE_CODE (arg0) == NEGATE_EXPR
8900 && TREE_CODE (arg1) == NEGATE_EXPR)
8901 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8902 TREE_OPERAND (arg0, 0));
8904 if (TREE_CODE (arg1) == REAL_CST)
8906 REAL_VALUE_TYPE cst;
8907 cst = TREE_REAL_CST (arg1);
8909 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8910 if (TREE_CODE (arg0) == NEGATE_EXPR)
8911 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8912 TREE_OPERAND (arg0, 0),
8913 build_real (TREE_TYPE (arg1),
8914 real_value_negate (&cst)));
8916 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8917 /* a CMP (-0) -> a CMP 0 */
8918 if (REAL_VALUE_MINUS_ZERO (cst))
8919 return fold_build2_loc (loc, code, type, arg0,
8920 build_real (TREE_TYPE (arg1), dconst0));
8922 /* x != NaN is always true, other ops are always false. */
8923 if (REAL_VALUE_ISNAN (cst)
8924 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8926 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8927 return omit_one_operand_loc (loc, type, tem, arg0);
8930 /* Fold comparisons against infinity. */
8931 if (REAL_VALUE_ISINF (cst)
8932 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8934 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8935 if (tem != NULL_TREE)
8936 return tem;
8940 /* If this is a comparison of a real constant with a PLUS_EXPR
8941 or a MINUS_EXPR of a real constant, we can convert it into a
8942 comparison with a revised real constant as long as no overflow
8943 occurs when unsafe_math_optimizations are enabled. */
8944 if (flag_unsafe_math_optimizations
8945 && TREE_CODE (arg1) == REAL_CST
8946 && (TREE_CODE (arg0) == PLUS_EXPR
8947 || TREE_CODE (arg0) == MINUS_EXPR)
8948 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8949 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8950 ? MINUS_EXPR : PLUS_EXPR,
8951 arg1, TREE_OPERAND (arg0, 1)))
8952 && !TREE_OVERFLOW (tem))
8953 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8955 /* Likewise, we can simplify a comparison of a real constant with
8956 a MINUS_EXPR whose first operand is also a real constant, i.e.
8957 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8958 floating-point types only if -fassociative-math is set. */
8959 if (flag_associative_math
8960 && TREE_CODE (arg1) == REAL_CST
8961 && TREE_CODE (arg0) == MINUS_EXPR
8962 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8963 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8964 arg1))
8965 && !TREE_OVERFLOW (tem))
8966 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8967 TREE_OPERAND (arg0, 1), tem);
8969 /* Fold comparisons against built-in math functions. */
8970 if (TREE_CODE (arg1) == REAL_CST
8971 && flag_unsafe_math_optimizations
8972 && ! flag_errno_math)
8974 enum built_in_function fcode = builtin_mathfn_code (arg0);
8976 if (fcode != END_BUILTINS)
8978 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8979 if (tem != NULL_TREE)
8980 return tem;
8985 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8986 && CONVERT_EXPR_P (arg0))
8988 /* If we are widening one operand of an integer comparison,
8989 see if the other operand is similarly being widened. Perhaps we
8990 can do the comparison in the narrower type. */
8991 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8992 if (tem)
8993 return tem;
8995 /* Or if we are changing signedness. */
8996 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8997 if (tem)
8998 return tem;
9001 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9002 constant, we can simplify it. */
9003 if (TREE_CODE (arg1) == INTEGER_CST
9004 && (TREE_CODE (arg0) == MIN_EXPR
9005 || TREE_CODE (arg0) == MAX_EXPR)
9006 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9008 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9009 if (tem)
9010 return tem;
9013 /* Simplify comparison of something with itself. (For IEEE
9014 floating-point, we can only do some of these simplifications.) */
9015 if (operand_equal_p (arg0, arg1, 0))
9017 switch (code)
9019 case EQ_EXPR:
9020 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9021 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9022 return constant_boolean_node (1, type);
9023 break;
9025 case GE_EXPR:
9026 case LE_EXPR:
9027 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9028 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9029 return constant_boolean_node (1, type);
9030 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9032 case NE_EXPR:
9033 /* For NE, we can only do this simplification if integer
9034 or we don't honor IEEE floating point NaNs. */
9035 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9036 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9037 break;
9038 /* ... fall through ... */
9039 case GT_EXPR:
9040 case LT_EXPR:
9041 return constant_boolean_node (0, type);
9042 default:
9043 gcc_unreachable ();
9047 /* If we are comparing an expression that just has comparisons
9048 of two integer values, arithmetic expressions of those comparisons,
9049 and constants, we can simplify it. There are only three cases
9050 to check: the two values can either be equal, the first can be
9051 greater, or the second can be greater. Fold the expression for
9052 those three values. Since each value must be 0 or 1, we have
9053 eight possibilities, each of which corresponds to the constant 0
9054 or 1 or one of the six possible comparisons.
9056 This handles common cases like (a > b) == 0 but also handles
9057 expressions like ((x > y) - (y > x)) > 0, which supposedly
9058 occur in macroized code. */
9060 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9062 tree cval1 = 0, cval2 = 0;
9063 int save_p = 0;
9065 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9066 /* Don't handle degenerate cases here; they should already
9067 have been handled anyway. */
9068 && cval1 != 0 && cval2 != 0
9069 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9070 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9071 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9072 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9073 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9074 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9075 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9077 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9078 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9080 /* We can't just pass T to eval_subst in case cval1 or cval2
9081 was the same as ARG1. */
9083 tree high_result
9084 = fold_build2_loc (loc, code, type,
9085 eval_subst (loc, arg0, cval1, maxval,
9086 cval2, minval),
9087 arg1);
9088 tree equal_result
9089 = fold_build2_loc (loc, code, type,
9090 eval_subst (loc, arg0, cval1, maxval,
9091 cval2, maxval),
9092 arg1);
9093 tree low_result
9094 = fold_build2_loc (loc, code, type,
9095 eval_subst (loc, arg0, cval1, minval,
9096 cval2, maxval),
9097 arg1);
9099 /* All three of these results should be 0 or 1. Confirm they are.
9100 Then use those values to select the proper code to use. */
9102 if (TREE_CODE (high_result) == INTEGER_CST
9103 && TREE_CODE (equal_result) == INTEGER_CST
9104 && TREE_CODE (low_result) == INTEGER_CST)
9106 /* Make a 3-bit mask with the high-order bit being the
9107 value for `>', the next for '=', and the low for '<'. */
9108 switch ((integer_onep (high_result) * 4)
9109 + (integer_onep (equal_result) * 2)
9110 + integer_onep (low_result))
9112 case 0:
9113 /* Always false. */
9114 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9115 case 1:
9116 code = LT_EXPR;
9117 break;
9118 case 2:
9119 code = EQ_EXPR;
9120 break;
9121 case 3:
9122 code = LE_EXPR;
9123 break;
9124 case 4:
9125 code = GT_EXPR;
9126 break;
9127 case 5:
9128 code = NE_EXPR;
9129 break;
9130 case 6:
9131 code = GE_EXPR;
9132 break;
9133 case 7:
9134 /* Always true. */
9135 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9138 if (save_p)
9140 tem = save_expr (build2 (code, type, cval1, cval2));
9141 SET_EXPR_LOCATION (tem, loc);
9142 return tem;
9144 return fold_build2_loc (loc, code, type, cval1, cval2);
9149 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9150 into a single range test. */
9151 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9152 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9153 && TREE_CODE (arg1) == INTEGER_CST
9154 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9155 && !integer_zerop (TREE_OPERAND (arg0, 1))
9156 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9157 && !TREE_OVERFLOW (arg1))
9159 tem = fold_div_compare (loc, code, type, arg0, arg1);
9160 if (tem != NULL_TREE)
9161 return tem;
9164 /* Fold ~X op ~Y as Y op X. */
9165 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9166 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9168 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9169 return fold_build2_loc (loc, code, type,
9170 fold_convert_loc (loc, cmp_type,
9171 TREE_OPERAND (arg1, 0)),
9172 TREE_OPERAND (arg0, 0));
9175 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9176 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9177 && TREE_CODE (arg1) == INTEGER_CST)
9179 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9180 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9181 TREE_OPERAND (arg0, 0),
9182 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9183 fold_convert_loc (loc, cmp_type, arg1)));
9186 return NULL_TREE;
9190 /* Subroutine of fold_binary. Optimize complex multiplications of the
9191 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9192 argument EXPR represents the expression "z" of type TYPE. */
9194 static tree
9195 fold_mult_zconjz (location_t loc, tree type, tree expr)
9197 tree itype = TREE_TYPE (type);
9198 tree rpart, ipart, tem;
9200 if (TREE_CODE (expr) == COMPLEX_EXPR)
9202 rpart = TREE_OPERAND (expr, 0);
9203 ipart = TREE_OPERAND (expr, 1);
9205 else if (TREE_CODE (expr) == COMPLEX_CST)
9207 rpart = TREE_REALPART (expr);
9208 ipart = TREE_IMAGPART (expr);
9210 else
9212 expr = save_expr (expr);
9213 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9214 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9217 rpart = save_expr (rpart);
9218 ipart = save_expr (ipart);
9219 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9220 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9221 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9222 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9223 build_zero_cst (itype));
9227 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9228 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9229 guarantees that P and N have the same least significant log2(M) bits.
9230 N is not otherwise constrained. In particular, N is not normalized to
9231 0 <= N < M as is common. In general, the precise value of P is unknown.
9232 M is chosen as large as possible such that constant N can be determined.
9234 Returns M and sets *RESIDUE to N.
9236 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9237 account. This is not always possible due to PR 35705.
9240 static unsigned HOST_WIDE_INT
9241 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9242 bool allow_func_align)
9244 enum tree_code code;
9246 *residue = 0;
9248 code = TREE_CODE (expr);
9249 if (code == ADDR_EXPR)
9251 expr = TREE_OPERAND (expr, 0);
9252 if (handled_component_p (expr))
9254 HOST_WIDE_INT bitsize, bitpos;
9255 tree offset;
9256 enum machine_mode mode;
9257 int unsignedp, volatilep;
9259 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9260 &mode, &unsignedp, &volatilep, false);
9261 *residue = bitpos / BITS_PER_UNIT;
9262 if (offset)
9264 if (TREE_CODE (offset) == INTEGER_CST)
9265 *residue += TREE_INT_CST_LOW (offset);
9266 else
9267 /* We don't handle more complicated offset expressions. */
9268 return 1;
9272 if (DECL_P (expr)
9273 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9274 return DECL_ALIGN_UNIT (expr);
9276 else if (code == POINTER_PLUS_EXPR)
9278 tree op0, op1;
9279 unsigned HOST_WIDE_INT modulus;
9280 enum tree_code inner_code;
9282 op0 = TREE_OPERAND (expr, 0);
9283 STRIP_NOPS (op0);
9284 modulus = get_pointer_modulus_and_residue (op0, residue,
9285 allow_func_align);
9287 op1 = TREE_OPERAND (expr, 1);
9288 STRIP_NOPS (op1);
9289 inner_code = TREE_CODE (op1);
9290 if (inner_code == INTEGER_CST)
9292 *residue += TREE_INT_CST_LOW (op1);
9293 return modulus;
9295 else if (inner_code == MULT_EXPR)
9297 op1 = TREE_OPERAND (op1, 1);
9298 if (TREE_CODE (op1) == INTEGER_CST)
9300 unsigned HOST_WIDE_INT align;
9302 /* Compute the greatest power-of-2 divisor of op1. */
9303 align = TREE_INT_CST_LOW (op1);
9304 align &= -align;
9306 /* If align is non-zero and less than *modulus, replace
9307 *modulus with align., If align is 0, then either op1 is 0
9308 or the greatest power-of-2 divisor of op1 doesn't fit in an
9309 unsigned HOST_WIDE_INT. In either case, no additional
9310 constraint is imposed. */
9311 if (align)
9312 modulus = MIN (modulus, align);
9314 return modulus;
9319 /* If we get here, we were unable to determine anything useful about the
9320 expression. */
9321 return 1;
9325 /* Fold a binary expression of code CODE and type TYPE with operands
9326 OP0 and OP1. LOC is the location of the resulting expression.
9327 Return the folded expression if folding is successful. Otherwise,
9328 return NULL_TREE. */
9330 tree
9331 fold_binary_loc (location_t loc,
9332 enum tree_code code, tree type, tree op0, tree op1)
9334 enum tree_code_class kind = TREE_CODE_CLASS (code);
9335 tree arg0, arg1, tem;
9336 tree t1 = NULL_TREE;
9337 bool strict_overflow_p;
9339 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9340 && TREE_CODE_LENGTH (code) == 2
9341 && op0 != NULL_TREE
9342 && op1 != NULL_TREE);
9344 arg0 = op0;
9345 arg1 = op1;
9347 /* Strip any conversions that don't change the mode. This is
9348 safe for every expression, except for a comparison expression
9349 because its signedness is derived from its operands. So, in
9350 the latter case, only strip conversions that don't change the
9351 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9352 preserved.
9354 Note that this is done as an internal manipulation within the
9355 constant folder, in order to find the simplest representation
9356 of the arguments so that their form can be studied. In any
9357 cases, the appropriate type conversions should be put back in
9358 the tree that will get out of the constant folder. */
9360 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9362 STRIP_SIGN_NOPS (arg0);
9363 STRIP_SIGN_NOPS (arg1);
9365 else
9367 STRIP_NOPS (arg0);
9368 STRIP_NOPS (arg1);
9371 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9372 constant but we can't do arithmetic on them. */
9373 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9374 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9375 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9376 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9377 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9378 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9380 if (kind == tcc_binary)
9382 /* Make sure type and arg0 have the same saturating flag. */
9383 gcc_assert (TYPE_SATURATING (type)
9384 == TYPE_SATURATING (TREE_TYPE (arg0)));
9385 tem = const_binop (code, arg0, arg1);
9387 else if (kind == tcc_comparison)
9388 tem = fold_relational_const (code, type, arg0, arg1);
9389 else
9390 tem = NULL_TREE;
9392 if (tem != NULL_TREE)
9394 if (TREE_TYPE (tem) != type)
9395 tem = fold_convert_loc (loc, type, tem);
9396 return tem;
9400 /* If this is a commutative operation, and ARG0 is a constant, move it
9401 to ARG1 to reduce the number of tests below. */
9402 if (commutative_tree_code (code)
9403 && tree_swap_operands_p (arg0, arg1, true))
9404 return fold_build2_loc (loc, code, type, op1, op0);
9406 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9408 First check for cases where an arithmetic operation is applied to a
9409 compound, conditional, or comparison operation. Push the arithmetic
9410 operation inside the compound or conditional to see if any folding
9411 can then be done. Convert comparison to conditional for this purpose.
9412 The also optimizes non-constant cases that used to be done in
9413 expand_expr.
9415 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9416 one of the operands is a comparison and the other is a comparison, a
9417 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9418 code below would make the expression more complex. Change it to a
9419 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9420 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9422 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9423 || code == EQ_EXPR || code == NE_EXPR)
9424 && ((truth_value_p (TREE_CODE (arg0))
9425 && (truth_value_p (TREE_CODE (arg1))
9426 || (TREE_CODE (arg1) == BIT_AND_EXPR
9427 && integer_onep (TREE_OPERAND (arg1, 1)))))
9428 || (truth_value_p (TREE_CODE (arg1))
9429 && (truth_value_p (TREE_CODE (arg0))
9430 || (TREE_CODE (arg0) == BIT_AND_EXPR
9431 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9433 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9434 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9435 : TRUTH_XOR_EXPR,
9436 boolean_type_node,
9437 fold_convert_loc (loc, boolean_type_node, arg0),
9438 fold_convert_loc (loc, boolean_type_node, arg1));
9440 if (code == EQ_EXPR)
9441 tem = invert_truthvalue_loc (loc, tem);
9443 return fold_convert_loc (loc, type, tem);
9446 if (TREE_CODE_CLASS (code) == tcc_binary
9447 || TREE_CODE_CLASS (code) == tcc_comparison)
9449 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9451 tem = fold_build2_loc (loc, code, type,
9452 fold_convert_loc (loc, TREE_TYPE (op0),
9453 TREE_OPERAND (arg0, 1)), op1);
9454 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9455 tem);
9457 if (TREE_CODE (arg1) == COMPOUND_EXPR
9458 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9460 tem = fold_build2_loc (loc, code, type, op0,
9461 fold_convert_loc (loc, TREE_TYPE (op1),
9462 TREE_OPERAND (arg1, 1)));
9463 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9464 tem);
9467 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9469 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9470 arg0, arg1,
9471 /*cond_first_p=*/1);
9472 if (tem != NULL_TREE)
9473 return tem;
9476 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9478 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9479 arg1, arg0,
9480 /*cond_first_p=*/0);
9481 if (tem != NULL_TREE)
9482 return tem;
9486 switch (code)
9488 case MEM_REF:
9489 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9490 if (TREE_CODE (arg0) == ADDR_EXPR
9491 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9493 tree iref = TREE_OPERAND (arg0, 0);
9494 return fold_build2 (MEM_REF, type,
9495 TREE_OPERAND (iref, 0),
9496 int_const_binop (PLUS_EXPR, arg1,
9497 TREE_OPERAND (iref, 1), 0));
9500 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9501 if (TREE_CODE (arg0) == ADDR_EXPR
9502 && handled_component_p (TREE_OPERAND (arg0, 0)))
9504 tree base;
9505 HOST_WIDE_INT coffset;
9506 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9507 &coffset);
9508 if (!base)
9509 return NULL_TREE;
9510 return fold_build2 (MEM_REF, type,
9511 build_fold_addr_expr (base),
9512 int_const_binop (PLUS_EXPR, arg1,
9513 size_int (coffset), 0));
9516 return NULL_TREE;
9518 case POINTER_PLUS_EXPR:
9519 /* 0 +p index -> (type)index */
9520 if (integer_zerop (arg0))
9521 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9523 /* PTR +p 0 -> PTR */
9524 if (integer_zerop (arg1))
9525 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9527 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9528 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9529 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9530 return fold_convert_loc (loc, type,
9531 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9532 fold_convert_loc (loc, sizetype,
9533 arg1),
9534 fold_convert_loc (loc, sizetype,
9535 arg0)));
9537 /* index +p PTR -> PTR +p index */
9538 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9539 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9540 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9541 fold_convert_loc (loc, type, arg1),
9542 fold_convert_loc (loc, sizetype, arg0));
9544 /* (PTR +p B) +p A -> PTR +p (B + A) */
9545 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9547 tree inner;
9548 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9549 tree arg00 = TREE_OPERAND (arg0, 0);
9550 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9551 arg01, fold_convert_loc (loc, sizetype, arg1));
9552 return fold_convert_loc (loc, type,
9553 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9554 TREE_TYPE (arg00),
9555 arg00, inner));
9558 /* PTR_CST +p CST -> CST1 */
9559 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9560 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9561 fold_convert_loc (loc, type, arg1));
9563 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9564 of the array. Loop optimizer sometimes produce this type of
9565 expressions. */
9566 if (TREE_CODE (arg0) == ADDR_EXPR)
9568 tem = try_move_mult_to_index (loc, arg0,
9569 fold_convert_loc (loc, sizetype, arg1));
9570 if (tem)
9571 return fold_convert_loc (loc, type, tem);
9574 return NULL_TREE;
9576 case PLUS_EXPR:
9577 /* A + (-B) -> A - B */
9578 if (TREE_CODE (arg1) == NEGATE_EXPR)
9579 return fold_build2_loc (loc, MINUS_EXPR, type,
9580 fold_convert_loc (loc, type, arg0),
9581 fold_convert_loc (loc, type,
9582 TREE_OPERAND (arg1, 0)));
9583 /* (-A) + B -> B - A */
9584 if (TREE_CODE (arg0) == NEGATE_EXPR
9585 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9586 return fold_build2_loc (loc, MINUS_EXPR, type,
9587 fold_convert_loc (loc, type, arg1),
9588 fold_convert_loc (loc, type,
9589 TREE_OPERAND (arg0, 0)));
9591 if (INTEGRAL_TYPE_P (type))
9593 /* Convert ~A + 1 to -A. */
9594 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9595 && integer_onep (arg1))
9596 return fold_build1_loc (loc, NEGATE_EXPR, type,
9597 fold_convert_loc (loc, type,
9598 TREE_OPERAND (arg0, 0)));
9600 /* ~X + X is -1. */
9601 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9602 && !TYPE_OVERFLOW_TRAPS (type))
9604 tree tem = TREE_OPERAND (arg0, 0);
9606 STRIP_NOPS (tem);
9607 if (operand_equal_p (tem, arg1, 0))
9609 t1 = build_int_cst_type (type, -1);
9610 return omit_one_operand_loc (loc, type, t1, arg1);
9614 /* X + ~X is -1. */
9615 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9616 && !TYPE_OVERFLOW_TRAPS (type))
9618 tree tem = TREE_OPERAND (arg1, 0);
9620 STRIP_NOPS (tem);
9621 if (operand_equal_p (arg0, tem, 0))
9623 t1 = build_int_cst_type (type, -1);
9624 return omit_one_operand_loc (loc, type, t1, arg0);
9628 /* X + (X / CST) * -CST is X % CST. */
9629 if (TREE_CODE (arg1) == MULT_EXPR
9630 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9631 && operand_equal_p (arg0,
9632 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9634 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9635 tree cst1 = TREE_OPERAND (arg1, 1);
9636 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9637 cst1, cst0);
9638 if (sum && integer_zerop (sum))
9639 return fold_convert_loc (loc, type,
9640 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9641 TREE_TYPE (arg0), arg0,
9642 cst0));
9646 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9647 same or one. Make sure type is not saturating.
9648 fold_plusminus_mult_expr will re-associate. */
9649 if ((TREE_CODE (arg0) == MULT_EXPR
9650 || TREE_CODE (arg1) == MULT_EXPR)
9651 && !TYPE_SATURATING (type)
9652 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9654 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9655 if (tem)
9656 return tem;
9659 if (! FLOAT_TYPE_P (type))
9661 if (integer_zerop (arg1))
9662 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9664 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9665 with a constant, and the two constants have no bits in common,
9666 we should treat this as a BIT_IOR_EXPR since this may produce more
9667 simplifications. */
9668 if (TREE_CODE (arg0) == BIT_AND_EXPR
9669 && TREE_CODE (arg1) == BIT_AND_EXPR
9670 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9671 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9672 && integer_zerop (const_binop (BIT_AND_EXPR,
9673 TREE_OPERAND (arg0, 1),
9674 TREE_OPERAND (arg1, 1))))
9676 code = BIT_IOR_EXPR;
9677 goto bit_ior;
9680 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9681 (plus (plus (mult) (mult)) (foo)) so that we can
9682 take advantage of the factoring cases below. */
9683 if (((TREE_CODE (arg0) == PLUS_EXPR
9684 || TREE_CODE (arg0) == MINUS_EXPR)
9685 && TREE_CODE (arg1) == MULT_EXPR)
9686 || ((TREE_CODE (arg1) == PLUS_EXPR
9687 || TREE_CODE (arg1) == MINUS_EXPR)
9688 && TREE_CODE (arg0) == MULT_EXPR))
9690 tree parg0, parg1, parg, marg;
9691 enum tree_code pcode;
9693 if (TREE_CODE (arg1) == MULT_EXPR)
9694 parg = arg0, marg = arg1;
9695 else
9696 parg = arg1, marg = arg0;
9697 pcode = TREE_CODE (parg);
9698 parg0 = TREE_OPERAND (parg, 0);
9699 parg1 = TREE_OPERAND (parg, 1);
9700 STRIP_NOPS (parg0);
9701 STRIP_NOPS (parg1);
9703 if (TREE_CODE (parg0) == MULT_EXPR
9704 && TREE_CODE (parg1) != MULT_EXPR)
9705 return fold_build2_loc (loc, pcode, type,
9706 fold_build2_loc (loc, PLUS_EXPR, type,
9707 fold_convert_loc (loc, type,
9708 parg0),
9709 fold_convert_loc (loc, type,
9710 marg)),
9711 fold_convert_loc (loc, type, parg1));
9712 if (TREE_CODE (parg0) != MULT_EXPR
9713 && TREE_CODE (parg1) == MULT_EXPR)
9714 return
9715 fold_build2_loc (loc, PLUS_EXPR, type,
9716 fold_convert_loc (loc, type, parg0),
9717 fold_build2_loc (loc, pcode, type,
9718 fold_convert_loc (loc, type, marg),
9719 fold_convert_loc (loc, type,
9720 parg1)));
9723 else
9725 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9726 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9727 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9729 /* Likewise if the operands are reversed. */
9730 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9731 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9733 /* Convert X + -C into X - C. */
9734 if (TREE_CODE (arg1) == REAL_CST
9735 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9737 tem = fold_negate_const (arg1, type);
9738 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9739 return fold_build2_loc (loc, MINUS_EXPR, type,
9740 fold_convert_loc (loc, type, arg0),
9741 fold_convert_loc (loc, type, tem));
9744 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9745 to __complex__ ( x, y ). This is not the same for SNaNs or
9746 if signed zeros are involved. */
9747 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9748 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9749 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9751 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9752 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9753 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9754 bool arg0rz = false, arg0iz = false;
9755 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9756 || (arg0i && (arg0iz = real_zerop (arg0i))))
9758 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9759 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9760 if (arg0rz && arg1i && real_zerop (arg1i))
9762 tree rp = arg1r ? arg1r
9763 : build1 (REALPART_EXPR, rtype, arg1);
9764 tree ip = arg0i ? arg0i
9765 : build1 (IMAGPART_EXPR, rtype, arg0);
9766 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9768 else if (arg0iz && arg1r && real_zerop (arg1r))
9770 tree rp = arg0r ? arg0r
9771 : build1 (REALPART_EXPR, rtype, arg0);
9772 tree ip = arg1i ? arg1i
9773 : build1 (IMAGPART_EXPR, rtype, arg1);
9774 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9779 if (flag_unsafe_math_optimizations
9780 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9781 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9782 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9783 return tem;
9785 /* Convert x+x into x*2.0. */
9786 if (operand_equal_p (arg0, arg1, 0)
9787 && SCALAR_FLOAT_TYPE_P (type))
9788 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9789 build_real (type, dconst2));
9791 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9792 We associate floats only if the user has specified
9793 -fassociative-math. */
9794 if (flag_associative_math
9795 && TREE_CODE (arg1) == PLUS_EXPR
9796 && TREE_CODE (arg0) != MULT_EXPR)
9798 tree tree10 = TREE_OPERAND (arg1, 0);
9799 tree tree11 = TREE_OPERAND (arg1, 1);
9800 if (TREE_CODE (tree11) == MULT_EXPR
9801 && TREE_CODE (tree10) == MULT_EXPR)
9803 tree tree0;
9804 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9805 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9808 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9809 We associate floats only if the user has specified
9810 -fassociative-math. */
9811 if (flag_associative_math
9812 && TREE_CODE (arg0) == PLUS_EXPR
9813 && TREE_CODE (arg1) != MULT_EXPR)
9815 tree tree00 = TREE_OPERAND (arg0, 0);
9816 tree tree01 = TREE_OPERAND (arg0, 1);
9817 if (TREE_CODE (tree01) == MULT_EXPR
9818 && TREE_CODE (tree00) == MULT_EXPR)
9820 tree tree0;
9821 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9822 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9827 bit_rotate:
9828 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9829 is a rotate of A by C1 bits. */
9830 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9831 is a rotate of A by B bits. */
9833 enum tree_code code0, code1;
9834 tree rtype;
9835 code0 = TREE_CODE (arg0);
9836 code1 = TREE_CODE (arg1);
9837 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9838 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9839 && operand_equal_p (TREE_OPERAND (arg0, 0),
9840 TREE_OPERAND (arg1, 0), 0)
9841 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9842 TYPE_UNSIGNED (rtype))
9843 /* Only create rotates in complete modes. Other cases are not
9844 expanded properly. */
9845 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9847 tree tree01, tree11;
9848 enum tree_code code01, code11;
9850 tree01 = TREE_OPERAND (arg0, 1);
9851 tree11 = TREE_OPERAND (arg1, 1);
9852 STRIP_NOPS (tree01);
9853 STRIP_NOPS (tree11);
9854 code01 = TREE_CODE (tree01);
9855 code11 = TREE_CODE (tree11);
9856 if (code01 == INTEGER_CST
9857 && code11 == INTEGER_CST
9858 && TREE_INT_CST_HIGH (tree01) == 0
9859 && TREE_INT_CST_HIGH (tree11) == 0
9860 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9861 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9863 tem = build2_loc (loc, LROTATE_EXPR,
9864 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9865 TREE_OPERAND (arg0, 0),
9866 code0 == LSHIFT_EXPR ? tree01 : tree11);
9867 return fold_convert_loc (loc, type, tem);
9869 else if (code11 == MINUS_EXPR)
9871 tree tree110, tree111;
9872 tree110 = TREE_OPERAND (tree11, 0);
9873 tree111 = TREE_OPERAND (tree11, 1);
9874 STRIP_NOPS (tree110);
9875 STRIP_NOPS (tree111);
9876 if (TREE_CODE (tree110) == INTEGER_CST
9877 && 0 == compare_tree_int (tree110,
9878 TYPE_PRECISION
9879 (TREE_TYPE (TREE_OPERAND
9880 (arg0, 0))))
9881 && operand_equal_p (tree01, tree111, 0))
9882 return
9883 fold_convert_loc (loc, type,
9884 build2 ((code0 == LSHIFT_EXPR
9885 ? LROTATE_EXPR
9886 : RROTATE_EXPR),
9887 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9888 TREE_OPERAND (arg0, 0), tree01));
9890 else if (code01 == MINUS_EXPR)
9892 tree tree010, tree011;
9893 tree010 = TREE_OPERAND (tree01, 0);
9894 tree011 = TREE_OPERAND (tree01, 1);
9895 STRIP_NOPS (tree010);
9896 STRIP_NOPS (tree011);
9897 if (TREE_CODE (tree010) == INTEGER_CST
9898 && 0 == compare_tree_int (tree010,
9899 TYPE_PRECISION
9900 (TREE_TYPE (TREE_OPERAND
9901 (arg0, 0))))
9902 && operand_equal_p (tree11, tree011, 0))
9903 return fold_convert_loc
9904 (loc, type,
9905 build2 ((code0 != LSHIFT_EXPR
9906 ? LROTATE_EXPR
9907 : RROTATE_EXPR),
9908 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9909 TREE_OPERAND (arg0, 0), tree11));
9914 associate:
9915 /* In most languages, can't associate operations on floats through
9916 parentheses. Rather than remember where the parentheses were, we
9917 don't associate floats at all, unless the user has specified
9918 -fassociative-math.
9919 And, we need to make sure type is not saturating. */
9921 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9922 && !TYPE_SATURATING (type))
9924 tree var0, con0, lit0, minus_lit0;
9925 tree var1, con1, lit1, minus_lit1;
9926 bool ok = true;
9928 /* Split both trees into variables, constants, and literals. Then
9929 associate each group together, the constants with literals,
9930 then the result with variables. This increases the chances of
9931 literals being recombined later and of generating relocatable
9932 expressions for the sum of a constant and literal. */
9933 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9934 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9935 code == MINUS_EXPR);
9937 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9938 if (code == MINUS_EXPR)
9939 code = PLUS_EXPR;
9941 /* With undefined overflow we can only associate constants with one
9942 variable, and constants whose association doesn't overflow. */
9943 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9944 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9946 if (var0 && var1)
9948 tree tmp0 = var0;
9949 tree tmp1 = var1;
9951 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9952 tmp0 = TREE_OPERAND (tmp0, 0);
9953 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9954 tmp1 = TREE_OPERAND (tmp1, 0);
9955 /* The only case we can still associate with two variables
9956 is if they are the same, modulo negation. */
9957 if (!operand_equal_p (tmp0, tmp1, 0))
9958 ok = false;
9961 if (ok && lit0 && lit1)
9963 tree tmp0 = fold_convert (type, lit0);
9964 tree tmp1 = fold_convert (type, lit1);
9966 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9967 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9968 ok = false;
9972 /* Only do something if we found more than two objects. Otherwise,
9973 nothing has changed and we risk infinite recursion. */
9974 if (ok
9975 && (2 < ((var0 != 0) + (var1 != 0)
9976 + (con0 != 0) + (con1 != 0)
9977 + (lit0 != 0) + (lit1 != 0)
9978 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9980 var0 = associate_trees (loc, var0, var1, code, type);
9981 con0 = associate_trees (loc, con0, con1, code, type);
9982 lit0 = associate_trees (loc, lit0, lit1, code, type);
9983 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9985 /* Preserve the MINUS_EXPR if the negative part of the literal is
9986 greater than the positive part. Otherwise, the multiplicative
9987 folding code (i.e extract_muldiv) may be fooled in case
9988 unsigned constants are subtracted, like in the following
9989 example: ((X*2 + 4) - 8U)/2. */
9990 if (minus_lit0 && lit0)
9992 if (TREE_CODE (lit0) == INTEGER_CST
9993 && TREE_CODE (minus_lit0) == INTEGER_CST
9994 && tree_int_cst_lt (lit0, minus_lit0))
9996 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9997 MINUS_EXPR, type);
9998 lit0 = 0;
10000 else
10002 lit0 = associate_trees (loc, lit0, minus_lit0,
10003 MINUS_EXPR, type);
10004 minus_lit0 = 0;
10007 if (minus_lit0)
10009 if (con0 == 0)
10010 return
10011 fold_convert_loc (loc, type,
10012 associate_trees (loc, var0, minus_lit0,
10013 MINUS_EXPR, type));
10014 else
10016 con0 = associate_trees (loc, con0, minus_lit0,
10017 MINUS_EXPR, type);
10018 return
10019 fold_convert_loc (loc, type,
10020 associate_trees (loc, var0, con0,
10021 PLUS_EXPR, type));
10025 con0 = associate_trees (loc, con0, lit0, code, type);
10026 return
10027 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10028 code, type));
10032 return NULL_TREE;
10034 case MINUS_EXPR:
10035 /* Pointer simplifications for subtraction, simple reassociations. */
10036 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10038 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10039 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10040 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10042 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10043 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10044 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10045 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10046 return fold_build2_loc (loc, PLUS_EXPR, type,
10047 fold_build2_loc (loc, MINUS_EXPR, type,
10048 arg00, arg10),
10049 fold_build2_loc (loc, MINUS_EXPR, type,
10050 arg01, arg11));
10052 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10053 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10055 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10056 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10057 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10058 fold_convert_loc (loc, type, arg1));
10059 if (tmp)
10060 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10063 /* A - (-B) -> A + B */
10064 if (TREE_CODE (arg1) == NEGATE_EXPR)
10065 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10066 fold_convert_loc (loc, type,
10067 TREE_OPERAND (arg1, 0)));
10068 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10069 if (TREE_CODE (arg0) == NEGATE_EXPR
10070 && (FLOAT_TYPE_P (type)
10071 || INTEGRAL_TYPE_P (type))
10072 && negate_expr_p (arg1)
10073 && reorder_operands_p (arg0, arg1))
10074 return fold_build2_loc (loc, MINUS_EXPR, type,
10075 fold_convert_loc (loc, type,
10076 negate_expr (arg1)),
10077 fold_convert_loc (loc, type,
10078 TREE_OPERAND (arg0, 0)));
10079 /* Convert -A - 1 to ~A. */
10080 if (INTEGRAL_TYPE_P (type)
10081 && TREE_CODE (arg0) == NEGATE_EXPR
10082 && integer_onep (arg1)
10083 && !TYPE_OVERFLOW_TRAPS (type))
10084 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10085 fold_convert_loc (loc, type,
10086 TREE_OPERAND (arg0, 0)));
10088 /* Convert -1 - A to ~A. */
10089 if (INTEGRAL_TYPE_P (type)
10090 && integer_all_onesp (arg0))
10091 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10094 /* X - (X / CST) * CST is X % CST. */
10095 if (INTEGRAL_TYPE_P (type)
10096 && TREE_CODE (arg1) == MULT_EXPR
10097 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10098 && operand_equal_p (arg0,
10099 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10100 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10101 TREE_OPERAND (arg1, 1), 0))
10102 return
10103 fold_convert_loc (loc, type,
10104 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10105 arg0, TREE_OPERAND (arg1, 1)));
10107 if (! FLOAT_TYPE_P (type))
10109 if (integer_zerop (arg0))
10110 return negate_expr (fold_convert_loc (loc, type, arg1));
10111 if (integer_zerop (arg1))
10112 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10114 /* Fold A - (A & B) into ~B & A. */
10115 if (!TREE_SIDE_EFFECTS (arg0)
10116 && TREE_CODE (arg1) == BIT_AND_EXPR)
10118 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10120 tree arg10 = fold_convert_loc (loc, type,
10121 TREE_OPERAND (arg1, 0));
10122 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10123 fold_build1_loc (loc, BIT_NOT_EXPR,
10124 type, arg10),
10125 fold_convert_loc (loc, type, arg0));
10127 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10129 tree arg11 = fold_convert_loc (loc,
10130 type, TREE_OPERAND (arg1, 1));
10131 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10132 fold_build1_loc (loc, BIT_NOT_EXPR,
10133 type, arg11),
10134 fold_convert_loc (loc, type, arg0));
10138 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10139 any power of 2 minus 1. */
10140 if (TREE_CODE (arg0) == BIT_AND_EXPR
10141 && TREE_CODE (arg1) == BIT_AND_EXPR
10142 && operand_equal_p (TREE_OPERAND (arg0, 0),
10143 TREE_OPERAND (arg1, 0), 0))
10145 tree mask0 = TREE_OPERAND (arg0, 1);
10146 tree mask1 = TREE_OPERAND (arg1, 1);
10147 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10149 if (operand_equal_p (tem, mask1, 0))
10151 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10152 TREE_OPERAND (arg0, 0), mask1);
10153 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10158 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10159 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10160 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10162 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10163 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10164 (-ARG1 + ARG0) reduces to -ARG1. */
10165 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10166 return negate_expr (fold_convert_loc (loc, type, arg1));
10168 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10169 __complex__ ( x, -y ). This is not the same for SNaNs or if
10170 signed zeros are involved. */
10171 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10172 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10173 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10175 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10176 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10177 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10178 bool arg0rz = false, arg0iz = false;
10179 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10180 || (arg0i && (arg0iz = real_zerop (arg0i))))
10182 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10183 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10184 if (arg0rz && arg1i && real_zerop (arg1i))
10186 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10187 arg1r ? arg1r
10188 : build1 (REALPART_EXPR, rtype, arg1));
10189 tree ip = arg0i ? arg0i
10190 : build1 (IMAGPART_EXPR, rtype, arg0);
10191 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10193 else if (arg0iz && arg1r && real_zerop (arg1r))
10195 tree rp = arg0r ? arg0r
10196 : build1 (REALPART_EXPR, rtype, arg0);
10197 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10198 arg1i ? arg1i
10199 : build1 (IMAGPART_EXPR, rtype, arg1));
10200 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10205 /* Fold &x - &x. This can happen from &x.foo - &x.
10206 This is unsafe for certain floats even in non-IEEE formats.
10207 In IEEE, it is unsafe because it does wrong for NaNs.
10208 Also note that operand_equal_p is always false if an operand
10209 is volatile. */
10211 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10212 && operand_equal_p (arg0, arg1, 0))
10213 return build_zero_cst (type);
10215 /* A - B -> A + (-B) if B is easily negatable. */
10216 if (negate_expr_p (arg1)
10217 && ((FLOAT_TYPE_P (type)
10218 /* Avoid this transformation if B is a positive REAL_CST. */
10219 && (TREE_CODE (arg1) != REAL_CST
10220 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10221 || INTEGRAL_TYPE_P (type)))
10222 return fold_build2_loc (loc, PLUS_EXPR, type,
10223 fold_convert_loc (loc, type, arg0),
10224 fold_convert_loc (loc, type,
10225 negate_expr (arg1)));
10227 /* Try folding difference of addresses. */
10229 HOST_WIDE_INT diff;
10231 if ((TREE_CODE (arg0) == ADDR_EXPR
10232 || TREE_CODE (arg1) == ADDR_EXPR)
10233 && ptr_difference_const (arg0, arg1, &diff))
10234 return build_int_cst_type (type, diff);
10237 /* Fold &a[i] - &a[j] to i-j. */
10238 if (TREE_CODE (arg0) == ADDR_EXPR
10239 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10240 && TREE_CODE (arg1) == ADDR_EXPR
10241 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10243 tree aref0 = TREE_OPERAND (arg0, 0);
10244 tree aref1 = TREE_OPERAND (arg1, 0);
10245 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10246 TREE_OPERAND (aref1, 0), 0))
10248 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10249 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10250 tree esz = array_ref_element_size (aref0);
10251 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10252 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10253 fold_convert_loc (loc, type, esz));
10258 if (FLOAT_TYPE_P (type)
10259 && flag_unsafe_math_optimizations
10260 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10261 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10262 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10263 return tem;
10265 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10266 same or one. Make sure type is not saturating.
10267 fold_plusminus_mult_expr will re-associate. */
10268 if ((TREE_CODE (arg0) == MULT_EXPR
10269 || TREE_CODE (arg1) == MULT_EXPR)
10270 && !TYPE_SATURATING (type)
10271 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10273 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10274 if (tem)
10275 return tem;
10278 goto associate;
10280 case MULT_EXPR:
10281 /* (-A) * (-B) -> A * B */
10282 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10283 return fold_build2_loc (loc, MULT_EXPR, type,
10284 fold_convert_loc (loc, type,
10285 TREE_OPERAND (arg0, 0)),
10286 fold_convert_loc (loc, type,
10287 negate_expr (arg1)));
10288 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10289 return fold_build2_loc (loc, MULT_EXPR, type,
10290 fold_convert_loc (loc, type,
10291 negate_expr (arg0)),
10292 fold_convert_loc (loc, type,
10293 TREE_OPERAND (arg1, 0)));
10295 if (! FLOAT_TYPE_P (type))
10297 if (integer_zerop (arg1))
10298 return omit_one_operand_loc (loc, type, arg1, arg0);
10299 if (integer_onep (arg1))
10300 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10301 /* Transform x * -1 into -x. Make sure to do the negation
10302 on the original operand with conversions not stripped
10303 because we can only strip non-sign-changing conversions. */
10304 if (integer_all_onesp (arg1))
10305 return fold_convert_loc (loc, type, negate_expr (op0));
10306 /* Transform x * -C into -x * C if x is easily negatable. */
10307 if (TREE_CODE (arg1) == INTEGER_CST
10308 && tree_int_cst_sgn (arg1) == -1
10309 && negate_expr_p (arg0)
10310 && (tem = negate_expr (arg1)) != arg1
10311 && !TREE_OVERFLOW (tem))
10312 return fold_build2_loc (loc, MULT_EXPR, type,
10313 fold_convert_loc (loc, type,
10314 negate_expr (arg0)),
10315 tem);
10317 /* (a * (1 << b)) is (a << b) */
10318 if (TREE_CODE (arg1) == LSHIFT_EXPR
10319 && integer_onep (TREE_OPERAND (arg1, 0)))
10320 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10321 TREE_OPERAND (arg1, 1));
10322 if (TREE_CODE (arg0) == LSHIFT_EXPR
10323 && integer_onep (TREE_OPERAND (arg0, 0)))
10324 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10325 TREE_OPERAND (arg0, 1));
10327 /* (A + A) * C -> A * 2 * C */
10328 if (TREE_CODE (arg0) == PLUS_EXPR
10329 && TREE_CODE (arg1) == INTEGER_CST
10330 && operand_equal_p (TREE_OPERAND (arg0, 0),
10331 TREE_OPERAND (arg0, 1), 0))
10332 return fold_build2_loc (loc, MULT_EXPR, type,
10333 omit_one_operand_loc (loc, type,
10334 TREE_OPERAND (arg0, 0),
10335 TREE_OPERAND (arg0, 1)),
10336 fold_build2_loc (loc, MULT_EXPR, type,
10337 build_int_cst (type, 2) , arg1));
10339 strict_overflow_p = false;
10340 if (TREE_CODE (arg1) == INTEGER_CST
10341 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10342 &strict_overflow_p)))
10344 if (strict_overflow_p)
10345 fold_overflow_warning (("assuming signed overflow does not "
10346 "occur when simplifying "
10347 "multiplication"),
10348 WARN_STRICT_OVERFLOW_MISC);
10349 return fold_convert_loc (loc, type, tem);
10352 /* Optimize z * conj(z) for integer complex numbers. */
10353 if (TREE_CODE (arg0) == CONJ_EXPR
10354 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10355 return fold_mult_zconjz (loc, type, arg1);
10356 if (TREE_CODE (arg1) == CONJ_EXPR
10357 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10358 return fold_mult_zconjz (loc, type, arg0);
10360 else
10362 /* Maybe fold x * 0 to 0. The expressions aren't the same
10363 when x is NaN, since x * 0 is also NaN. Nor are they the
10364 same in modes with signed zeros, since multiplying a
10365 negative value by 0 gives -0, not +0. */
10366 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10367 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10368 && real_zerop (arg1))
10369 return omit_one_operand_loc (loc, type, arg1, arg0);
10370 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10371 Likewise for complex arithmetic with signed zeros. */
10372 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10373 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10374 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10375 && real_onep (arg1))
10376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10378 /* Transform x * -1.0 into -x. */
10379 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10380 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10381 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10382 && real_minus_onep (arg1))
10383 return fold_convert_loc (loc, type, negate_expr (arg0));
10385 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10386 the result for floating point types due to rounding so it is applied
10387 only if -fassociative-math was specify. */
10388 if (flag_associative_math
10389 && TREE_CODE (arg0) == RDIV_EXPR
10390 && TREE_CODE (arg1) == REAL_CST
10391 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10393 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10394 arg1);
10395 if (tem)
10396 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10397 TREE_OPERAND (arg0, 1));
10400 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10401 if (operand_equal_p (arg0, arg1, 0))
10403 tree tem = fold_strip_sign_ops (arg0);
10404 if (tem != NULL_TREE)
10406 tem = fold_convert_loc (loc, type, tem);
10407 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10411 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10412 This is not the same for NaNs or if signed zeros are
10413 involved. */
10414 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10415 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10416 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10417 && TREE_CODE (arg1) == COMPLEX_CST
10418 && real_zerop (TREE_REALPART (arg1)))
10420 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10421 if (real_onep (TREE_IMAGPART (arg1)))
10422 return
10423 fold_build2_loc (loc, COMPLEX_EXPR, type,
10424 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10425 rtype, arg0)),
10426 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10427 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10428 return
10429 fold_build2_loc (loc, COMPLEX_EXPR, type,
10430 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10431 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10432 rtype, arg0)));
10435 /* Optimize z * conj(z) for floating point complex numbers.
10436 Guarded by flag_unsafe_math_optimizations as non-finite
10437 imaginary components don't produce scalar results. */
10438 if (flag_unsafe_math_optimizations
10439 && TREE_CODE (arg0) == CONJ_EXPR
10440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10441 return fold_mult_zconjz (loc, type, arg1);
10442 if (flag_unsafe_math_optimizations
10443 && TREE_CODE (arg1) == CONJ_EXPR
10444 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10445 return fold_mult_zconjz (loc, type, arg0);
10447 if (flag_unsafe_math_optimizations)
10449 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10450 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10452 /* Optimizations of root(...)*root(...). */
10453 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10455 tree rootfn, arg;
10456 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10457 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10459 /* Optimize sqrt(x)*sqrt(x) as x. */
10460 if (BUILTIN_SQRT_P (fcode0)
10461 && operand_equal_p (arg00, arg10, 0)
10462 && ! HONOR_SNANS (TYPE_MODE (type)))
10463 return arg00;
10465 /* Optimize root(x)*root(y) as root(x*y). */
10466 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10467 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10468 return build_call_expr_loc (loc, rootfn, 1, arg);
10471 /* Optimize expN(x)*expN(y) as expN(x+y). */
10472 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10474 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10475 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10476 CALL_EXPR_ARG (arg0, 0),
10477 CALL_EXPR_ARG (arg1, 0));
10478 return build_call_expr_loc (loc, expfn, 1, arg);
10481 /* Optimizations of pow(...)*pow(...). */
10482 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10483 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10484 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10486 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10487 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10488 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10489 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10491 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10492 if (operand_equal_p (arg01, arg11, 0))
10494 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10495 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10496 arg00, arg10);
10497 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10500 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10501 if (operand_equal_p (arg00, arg10, 0))
10503 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10504 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10505 arg01, arg11);
10506 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10510 /* Optimize tan(x)*cos(x) as sin(x). */
10511 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10512 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10513 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10514 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10515 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10516 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10517 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10518 CALL_EXPR_ARG (arg1, 0), 0))
10520 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10522 if (sinfn != NULL_TREE)
10523 return build_call_expr_loc (loc, sinfn, 1,
10524 CALL_EXPR_ARG (arg0, 0));
10527 /* Optimize x*pow(x,c) as pow(x,c+1). */
10528 if (fcode1 == BUILT_IN_POW
10529 || fcode1 == BUILT_IN_POWF
10530 || fcode1 == BUILT_IN_POWL)
10532 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10533 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10534 if (TREE_CODE (arg11) == REAL_CST
10535 && !TREE_OVERFLOW (arg11)
10536 && operand_equal_p (arg0, arg10, 0))
10538 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10539 REAL_VALUE_TYPE c;
10540 tree arg;
10542 c = TREE_REAL_CST (arg11);
10543 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10544 arg = build_real (type, c);
10545 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10549 /* Optimize pow(x,c)*x as pow(x,c+1). */
10550 if (fcode0 == BUILT_IN_POW
10551 || fcode0 == BUILT_IN_POWF
10552 || fcode0 == BUILT_IN_POWL)
10554 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10555 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10556 if (TREE_CODE (arg01) == REAL_CST
10557 && !TREE_OVERFLOW (arg01)
10558 && operand_equal_p (arg1, arg00, 0))
10560 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10561 REAL_VALUE_TYPE c;
10562 tree arg;
10564 c = TREE_REAL_CST (arg01);
10565 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10566 arg = build_real (type, c);
10567 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10571 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10572 if (optimize_function_for_speed_p (cfun)
10573 && operand_equal_p (arg0, arg1, 0))
10575 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10577 if (powfn)
10579 tree arg = build_real (type, dconst2);
10580 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10585 goto associate;
10587 case BIT_IOR_EXPR:
10588 bit_ior:
10589 if (integer_all_onesp (arg1))
10590 return omit_one_operand_loc (loc, type, arg1, arg0);
10591 if (integer_zerop (arg1))
10592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10593 if (operand_equal_p (arg0, arg1, 0))
10594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10596 /* ~X | X is -1. */
10597 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10598 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10600 t1 = build_zero_cst (type);
10601 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10602 return omit_one_operand_loc (loc, type, t1, arg1);
10605 /* X | ~X is -1. */
10606 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10607 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10609 t1 = build_zero_cst (type);
10610 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10611 return omit_one_operand_loc (loc, type, t1, arg0);
10614 /* Canonicalize (X & C1) | C2. */
10615 if (TREE_CODE (arg0) == BIT_AND_EXPR
10616 && TREE_CODE (arg1) == INTEGER_CST
10617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10619 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10620 int width = TYPE_PRECISION (type), w;
10621 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10622 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10623 hi2 = TREE_INT_CST_HIGH (arg1);
10624 lo2 = TREE_INT_CST_LOW (arg1);
10626 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10627 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10628 return omit_one_operand_loc (loc, type, arg1,
10629 TREE_OPERAND (arg0, 0));
10631 if (width > HOST_BITS_PER_WIDE_INT)
10633 mhi = (unsigned HOST_WIDE_INT) -1
10634 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10635 mlo = -1;
10637 else
10639 mhi = 0;
10640 mlo = (unsigned HOST_WIDE_INT) -1
10641 >> (HOST_BITS_PER_WIDE_INT - width);
10644 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10645 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10646 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10647 TREE_OPERAND (arg0, 0), arg1);
10649 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10650 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10651 mode which allows further optimizations. */
10652 hi1 &= mhi;
10653 lo1 &= mlo;
10654 hi2 &= mhi;
10655 lo2 &= mlo;
10656 hi3 = hi1 & ~hi2;
10657 lo3 = lo1 & ~lo2;
10658 for (w = BITS_PER_UNIT;
10659 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10660 w <<= 1)
10662 unsigned HOST_WIDE_INT mask
10663 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10664 if (((lo1 | lo2) & mask) == mask
10665 && (lo1 & ~mask) == 0 && hi1 == 0)
10667 hi3 = 0;
10668 lo3 = mask;
10669 break;
10672 if (hi3 != hi1 || lo3 != lo1)
10673 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10674 fold_build2_loc (loc, BIT_AND_EXPR, type,
10675 TREE_OPERAND (arg0, 0),
10676 build_int_cst_wide (type,
10677 lo3, hi3)),
10678 arg1);
10681 /* (X & Y) | Y is (X, Y). */
10682 if (TREE_CODE (arg0) == BIT_AND_EXPR
10683 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10684 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10685 /* (X & Y) | X is (Y, X). */
10686 if (TREE_CODE (arg0) == BIT_AND_EXPR
10687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10688 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10689 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10690 /* X | (X & Y) is (Y, X). */
10691 if (TREE_CODE (arg1) == BIT_AND_EXPR
10692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10693 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10694 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10695 /* X | (Y & X) is (Y, X). */
10696 if (TREE_CODE (arg1) == BIT_AND_EXPR
10697 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10698 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10699 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10701 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10702 if (t1 != NULL_TREE)
10703 return t1;
10705 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10707 This results in more efficient code for machines without a NAND
10708 instruction. Combine will canonicalize to the first form
10709 which will allow use of NAND instructions provided by the
10710 backend if they exist. */
10711 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10712 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10714 return
10715 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10716 build2 (BIT_AND_EXPR, type,
10717 fold_convert_loc (loc, type,
10718 TREE_OPERAND (arg0, 0)),
10719 fold_convert_loc (loc, type,
10720 TREE_OPERAND (arg1, 0))));
10723 /* See if this can be simplified into a rotate first. If that
10724 is unsuccessful continue in the association code. */
10725 goto bit_rotate;
10727 case BIT_XOR_EXPR:
10728 if (integer_zerop (arg1))
10729 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10730 if (integer_all_onesp (arg1))
10731 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10732 if (operand_equal_p (arg0, arg1, 0))
10733 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10735 /* ~X ^ X is -1. */
10736 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10737 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10739 t1 = build_zero_cst (type);
10740 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10741 return omit_one_operand_loc (loc, type, t1, arg1);
10744 /* X ^ ~X is -1. */
10745 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10746 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10748 t1 = build_zero_cst (type);
10749 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10750 return omit_one_operand_loc (loc, type, t1, arg0);
10753 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10754 with a constant, and the two constants have no bits in common,
10755 we should treat this as a BIT_IOR_EXPR since this may produce more
10756 simplifications. */
10757 if (TREE_CODE (arg0) == BIT_AND_EXPR
10758 && TREE_CODE (arg1) == BIT_AND_EXPR
10759 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10760 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10761 && integer_zerop (const_binop (BIT_AND_EXPR,
10762 TREE_OPERAND (arg0, 1),
10763 TREE_OPERAND (arg1, 1))))
10765 code = BIT_IOR_EXPR;
10766 goto bit_ior;
10769 /* (X | Y) ^ X -> Y & ~ X*/
10770 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10771 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10773 tree t2 = TREE_OPERAND (arg0, 1);
10774 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10775 arg1);
10776 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10777 fold_convert_loc (loc, type, t2),
10778 fold_convert_loc (loc, type, t1));
10779 return t1;
10782 /* (Y | X) ^ X -> Y & ~ X*/
10783 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10784 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10786 tree t2 = TREE_OPERAND (arg0, 0);
10787 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10788 arg1);
10789 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10790 fold_convert_loc (loc, type, t2),
10791 fold_convert_loc (loc, type, t1));
10792 return t1;
10795 /* X ^ (X | Y) -> Y & ~ X*/
10796 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10797 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10799 tree t2 = TREE_OPERAND (arg1, 1);
10800 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10801 arg0);
10802 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10803 fold_convert_loc (loc, type, t2),
10804 fold_convert_loc (loc, type, t1));
10805 return t1;
10808 /* X ^ (Y | X) -> Y & ~ X*/
10809 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10810 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10812 tree t2 = TREE_OPERAND (arg1, 0);
10813 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10814 arg0);
10815 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10816 fold_convert_loc (loc, type, t2),
10817 fold_convert_loc (loc, type, t1));
10818 return t1;
10821 /* Convert ~X ^ ~Y to X ^ Y. */
10822 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10823 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10824 return fold_build2_loc (loc, code, type,
10825 fold_convert_loc (loc, type,
10826 TREE_OPERAND (arg0, 0)),
10827 fold_convert_loc (loc, type,
10828 TREE_OPERAND (arg1, 0)));
10830 /* Convert ~X ^ C to X ^ ~C. */
10831 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10832 && TREE_CODE (arg1) == INTEGER_CST)
10833 return fold_build2_loc (loc, code, type,
10834 fold_convert_loc (loc, type,
10835 TREE_OPERAND (arg0, 0)),
10836 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10838 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10839 if (TREE_CODE (arg0) == BIT_AND_EXPR
10840 && integer_onep (TREE_OPERAND (arg0, 1))
10841 && integer_onep (arg1))
10842 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10843 build_int_cst (TREE_TYPE (arg0), 0));
10845 /* Fold (X & Y) ^ Y as ~X & Y. */
10846 if (TREE_CODE (arg0) == BIT_AND_EXPR
10847 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10849 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10850 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10851 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10852 fold_convert_loc (loc, type, arg1));
10854 /* Fold (X & Y) ^ X as ~Y & X. */
10855 if (TREE_CODE (arg0) == BIT_AND_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10857 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10859 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10860 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10861 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10862 fold_convert_loc (loc, type, arg1));
10864 /* Fold X ^ (X & Y) as X & ~Y. */
10865 if (TREE_CODE (arg1) == BIT_AND_EXPR
10866 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10868 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10869 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10870 fold_convert_loc (loc, type, arg0),
10871 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10873 /* Fold X ^ (Y & X) as ~Y & X. */
10874 if (TREE_CODE (arg1) == BIT_AND_EXPR
10875 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10876 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10878 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10879 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10880 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10881 fold_convert_loc (loc, type, arg0));
10884 /* See if this can be simplified into a rotate first. If that
10885 is unsuccessful continue in the association code. */
10886 goto bit_rotate;
10888 case BIT_AND_EXPR:
10889 if (integer_all_onesp (arg1))
10890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10891 if (integer_zerop (arg1))
10892 return omit_one_operand_loc (loc, type, arg1, arg0);
10893 if (operand_equal_p (arg0, arg1, 0))
10894 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10896 /* ~X & X is always zero. */
10897 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10898 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10899 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10901 /* X & ~X is always zero. */
10902 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10903 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10904 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10906 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10907 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10908 && TREE_CODE (arg1) == INTEGER_CST
10909 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10911 tree tmp1 = fold_convert_loc (loc, type, arg1);
10912 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10913 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10914 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10915 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10916 return
10917 fold_convert_loc (loc, type,
10918 fold_build2_loc (loc, BIT_IOR_EXPR,
10919 type, tmp2, tmp3));
10922 /* (X | Y) & Y is (X, Y). */
10923 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10924 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10925 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10926 /* (X | Y) & X is (Y, X). */
10927 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10928 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10929 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10930 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10931 /* X & (X | Y) is (Y, X). */
10932 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10933 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10934 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10935 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10936 /* X & (Y | X) is (Y, X). */
10937 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10938 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10939 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10940 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10942 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10943 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10944 && integer_onep (TREE_OPERAND (arg0, 1))
10945 && integer_onep (arg1))
10947 tem = TREE_OPERAND (arg0, 0);
10948 return fold_build2_loc (loc, EQ_EXPR, type,
10949 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10950 build_int_cst (TREE_TYPE (tem), 1)),
10951 build_int_cst (TREE_TYPE (tem), 0));
10953 /* Fold ~X & 1 as (X & 1) == 0. */
10954 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10955 && integer_onep (arg1))
10957 tem = TREE_OPERAND (arg0, 0);
10958 return fold_build2_loc (loc, EQ_EXPR, type,
10959 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10960 build_int_cst (TREE_TYPE (tem), 1)),
10961 build_int_cst (TREE_TYPE (tem), 0));
10964 /* Fold (X ^ Y) & Y as ~X & Y. */
10965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10966 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10968 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10969 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10970 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10971 fold_convert_loc (loc, type, arg1));
10973 /* Fold (X ^ Y) & X as ~Y & X. */
10974 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10975 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10976 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10978 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10979 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10980 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10981 fold_convert_loc (loc, type, arg1));
10983 /* Fold X & (X ^ Y) as X & ~Y. */
10984 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10985 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10987 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10988 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10989 fold_convert_loc (loc, type, arg0),
10990 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10992 /* Fold X & (Y ^ X) as ~Y & X. */
10993 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10994 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10995 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10997 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10998 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10999 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11000 fold_convert_loc (loc, type, arg0));
11003 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11004 ((A & N) + B) & M -> (A + B) & M
11005 Similarly if (N & M) == 0,
11006 ((A | N) + B) & M -> (A + B) & M
11007 and for - instead of + (or unary - instead of +)
11008 and/or ^ instead of |.
11009 If B is constant and (B & M) == 0, fold into A & M. */
11010 if (host_integerp (arg1, 1))
11012 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11013 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11014 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11015 && (TREE_CODE (arg0) == PLUS_EXPR
11016 || TREE_CODE (arg0) == MINUS_EXPR
11017 || TREE_CODE (arg0) == NEGATE_EXPR)
11018 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11019 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11021 tree pmop[2];
11022 int which = 0;
11023 unsigned HOST_WIDE_INT cst0;
11025 /* Now we know that arg0 is (C + D) or (C - D) or
11026 -C and arg1 (M) is == (1LL << cst) - 1.
11027 Store C into PMOP[0] and D into PMOP[1]. */
11028 pmop[0] = TREE_OPERAND (arg0, 0);
11029 pmop[1] = NULL;
11030 if (TREE_CODE (arg0) != NEGATE_EXPR)
11032 pmop[1] = TREE_OPERAND (arg0, 1);
11033 which = 1;
11036 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11037 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11038 & cst1) != cst1)
11039 which = -1;
11041 for (; which >= 0; which--)
11042 switch (TREE_CODE (pmop[which]))
11044 case BIT_AND_EXPR:
11045 case BIT_IOR_EXPR:
11046 case BIT_XOR_EXPR:
11047 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11048 != INTEGER_CST)
11049 break;
11050 /* tree_low_cst not used, because we don't care about
11051 the upper bits. */
11052 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11053 cst0 &= cst1;
11054 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11056 if (cst0 != cst1)
11057 break;
11059 else if (cst0 != 0)
11060 break;
11061 /* If C or D is of the form (A & N) where
11062 (N & M) == M, or of the form (A | N) or
11063 (A ^ N) where (N & M) == 0, replace it with A. */
11064 pmop[which] = TREE_OPERAND (pmop[which], 0);
11065 break;
11066 case INTEGER_CST:
11067 /* If C or D is a N where (N & M) == 0, it can be
11068 omitted (assumed 0). */
11069 if ((TREE_CODE (arg0) == PLUS_EXPR
11070 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11071 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11072 pmop[which] = NULL;
11073 break;
11074 default:
11075 break;
11078 /* Only build anything new if we optimized one or both arguments
11079 above. */
11080 if (pmop[0] != TREE_OPERAND (arg0, 0)
11081 || (TREE_CODE (arg0) != NEGATE_EXPR
11082 && pmop[1] != TREE_OPERAND (arg0, 1)))
11084 tree utype = TREE_TYPE (arg0);
11085 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11087 /* Perform the operations in a type that has defined
11088 overflow behavior. */
11089 utype = unsigned_type_for (TREE_TYPE (arg0));
11090 if (pmop[0] != NULL)
11091 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11092 if (pmop[1] != NULL)
11093 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11096 if (TREE_CODE (arg0) == NEGATE_EXPR)
11097 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11098 else if (TREE_CODE (arg0) == PLUS_EXPR)
11100 if (pmop[0] != NULL && pmop[1] != NULL)
11101 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11102 pmop[0], pmop[1]);
11103 else if (pmop[0] != NULL)
11104 tem = pmop[0];
11105 else if (pmop[1] != NULL)
11106 tem = pmop[1];
11107 else
11108 return build_int_cst (type, 0);
11110 else if (pmop[0] == NULL)
11111 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11112 else
11113 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11114 pmop[0], pmop[1]);
11115 /* TEM is now the new binary +, - or unary - replacement. */
11116 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11117 fold_convert_loc (loc, utype, arg1));
11118 return fold_convert_loc (loc, type, tem);
11123 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11124 if (t1 != NULL_TREE)
11125 return t1;
11126 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11127 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11128 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11130 unsigned int prec
11131 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11133 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11134 && (~TREE_INT_CST_LOW (arg1)
11135 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11136 return
11137 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11140 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11142 This results in more efficient code for machines without a NOR
11143 instruction. Combine will canonicalize to the first form
11144 which will allow use of NOR instructions provided by the
11145 backend if they exist. */
11146 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11147 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11149 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11150 build2 (BIT_IOR_EXPR, type,
11151 fold_convert_loc (loc, type,
11152 TREE_OPERAND (arg0, 0)),
11153 fold_convert_loc (loc, type,
11154 TREE_OPERAND (arg1, 0))));
11157 /* If arg0 is derived from the address of an object or function, we may
11158 be able to fold this expression using the object or function's
11159 alignment. */
11160 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11162 unsigned HOST_WIDE_INT modulus, residue;
11163 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11165 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11166 integer_onep (arg1));
11168 /* This works because modulus is a power of 2. If this weren't the
11169 case, we'd have to replace it by its greatest power-of-2
11170 divisor: modulus & -modulus. */
11171 if (low < modulus)
11172 return build_int_cst (type, residue & low);
11175 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11176 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11177 if the new mask might be further optimized. */
11178 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11179 || TREE_CODE (arg0) == RSHIFT_EXPR)
11180 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11181 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11182 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11183 < TYPE_PRECISION (TREE_TYPE (arg0))
11184 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11185 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11187 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11188 unsigned HOST_WIDE_INT mask
11189 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11190 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11191 tree shift_type = TREE_TYPE (arg0);
11193 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11194 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11195 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11196 && TYPE_PRECISION (TREE_TYPE (arg0))
11197 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11199 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11200 tree arg00 = TREE_OPERAND (arg0, 0);
11201 /* See if more bits can be proven as zero because of
11202 zero extension. */
11203 if (TREE_CODE (arg00) == NOP_EXPR
11204 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11206 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11207 if (TYPE_PRECISION (inner_type)
11208 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11209 && TYPE_PRECISION (inner_type) < prec)
11211 prec = TYPE_PRECISION (inner_type);
11212 /* See if we can shorten the right shift. */
11213 if (shiftc < prec)
11214 shift_type = inner_type;
11217 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11218 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11219 zerobits <<= prec - shiftc;
11220 /* For arithmetic shift if sign bit could be set, zerobits
11221 can contain actually sign bits, so no transformation is
11222 possible, unless MASK masks them all away. In that
11223 case the shift needs to be converted into logical shift. */
11224 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11225 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11227 if ((mask & zerobits) == 0)
11228 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11229 else
11230 zerobits = 0;
11234 /* ((X << 16) & 0xff00) is (X, 0). */
11235 if ((mask & zerobits) == mask)
11236 return omit_one_operand_loc (loc, type,
11237 build_int_cst (type, 0), arg0);
11239 newmask = mask | zerobits;
11240 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11242 unsigned int prec;
11244 /* Only do the transformation if NEWMASK is some integer
11245 mode's mask. */
11246 for (prec = BITS_PER_UNIT;
11247 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11248 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11249 break;
11250 if (prec < HOST_BITS_PER_WIDE_INT
11251 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11253 tree newmaskt;
11255 if (shift_type != TREE_TYPE (arg0))
11257 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11258 fold_convert_loc (loc, shift_type,
11259 TREE_OPERAND (arg0, 0)),
11260 TREE_OPERAND (arg0, 1));
11261 tem = fold_convert_loc (loc, type, tem);
11263 else
11264 tem = op0;
11265 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11266 if (!tree_int_cst_equal (newmaskt, arg1))
11267 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11272 goto associate;
11274 case RDIV_EXPR:
11275 /* Don't touch a floating-point divide by zero unless the mode
11276 of the constant can represent infinity. */
11277 if (TREE_CODE (arg1) == REAL_CST
11278 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11279 && real_zerop (arg1))
11280 return NULL_TREE;
11282 /* Optimize A / A to 1.0 if we don't care about
11283 NaNs or Infinities. Skip the transformation
11284 for non-real operands. */
11285 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11286 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11287 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11288 && operand_equal_p (arg0, arg1, 0))
11290 tree r = build_real (TREE_TYPE (arg0), dconst1);
11292 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11295 /* The complex version of the above A / A optimization. */
11296 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11297 && operand_equal_p (arg0, arg1, 0))
11299 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11300 if (! HONOR_NANS (TYPE_MODE (elem_type))
11301 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11303 tree r = build_real (elem_type, dconst1);
11304 /* omit_two_operands will call fold_convert for us. */
11305 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11309 /* (-A) / (-B) -> A / B */
11310 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11311 return fold_build2_loc (loc, RDIV_EXPR, type,
11312 TREE_OPERAND (arg0, 0),
11313 negate_expr (arg1));
11314 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11315 return fold_build2_loc (loc, RDIV_EXPR, type,
11316 negate_expr (arg0),
11317 TREE_OPERAND (arg1, 0));
11319 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11320 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11321 && real_onep (arg1))
11322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11324 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11325 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11326 && real_minus_onep (arg1))
11327 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11328 negate_expr (arg0)));
11330 /* If ARG1 is a constant, we can convert this to a multiply by the
11331 reciprocal. This does not have the same rounding properties,
11332 so only do this if -freciprocal-math. We can actually
11333 always safely do it if ARG1 is a power of two, but it's hard to
11334 tell if it is or not in a portable manner. */
11335 if (TREE_CODE (arg1) == REAL_CST)
11337 if (flag_reciprocal_math
11338 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11339 arg1)))
11340 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11341 /* Find the reciprocal if optimizing and the result is exact. */
11342 if (optimize)
11344 REAL_VALUE_TYPE r;
11345 r = TREE_REAL_CST (arg1);
11346 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11348 tem = build_real (type, r);
11349 return fold_build2_loc (loc, MULT_EXPR, type,
11350 fold_convert_loc (loc, type, arg0), tem);
11354 /* Convert A/B/C to A/(B*C). */
11355 if (flag_reciprocal_math
11356 && TREE_CODE (arg0) == RDIV_EXPR)
11357 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11358 fold_build2_loc (loc, MULT_EXPR, type,
11359 TREE_OPERAND (arg0, 1), arg1));
11361 /* Convert A/(B/C) to (A/B)*C. */
11362 if (flag_reciprocal_math
11363 && TREE_CODE (arg1) == RDIV_EXPR)
11364 return fold_build2_loc (loc, MULT_EXPR, type,
11365 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11366 TREE_OPERAND (arg1, 0)),
11367 TREE_OPERAND (arg1, 1));
11369 /* Convert C1/(X*C2) into (C1/C2)/X. */
11370 if (flag_reciprocal_math
11371 && TREE_CODE (arg1) == MULT_EXPR
11372 && TREE_CODE (arg0) == REAL_CST
11373 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11375 tree tem = const_binop (RDIV_EXPR, arg0,
11376 TREE_OPERAND (arg1, 1));
11377 if (tem)
11378 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11379 TREE_OPERAND (arg1, 0));
11382 if (flag_unsafe_math_optimizations)
11384 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11385 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11387 /* Optimize sin(x)/cos(x) as tan(x). */
11388 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11389 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11390 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11391 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11392 CALL_EXPR_ARG (arg1, 0), 0))
11394 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11396 if (tanfn != NULL_TREE)
11397 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11400 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11401 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11402 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11403 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11404 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11405 CALL_EXPR_ARG (arg1, 0), 0))
11407 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11409 if (tanfn != NULL_TREE)
11411 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11412 CALL_EXPR_ARG (arg0, 0));
11413 return fold_build2_loc (loc, RDIV_EXPR, type,
11414 build_real (type, dconst1), tmp);
11418 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11419 NaNs or Infinities. */
11420 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11421 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11422 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11424 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11425 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11427 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11428 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11429 && operand_equal_p (arg00, arg01, 0))
11431 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11433 if (cosfn != NULL_TREE)
11434 return build_call_expr_loc (loc, cosfn, 1, arg00);
11438 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11439 NaNs or Infinities. */
11440 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11441 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11442 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11444 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11445 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11447 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11448 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11449 && operand_equal_p (arg00, arg01, 0))
11451 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11453 if (cosfn != NULL_TREE)
11455 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11456 return fold_build2_loc (loc, RDIV_EXPR, type,
11457 build_real (type, dconst1),
11458 tmp);
11463 /* Optimize pow(x,c)/x as pow(x,c-1). */
11464 if (fcode0 == BUILT_IN_POW
11465 || fcode0 == BUILT_IN_POWF
11466 || fcode0 == BUILT_IN_POWL)
11468 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11469 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11470 if (TREE_CODE (arg01) == REAL_CST
11471 && !TREE_OVERFLOW (arg01)
11472 && operand_equal_p (arg1, arg00, 0))
11474 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11475 REAL_VALUE_TYPE c;
11476 tree arg;
11478 c = TREE_REAL_CST (arg01);
11479 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11480 arg = build_real (type, c);
11481 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11485 /* Optimize a/root(b/c) into a*root(c/b). */
11486 if (BUILTIN_ROOT_P (fcode1))
11488 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11490 if (TREE_CODE (rootarg) == RDIV_EXPR)
11492 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11493 tree b = TREE_OPERAND (rootarg, 0);
11494 tree c = TREE_OPERAND (rootarg, 1);
11496 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11498 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11499 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11503 /* Optimize x/expN(y) into x*expN(-y). */
11504 if (BUILTIN_EXPONENT_P (fcode1))
11506 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11507 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11508 arg1 = build_call_expr_loc (loc,
11509 expfn, 1,
11510 fold_convert_loc (loc, type, arg));
11511 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11514 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11515 if (fcode1 == BUILT_IN_POW
11516 || fcode1 == BUILT_IN_POWF
11517 || fcode1 == BUILT_IN_POWL)
11519 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11520 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11521 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11522 tree neg11 = fold_convert_loc (loc, type,
11523 negate_expr (arg11));
11524 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11525 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11528 return NULL_TREE;
11530 case TRUNC_DIV_EXPR:
11531 /* Optimize (X & (-A)) / A where A is a power of 2,
11532 to X >> log2(A) */
11533 if (TREE_CODE (arg0) == BIT_AND_EXPR
11534 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11535 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11537 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11538 arg1, TREE_OPERAND (arg0, 1));
11539 if (sum && integer_zerop (sum)) {
11540 unsigned long pow2;
11542 if (TREE_INT_CST_LOW (arg1))
11543 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11544 else
11545 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11546 + HOST_BITS_PER_WIDE_INT;
11548 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11549 TREE_OPERAND (arg0, 0),
11550 build_int_cst (NULL_TREE, pow2));
11554 /* Fall thru */
11556 case FLOOR_DIV_EXPR:
11557 /* Simplify A / (B << N) where A and B are positive and B is
11558 a power of 2, to A >> (N + log2(B)). */
11559 strict_overflow_p = false;
11560 if (TREE_CODE (arg1) == LSHIFT_EXPR
11561 && (TYPE_UNSIGNED (type)
11562 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11564 tree sval = TREE_OPERAND (arg1, 0);
11565 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11567 tree sh_cnt = TREE_OPERAND (arg1, 1);
11568 unsigned long pow2;
11570 if (TREE_INT_CST_LOW (sval))
11571 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11572 else
11573 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11574 + HOST_BITS_PER_WIDE_INT;
11576 if (strict_overflow_p)
11577 fold_overflow_warning (("assuming signed overflow does not "
11578 "occur when simplifying A / (B << N)"),
11579 WARN_STRICT_OVERFLOW_MISC);
11581 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11582 sh_cnt, build_int_cst (NULL_TREE, pow2));
11583 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11584 fold_convert_loc (loc, type, arg0), sh_cnt);
11588 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11589 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11590 if (INTEGRAL_TYPE_P (type)
11591 && TYPE_UNSIGNED (type)
11592 && code == FLOOR_DIV_EXPR)
11593 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11595 /* Fall thru */
11597 case ROUND_DIV_EXPR:
11598 case CEIL_DIV_EXPR:
11599 case EXACT_DIV_EXPR:
11600 if (integer_onep (arg1))
11601 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11602 if (integer_zerop (arg1))
11603 return NULL_TREE;
11604 /* X / -1 is -X. */
11605 if (!TYPE_UNSIGNED (type)
11606 && TREE_CODE (arg1) == INTEGER_CST
11607 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11608 && TREE_INT_CST_HIGH (arg1) == -1)
11609 return fold_convert_loc (loc, type, negate_expr (arg0));
11611 /* Convert -A / -B to A / B when the type is signed and overflow is
11612 undefined. */
11613 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11614 && TREE_CODE (arg0) == NEGATE_EXPR
11615 && negate_expr_p (arg1))
11617 if (INTEGRAL_TYPE_P (type))
11618 fold_overflow_warning (("assuming signed overflow does not occur "
11619 "when distributing negation across "
11620 "division"),
11621 WARN_STRICT_OVERFLOW_MISC);
11622 return fold_build2_loc (loc, code, type,
11623 fold_convert_loc (loc, type,
11624 TREE_OPERAND (arg0, 0)),
11625 fold_convert_loc (loc, type,
11626 negate_expr (arg1)));
11628 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11629 && TREE_CODE (arg1) == NEGATE_EXPR
11630 && negate_expr_p (arg0))
11632 if (INTEGRAL_TYPE_P (type))
11633 fold_overflow_warning (("assuming signed overflow does not occur "
11634 "when distributing negation across "
11635 "division"),
11636 WARN_STRICT_OVERFLOW_MISC);
11637 return fold_build2_loc (loc, code, type,
11638 fold_convert_loc (loc, type,
11639 negate_expr (arg0)),
11640 fold_convert_loc (loc, type,
11641 TREE_OPERAND (arg1, 0)));
11644 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11645 operation, EXACT_DIV_EXPR.
11647 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11648 At one time others generated faster code, it's not clear if they do
11649 after the last round to changes to the DIV code in expmed.c. */
11650 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11651 && multiple_of_p (type, arg0, arg1))
11652 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11654 strict_overflow_p = false;
11655 if (TREE_CODE (arg1) == INTEGER_CST
11656 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11657 &strict_overflow_p)))
11659 if (strict_overflow_p)
11660 fold_overflow_warning (("assuming signed overflow does not occur "
11661 "when simplifying division"),
11662 WARN_STRICT_OVERFLOW_MISC);
11663 return fold_convert_loc (loc, type, tem);
11666 return NULL_TREE;
11668 case CEIL_MOD_EXPR:
11669 case FLOOR_MOD_EXPR:
11670 case ROUND_MOD_EXPR:
11671 case TRUNC_MOD_EXPR:
11672 /* X % 1 is always zero, but be sure to preserve any side
11673 effects in X. */
11674 if (integer_onep (arg1))
11675 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11677 /* X % 0, return X % 0 unchanged so that we can get the
11678 proper warnings and errors. */
11679 if (integer_zerop (arg1))
11680 return NULL_TREE;
11682 /* 0 % X is always zero, but be sure to preserve any side
11683 effects in X. Place this after checking for X == 0. */
11684 if (integer_zerop (arg0))
11685 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11687 /* X % -1 is zero. */
11688 if (!TYPE_UNSIGNED (type)
11689 && TREE_CODE (arg1) == INTEGER_CST
11690 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11691 && TREE_INT_CST_HIGH (arg1) == -1)
11692 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11694 /* X % -C is the same as X % C. */
11695 if (code == TRUNC_MOD_EXPR
11696 && !TYPE_UNSIGNED (type)
11697 && TREE_CODE (arg1) == INTEGER_CST
11698 && !TREE_OVERFLOW (arg1)
11699 && TREE_INT_CST_HIGH (arg1) < 0
11700 && !TYPE_OVERFLOW_TRAPS (type)
11701 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11702 && !sign_bit_p (arg1, arg1))
11703 return fold_build2_loc (loc, code, type,
11704 fold_convert_loc (loc, type, arg0),
11705 fold_convert_loc (loc, type,
11706 negate_expr (arg1)));
11708 /* X % -Y is the same as X % Y. */
11709 if (code == TRUNC_MOD_EXPR
11710 && !TYPE_UNSIGNED (type)
11711 && TREE_CODE (arg1) == NEGATE_EXPR
11712 && !TYPE_OVERFLOW_TRAPS (type))
11713 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11714 fold_convert_loc (loc, type,
11715 TREE_OPERAND (arg1, 0)));
11717 strict_overflow_p = false;
11718 if (TREE_CODE (arg1) == INTEGER_CST
11719 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11720 &strict_overflow_p)))
11722 if (strict_overflow_p)
11723 fold_overflow_warning (("assuming signed overflow does not occur "
11724 "when simplifying modulus"),
11725 WARN_STRICT_OVERFLOW_MISC);
11726 return fold_convert_loc (loc, type, tem);
11729 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11730 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11731 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11732 && (TYPE_UNSIGNED (type)
11733 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11735 tree c = arg1;
11736 /* Also optimize A % (C << N) where C is a power of 2,
11737 to A & ((C << N) - 1). */
11738 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11739 c = TREE_OPERAND (arg1, 0);
11741 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11743 tree mask
11744 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11745 build_int_cst (TREE_TYPE (arg1), 1));
11746 if (strict_overflow_p)
11747 fold_overflow_warning (("assuming signed overflow does not "
11748 "occur when simplifying "
11749 "X % (power of two)"),
11750 WARN_STRICT_OVERFLOW_MISC);
11751 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11752 fold_convert_loc (loc, type, arg0),
11753 fold_convert_loc (loc, type, mask));
11757 return NULL_TREE;
11759 case LROTATE_EXPR:
11760 case RROTATE_EXPR:
11761 if (integer_all_onesp (arg0))
11762 return omit_one_operand_loc (loc, type, arg0, arg1);
11763 goto shift;
11765 case RSHIFT_EXPR:
11766 /* Optimize -1 >> x for arithmetic right shifts. */
11767 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11768 && tree_expr_nonnegative_p (arg1))
11769 return omit_one_operand_loc (loc, type, arg0, arg1);
11770 /* ... fall through ... */
11772 case LSHIFT_EXPR:
11773 shift:
11774 if (integer_zerop (arg1))
11775 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11776 if (integer_zerop (arg0))
11777 return omit_one_operand_loc (loc, type, arg0, arg1);
11779 /* Since negative shift count is not well-defined,
11780 don't try to compute it in the compiler. */
11781 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11782 return NULL_TREE;
11784 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11785 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11786 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11787 && host_integerp (TREE_OPERAND (arg0, 1), false)
11788 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11790 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11791 + TREE_INT_CST_LOW (arg1));
11793 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11794 being well defined. */
11795 if (low >= TYPE_PRECISION (type))
11797 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11798 low = low % TYPE_PRECISION (type);
11799 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11800 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11801 TREE_OPERAND (arg0, 0));
11802 else
11803 low = TYPE_PRECISION (type) - 1;
11806 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11807 build_int_cst (type, low));
11810 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11811 into x & ((unsigned)-1 >> c) for unsigned types. */
11812 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11813 || (TYPE_UNSIGNED (type)
11814 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11815 && host_integerp (arg1, false)
11816 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11817 && host_integerp (TREE_OPERAND (arg0, 1), false)
11818 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11820 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11821 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11822 tree lshift;
11823 tree arg00;
11825 if (low0 == low1)
11827 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11829 lshift = build_int_cst (type, -1);
11830 lshift = int_const_binop (code, lshift, arg1, 0);
11832 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11836 /* Rewrite an LROTATE_EXPR by a constant into an
11837 RROTATE_EXPR by a new constant. */
11838 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11840 tree tem = build_int_cst (TREE_TYPE (arg1),
11841 TYPE_PRECISION (type));
11842 tem = const_binop (MINUS_EXPR, tem, arg1);
11843 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11846 /* If we have a rotate of a bit operation with the rotate count and
11847 the second operand of the bit operation both constant,
11848 permute the two operations. */
11849 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11850 && (TREE_CODE (arg0) == BIT_AND_EXPR
11851 || TREE_CODE (arg0) == BIT_IOR_EXPR
11852 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11854 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11855 fold_build2_loc (loc, code, type,
11856 TREE_OPERAND (arg0, 0), arg1),
11857 fold_build2_loc (loc, code, type,
11858 TREE_OPERAND (arg0, 1), arg1));
11860 /* Two consecutive rotates adding up to the precision of the
11861 type can be ignored. */
11862 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11863 && TREE_CODE (arg0) == RROTATE_EXPR
11864 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11865 && TREE_INT_CST_HIGH (arg1) == 0
11866 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11867 && ((TREE_INT_CST_LOW (arg1)
11868 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11869 == (unsigned int) TYPE_PRECISION (type)))
11870 return TREE_OPERAND (arg0, 0);
11872 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11873 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11874 if the latter can be further optimized. */
11875 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11876 && TREE_CODE (arg0) == BIT_AND_EXPR
11877 && TREE_CODE (arg1) == INTEGER_CST
11878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11880 tree mask = fold_build2_loc (loc, code, type,
11881 fold_convert_loc (loc, type,
11882 TREE_OPERAND (arg0, 1)),
11883 arg1);
11884 tree shift = fold_build2_loc (loc, code, type,
11885 fold_convert_loc (loc, type,
11886 TREE_OPERAND (arg0, 0)),
11887 arg1);
11888 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11889 if (tem)
11890 return tem;
11893 return NULL_TREE;
11895 case MIN_EXPR:
11896 if (operand_equal_p (arg0, arg1, 0))
11897 return omit_one_operand_loc (loc, type, arg0, arg1);
11898 if (INTEGRAL_TYPE_P (type)
11899 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11900 return omit_one_operand_loc (loc, type, arg1, arg0);
11901 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11902 if (tem)
11903 return tem;
11904 goto associate;
11906 case MAX_EXPR:
11907 if (operand_equal_p (arg0, arg1, 0))
11908 return omit_one_operand_loc (loc, type, arg0, arg1);
11909 if (INTEGRAL_TYPE_P (type)
11910 && TYPE_MAX_VALUE (type)
11911 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11912 return omit_one_operand_loc (loc, type, arg1, arg0);
11913 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11914 if (tem)
11915 return tem;
11916 goto associate;
11918 case TRUTH_ANDIF_EXPR:
11919 /* Note that the operands of this must be ints
11920 and their values must be 0 or 1.
11921 ("true" is a fixed value perhaps depending on the language.) */
11922 /* If first arg is constant zero, return it. */
11923 if (integer_zerop (arg0))
11924 return fold_convert_loc (loc, type, arg0);
11925 case TRUTH_AND_EXPR:
11926 /* If either arg is constant true, drop it. */
11927 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11929 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11930 /* Preserve sequence points. */
11931 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11933 /* If second arg is constant zero, result is zero, but first arg
11934 must be evaluated. */
11935 if (integer_zerop (arg1))
11936 return omit_one_operand_loc (loc, type, arg1, arg0);
11937 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11938 case will be handled here. */
11939 if (integer_zerop (arg0))
11940 return omit_one_operand_loc (loc, type, arg0, arg1);
11942 /* !X && X is always false. */
11943 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11945 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11946 /* X && !X is always false. */
11947 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11949 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11951 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11952 means A >= Y && A != MAX, but in this case we know that
11953 A < X <= MAX. */
11955 if (!TREE_SIDE_EFFECTS (arg0)
11956 && !TREE_SIDE_EFFECTS (arg1))
11958 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11959 if (tem && !operand_equal_p (tem, arg0, 0))
11960 return fold_build2_loc (loc, code, type, tem, arg1);
11962 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11963 if (tem && !operand_equal_p (tem, arg1, 0))
11964 return fold_build2_loc (loc, code, type, arg0, tem);
11967 truth_andor:
11968 /* We only do these simplifications if we are optimizing. */
11969 if (!optimize)
11970 return NULL_TREE;
11972 /* Check for things like (A || B) && (A || C). We can convert this
11973 to A || (B && C). Note that either operator can be any of the four
11974 truth and/or operations and the transformation will still be
11975 valid. Also note that we only care about order for the
11976 ANDIF and ORIF operators. If B contains side effects, this
11977 might change the truth-value of A. */
11978 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11979 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11980 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11981 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11982 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11983 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11985 tree a00 = TREE_OPERAND (arg0, 0);
11986 tree a01 = TREE_OPERAND (arg0, 1);
11987 tree a10 = TREE_OPERAND (arg1, 0);
11988 tree a11 = TREE_OPERAND (arg1, 1);
11989 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11990 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11991 && (code == TRUTH_AND_EXPR
11992 || code == TRUTH_OR_EXPR));
11994 if (operand_equal_p (a00, a10, 0))
11995 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11996 fold_build2_loc (loc, code, type, a01, a11));
11997 else if (commutative && operand_equal_p (a00, a11, 0))
11998 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11999 fold_build2_loc (loc, code, type, a01, a10));
12000 else if (commutative && operand_equal_p (a01, a10, 0))
12001 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12002 fold_build2_loc (loc, code, type, a00, a11));
12004 /* This case if tricky because we must either have commutative
12005 operators or else A10 must not have side-effects. */
12007 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12008 && operand_equal_p (a01, a11, 0))
12009 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12010 fold_build2_loc (loc, code, type, a00, a10),
12011 a01);
12014 /* See if we can build a range comparison. */
12015 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12016 return tem;
12018 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12019 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12021 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12022 if (tem)
12023 return fold_build2_loc (loc, code, type, tem, arg1);
12026 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12027 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12029 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12030 if (tem)
12031 return fold_build2_loc (loc, code, type, arg0, tem);
12034 /* Check for the possibility of merging component references. If our
12035 lhs is another similar operation, try to merge its rhs with our
12036 rhs. Then try to merge our lhs and rhs. */
12037 if (TREE_CODE (arg0) == code
12038 && 0 != (tem = fold_truthop (loc, code, type,
12039 TREE_OPERAND (arg0, 1), arg1)))
12040 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12042 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12043 return tem;
12045 return NULL_TREE;
12047 case TRUTH_ORIF_EXPR:
12048 /* Note that the operands of this must be ints
12049 and their values must be 0 or true.
12050 ("true" is a fixed value perhaps depending on the language.) */
12051 /* If first arg is constant true, return it. */
12052 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12053 return fold_convert_loc (loc, type, arg0);
12054 case TRUTH_OR_EXPR:
12055 /* If either arg is constant zero, drop it. */
12056 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12057 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12058 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12059 /* Preserve sequence points. */
12060 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12062 /* If second arg is constant true, result is true, but we must
12063 evaluate first arg. */
12064 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12065 return omit_one_operand_loc (loc, type, arg1, arg0);
12066 /* Likewise for first arg, but note this only occurs here for
12067 TRUTH_OR_EXPR. */
12068 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12069 return omit_one_operand_loc (loc, type, arg0, arg1);
12071 /* !X || X is always true. */
12072 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12073 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12074 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12075 /* X || !X is always true. */
12076 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12077 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12078 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12080 goto truth_andor;
12082 case TRUTH_XOR_EXPR:
12083 /* If the second arg is constant zero, drop it. */
12084 if (integer_zerop (arg1))
12085 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12086 /* If the second arg is constant true, this is a logical inversion. */
12087 if (integer_onep (arg1))
12089 /* Only call invert_truthvalue if operand is a truth value. */
12090 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12091 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12092 else
12093 tem = invert_truthvalue_loc (loc, arg0);
12094 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12096 /* Identical arguments cancel to zero. */
12097 if (operand_equal_p (arg0, arg1, 0))
12098 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12100 /* !X ^ X is always true. */
12101 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12102 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12103 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12105 /* X ^ !X is always true. */
12106 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12107 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12108 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12110 return NULL_TREE;
12112 case EQ_EXPR:
12113 case NE_EXPR:
12114 tem = fold_comparison (loc, code, type, op0, op1);
12115 if (tem != NULL_TREE)
12116 return tem;
12118 /* bool_var != 0 becomes bool_var. */
12119 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12120 && code == NE_EXPR)
12121 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12123 /* bool_var == 1 becomes bool_var. */
12124 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12125 && code == EQ_EXPR)
12126 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12128 /* bool_var != 1 becomes !bool_var. */
12129 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12130 && code == NE_EXPR)
12131 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12132 fold_convert_loc (loc, type, arg0));
12134 /* bool_var == 0 becomes !bool_var. */
12135 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12136 && code == EQ_EXPR)
12137 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12138 fold_convert_loc (loc, type, arg0));
12140 /* !exp != 0 becomes !exp */
12141 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12142 && code == NE_EXPR)
12143 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12145 /* If this is an equality comparison of the address of two non-weak,
12146 unaliased symbols neither of which are extern (since we do not
12147 have access to attributes for externs), then we know the result. */
12148 if (TREE_CODE (arg0) == ADDR_EXPR
12149 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12150 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12151 && ! lookup_attribute ("alias",
12152 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12153 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12154 && TREE_CODE (arg1) == ADDR_EXPR
12155 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12156 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12157 && ! lookup_attribute ("alias",
12158 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12159 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12161 /* We know that we're looking at the address of two
12162 non-weak, unaliased, static _DECL nodes.
12164 It is both wasteful and incorrect to call operand_equal_p
12165 to compare the two ADDR_EXPR nodes. It is wasteful in that
12166 all we need to do is test pointer equality for the arguments
12167 to the two ADDR_EXPR nodes. It is incorrect to use
12168 operand_equal_p as that function is NOT equivalent to a
12169 C equality test. It can in fact return false for two
12170 objects which would test as equal using the C equality
12171 operator. */
12172 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12173 return constant_boolean_node (equal
12174 ? code == EQ_EXPR : code != EQ_EXPR,
12175 type);
12178 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12179 a MINUS_EXPR of a constant, we can convert it into a comparison with
12180 a revised constant as long as no overflow occurs. */
12181 if (TREE_CODE (arg1) == INTEGER_CST
12182 && (TREE_CODE (arg0) == PLUS_EXPR
12183 || TREE_CODE (arg0) == MINUS_EXPR)
12184 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12185 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12186 ? MINUS_EXPR : PLUS_EXPR,
12187 fold_convert_loc (loc, TREE_TYPE (arg0),
12188 arg1),
12189 TREE_OPERAND (arg0, 1)))
12190 && !TREE_OVERFLOW (tem))
12191 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12193 /* Similarly for a NEGATE_EXPR. */
12194 if (TREE_CODE (arg0) == NEGATE_EXPR
12195 && TREE_CODE (arg1) == INTEGER_CST
12196 && 0 != (tem = negate_expr (arg1))
12197 && TREE_CODE (tem) == INTEGER_CST
12198 && !TREE_OVERFLOW (tem))
12199 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12201 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12202 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12203 && TREE_CODE (arg1) == INTEGER_CST
12204 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12205 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12206 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12207 fold_convert_loc (loc,
12208 TREE_TYPE (arg0),
12209 arg1),
12210 TREE_OPERAND (arg0, 1)));
12212 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12213 if ((TREE_CODE (arg0) == PLUS_EXPR
12214 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12215 || TREE_CODE (arg0) == MINUS_EXPR)
12216 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12217 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12218 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12220 tree val = TREE_OPERAND (arg0, 1);
12221 return omit_two_operands_loc (loc, type,
12222 fold_build2_loc (loc, code, type,
12223 val,
12224 build_int_cst (TREE_TYPE (val),
12225 0)),
12226 TREE_OPERAND (arg0, 0), arg1);
12229 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12230 if (TREE_CODE (arg0) == MINUS_EXPR
12231 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12232 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12233 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12235 return omit_two_operands_loc (loc, type,
12236 code == NE_EXPR
12237 ? boolean_true_node : boolean_false_node,
12238 TREE_OPERAND (arg0, 1), arg1);
12241 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12242 for !=. Don't do this for ordered comparisons due to overflow. */
12243 if (TREE_CODE (arg0) == MINUS_EXPR
12244 && integer_zerop (arg1))
12245 return fold_build2_loc (loc, code, type,
12246 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12248 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12249 if (TREE_CODE (arg0) == ABS_EXPR
12250 && (integer_zerop (arg1) || real_zerop (arg1)))
12251 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12253 /* If this is an EQ or NE comparison with zero and ARG0 is
12254 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12255 two operations, but the latter can be done in one less insn
12256 on machines that have only two-operand insns or on which a
12257 constant cannot be the first operand. */
12258 if (TREE_CODE (arg0) == BIT_AND_EXPR
12259 && integer_zerop (arg1))
12261 tree arg00 = TREE_OPERAND (arg0, 0);
12262 tree arg01 = TREE_OPERAND (arg0, 1);
12263 if (TREE_CODE (arg00) == LSHIFT_EXPR
12264 && integer_onep (TREE_OPERAND (arg00, 0)))
12266 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12267 arg01, TREE_OPERAND (arg00, 1));
12268 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12269 build_int_cst (TREE_TYPE (arg0), 1));
12270 return fold_build2_loc (loc, code, type,
12271 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12272 arg1);
12274 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12275 && integer_onep (TREE_OPERAND (arg01, 0)))
12277 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12278 arg00, TREE_OPERAND (arg01, 1));
12279 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12280 build_int_cst (TREE_TYPE (arg0), 1));
12281 return fold_build2_loc (loc, code, type,
12282 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12283 arg1);
12287 /* If this is an NE or EQ comparison of zero against the result of a
12288 signed MOD operation whose second operand is a power of 2, make
12289 the MOD operation unsigned since it is simpler and equivalent. */
12290 if (integer_zerop (arg1)
12291 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12292 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12293 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12294 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12295 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12296 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12298 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12299 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12300 fold_convert_loc (loc, newtype,
12301 TREE_OPERAND (arg0, 0)),
12302 fold_convert_loc (loc, newtype,
12303 TREE_OPERAND (arg0, 1)));
12305 return fold_build2_loc (loc, code, type, newmod,
12306 fold_convert_loc (loc, newtype, arg1));
12309 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12310 C1 is a valid shift constant, and C2 is a power of two, i.e.
12311 a single bit. */
12312 if (TREE_CODE (arg0) == BIT_AND_EXPR
12313 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12314 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12315 == INTEGER_CST
12316 && integer_pow2p (TREE_OPERAND (arg0, 1))
12317 && integer_zerop (arg1))
12319 tree itype = TREE_TYPE (arg0);
12320 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12321 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12323 /* Check for a valid shift count. */
12324 if (TREE_INT_CST_HIGH (arg001) == 0
12325 && TREE_INT_CST_LOW (arg001) < prec)
12327 tree arg01 = TREE_OPERAND (arg0, 1);
12328 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12329 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12330 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12331 can be rewritten as (X & (C2 << C1)) != 0. */
12332 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12334 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12335 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12336 return fold_build2_loc (loc, code, type, tem, arg1);
12338 /* Otherwise, for signed (arithmetic) shifts,
12339 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12340 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12341 else if (!TYPE_UNSIGNED (itype))
12342 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12343 arg000, build_int_cst (itype, 0));
12344 /* Otherwise, of unsigned (logical) shifts,
12345 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12346 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12347 else
12348 return omit_one_operand_loc (loc, type,
12349 code == EQ_EXPR ? integer_one_node
12350 : integer_zero_node,
12351 arg000);
12355 /* If this is an NE comparison of zero with an AND of one, remove the
12356 comparison since the AND will give the correct value. */
12357 if (code == NE_EXPR
12358 && integer_zerop (arg1)
12359 && TREE_CODE (arg0) == BIT_AND_EXPR
12360 && integer_onep (TREE_OPERAND (arg0, 1)))
12361 return fold_convert_loc (loc, type, arg0);
12363 /* If we have (A & C) == C where C is a power of 2, convert this into
12364 (A & C) != 0. Similarly for NE_EXPR. */
12365 if (TREE_CODE (arg0) == BIT_AND_EXPR
12366 && integer_pow2p (TREE_OPERAND (arg0, 1))
12367 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12368 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12369 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12370 integer_zero_node));
12372 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12373 bit, then fold the expression into A < 0 or A >= 0. */
12374 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12375 if (tem)
12376 return tem;
12378 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12379 Similarly for NE_EXPR. */
12380 if (TREE_CODE (arg0) == BIT_AND_EXPR
12381 && TREE_CODE (arg1) == INTEGER_CST
12382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12384 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12385 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12386 TREE_OPERAND (arg0, 1));
12387 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12388 arg1, notc);
12389 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12390 if (integer_nonzerop (dandnotc))
12391 return omit_one_operand_loc (loc, type, rslt, arg0);
12394 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12395 Similarly for NE_EXPR. */
12396 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12397 && TREE_CODE (arg1) == INTEGER_CST
12398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12400 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12401 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12402 TREE_OPERAND (arg0, 1), notd);
12403 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12404 if (integer_nonzerop (candnotd))
12405 return omit_one_operand_loc (loc, type, rslt, arg0);
12408 /* If this is a comparison of a field, we may be able to simplify it. */
12409 if ((TREE_CODE (arg0) == COMPONENT_REF
12410 || TREE_CODE (arg0) == BIT_FIELD_REF)
12411 /* Handle the constant case even without -O
12412 to make sure the warnings are given. */
12413 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12415 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12416 if (t1)
12417 return t1;
12420 /* Optimize comparisons of strlen vs zero to a compare of the
12421 first character of the string vs zero. To wit,
12422 strlen(ptr) == 0 => *ptr == 0
12423 strlen(ptr) != 0 => *ptr != 0
12424 Other cases should reduce to one of these two (or a constant)
12425 due to the return value of strlen being unsigned. */
12426 if (TREE_CODE (arg0) == CALL_EXPR
12427 && integer_zerop (arg1))
12429 tree fndecl = get_callee_fndecl (arg0);
12431 if (fndecl
12432 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12433 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12434 && call_expr_nargs (arg0) == 1
12435 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12437 tree iref = build_fold_indirect_ref_loc (loc,
12438 CALL_EXPR_ARG (arg0, 0));
12439 return fold_build2_loc (loc, code, type, iref,
12440 build_int_cst (TREE_TYPE (iref), 0));
12444 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12445 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12446 if (TREE_CODE (arg0) == RSHIFT_EXPR
12447 && integer_zerop (arg1)
12448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12450 tree arg00 = TREE_OPERAND (arg0, 0);
12451 tree arg01 = TREE_OPERAND (arg0, 1);
12452 tree itype = TREE_TYPE (arg00);
12453 if (TREE_INT_CST_HIGH (arg01) == 0
12454 && TREE_INT_CST_LOW (arg01)
12455 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12457 if (TYPE_UNSIGNED (itype))
12459 itype = signed_type_for (itype);
12460 arg00 = fold_convert_loc (loc, itype, arg00);
12462 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12463 type, arg00, build_int_cst (itype, 0));
12467 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12468 if (integer_zerop (arg1)
12469 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12470 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12471 TREE_OPERAND (arg0, 1));
12473 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12474 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12475 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12476 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12477 build_int_cst (TREE_TYPE (arg1), 0));
12478 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12479 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12480 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12481 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12482 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12483 build_int_cst (TREE_TYPE (arg1), 0));
12485 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12486 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12487 && TREE_CODE (arg1) == INTEGER_CST
12488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12489 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12490 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12491 TREE_OPERAND (arg0, 1), arg1));
12493 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12494 (X & C) == 0 when C is a single bit. */
12495 if (TREE_CODE (arg0) == BIT_AND_EXPR
12496 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12497 && integer_zerop (arg1)
12498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12500 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12501 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12502 TREE_OPERAND (arg0, 1));
12503 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12504 type, tem, arg1);
12507 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12508 constant C is a power of two, i.e. a single bit. */
12509 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12510 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12511 && integer_zerop (arg1)
12512 && integer_pow2p (TREE_OPERAND (arg0, 1))
12513 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12514 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12516 tree arg00 = TREE_OPERAND (arg0, 0);
12517 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12518 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12521 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12522 when is C is a power of two, i.e. a single bit. */
12523 if (TREE_CODE (arg0) == BIT_AND_EXPR
12524 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12525 && integer_zerop (arg1)
12526 && integer_pow2p (TREE_OPERAND (arg0, 1))
12527 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12528 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12530 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12531 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12532 arg000, TREE_OPERAND (arg0, 1));
12533 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12534 tem, build_int_cst (TREE_TYPE (tem), 0));
12537 if (integer_zerop (arg1)
12538 && tree_expr_nonzero_p (arg0))
12540 tree res = constant_boolean_node (code==NE_EXPR, type);
12541 return omit_one_operand_loc (loc, type, res, arg0);
12544 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12545 if (TREE_CODE (arg0) == NEGATE_EXPR
12546 && TREE_CODE (arg1) == NEGATE_EXPR)
12547 return fold_build2_loc (loc, code, type,
12548 TREE_OPERAND (arg0, 0),
12549 TREE_OPERAND (arg1, 0));
12551 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12552 if (TREE_CODE (arg0) == BIT_AND_EXPR
12553 && TREE_CODE (arg1) == BIT_AND_EXPR)
12555 tree arg00 = TREE_OPERAND (arg0, 0);
12556 tree arg01 = TREE_OPERAND (arg0, 1);
12557 tree arg10 = TREE_OPERAND (arg1, 0);
12558 tree arg11 = TREE_OPERAND (arg1, 1);
12559 tree itype = TREE_TYPE (arg0);
12561 if (operand_equal_p (arg01, arg11, 0))
12562 return fold_build2_loc (loc, code, type,
12563 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12564 fold_build2_loc (loc,
12565 BIT_XOR_EXPR, itype,
12566 arg00, arg10),
12567 arg01),
12568 build_int_cst (itype, 0));
12570 if (operand_equal_p (arg01, arg10, 0))
12571 return fold_build2_loc (loc, code, type,
12572 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12573 fold_build2_loc (loc,
12574 BIT_XOR_EXPR, itype,
12575 arg00, arg11),
12576 arg01),
12577 build_int_cst (itype, 0));
12579 if (operand_equal_p (arg00, arg11, 0))
12580 return fold_build2_loc (loc, code, type,
12581 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12582 fold_build2_loc (loc,
12583 BIT_XOR_EXPR, itype,
12584 arg01, arg10),
12585 arg00),
12586 build_int_cst (itype, 0));
12588 if (operand_equal_p (arg00, arg10, 0))
12589 return fold_build2_loc (loc, code, type,
12590 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12591 fold_build2_loc (loc,
12592 BIT_XOR_EXPR, itype,
12593 arg01, arg11),
12594 arg00),
12595 build_int_cst (itype, 0));
12598 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12599 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12601 tree arg00 = TREE_OPERAND (arg0, 0);
12602 tree arg01 = TREE_OPERAND (arg0, 1);
12603 tree arg10 = TREE_OPERAND (arg1, 0);
12604 tree arg11 = TREE_OPERAND (arg1, 1);
12605 tree itype = TREE_TYPE (arg0);
12607 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12608 operand_equal_p guarantees no side-effects so we don't need
12609 to use omit_one_operand on Z. */
12610 if (operand_equal_p (arg01, arg11, 0))
12611 return fold_build2_loc (loc, code, type, arg00, arg10);
12612 if (operand_equal_p (arg01, arg10, 0))
12613 return fold_build2_loc (loc, code, type, arg00, arg11);
12614 if (operand_equal_p (arg00, arg11, 0))
12615 return fold_build2_loc (loc, code, type, arg01, arg10);
12616 if (operand_equal_p (arg00, arg10, 0))
12617 return fold_build2_loc (loc, code, type, arg01, arg11);
12619 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12620 if (TREE_CODE (arg01) == INTEGER_CST
12621 && TREE_CODE (arg11) == INTEGER_CST)
12622 return fold_build2_loc (loc, code, type,
12623 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12624 fold_build2_loc (loc,
12625 BIT_XOR_EXPR, itype,
12626 arg01, arg11)),
12627 arg10);
12630 /* Attempt to simplify equality/inequality comparisons of complex
12631 values. Only lower the comparison if the result is known or
12632 can be simplified to a single scalar comparison. */
12633 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12634 || TREE_CODE (arg0) == COMPLEX_CST)
12635 && (TREE_CODE (arg1) == COMPLEX_EXPR
12636 || TREE_CODE (arg1) == COMPLEX_CST))
12638 tree real0, imag0, real1, imag1;
12639 tree rcond, icond;
12641 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12643 real0 = TREE_OPERAND (arg0, 0);
12644 imag0 = TREE_OPERAND (arg0, 1);
12646 else
12648 real0 = TREE_REALPART (arg0);
12649 imag0 = TREE_IMAGPART (arg0);
12652 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12654 real1 = TREE_OPERAND (arg1, 0);
12655 imag1 = TREE_OPERAND (arg1, 1);
12657 else
12659 real1 = TREE_REALPART (arg1);
12660 imag1 = TREE_IMAGPART (arg1);
12663 rcond = fold_binary_loc (loc, code, type, real0, real1);
12664 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12666 if (integer_zerop (rcond))
12668 if (code == EQ_EXPR)
12669 return omit_two_operands_loc (loc, type, boolean_false_node,
12670 imag0, imag1);
12671 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12673 else
12675 if (code == NE_EXPR)
12676 return omit_two_operands_loc (loc, type, boolean_true_node,
12677 imag0, imag1);
12678 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12682 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12683 if (icond && TREE_CODE (icond) == INTEGER_CST)
12685 if (integer_zerop (icond))
12687 if (code == EQ_EXPR)
12688 return omit_two_operands_loc (loc, type, boolean_false_node,
12689 real0, real1);
12690 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12692 else
12694 if (code == NE_EXPR)
12695 return omit_two_operands_loc (loc, type, boolean_true_node,
12696 real0, real1);
12697 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12702 return NULL_TREE;
12704 case LT_EXPR:
12705 case GT_EXPR:
12706 case LE_EXPR:
12707 case GE_EXPR:
12708 tem = fold_comparison (loc, code, type, op0, op1);
12709 if (tem != NULL_TREE)
12710 return tem;
12712 /* Transform comparisons of the form X +- C CMP X. */
12713 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12714 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12715 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12716 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12717 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12718 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12720 tree arg01 = TREE_OPERAND (arg0, 1);
12721 enum tree_code code0 = TREE_CODE (arg0);
12722 int is_positive;
12724 if (TREE_CODE (arg01) == REAL_CST)
12725 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12726 else
12727 is_positive = tree_int_cst_sgn (arg01);
12729 /* (X - c) > X becomes false. */
12730 if (code == GT_EXPR
12731 && ((code0 == MINUS_EXPR && is_positive >= 0)
12732 || (code0 == PLUS_EXPR && is_positive <= 0)))
12734 if (TREE_CODE (arg01) == INTEGER_CST
12735 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12736 fold_overflow_warning (("assuming signed overflow does not "
12737 "occur when assuming that (X - c) > X "
12738 "is always false"),
12739 WARN_STRICT_OVERFLOW_ALL);
12740 return constant_boolean_node (0, type);
12743 /* Likewise (X + c) < X becomes false. */
12744 if (code == LT_EXPR
12745 && ((code0 == PLUS_EXPR && is_positive >= 0)
12746 || (code0 == MINUS_EXPR && is_positive <= 0)))
12748 if (TREE_CODE (arg01) == INTEGER_CST
12749 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12750 fold_overflow_warning (("assuming signed overflow does not "
12751 "occur when assuming that "
12752 "(X + c) < X is always false"),
12753 WARN_STRICT_OVERFLOW_ALL);
12754 return constant_boolean_node (0, type);
12757 /* Convert (X - c) <= X to true. */
12758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12759 && code == LE_EXPR
12760 && ((code0 == MINUS_EXPR && is_positive >= 0)
12761 || (code0 == PLUS_EXPR && is_positive <= 0)))
12763 if (TREE_CODE (arg01) == INTEGER_CST
12764 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12765 fold_overflow_warning (("assuming signed overflow does not "
12766 "occur when assuming that "
12767 "(X - c) <= X is always true"),
12768 WARN_STRICT_OVERFLOW_ALL);
12769 return constant_boolean_node (1, type);
12772 /* Convert (X + c) >= X to true. */
12773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12774 && code == GE_EXPR
12775 && ((code0 == PLUS_EXPR && is_positive >= 0)
12776 || (code0 == MINUS_EXPR && is_positive <= 0)))
12778 if (TREE_CODE (arg01) == INTEGER_CST
12779 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12780 fold_overflow_warning (("assuming signed overflow does not "
12781 "occur when assuming that "
12782 "(X + c) >= X is always true"),
12783 WARN_STRICT_OVERFLOW_ALL);
12784 return constant_boolean_node (1, type);
12787 if (TREE_CODE (arg01) == INTEGER_CST)
12789 /* Convert X + c > X and X - c < X to true for integers. */
12790 if (code == GT_EXPR
12791 && ((code0 == PLUS_EXPR && is_positive > 0)
12792 || (code0 == MINUS_EXPR && is_positive < 0)))
12794 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12795 fold_overflow_warning (("assuming signed overflow does "
12796 "not occur when assuming that "
12797 "(X + c) > X is always true"),
12798 WARN_STRICT_OVERFLOW_ALL);
12799 return constant_boolean_node (1, type);
12802 if (code == LT_EXPR
12803 && ((code0 == MINUS_EXPR && is_positive > 0)
12804 || (code0 == PLUS_EXPR && is_positive < 0)))
12806 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12807 fold_overflow_warning (("assuming signed overflow does "
12808 "not occur when assuming that "
12809 "(X - c) < X is always true"),
12810 WARN_STRICT_OVERFLOW_ALL);
12811 return constant_boolean_node (1, type);
12814 /* Convert X + c <= X and X - c >= X to false for integers. */
12815 if (code == LE_EXPR
12816 && ((code0 == PLUS_EXPR && is_positive > 0)
12817 || (code0 == MINUS_EXPR && is_positive < 0)))
12819 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12820 fold_overflow_warning (("assuming signed overflow does "
12821 "not occur when assuming that "
12822 "(X + c) <= X is always false"),
12823 WARN_STRICT_OVERFLOW_ALL);
12824 return constant_boolean_node (0, type);
12827 if (code == GE_EXPR
12828 && ((code0 == MINUS_EXPR && is_positive > 0)
12829 || (code0 == PLUS_EXPR && is_positive < 0)))
12831 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12832 fold_overflow_warning (("assuming signed overflow does "
12833 "not occur when assuming that "
12834 "(X - c) >= X is always false"),
12835 WARN_STRICT_OVERFLOW_ALL);
12836 return constant_boolean_node (0, type);
12841 /* Comparisons with the highest or lowest possible integer of
12842 the specified precision will have known values. */
12844 tree arg1_type = TREE_TYPE (arg1);
12845 unsigned int width = TYPE_PRECISION (arg1_type);
12847 if (TREE_CODE (arg1) == INTEGER_CST
12848 && width <= 2 * HOST_BITS_PER_WIDE_INT
12849 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12851 HOST_WIDE_INT signed_max_hi;
12852 unsigned HOST_WIDE_INT signed_max_lo;
12853 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12855 if (width <= HOST_BITS_PER_WIDE_INT)
12857 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12858 - 1;
12859 signed_max_hi = 0;
12860 max_hi = 0;
12862 if (TYPE_UNSIGNED (arg1_type))
12864 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12865 min_lo = 0;
12866 min_hi = 0;
12868 else
12870 max_lo = signed_max_lo;
12871 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12872 min_hi = -1;
12875 else
12877 width -= HOST_BITS_PER_WIDE_INT;
12878 signed_max_lo = -1;
12879 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12880 - 1;
12881 max_lo = -1;
12882 min_lo = 0;
12884 if (TYPE_UNSIGNED (arg1_type))
12886 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12887 min_hi = 0;
12889 else
12891 max_hi = signed_max_hi;
12892 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12896 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12897 && TREE_INT_CST_LOW (arg1) == max_lo)
12898 switch (code)
12900 case GT_EXPR:
12901 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12903 case GE_EXPR:
12904 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12906 case LE_EXPR:
12907 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12909 case LT_EXPR:
12910 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12912 /* The GE_EXPR and LT_EXPR cases above are not normally
12913 reached because of previous transformations. */
12915 default:
12916 break;
12918 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12919 == max_hi
12920 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12921 switch (code)
12923 case GT_EXPR:
12924 arg1 = const_binop (PLUS_EXPR, arg1,
12925 build_int_cst (TREE_TYPE (arg1), 1));
12926 return fold_build2_loc (loc, EQ_EXPR, type,
12927 fold_convert_loc (loc,
12928 TREE_TYPE (arg1), arg0),
12929 arg1);
12930 case LE_EXPR:
12931 arg1 = const_binop (PLUS_EXPR, arg1,
12932 build_int_cst (TREE_TYPE (arg1), 1));
12933 return fold_build2_loc (loc, NE_EXPR, type,
12934 fold_convert_loc (loc, TREE_TYPE (arg1),
12935 arg0),
12936 arg1);
12937 default:
12938 break;
12940 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12941 == min_hi
12942 && TREE_INT_CST_LOW (arg1) == min_lo)
12943 switch (code)
12945 case LT_EXPR:
12946 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12948 case LE_EXPR:
12949 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12951 case GE_EXPR:
12952 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12954 case GT_EXPR:
12955 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12957 default:
12958 break;
12960 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12961 == min_hi
12962 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12963 switch (code)
12965 case GE_EXPR:
12966 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12967 return fold_build2_loc (loc, NE_EXPR, type,
12968 fold_convert_loc (loc,
12969 TREE_TYPE (arg1), arg0),
12970 arg1);
12971 case LT_EXPR:
12972 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12973 return fold_build2_loc (loc, EQ_EXPR, type,
12974 fold_convert_loc (loc, TREE_TYPE (arg1),
12975 arg0),
12976 arg1);
12977 default:
12978 break;
12981 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12982 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12983 && TYPE_UNSIGNED (arg1_type)
12984 /* We will flip the signedness of the comparison operator
12985 associated with the mode of arg1, so the sign bit is
12986 specified by this mode. Check that arg1 is the signed
12987 max associated with this sign bit. */
12988 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12989 /* signed_type does not work on pointer types. */
12990 && INTEGRAL_TYPE_P (arg1_type))
12992 /* The following case also applies to X < signed_max+1
12993 and X >= signed_max+1 because previous transformations. */
12994 if (code == LE_EXPR || code == GT_EXPR)
12996 tree st;
12997 st = signed_type_for (TREE_TYPE (arg1));
12998 return fold_build2_loc (loc,
12999 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13000 type, fold_convert_loc (loc, st, arg0),
13001 build_int_cst (st, 0));
13007 /* If we are comparing an ABS_EXPR with a constant, we can
13008 convert all the cases into explicit comparisons, but they may
13009 well not be faster than doing the ABS and one comparison.
13010 But ABS (X) <= C is a range comparison, which becomes a subtraction
13011 and a comparison, and is probably faster. */
13012 if (code == LE_EXPR
13013 && TREE_CODE (arg1) == INTEGER_CST
13014 && TREE_CODE (arg0) == ABS_EXPR
13015 && ! TREE_SIDE_EFFECTS (arg0)
13016 && (0 != (tem = negate_expr (arg1)))
13017 && TREE_CODE (tem) == INTEGER_CST
13018 && !TREE_OVERFLOW (tem))
13019 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13020 build2 (GE_EXPR, type,
13021 TREE_OPERAND (arg0, 0), tem),
13022 build2 (LE_EXPR, type,
13023 TREE_OPERAND (arg0, 0), arg1));
13025 /* Convert ABS_EXPR<x> >= 0 to true. */
13026 strict_overflow_p = false;
13027 if (code == GE_EXPR
13028 && (integer_zerop (arg1)
13029 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13030 && real_zerop (arg1)))
13031 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13033 if (strict_overflow_p)
13034 fold_overflow_warning (("assuming signed overflow does not occur "
13035 "when simplifying comparison of "
13036 "absolute value and zero"),
13037 WARN_STRICT_OVERFLOW_CONDITIONAL);
13038 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13041 /* Convert ABS_EXPR<x> < 0 to false. */
13042 strict_overflow_p = false;
13043 if (code == LT_EXPR
13044 && (integer_zerop (arg1) || real_zerop (arg1))
13045 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13047 if (strict_overflow_p)
13048 fold_overflow_warning (("assuming signed overflow does not occur "
13049 "when simplifying comparison of "
13050 "absolute value and zero"),
13051 WARN_STRICT_OVERFLOW_CONDITIONAL);
13052 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13055 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13056 and similarly for >= into !=. */
13057 if ((code == LT_EXPR || code == GE_EXPR)
13058 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13059 && TREE_CODE (arg1) == LSHIFT_EXPR
13060 && integer_onep (TREE_OPERAND (arg1, 0)))
13061 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13062 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13063 TREE_OPERAND (arg1, 1)),
13064 build_int_cst (TREE_TYPE (arg0), 0));
13066 if ((code == LT_EXPR || code == GE_EXPR)
13067 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13068 && CONVERT_EXPR_P (arg1)
13069 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13070 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13072 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13073 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13074 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13075 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13076 build_int_cst (TREE_TYPE (arg0), 0));
13079 return NULL_TREE;
13081 case UNORDERED_EXPR:
13082 case ORDERED_EXPR:
13083 case UNLT_EXPR:
13084 case UNLE_EXPR:
13085 case UNGT_EXPR:
13086 case UNGE_EXPR:
13087 case UNEQ_EXPR:
13088 case LTGT_EXPR:
13089 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13091 t1 = fold_relational_const (code, type, arg0, arg1);
13092 if (t1 != NULL_TREE)
13093 return t1;
13096 /* If the first operand is NaN, the result is constant. */
13097 if (TREE_CODE (arg0) == REAL_CST
13098 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13099 && (code != LTGT_EXPR || ! flag_trapping_math))
13101 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13102 ? integer_zero_node
13103 : integer_one_node;
13104 return omit_one_operand_loc (loc, type, t1, arg1);
13107 /* If the second operand is NaN, the result is constant. */
13108 if (TREE_CODE (arg1) == REAL_CST
13109 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13110 && (code != LTGT_EXPR || ! flag_trapping_math))
13112 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13113 ? integer_zero_node
13114 : integer_one_node;
13115 return omit_one_operand_loc (loc, type, t1, arg0);
13118 /* Simplify unordered comparison of something with itself. */
13119 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13120 && operand_equal_p (arg0, arg1, 0))
13121 return constant_boolean_node (1, type);
13123 if (code == LTGT_EXPR
13124 && !flag_trapping_math
13125 && operand_equal_p (arg0, arg1, 0))
13126 return constant_boolean_node (0, type);
13128 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13130 tree targ0 = strip_float_extensions (arg0);
13131 tree targ1 = strip_float_extensions (arg1);
13132 tree newtype = TREE_TYPE (targ0);
13134 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13135 newtype = TREE_TYPE (targ1);
13137 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13138 return fold_build2_loc (loc, code, type,
13139 fold_convert_loc (loc, newtype, targ0),
13140 fold_convert_loc (loc, newtype, targ1));
13143 return NULL_TREE;
13145 case COMPOUND_EXPR:
13146 /* When pedantic, a compound expression can be neither an lvalue
13147 nor an integer constant expression. */
13148 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13149 return NULL_TREE;
13150 /* Don't let (0, 0) be null pointer constant. */
13151 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13152 : fold_convert_loc (loc, type, arg1);
13153 return pedantic_non_lvalue_loc (loc, tem);
13155 case COMPLEX_EXPR:
13156 if ((TREE_CODE (arg0) == REAL_CST
13157 && TREE_CODE (arg1) == REAL_CST)
13158 || (TREE_CODE (arg0) == INTEGER_CST
13159 && TREE_CODE (arg1) == INTEGER_CST))
13160 return build_complex (type, arg0, arg1);
13161 return NULL_TREE;
13163 case ASSERT_EXPR:
13164 /* An ASSERT_EXPR should never be passed to fold_binary. */
13165 gcc_unreachable ();
13167 default:
13168 return NULL_TREE;
13169 } /* switch (code) */
13172 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13173 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13174 of GOTO_EXPR. */
13176 static tree
13177 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13179 switch (TREE_CODE (*tp))
13181 case LABEL_EXPR:
13182 return *tp;
13184 case GOTO_EXPR:
13185 *walk_subtrees = 0;
13187 /* ... fall through ... */
13189 default:
13190 return NULL_TREE;
13194 /* Return whether the sub-tree ST contains a label which is accessible from
13195 outside the sub-tree. */
13197 static bool
13198 contains_label_p (tree st)
13200 return
13201 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13204 /* Fold a ternary expression of code CODE and type TYPE with operands
13205 OP0, OP1, and OP2. Return the folded expression if folding is
13206 successful. Otherwise, return NULL_TREE. */
13208 tree
13209 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13210 tree op0, tree op1, tree op2)
13212 tree tem;
13213 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13214 enum tree_code_class kind = TREE_CODE_CLASS (code);
13216 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13217 && TREE_CODE_LENGTH (code) == 3);
13219 /* Strip any conversions that don't change the mode. This is safe
13220 for every expression, except for a comparison expression because
13221 its signedness is derived from its operands. So, in the latter
13222 case, only strip conversions that don't change the signedness.
13224 Note that this is done as an internal manipulation within the
13225 constant folder, in order to find the simplest representation of
13226 the arguments so that their form can be studied. In any cases,
13227 the appropriate type conversions should be put back in the tree
13228 that will get out of the constant folder. */
13229 if (op0)
13231 arg0 = op0;
13232 STRIP_NOPS (arg0);
13235 if (op1)
13237 arg1 = op1;
13238 STRIP_NOPS (arg1);
13241 if (op2)
13243 arg2 = op2;
13244 STRIP_NOPS (arg2);
13247 switch (code)
13249 case COMPONENT_REF:
13250 if (TREE_CODE (arg0) == CONSTRUCTOR
13251 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13253 unsigned HOST_WIDE_INT idx;
13254 tree field, value;
13255 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13256 if (field == arg1)
13257 return value;
13259 return NULL_TREE;
13261 case COND_EXPR:
13262 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13263 so all simple results must be passed through pedantic_non_lvalue. */
13264 if (TREE_CODE (arg0) == INTEGER_CST)
13266 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13267 tem = integer_zerop (arg0) ? op2 : op1;
13268 /* Only optimize constant conditions when the selected branch
13269 has the same type as the COND_EXPR. This avoids optimizing
13270 away "c ? x : throw", where the throw has a void type.
13271 Avoid throwing away that operand which contains label. */
13272 if ((!TREE_SIDE_EFFECTS (unused_op)
13273 || !contains_label_p (unused_op))
13274 && (! VOID_TYPE_P (TREE_TYPE (tem))
13275 || VOID_TYPE_P (type)))
13276 return pedantic_non_lvalue_loc (loc, tem);
13277 return NULL_TREE;
13279 if (operand_equal_p (arg1, op2, 0))
13280 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13282 /* If we have A op B ? A : C, we may be able to convert this to a
13283 simpler expression, depending on the operation and the values
13284 of B and C. Signed zeros prevent all of these transformations,
13285 for reasons given above each one.
13287 Also try swapping the arguments and inverting the conditional. */
13288 if (COMPARISON_CLASS_P (arg0)
13289 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13290 arg1, TREE_OPERAND (arg0, 1))
13291 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13293 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13294 if (tem)
13295 return tem;
13298 if (COMPARISON_CLASS_P (arg0)
13299 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13300 op2,
13301 TREE_OPERAND (arg0, 1))
13302 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13304 tem = fold_truth_not_expr (loc, arg0);
13305 if (tem && COMPARISON_CLASS_P (tem))
13307 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13308 if (tem)
13309 return tem;
13313 /* If the second operand is simpler than the third, swap them
13314 since that produces better jump optimization results. */
13315 if (truth_value_p (TREE_CODE (arg0))
13316 && tree_swap_operands_p (op1, op2, false))
13318 /* See if this can be inverted. If it can't, possibly because
13319 it was a floating-point inequality comparison, don't do
13320 anything. */
13321 tem = fold_truth_not_expr (loc, arg0);
13322 if (tem)
13323 return fold_build3_loc (loc, code, type, tem, op2, op1);
13326 /* Convert A ? 1 : 0 to simply A. */
13327 if (integer_onep (op1)
13328 && integer_zerop (op2)
13329 /* If we try to convert OP0 to our type, the
13330 call to fold will try to move the conversion inside
13331 a COND, which will recurse. In that case, the COND_EXPR
13332 is probably the best choice, so leave it alone. */
13333 && type == TREE_TYPE (arg0))
13334 return pedantic_non_lvalue_loc (loc, arg0);
13336 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13337 over COND_EXPR in cases such as floating point comparisons. */
13338 if (integer_zerop (op1)
13339 && integer_onep (op2)
13340 && truth_value_p (TREE_CODE (arg0)))
13341 return pedantic_non_lvalue_loc (loc,
13342 fold_convert_loc (loc, type,
13343 invert_truthvalue_loc (loc,
13344 arg0)));
13346 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13347 if (TREE_CODE (arg0) == LT_EXPR
13348 && integer_zerop (TREE_OPERAND (arg0, 1))
13349 && integer_zerop (op2)
13350 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13352 /* sign_bit_p only checks ARG1 bits within A's precision.
13353 If <sign bit of A> has wider type than A, bits outside
13354 of A's precision in <sign bit of A> need to be checked.
13355 If they are all 0, this optimization needs to be done
13356 in unsigned A's type, if they are all 1 in signed A's type,
13357 otherwise this can't be done. */
13358 if (TYPE_PRECISION (TREE_TYPE (tem))
13359 < TYPE_PRECISION (TREE_TYPE (arg1))
13360 && TYPE_PRECISION (TREE_TYPE (tem))
13361 < TYPE_PRECISION (type))
13363 unsigned HOST_WIDE_INT mask_lo;
13364 HOST_WIDE_INT mask_hi;
13365 int inner_width, outer_width;
13366 tree tem_type;
13368 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13369 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13370 if (outer_width > TYPE_PRECISION (type))
13371 outer_width = TYPE_PRECISION (type);
13373 if (outer_width > HOST_BITS_PER_WIDE_INT)
13375 mask_hi = ((unsigned HOST_WIDE_INT) -1
13376 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13377 mask_lo = -1;
13379 else
13381 mask_hi = 0;
13382 mask_lo = ((unsigned HOST_WIDE_INT) -1
13383 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13385 if (inner_width > HOST_BITS_PER_WIDE_INT)
13387 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13388 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13389 mask_lo = 0;
13391 else
13392 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13393 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13395 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13396 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13398 tem_type = signed_type_for (TREE_TYPE (tem));
13399 tem = fold_convert_loc (loc, tem_type, tem);
13401 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13402 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13404 tem_type = unsigned_type_for (TREE_TYPE (tem));
13405 tem = fold_convert_loc (loc, tem_type, tem);
13407 else
13408 tem = NULL;
13411 if (tem)
13412 return
13413 fold_convert_loc (loc, type,
13414 fold_build2_loc (loc, BIT_AND_EXPR,
13415 TREE_TYPE (tem), tem,
13416 fold_convert_loc (loc,
13417 TREE_TYPE (tem),
13418 arg1)));
13421 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13422 already handled above. */
13423 if (TREE_CODE (arg0) == BIT_AND_EXPR
13424 && integer_onep (TREE_OPERAND (arg0, 1))
13425 && integer_zerop (op2)
13426 && integer_pow2p (arg1))
13428 tree tem = TREE_OPERAND (arg0, 0);
13429 STRIP_NOPS (tem);
13430 if (TREE_CODE (tem) == RSHIFT_EXPR
13431 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13432 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13433 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13434 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13435 TREE_OPERAND (tem, 0), arg1);
13438 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13439 is probably obsolete because the first operand should be a
13440 truth value (that's why we have the two cases above), but let's
13441 leave it in until we can confirm this for all front-ends. */
13442 if (integer_zerop (op2)
13443 && TREE_CODE (arg0) == NE_EXPR
13444 && integer_zerop (TREE_OPERAND (arg0, 1))
13445 && integer_pow2p (arg1)
13446 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13447 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13448 arg1, OEP_ONLY_CONST))
13449 return pedantic_non_lvalue_loc (loc,
13450 fold_convert_loc (loc, type,
13451 TREE_OPERAND (arg0, 0)));
13453 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13454 if (integer_zerop (op2)
13455 && truth_value_p (TREE_CODE (arg0))
13456 && truth_value_p (TREE_CODE (arg1)))
13457 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13458 fold_convert_loc (loc, type, arg0),
13459 arg1);
13461 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13462 if (integer_onep (op2)
13463 && truth_value_p (TREE_CODE (arg0))
13464 && truth_value_p (TREE_CODE (arg1)))
13466 /* Only perform transformation if ARG0 is easily inverted. */
13467 tem = fold_truth_not_expr (loc, arg0);
13468 if (tem)
13469 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13470 fold_convert_loc (loc, type, tem),
13471 arg1);
13474 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13475 if (integer_zerop (arg1)
13476 && truth_value_p (TREE_CODE (arg0))
13477 && truth_value_p (TREE_CODE (op2)))
13479 /* Only perform transformation if ARG0 is easily inverted. */
13480 tem = fold_truth_not_expr (loc, arg0);
13481 if (tem)
13482 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13483 fold_convert_loc (loc, type, tem),
13484 op2);
13487 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13488 if (integer_onep (arg1)
13489 && truth_value_p (TREE_CODE (arg0))
13490 && truth_value_p (TREE_CODE (op2)))
13491 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13492 fold_convert_loc (loc, type, arg0),
13493 op2);
13495 return NULL_TREE;
13497 case CALL_EXPR:
13498 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13499 of fold_ternary on them. */
13500 gcc_unreachable ();
13502 case BIT_FIELD_REF:
13503 if ((TREE_CODE (arg0) == VECTOR_CST
13504 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13505 && type == TREE_TYPE (TREE_TYPE (arg0)))
13507 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13508 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13510 if (width != 0
13511 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13512 && (idx % width) == 0
13513 && (idx = idx / width)
13514 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13516 tree elements = NULL_TREE;
13518 if (TREE_CODE (arg0) == VECTOR_CST)
13519 elements = TREE_VECTOR_CST_ELTS (arg0);
13520 else
13522 unsigned HOST_WIDE_INT idx;
13523 tree value;
13525 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13526 elements = tree_cons (NULL_TREE, value, elements);
13528 while (idx-- > 0 && elements)
13529 elements = TREE_CHAIN (elements);
13530 if (elements)
13531 return TREE_VALUE (elements);
13532 else
13533 return build_zero_cst (type);
13537 /* A bit-field-ref that referenced the full argument can be stripped. */
13538 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13539 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13540 && integer_zerop (op2))
13541 return fold_convert_loc (loc, type, arg0);
13543 return NULL_TREE;
13545 case FMA_EXPR:
13546 /* For integers we can decompose the FMA if possible. */
13547 if (TREE_CODE (arg0) == INTEGER_CST
13548 && TREE_CODE (arg1) == INTEGER_CST)
13549 return fold_build2_loc (loc, PLUS_EXPR, type,
13550 const_binop (MULT_EXPR, arg0, arg1), arg2);
13551 if (integer_zerop (arg2))
13552 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13554 return fold_fma (loc, type, arg0, arg1, arg2);
13556 default:
13557 return NULL_TREE;
13558 } /* switch (code) */
13561 /* Perform constant folding and related simplification of EXPR.
13562 The related simplifications include x*1 => x, x*0 => 0, etc.,
13563 and application of the associative law.
13564 NOP_EXPR conversions may be removed freely (as long as we
13565 are careful not to change the type of the overall expression).
13566 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13567 but we can constant-fold them if they have constant operands. */
13569 #ifdef ENABLE_FOLD_CHECKING
13570 # define fold(x) fold_1 (x)
13571 static tree fold_1 (tree);
13572 static
13573 #endif
13574 tree
13575 fold (tree expr)
13577 const tree t = expr;
13578 enum tree_code code = TREE_CODE (t);
13579 enum tree_code_class kind = TREE_CODE_CLASS (code);
13580 tree tem;
13581 location_t loc = EXPR_LOCATION (expr);
13583 /* Return right away if a constant. */
13584 if (kind == tcc_constant)
13585 return t;
13587 /* CALL_EXPR-like objects with variable numbers of operands are
13588 treated specially. */
13589 if (kind == tcc_vl_exp)
13591 if (code == CALL_EXPR)
13593 tem = fold_call_expr (loc, expr, false);
13594 return tem ? tem : expr;
13596 return expr;
13599 if (IS_EXPR_CODE_CLASS (kind))
13601 tree type = TREE_TYPE (t);
13602 tree op0, op1, op2;
13604 switch (TREE_CODE_LENGTH (code))
13606 case 1:
13607 op0 = TREE_OPERAND (t, 0);
13608 tem = fold_unary_loc (loc, code, type, op0);
13609 return tem ? tem : expr;
13610 case 2:
13611 op0 = TREE_OPERAND (t, 0);
13612 op1 = TREE_OPERAND (t, 1);
13613 tem = fold_binary_loc (loc, code, type, op0, op1);
13614 return tem ? tem : expr;
13615 case 3:
13616 op0 = TREE_OPERAND (t, 0);
13617 op1 = TREE_OPERAND (t, 1);
13618 op2 = TREE_OPERAND (t, 2);
13619 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13620 return tem ? tem : expr;
13621 default:
13622 break;
13626 switch (code)
13628 case ARRAY_REF:
13630 tree op0 = TREE_OPERAND (t, 0);
13631 tree op1 = TREE_OPERAND (t, 1);
13633 if (TREE_CODE (op1) == INTEGER_CST
13634 && TREE_CODE (op0) == CONSTRUCTOR
13635 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13637 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13638 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13639 unsigned HOST_WIDE_INT begin = 0;
13641 /* Find a matching index by means of a binary search. */
13642 while (begin != end)
13644 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13645 tree index = VEC_index (constructor_elt, elts, middle)->index;
13647 if (TREE_CODE (index) == INTEGER_CST
13648 && tree_int_cst_lt (index, op1))
13649 begin = middle + 1;
13650 else if (TREE_CODE (index) == INTEGER_CST
13651 && tree_int_cst_lt (op1, index))
13652 end = middle;
13653 else if (TREE_CODE (index) == RANGE_EXPR
13654 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13655 begin = middle + 1;
13656 else if (TREE_CODE (index) == RANGE_EXPR
13657 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13658 end = middle;
13659 else
13660 return VEC_index (constructor_elt, elts, middle)->value;
13664 return t;
13667 case CONST_DECL:
13668 return fold (DECL_INITIAL (t));
13670 default:
13671 return t;
13672 } /* switch (code) */
13675 #ifdef ENABLE_FOLD_CHECKING
13676 #undef fold
13678 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13679 static void fold_check_failed (const_tree, const_tree);
13680 void print_fold_checksum (const_tree);
13682 /* When --enable-checking=fold, compute a digest of expr before
13683 and after actual fold call to see if fold did not accidentally
13684 change original expr. */
13686 tree
13687 fold (tree expr)
13689 tree ret;
13690 struct md5_ctx ctx;
13691 unsigned char checksum_before[16], checksum_after[16];
13692 htab_t ht;
13694 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13695 md5_init_ctx (&ctx);
13696 fold_checksum_tree (expr, &ctx, ht);
13697 md5_finish_ctx (&ctx, checksum_before);
13698 htab_empty (ht);
13700 ret = fold_1 (expr);
13702 md5_init_ctx (&ctx);
13703 fold_checksum_tree (expr, &ctx, ht);
13704 md5_finish_ctx (&ctx, checksum_after);
13705 htab_delete (ht);
13707 if (memcmp (checksum_before, checksum_after, 16))
13708 fold_check_failed (expr, ret);
13710 return ret;
13713 void
13714 print_fold_checksum (const_tree expr)
13716 struct md5_ctx ctx;
13717 unsigned char checksum[16], cnt;
13718 htab_t ht;
13720 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13721 md5_init_ctx (&ctx);
13722 fold_checksum_tree (expr, &ctx, ht);
13723 md5_finish_ctx (&ctx, checksum);
13724 htab_delete (ht);
13725 for (cnt = 0; cnt < 16; ++cnt)
13726 fprintf (stderr, "%02x", checksum[cnt]);
13727 putc ('\n', stderr);
13730 static void
13731 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13733 internal_error ("fold check: original tree changed by fold");
13736 static void
13737 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13739 void **slot;
13740 enum tree_code code;
13741 union tree_node buf;
13742 int i, len;
13744 recursive_label:
13746 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13747 <= sizeof (struct tree_function_decl))
13748 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13749 if (expr == NULL)
13750 return;
13751 slot = (void **) htab_find_slot (ht, expr, INSERT);
13752 if (*slot != NULL)
13753 return;
13754 *slot = CONST_CAST_TREE (expr);
13755 code = TREE_CODE (expr);
13756 if (TREE_CODE_CLASS (code) == tcc_declaration
13757 && DECL_ASSEMBLER_NAME_SET_P (expr))
13759 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13760 memcpy ((char *) &buf, expr, tree_size (expr));
13761 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13762 expr = (tree) &buf;
13764 else if (TREE_CODE_CLASS (code) == tcc_type
13765 && (TYPE_POINTER_TO (expr)
13766 || TYPE_REFERENCE_TO (expr)
13767 || TYPE_CACHED_VALUES_P (expr)
13768 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13769 || TYPE_NEXT_VARIANT (expr)))
13771 /* Allow these fields to be modified. */
13772 tree tmp;
13773 memcpy ((char *) &buf, expr, tree_size (expr));
13774 expr = tmp = (tree) &buf;
13775 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13776 TYPE_POINTER_TO (tmp) = NULL;
13777 TYPE_REFERENCE_TO (tmp) = NULL;
13778 TYPE_NEXT_VARIANT (tmp) = NULL;
13779 if (TYPE_CACHED_VALUES_P (tmp))
13781 TYPE_CACHED_VALUES_P (tmp) = 0;
13782 TYPE_CACHED_VALUES (tmp) = NULL;
13785 md5_process_bytes (expr, tree_size (expr), ctx);
13786 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13787 if (TREE_CODE_CLASS (code) != tcc_type
13788 && TREE_CODE_CLASS (code) != tcc_declaration
13789 && code != TREE_LIST
13790 && code != SSA_NAME)
13791 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13792 switch (TREE_CODE_CLASS (code))
13794 case tcc_constant:
13795 switch (code)
13797 case STRING_CST:
13798 md5_process_bytes (TREE_STRING_POINTER (expr),
13799 TREE_STRING_LENGTH (expr), ctx);
13800 break;
13801 case COMPLEX_CST:
13802 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13803 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13804 break;
13805 case VECTOR_CST:
13806 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13807 break;
13808 default:
13809 break;
13811 break;
13812 case tcc_exceptional:
13813 switch (code)
13815 case TREE_LIST:
13816 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13817 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13818 expr = TREE_CHAIN (expr);
13819 goto recursive_label;
13820 break;
13821 case TREE_VEC:
13822 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13823 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13824 break;
13825 default:
13826 break;
13828 break;
13829 case tcc_expression:
13830 case tcc_reference:
13831 case tcc_comparison:
13832 case tcc_unary:
13833 case tcc_binary:
13834 case tcc_statement:
13835 case tcc_vl_exp:
13836 len = TREE_OPERAND_LENGTH (expr);
13837 for (i = 0; i < len; ++i)
13838 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13839 break;
13840 case tcc_declaration:
13841 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13842 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13843 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13845 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13846 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13847 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13848 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13849 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13851 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13852 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13854 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13856 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13857 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13858 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13860 break;
13861 case tcc_type:
13862 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13863 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13864 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13865 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13866 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13867 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13868 if (INTEGRAL_TYPE_P (expr)
13869 || SCALAR_FLOAT_TYPE_P (expr))
13871 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13872 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13874 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13875 if (TREE_CODE (expr) == RECORD_TYPE
13876 || TREE_CODE (expr) == UNION_TYPE
13877 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13878 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13879 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13880 break;
13881 default:
13882 break;
13886 /* Helper function for outputting the checksum of a tree T. When
13887 debugging with gdb, you can "define mynext" to be "next" followed
13888 by "call debug_fold_checksum (op0)", then just trace down till the
13889 outputs differ. */
13891 DEBUG_FUNCTION void
13892 debug_fold_checksum (const_tree t)
13894 int i;
13895 unsigned char checksum[16];
13896 struct md5_ctx ctx;
13897 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13899 md5_init_ctx (&ctx);
13900 fold_checksum_tree (t, &ctx, ht);
13901 md5_finish_ctx (&ctx, checksum);
13902 htab_empty (ht);
13904 for (i = 0; i < 16; i++)
13905 fprintf (stderr, "%d ", checksum[i]);
13907 fprintf (stderr, "\n");
13910 #endif
13912 /* Fold a unary tree expression with code CODE of type TYPE with an
13913 operand OP0. LOC is the location of the resulting expression.
13914 Return a folded expression if successful. Otherwise, return a tree
13915 expression with code CODE of type TYPE with an operand OP0. */
13917 tree
13918 fold_build1_stat_loc (location_t loc,
13919 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13921 tree tem;
13922 #ifdef ENABLE_FOLD_CHECKING
13923 unsigned char checksum_before[16], checksum_after[16];
13924 struct md5_ctx ctx;
13925 htab_t ht;
13927 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13928 md5_init_ctx (&ctx);
13929 fold_checksum_tree (op0, &ctx, ht);
13930 md5_finish_ctx (&ctx, checksum_before);
13931 htab_empty (ht);
13932 #endif
13934 tem = fold_unary_loc (loc, code, type, op0);
13935 if (!tem)
13936 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13938 #ifdef ENABLE_FOLD_CHECKING
13939 md5_init_ctx (&ctx);
13940 fold_checksum_tree (op0, &ctx, ht);
13941 md5_finish_ctx (&ctx, checksum_after);
13942 htab_delete (ht);
13944 if (memcmp (checksum_before, checksum_after, 16))
13945 fold_check_failed (op0, tem);
13946 #endif
13947 return tem;
13950 /* Fold a binary tree expression with code CODE of type TYPE with
13951 operands OP0 and OP1. LOC is the location of the resulting
13952 expression. Return a folded expression if successful. Otherwise,
13953 return a tree expression with code CODE of type TYPE with operands
13954 OP0 and OP1. */
13956 tree
13957 fold_build2_stat_loc (location_t loc,
13958 enum tree_code code, tree type, tree op0, tree op1
13959 MEM_STAT_DECL)
13961 tree tem;
13962 #ifdef ENABLE_FOLD_CHECKING
13963 unsigned char checksum_before_op0[16],
13964 checksum_before_op1[16],
13965 checksum_after_op0[16],
13966 checksum_after_op1[16];
13967 struct md5_ctx ctx;
13968 htab_t ht;
13970 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13971 md5_init_ctx (&ctx);
13972 fold_checksum_tree (op0, &ctx, ht);
13973 md5_finish_ctx (&ctx, checksum_before_op0);
13974 htab_empty (ht);
13976 md5_init_ctx (&ctx);
13977 fold_checksum_tree (op1, &ctx, ht);
13978 md5_finish_ctx (&ctx, checksum_before_op1);
13979 htab_empty (ht);
13980 #endif
13982 tem = fold_binary_loc (loc, code, type, op0, op1);
13983 if (!tem)
13984 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13986 #ifdef ENABLE_FOLD_CHECKING
13987 md5_init_ctx (&ctx);
13988 fold_checksum_tree (op0, &ctx, ht);
13989 md5_finish_ctx (&ctx, checksum_after_op0);
13990 htab_empty (ht);
13992 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13993 fold_check_failed (op0, tem);
13995 md5_init_ctx (&ctx);
13996 fold_checksum_tree (op1, &ctx, ht);
13997 md5_finish_ctx (&ctx, checksum_after_op1);
13998 htab_delete (ht);
14000 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14001 fold_check_failed (op1, tem);
14002 #endif
14003 return tem;
14006 /* Fold a ternary tree expression with code CODE of type TYPE with
14007 operands OP0, OP1, and OP2. Return a folded expression if
14008 successful. Otherwise, return a tree expression with code CODE of
14009 type TYPE with operands OP0, OP1, and OP2. */
14011 tree
14012 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14013 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14015 tree tem;
14016 #ifdef ENABLE_FOLD_CHECKING
14017 unsigned char checksum_before_op0[16],
14018 checksum_before_op1[16],
14019 checksum_before_op2[16],
14020 checksum_after_op0[16],
14021 checksum_after_op1[16],
14022 checksum_after_op2[16];
14023 struct md5_ctx ctx;
14024 htab_t ht;
14026 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14027 md5_init_ctx (&ctx);
14028 fold_checksum_tree (op0, &ctx, ht);
14029 md5_finish_ctx (&ctx, checksum_before_op0);
14030 htab_empty (ht);
14032 md5_init_ctx (&ctx);
14033 fold_checksum_tree (op1, &ctx, ht);
14034 md5_finish_ctx (&ctx, checksum_before_op1);
14035 htab_empty (ht);
14037 md5_init_ctx (&ctx);
14038 fold_checksum_tree (op2, &ctx, ht);
14039 md5_finish_ctx (&ctx, checksum_before_op2);
14040 htab_empty (ht);
14041 #endif
14043 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14044 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14045 if (!tem)
14046 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14048 #ifdef ENABLE_FOLD_CHECKING
14049 md5_init_ctx (&ctx);
14050 fold_checksum_tree (op0, &ctx, ht);
14051 md5_finish_ctx (&ctx, checksum_after_op0);
14052 htab_empty (ht);
14054 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14055 fold_check_failed (op0, tem);
14057 md5_init_ctx (&ctx);
14058 fold_checksum_tree (op1, &ctx, ht);
14059 md5_finish_ctx (&ctx, checksum_after_op1);
14060 htab_empty (ht);
14062 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14063 fold_check_failed (op1, tem);
14065 md5_init_ctx (&ctx);
14066 fold_checksum_tree (op2, &ctx, ht);
14067 md5_finish_ctx (&ctx, checksum_after_op2);
14068 htab_delete (ht);
14070 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14071 fold_check_failed (op2, tem);
14072 #endif
14073 return tem;
14076 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14077 arguments in ARGARRAY, and a null static chain.
14078 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14079 of type TYPE from the given operands as constructed by build_call_array. */
14081 tree
14082 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14083 int nargs, tree *argarray)
14085 tree tem;
14086 #ifdef ENABLE_FOLD_CHECKING
14087 unsigned char checksum_before_fn[16],
14088 checksum_before_arglist[16],
14089 checksum_after_fn[16],
14090 checksum_after_arglist[16];
14091 struct md5_ctx ctx;
14092 htab_t ht;
14093 int i;
14095 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14096 md5_init_ctx (&ctx);
14097 fold_checksum_tree (fn, &ctx, ht);
14098 md5_finish_ctx (&ctx, checksum_before_fn);
14099 htab_empty (ht);
14101 md5_init_ctx (&ctx);
14102 for (i = 0; i < nargs; i++)
14103 fold_checksum_tree (argarray[i], &ctx, ht);
14104 md5_finish_ctx (&ctx, checksum_before_arglist);
14105 htab_empty (ht);
14106 #endif
14108 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14110 #ifdef ENABLE_FOLD_CHECKING
14111 md5_init_ctx (&ctx);
14112 fold_checksum_tree (fn, &ctx, ht);
14113 md5_finish_ctx (&ctx, checksum_after_fn);
14114 htab_empty (ht);
14116 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14117 fold_check_failed (fn, tem);
14119 md5_init_ctx (&ctx);
14120 for (i = 0; i < nargs; i++)
14121 fold_checksum_tree (argarray[i], &ctx, ht);
14122 md5_finish_ctx (&ctx, checksum_after_arglist);
14123 htab_delete (ht);
14125 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14126 fold_check_failed (NULL_TREE, tem);
14127 #endif
14128 return tem;
14131 /* Perform constant folding and related simplification of initializer
14132 expression EXPR. These behave identically to "fold_buildN" but ignore
14133 potential run-time traps and exceptions that fold must preserve. */
14135 #define START_FOLD_INIT \
14136 int saved_signaling_nans = flag_signaling_nans;\
14137 int saved_trapping_math = flag_trapping_math;\
14138 int saved_rounding_math = flag_rounding_math;\
14139 int saved_trapv = flag_trapv;\
14140 int saved_folding_initializer = folding_initializer;\
14141 flag_signaling_nans = 0;\
14142 flag_trapping_math = 0;\
14143 flag_rounding_math = 0;\
14144 flag_trapv = 0;\
14145 folding_initializer = 1;
14147 #define END_FOLD_INIT \
14148 flag_signaling_nans = saved_signaling_nans;\
14149 flag_trapping_math = saved_trapping_math;\
14150 flag_rounding_math = saved_rounding_math;\
14151 flag_trapv = saved_trapv;\
14152 folding_initializer = saved_folding_initializer;
14154 tree
14155 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14156 tree type, tree op)
14158 tree result;
14159 START_FOLD_INIT;
14161 result = fold_build1_loc (loc, code, type, op);
14163 END_FOLD_INIT;
14164 return result;
14167 tree
14168 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14169 tree type, tree op0, tree op1)
14171 tree result;
14172 START_FOLD_INIT;
14174 result = fold_build2_loc (loc, code, type, op0, op1);
14176 END_FOLD_INIT;
14177 return result;
14180 tree
14181 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14182 tree type, tree op0, tree op1, tree op2)
14184 tree result;
14185 START_FOLD_INIT;
14187 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14189 END_FOLD_INIT;
14190 return result;
14193 tree
14194 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14195 int nargs, tree *argarray)
14197 tree result;
14198 START_FOLD_INIT;
14200 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14202 END_FOLD_INIT;
14203 return result;
14206 #undef START_FOLD_INIT
14207 #undef END_FOLD_INIT
14209 /* Determine if first argument is a multiple of second argument. Return 0 if
14210 it is not, or we cannot easily determined it to be.
14212 An example of the sort of thing we care about (at this point; this routine
14213 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14214 fold cases do now) is discovering that
14216 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14218 is a multiple of
14220 SAVE_EXPR (J * 8)
14222 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14224 This code also handles discovering that
14226 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14228 is a multiple of 8 so we don't have to worry about dealing with a
14229 possible remainder.
14231 Note that we *look* inside a SAVE_EXPR only to determine how it was
14232 calculated; it is not safe for fold to do much of anything else with the
14233 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14234 at run time. For example, the latter example above *cannot* be implemented
14235 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14236 evaluation time of the original SAVE_EXPR is not necessarily the same at
14237 the time the new expression is evaluated. The only optimization of this
14238 sort that would be valid is changing
14240 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14242 divided by 8 to
14244 SAVE_EXPR (I) * SAVE_EXPR (J)
14246 (where the same SAVE_EXPR (J) is used in the original and the
14247 transformed version). */
14250 multiple_of_p (tree type, const_tree top, const_tree bottom)
14252 if (operand_equal_p (top, bottom, 0))
14253 return 1;
14255 if (TREE_CODE (type) != INTEGER_TYPE)
14256 return 0;
14258 switch (TREE_CODE (top))
14260 case BIT_AND_EXPR:
14261 /* Bitwise and provides a power of two multiple. If the mask is
14262 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14263 if (!integer_pow2p (bottom))
14264 return 0;
14265 /* FALLTHRU */
14267 case MULT_EXPR:
14268 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14269 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14271 case PLUS_EXPR:
14272 case MINUS_EXPR:
14273 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14274 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14276 case LSHIFT_EXPR:
14277 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14279 tree op1, t1;
14281 op1 = TREE_OPERAND (top, 1);
14282 /* const_binop may not detect overflow correctly,
14283 so check for it explicitly here. */
14284 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14285 > TREE_INT_CST_LOW (op1)
14286 && TREE_INT_CST_HIGH (op1) == 0
14287 && 0 != (t1 = fold_convert (type,
14288 const_binop (LSHIFT_EXPR,
14289 size_one_node,
14290 op1)))
14291 && !TREE_OVERFLOW (t1))
14292 return multiple_of_p (type, t1, bottom);
14294 return 0;
14296 case NOP_EXPR:
14297 /* Can't handle conversions from non-integral or wider integral type. */
14298 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14299 || (TYPE_PRECISION (type)
14300 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14301 return 0;
14303 /* .. fall through ... */
14305 case SAVE_EXPR:
14306 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14308 case COND_EXPR:
14309 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14310 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14312 case INTEGER_CST:
14313 if (TREE_CODE (bottom) != INTEGER_CST
14314 || integer_zerop (bottom)
14315 || (TYPE_UNSIGNED (type)
14316 && (tree_int_cst_sgn (top) < 0
14317 || tree_int_cst_sgn (bottom) < 0)))
14318 return 0;
14319 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14320 top, bottom, 0));
14322 default:
14323 return 0;
14327 /* Return true if CODE or TYPE is known to be non-negative. */
14329 static bool
14330 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14332 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14333 && truth_value_p (code))
14334 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14335 have a signed:1 type (where the value is -1 and 0). */
14336 return true;
14337 return false;
14340 /* Return true if (CODE OP0) is known to be non-negative. If the return
14341 value is based on the assumption that signed overflow is undefined,
14342 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14343 *STRICT_OVERFLOW_P. */
14345 bool
14346 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14347 bool *strict_overflow_p)
14349 if (TYPE_UNSIGNED (type))
14350 return true;
14352 switch (code)
14354 case ABS_EXPR:
14355 /* We can't return 1 if flag_wrapv is set because
14356 ABS_EXPR<INT_MIN> = INT_MIN. */
14357 if (!INTEGRAL_TYPE_P (type))
14358 return true;
14359 if (TYPE_OVERFLOW_UNDEFINED (type))
14361 *strict_overflow_p = true;
14362 return true;
14364 break;
14366 case NON_LVALUE_EXPR:
14367 case FLOAT_EXPR:
14368 case FIX_TRUNC_EXPR:
14369 return tree_expr_nonnegative_warnv_p (op0,
14370 strict_overflow_p);
14372 case NOP_EXPR:
14374 tree inner_type = TREE_TYPE (op0);
14375 tree outer_type = type;
14377 if (TREE_CODE (outer_type) == REAL_TYPE)
14379 if (TREE_CODE (inner_type) == REAL_TYPE)
14380 return tree_expr_nonnegative_warnv_p (op0,
14381 strict_overflow_p);
14382 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14384 if (TYPE_UNSIGNED (inner_type))
14385 return true;
14386 return tree_expr_nonnegative_warnv_p (op0,
14387 strict_overflow_p);
14390 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14392 if (TREE_CODE (inner_type) == REAL_TYPE)
14393 return tree_expr_nonnegative_warnv_p (op0,
14394 strict_overflow_p);
14395 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14396 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14397 && TYPE_UNSIGNED (inner_type);
14400 break;
14402 default:
14403 return tree_simple_nonnegative_warnv_p (code, type);
14406 /* We don't know sign of `t', so be conservative and return false. */
14407 return false;
14410 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14411 value is based on the assumption that signed overflow is undefined,
14412 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14413 *STRICT_OVERFLOW_P. */
14415 bool
14416 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14417 tree op1, bool *strict_overflow_p)
14419 if (TYPE_UNSIGNED (type))
14420 return true;
14422 switch (code)
14424 case POINTER_PLUS_EXPR:
14425 case PLUS_EXPR:
14426 if (FLOAT_TYPE_P (type))
14427 return (tree_expr_nonnegative_warnv_p (op0,
14428 strict_overflow_p)
14429 && tree_expr_nonnegative_warnv_p (op1,
14430 strict_overflow_p));
14432 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14433 both unsigned and at least 2 bits shorter than the result. */
14434 if (TREE_CODE (type) == INTEGER_TYPE
14435 && TREE_CODE (op0) == NOP_EXPR
14436 && TREE_CODE (op1) == NOP_EXPR)
14438 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14439 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14440 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14441 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14443 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14444 TYPE_PRECISION (inner2)) + 1;
14445 return prec < TYPE_PRECISION (type);
14448 break;
14450 case MULT_EXPR:
14451 if (FLOAT_TYPE_P (type))
14453 /* x * x for floating point x is always non-negative. */
14454 if (operand_equal_p (op0, op1, 0))
14455 return true;
14456 return (tree_expr_nonnegative_warnv_p (op0,
14457 strict_overflow_p)
14458 && tree_expr_nonnegative_warnv_p (op1,
14459 strict_overflow_p));
14462 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14463 both unsigned and their total bits is shorter than the result. */
14464 if (TREE_CODE (type) == INTEGER_TYPE
14465 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14466 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14468 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14469 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14470 : TREE_TYPE (op0);
14471 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14472 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14473 : TREE_TYPE (op1);
14475 bool unsigned0 = TYPE_UNSIGNED (inner0);
14476 bool unsigned1 = TYPE_UNSIGNED (inner1);
14478 if (TREE_CODE (op0) == INTEGER_CST)
14479 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14481 if (TREE_CODE (op1) == INTEGER_CST)
14482 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14484 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14485 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14487 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14488 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14489 : TYPE_PRECISION (inner0);
14491 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14492 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14493 : TYPE_PRECISION (inner1);
14495 return precision0 + precision1 < TYPE_PRECISION (type);
14498 return false;
14500 case BIT_AND_EXPR:
14501 case MAX_EXPR:
14502 return (tree_expr_nonnegative_warnv_p (op0,
14503 strict_overflow_p)
14504 || tree_expr_nonnegative_warnv_p (op1,
14505 strict_overflow_p));
14507 case BIT_IOR_EXPR:
14508 case BIT_XOR_EXPR:
14509 case MIN_EXPR:
14510 case RDIV_EXPR:
14511 case TRUNC_DIV_EXPR:
14512 case CEIL_DIV_EXPR:
14513 case FLOOR_DIV_EXPR:
14514 case ROUND_DIV_EXPR:
14515 return (tree_expr_nonnegative_warnv_p (op0,
14516 strict_overflow_p)
14517 && tree_expr_nonnegative_warnv_p (op1,
14518 strict_overflow_p));
14520 case TRUNC_MOD_EXPR:
14521 case CEIL_MOD_EXPR:
14522 case FLOOR_MOD_EXPR:
14523 case ROUND_MOD_EXPR:
14524 return tree_expr_nonnegative_warnv_p (op0,
14525 strict_overflow_p);
14526 default:
14527 return tree_simple_nonnegative_warnv_p (code, type);
14530 /* We don't know sign of `t', so be conservative and return false. */
14531 return false;
14534 /* Return true if T is known to be non-negative. If the return
14535 value is based on the assumption that signed overflow is undefined,
14536 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14537 *STRICT_OVERFLOW_P. */
14539 bool
14540 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14542 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14543 return true;
14545 switch (TREE_CODE (t))
14547 case INTEGER_CST:
14548 return tree_int_cst_sgn (t) >= 0;
14550 case REAL_CST:
14551 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14553 case FIXED_CST:
14554 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14556 case COND_EXPR:
14557 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14558 strict_overflow_p)
14559 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14560 strict_overflow_p));
14561 default:
14562 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14563 TREE_TYPE (t));
14565 /* We don't know sign of `t', so be conservative and return false. */
14566 return false;
14569 /* Return true if T is known to be non-negative. If the return
14570 value is based on the assumption that signed overflow is undefined,
14571 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14572 *STRICT_OVERFLOW_P. */
14574 bool
14575 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14576 tree arg0, tree arg1, bool *strict_overflow_p)
14578 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14579 switch (DECL_FUNCTION_CODE (fndecl))
14581 CASE_FLT_FN (BUILT_IN_ACOS):
14582 CASE_FLT_FN (BUILT_IN_ACOSH):
14583 CASE_FLT_FN (BUILT_IN_CABS):
14584 CASE_FLT_FN (BUILT_IN_COSH):
14585 CASE_FLT_FN (BUILT_IN_ERFC):
14586 CASE_FLT_FN (BUILT_IN_EXP):
14587 CASE_FLT_FN (BUILT_IN_EXP10):
14588 CASE_FLT_FN (BUILT_IN_EXP2):
14589 CASE_FLT_FN (BUILT_IN_FABS):
14590 CASE_FLT_FN (BUILT_IN_FDIM):
14591 CASE_FLT_FN (BUILT_IN_HYPOT):
14592 CASE_FLT_FN (BUILT_IN_POW10):
14593 CASE_INT_FN (BUILT_IN_FFS):
14594 CASE_INT_FN (BUILT_IN_PARITY):
14595 CASE_INT_FN (BUILT_IN_POPCOUNT):
14596 case BUILT_IN_BSWAP32:
14597 case BUILT_IN_BSWAP64:
14598 /* Always true. */
14599 return true;
14601 CASE_FLT_FN (BUILT_IN_SQRT):
14602 /* sqrt(-0.0) is -0.0. */
14603 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14604 return true;
14605 return tree_expr_nonnegative_warnv_p (arg0,
14606 strict_overflow_p);
14608 CASE_FLT_FN (BUILT_IN_ASINH):
14609 CASE_FLT_FN (BUILT_IN_ATAN):
14610 CASE_FLT_FN (BUILT_IN_ATANH):
14611 CASE_FLT_FN (BUILT_IN_CBRT):
14612 CASE_FLT_FN (BUILT_IN_CEIL):
14613 CASE_FLT_FN (BUILT_IN_ERF):
14614 CASE_FLT_FN (BUILT_IN_EXPM1):
14615 CASE_FLT_FN (BUILT_IN_FLOOR):
14616 CASE_FLT_FN (BUILT_IN_FMOD):
14617 CASE_FLT_FN (BUILT_IN_FREXP):
14618 CASE_FLT_FN (BUILT_IN_LCEIL):
14619 CASE_FLT_FN (BUILT_IN_LDEXP):
14620 CASE_FLT_FN (BUILT_IN_LFLOOR):
14621 CASE_FLT_FN (BUILT_IN_LLCEIL):
14622 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14623 CASE_FLT_FN (BUILT_IN_LLRINT):
14624 CASE_FLT_FN (BUILT_IN_LLROUND):
14625 CASE_FLT_FN (BUILT_IN_LRINT):
14626 CASE_FLT_FN (BUILT_IN_LROUND):
14627 CASE_FLT_FN (BUILT_IN_MODF):
14628 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14629 CASE_FLT_FN (BUILT_IN_RINT):
14630 CASE_FLT_FN (BUILT_IN_ROUND):
14631 CASE_FLT_FN (BUILT_IN_SCALB):
14632 CASE_FLT_FN (BUILT_IN_SCALBLN):
14633 CASE_FLT_FN (BUILT_IN_SCALBN):
14634 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14635 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14636 CASE_FLT_FN (BUILT_IN_SINH):
14637 CASE_FLT_FN (BUILT_IN_TANH):
14638 CASE_FLT_FN (BUILT_IN_TRUNC):
14639 /* True if the 1st argument is nonnegative. */
14640 return tree_expr_nonnegative_warnv_p (arg0,
14641 strict_overflow_p);
14643 CASE_FLT_FN (BUILT_IN_FMAX):
14644 /* True if the 1st OR 2nd arguments are nonnegative. */
14645 return (tree_expr_nonnegative_warnv_p (arg0,
14646 strict_overflow_p)
14647 || (tree_expr_nonnegative_warnv_p (arg1,
14648 strict_overflow_p)));
14650 CASE_FLT_FN (BUILT_IN_FMIN):
14651 /* True if the 1st AND 2nd arguments are nonnegative. */
14652 return (tree_expr_nonnegative_warnv_p (arg0,
14653 strict_overflow_p)
14654 && (tree_expr_nonnegative_warnv_p (arg1,
14655 strict_overflow_p)));
14657 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14658 /* True if the 2nd argument is nonnegative. */
14659 return tree_expr_nonnegative_warnv_p (arg1,
14660 strict_overflow_p);
14662 CASE_FLT_FN (BUILT_IN_POWI):
14663 /* True if the 1st argument is nonnegative or the second
14664 argument is an even integer. */
14665 if (TREE_CODE (arg1) == INTEGER_CST
14666 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14667 return true;
14668 return tree_expr_nonnegative_warnv_p (arg0,
14669 strict_overflow_p);
14671 CASE_FLT_FN (BUILT_IN_POW):
14672 /* True if the 1st argument is nonnegative or the second
14673 argument is an even integer valued real. */
14674 if (TREE_CODE (arg1) == REAL_CST)
14676 REAL_VALUE_TYPE c;
14677 HOST_WIDE_INT n;
14679 c = TREE_REAL_CST (arg1);
14680 n = real_to_integer (&c);
14681 if ((n & 1) == 0)
14683 REAL_VALUE_TYPE cint;
14684 real_from_integer (&cint, VOIDmode, n,
14685 n < 0 ? -1 : 0, 0);
14686 if (real_identical (&c, &cint))
14687 return true;
14690 return tree_expr_nonnegative_warnv_p (arg0,
14691 strict_overflow_p);
14693 default:
14694 break;
14696 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14697 type);
14700 /* Return true if T is known to be non-negative. If the return
14701 value is based on the assumption that signed overflow is undefined,
14702 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14703 *STRICT_OVERFLOW_P. */
14705 bool
14706 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14708 enum tree_code code = TREE_CODE (t);
14709 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14710 return true;
14712 switch (code)
14714 case TARGET_EXPR:
14716 tree temp = TARGET_EXPR_SLOT (t);
14717 t = TARGET_EXPR_INITIAL (t);
14719 /* If the initializer is non-void, then it's a normal expression
14720 that will be assigned to the slot. */
14721 if (!VOID_TYPE_P (t))
14722 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14724 /* Otherwise, the initializer sets the slot in some way. One common
14725 way is an assignment statement at the end of the initializer. */
14726 while (1)
14728 if (TREE_CODE (t) == BIND_EXPR)
14729 t = expr_last (BIND_EXPR_BODY (t));
14730 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14731 || TREE_CODE (t) == TRY_CATCH_EXPR)
14732 t = expr_last (TREE_OPERAND (t, 0));
14733 else if (TREE_CODE (t) == STATEMENT_LIST)
14734 t = expr_last (t);
14735 else
14736 break;
14738 if (TREE_CODE (t) == MODIFY_EXPR
14739 && TREE_OPERAND (t, 0) == temp)
14740 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14741 strict_overflow_p);
14743 return false;
14746 case CALL_EXPR:
14748 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14749 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14751 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14752 get_callee_fndecl (t),
14753 arg0,
14754 arg1,
14755 strict_overflow_p);
14757 case COMPOUND_EXPR:
14758 case MODIFY_EXPR:
14759 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14760 strict_overflow_p);
14761 case BIND_EXPR:
14762 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14763 strict_overflow_p);
14764 case SAVE_EXPR:
14765 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14766 strict_overflow_p);
14768 default:
14769 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14770 TREE_TYPE (t));
14773 /* We don't know sign of `t', so be conservative and return false. */
14774 return false;
14777 /* Return true if T is known to be non-negative. If the return
14778 value is based on the assumption that signed overflow is undefined,
14779 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14780 *STRICT_OVERFLOW_P. */
14782 bool
14783 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14785 enum tree_code code;
14786 if (t == error_mark_node)
14787 return false;
14789 code = TREE_CODE (t);
14790 switch (TREE_CODE_CLASS (code))
14792 case tcc_binary:
14793 case tcc_comparison:
14794 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14795 TREE_TYPE (t),
14796 TREE_OPERAND (t, 0),
14797 TREE_OPERAND (t, 1),
14798 strict_overflow_p);
14800 case tcc_unary:
14801 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14802 TREE_TYPE (t),
14803 TREE_OPERAND (t, 0),
14804 strict_overflow_p);
14806 case tcc_constant:
14807 case tcc_declaration:
14808 case tcc_reference:
14809 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14811 default:
14812 break;
14815 switch (code)
14817 case TRUTH_AND_EXPR:
14818 case TRUTH_OR_EXPR:
14819 case TRUTH_XOR_EXPR:
14820 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14821 TREE_TYPE (t),
14822 TREE_OPERAND (t, 0),
14823 TREE_OPERAND (t, 1),
14824 strict_overflow_p);
14825 case TRUTH_NOT_EXPR:
14826 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14827 TREE_TYPE (t),
14828 TREE_OPERAND (t, 0),
14829 strict_overflow_p);
14831 case COND_EXPR:
14832 case CONSTRUCTOR:
14833 case OBJ_TYPE_REF:
14834 case ASSERT_EXPR:
14835 case ADDR_EXPR:
14836 case WITH_SIZE_EXPR:
14837 case SSA_NAME:
14838 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14840 default:
14841 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14845 /* Return true if `t' is known to be non-negative. Handle warnings
14846 about undefined signed overflow. */
14848 bool
14849 tree_expr_nonnegative_p (tree t)
14851 bool ret, strict_overflow_p;
14853 strict_overflow_p = false;
14854 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14855 if (strict_overflow_p)
14856 fold_overflow_warning (("assuming signed overflow does not occur when "
14857 "determining that expression is always "
14858 "non-negative"),
14859 WARN_STRICT_OVERFLOW_MISC);
14860 return ret;
14864 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14865 For floating point we further ensure that T is not denormal.
14866 Similar logic is present in nonzero_address in rtlanal.h.
14868 If the return value is based on the assumption that signed overflow
14869 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14870 change *STRICT_OVERFLOW_P. */
14872 bool
14873 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14874 bool *strict_overflow_p)
14876 switch (code)
14878 case ABS_EXPR:
14879 return tree_expr_nonzero_warnv_p (op0,
14880 strict_overflow_p);
14882 case NOP_EXPR:
14884 tree inner_type = TREE_TYPE (op0);
14885 tree outer_type = type;
14887 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14888 && tree_expr_nonzero_warnv_p (op0,
14889 strict_overflow_p));
14891 break;
14893 case NON_LVALUE_EXPR:
14894 return tree_expr_nonzero_warnv_p (op0,
14895 strict_overflow_p);
14897 default:
14898 break;
14901 return false;
14904 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14905 For floating point we further ensure that T is not denormal.
14906 Similar logic is present in nonzero_address in rtlanal.h.
14908 If the return value is based on the assumption that signed overflow
14909 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14910 change *STRICT_OVERFLOW_P. */
14912 bool
14913 tree_binary_nonzero_warnv_p (enum tree_code code,
14914 tree type,
14915 tree op0,
14916 tree op1, bool *strict_overflow_p)
14918 bool sub_strict_overflow_p;
14919 switch (code)
14921 case POINTER_PLUS_EXPR:
14922 case PLUS_EXPR:
14923 if (TYPE_OVERFLOW_UNDEFINED (type))
14925 /* With the presence of negative values it is hard
14926 to say something. */
14927 sub_strict_overflow_p = false;
14928 if (!tree_expr_nonnegative_warnv_p (op0,
14929 &sub_strict_overflow_p)
14930 || !tree_expr_nonnegative_warnv_p (op1,
14931 &sub_strict_overflow_p))
14932 return false;
14933 /* One of operands must be positive and the other non-negative. */
14934 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14935 overflows, on a twos-complement machine the sum of two
14936 nonnegative numbers can never be zero. */
14937 return (tree_expr_nonzero_warnv_p (op0,
14938 strict_overflow_p)
14939 || tree_expr_nonzero_warnv_p (op1,
14940 strict_overflow_p));
14942 break;
14944 case MULT_EXPR:
14945 if (TYPE_OVERFLOW_UNDEFINED (type))
14947 if (tree_expr_nonzero_warnv_p (op0,
14948 strict_overflow_p)
14949 && tree_expr_nonzero_warnv_p (op1,
14950 strict_overflow_p))
14952 *strict_overflow_p = true;
14953 return true;
14956 break;
14958 case MIN_EXPR:
14959 sub_strict_overflow_p = false;
14960 if (tree_expr_nonzero_warnv_p (op0,
14961 &sub_strict_overflow_p)
14962 && tree_expr_nonzero_warnv_p (op1,
14963 &sub_strict_overflow_p))
14965 if (sub_strict_overflow_p)
14966 *strict_overflow_p = true;
14968 break;
14970 case MAX_EXPR:
14971 sub_strict_overflow_p = false;
14972 if (tree_expr_nonzero_warnv_p (op0,
14973 &sub_strict_overflow_p))
14975 if (sub_strict_overflow_p)
14976 *strict_overflow_p = true;
14978 /* When both operands are nonzero, then MAX must be too. */
14979 if (tree_expr_nonzero_warnv_p (op1,
14980 strict_overflow_p))
14981 return true;
14983 /* MAX where operand 0 is positive is positive. */
14984 return tree_expr_nonnegative_warnv_p (op0,
14985 strict_overflow_p);
14987 /* MAX where operand 1 is positive is positive. */
14988 else if (tree_expr_nonzero_warnv_p (op1,
14989 &sub_strict_overflow_p)
14990 && tree_expr_nonnegative_warnv_p (op1,
14991 &sub_strict_overflow_p))
14993 if (sub_strict_overflow_p)
14994 *strict_overflow_p = true;
14995 return true;
14997 break;
14999 case BIT_IOR_EXPR:
15000 return (tree_expr_nonzero_warnv_p (op1,
15001 strict_overflow_p)
15002 || tree_expr_nonzero_warnv_p (op0,
15003 strict_overflow_p));
15005 default:
15006 break;
15009 return false;
15012 /* Return true when T is an address and is known to be nonzero.
15013 For floating point we further ensure that T is not denormal.
15014 Similar logic is present in nonzero_address in rtlanal.h.
15016 If the return value is based on the assumption that signed overflow
15017 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15018 change *STRICT_OVERFLOW_P. */
15020 bool
15021 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15023 bool sub_strict_overflow_p;
15024 switch (TREE_CODE (t))
15026 case INTEGER_CST:
15027 return !integer_zerop (t);
15029 case ADDR_EXPR:
15031 tree base = TREE_OPERAND (t, 0);
15032 if (!DECL_P (base))
15033 base = get_base_address (base);
15035 if (!base)
15036 return false;
15038 /* Weak declarations may link to NULL. Other things may also be NULL
15039 so protect with -fdelete-null-pointer-checks; but not variables
15040 allocated on the stack. */
15041 if (DECL_P (base)
15042 && (flag_delete_null_pointer_checks
15043 || (DECL_CONTEXT (base)
15044 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15045 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15046 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15048 /* Constants are never weak. */
15049 if (CONSTANT_CLASS_P (base))
15050 return true;
15052 return false;
15055 case COND_EXPR:
15056 sub_strict_overflow_p = false;
15057 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15058 &sub_strict_overflow_p)
15059 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15060 &sub_strict_overflow_p))
15062 if (sub_strict_overflow_p)
15063 *strict_overflow_p = true;
15064 return true;
15066 break;
15068 default:
15069 break;
15071 return false;
15074 /* Return true when T is an address and is known to be nonzero.
15075 For floating point we further ensure that T is not denormal.
15076 Similar logic is present in nonzero_address in rtlanal.h.
15078 If the return value is based on the assumption that signed overflow
15079 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15080 change *STRICT_OVERFLOW_P. */
15082 bool
15083 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15085 tree type = TREE_TYPE (t);
15086 enum tree_code code;
15088 /* Doing something useful for floating point would need more work. */
15089 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15090 return false;
15092 code = TREE_CODE (t);
15093 switch (TREE_CODE_CLASS (code))
15095 case tcc_unary:
15096 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15097 strict_overflow_p);
15098 case tcc_binary:
15099 case tcc_comparison:
15100 return tree_binary_nonzero_warnv_p (code, type,
15101 TREE_OPERAND (t, 0),
15102 TREE_OPERAND (t, 1),
15103 strict_overflow_p);
15104 case tcc_constant:
15105 case tcc_declaration:
15106 case tcc_reference:
15107 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15109 default:
15110 break;
15113 switch (code)
15115 case TRUTH_NOT_EXPR:
15116 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15117 strict_overflow_p);
15119 case TRUTH_AND_EXPR:
15120 case TRUTH_OR_EXPR:
15121 case TRUTH_XOR_EXPR:
15122 return tree_binary_nonzero_warnv_p (code, type,
15123 TREE_OPERAND (t, 0),
15124 TREE_OPERAND (t, 1),
15125 strict_overflow_p);
15127 case COND_EXPR:
15128 case CONSTRUCTOR:
15129 case OBJ_TYPE_REF:
15130 case ASSERT_EXPR:
15131 case ADDR_EXPR:
15132 case WITH_SIZE_EXPR:
15133 case SSA_NAME:
15134 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15136 case COMPOUND_EXPR:
15137 case MODIFY_EXPR:
15138 case BIND_EXPR:
15139 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15140 strict_overflow_p);
15142 case SAVE_EXPR:
15143 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15144 strict_overflow_p);
15146 case CALL_EXPR:
15147 return alloca_call_p (t);
15149 default:
15150 break;
15152 return false;
15155 /* Return true when T is an address and is known to be nonzero.
15156 Handle warnings about undefined signed overflow. */
15158 bool
15159 tree_expr_nonzero_p (tree t)
15161 bool ret, strict_overflow_p;
15163 strict_overflow_p = false;
15164 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15165 if (strict_overflow_p)
15166 fold_overflow_warning (("assuming signed overflow does not occur when "
15167 "determining that expression is always "
15168 "non-zero"),
15169 WARN_STRICT_OVERFLOW_MISC);
15170 return ret;
15173 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15174 attempt to fold the expression to a constant without modifying TYPE,
15175 OP0 or OP1.
15177 If the expression could be simplified to a constant, then return
15178 the constant. If the expression would not be simplified to a
15179 constant, then return NULL_TREE. */
15181 tree
15182 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15184 tree tem = fold_binary (code, type, op0, op1);
15185 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15188 /* Given the components of a unary expression CODE, TYPE and OP0,
15189 attempt to fold the expression to a constant without modifying
15190 TYPE or OP0.
15192 If the expression could be simplified to a constant, then return
15193 the constant. If the expression would not be simplified to a
15194 constant, then return NULL_TREE. */
15196 tree
15197 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15199 tree tem = fold_unary (code, type, op0);
15200 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15203 /* If EXP represents referencing an element in a constant string
15204 (either via pointer arithmetic or array indexing), return the
15205 tree representing the value accessed, otherwise return NULL. */
15207 tree
15208 fold_read_from_constant_string (tree exp)
15210 if ((TREE_CODE (exp) == INDIRECT_REF
15211 || TREE_CODE (exp) == ARRAY_REF)
15212 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15214 tree exp1 = TREE_OPERAND (exp, 0);
15215 tree index;
15216 tree string;
15217 location_t loc = EXPR_LOCATION (exp);
15219 if (TREE_CODE (exp) == INDIRECT_REF)
15220 string = string_constant (exp1, &index);
15221 else
15223 tree low_bound = array_ref_low_bound (exp);
15224 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15226 /* Optimize the special-case of a zero lower bound.
15228 We convert the low_bound to sizetype to avoid some problems
15229 with constant folding. (E.g. suppose the lower bound is 1,
15230 and its mode is QI. Without the conversion,l (ARRAY
15231 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15232 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15233 if (! integer_zerop (low_bound))
15234 index = size_diffop_loc (loc, index,
15235 fold_convert_loc (loc, sizetype, low_bound));
15237 string = exp1;
15240 if (string
15241 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15242 && TREE_CODE (string) == STRING_CST
15243 && TREE_CODE (index) == INTEGER_CST
15244 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15245 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15246 == MODE_INT)
15247 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15248 return build_int_cst_type (TREE_TYPE (exp),
15249 (TREE_STRING_POINTER (string)
15250 [TREE_INT_CST_LOW (index)]));
15252 return NULL;
15255 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15256 an integer constant, real, or fixed-point constant.
15258 TYPE is the type of the result. */
15260 static tree
15261 fold_negate_const (tree arg0, tree type)
15263 tree t = NULL_TREE;
15265 switch (TREE_CODE (arg0))
15267 case INTEGER_CST:
15269 double_int val = tree_to_double_int (arg0);
15270 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15272 t = force_fit_type_double (type, val, 1,
15273 (overflow | TREE_OVERFLOW (arg0))
15274 && !TYPE_UNSIGNED (type));
15275 break;
15278 case REAL_CST:
15279 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15280 break;
15282 case FIXED_CST:
15284 FIXED_VALUE_TYPE f;
15285 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15286 &(TREE_FIXED_CST (arg0)), NULL,
15287 TYPE_SATURATING (type));
15288 t = build_fixed (type, f);
15289 /* Propagate overflow flags. */
15290 if (overflow_p | TREE_OVERFLOW (arg0))
15291 TREE_OVERFLOW (t) = 1;
15292 break;
15295 default:
15296 gcc_unreachable ();
15299 return t;
15302 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15303 an integer constant or real constant.
15305 TYPE is the type of the result. */
15307 tree
15308 fold_abs_const (tree arg0, tree type)
15310 tree t = NULL_TREE;
15312 switch (TREE_CODE (arg0))
15314 case INTEGER_CST:
15316 double_int val = tree_to_double_int (arg0);
15318 /* If the value is unsigned or non-negative, then the absolute value
15319 is the same as the ordinary value. */
15320 if (TYPE_UNSIGNED (type)
15321 || !double_int_negative_p (val))
15322 t = arg0;
15324 /* If the value is negative, then the absolute value is
15325 its negation. */
15326 else
15328 int overflow;
15330 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15331 t = force_fit_type_double (type, val, -1,
15332 overflow | TREE_OVERFLOW (arg0));
15335 break;
15337 case REAL_CST:
15338 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15339 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15340 else
15341 t = arg0;
15342 break;
15344 default:
15345 gcc_unreachable ();
15348 return t;
15351 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15352 constant. TYPE is the type of the result. */
15354 static tree
15355 fold_not_const (const_tree arg0, tree type)
15357 double_int val;
15359 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15361 val = double_int_not (tree_to_double_int (arg0));
15362 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15365 /* Given CODE, a relational operator, the target type, TYPE and two
15366 constant operands OP0 and OP1, return the result of the
15367 relational operation. If the result is not a compile time
15368 constant, then return NULL_TREE. */
15370 static tree
15371 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15373 int result, invert;
15375 /* From here on, the only cases we handle are when the result is
15376 known to be a constant. */
15378 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15380 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15381 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15383 /* Handle the cases where either operand is a NaN. */
15384 if (real_isnan (c0) || real_isnan (c1))
15386 switch (code)
15388 case EQ_EXPR:
15389 case ORDERED_EXPR:
15390 result = 0;
15391 break;
15393 case NE_EXPR:
15394 case UNORDERED_EXPR:
15395 case UNLT_EXPR:
15396 case UNLE_EXPR:
15397 case UNGT_EXPR:
15398 case UNGE_EXPR:
15399 case UNEQ_EXPR:
15400 result = 1;
15401 break;
15403 case LT_EXPR:
15404 case LE_EXPR:
15405 case GT_EXPR:
15406 case GE_EXPR:
15407 case LTGT_EXPR:
15408 if (flag_trapping_math)
15409 return NULL_TREE;
15410 result = 0;
15411 break;
15413 default:
15414 gcc_unreachable ();
15417 return constant_boolean_node (result, type);
15420 return constant_boolean_node (real_compare (code, c0, c1), type);
15423 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15425 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15426 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15427 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15430 /* Handle equality/inequality of complex constants. */
15431 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15433 tree rcond = fold_relational_const (code, type,
15434 TREE_REALPART (op0),
15435 TREE_REALPART (op1));
15436 tree icond = fold_relational_const (code, type,
15437 TREE_IMAGPART (op0),
15438 TREE_IMAGPART (op1));
15439 if (code == EQ_EXPR)
15440 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15441 else if (code == NE_EXPR)
15442 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15443 else
15444 return NULL_TREE;
15447 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15449 To compute GT, swap the arguments and do LT.
15450 To compute GE, do LT and invert the result.
15451 To compute LE, swap the arguments, do LT and invert the result.
15452 To compute NE, do EQ and invert the result.
15454 Therefore, the code below must handle only EQ and LT. */
15456 if (code == LE_EXPR || code == GT_EXPR)
15458 tree tem = op0;
15459 op0 = op1;
15460 op1 = tem;
15461 code = swap_tree_comparison (code);
15464 /* Note that it is safe to invert for real values here because we
15465 have already handled the one case that it matters. */
15467 invert = 0;
15468 if (code == NE_EXPR || code == GE_EXPR)
15470 invert = 1;
15471 code = invert_tree_comparison (code, false);
15474 /* Compute a result for LT or EQ if args permit;
15475 Otherwise return T. */
15476 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15478 if (code == EQ_EXPR)
15479 result = tree_int_cst_equal (op0, op1);
15480 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15481 result = INT_CST_LT_UNSIGNED (op0, op1);
15482 else
15483 result = INT_CST_LT (op0, op1);
15485 else
15486 return NULL_TREE;
15488 if (invert)
15489 result ^= 1;
15490 return constant_boolean_node (result, type);
15493 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15494 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15495 itself. */
15497 tree
15498 fold_build_cleanup_point_expr (tree type, tree expr)
15500 /* If the expression does not have side effects then we don't have to wrap
15501 it with a cleanup point expression. */
15502 if (!TREE_SIDE_EFFECTS (expr))
15503 return expr;
15505 /* If the expression is a return, check to see if the expression inside the
15506 return has no side effects or the right hand side of the modify expression
15507 inside the return. If either don't have side effects set we don't need to
15508 wrap the expression in a cleanup point expression. Note we don't check the
15509 left hand side of the modify because it should always be a return decl. */
15510 if (TREE_CODE (expr) == RETURN_EXPR)
15512 tree op = TREE_OPERAND (expr, 0);
15513 if (!op || !TREE_SIDE_EFFECTS (op))
15514 return expr;
15515 op = TREE_OPERAND (op, 1);
15516 if (!TREE_SIDE_EFFECTS (op))
15517 return expr;
15520 return build1 (CLEANUP_POINT_EXPR, type, expr);
15523 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15524 of an indirection through OP0, or NULL_TREE if no simplification is
15525 possible. */
15527 tree
15528 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15530 tree sub = op0;
15531 tree subtype;
15533 STRIP_NOPS (sub);
15534 subtype = TREE_TYPE (sub);
15535 if (!POINTER_TYPE_P (subtype))
15536 return NULL_TREE;
15538 if (TREE_CODE (sub) == ADDR_EXPR)
15540 tree op = TREE_OPERAND (sub, 0);
15541 tree optype = TREE_TYPE (op);
15542 /* *&CONST_DECL -> to the value of the const decl. */
15543 if (TREE_CODE (op) == CONST_DECL)
15544 return DECL_INITIAL (op);
15545 /* *&p => p; make sure to handle *&"str"[cst] here. */
15546 if (type == optype)
15548 tree fop = fold_read_from_constant_string (op);
15549 if (fop)
15550 return fop;
15551 else
15552 return op;
15554 /* *(foo *)&fooarray => fooarray[0] */
15555 else if (TREE_CODE (optype) == ARRAY_TYPE
15556 && type == TREE_TYPE (optype))
15558 tree type_domain = TYPE_DOMAIN (optype);
15559 tree min_val = size_zero_node;
15560 if (type_domain && TYPE_MIN_VALUE (type_domain))
15561 min_val = TYPE_MIN_VALUE (type_domain);
15562 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15563 NULL_TREE, NULL_TREE);
15565 /* *(foo *)&complexfoo => __real__ complexfoo */
15566 else if (TREE_CODE (optype) == COMPLEX_TYPE
15567 && type == TREE_TYPE (optype))
15568 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15569 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15570 else if (TREE_CODE (optype) == VECTOR_TYPE
15571 && type == TREE_TYPE (optype))
15573 tree part_width = TYPE_SIZE (type);
15574 tree index = bitsize_int (0);
15575 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15579 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15580 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15582 tree op00 = TREE_OPERAND (sub, 0);
15583 tree op01 = TREE_OPERAND (sub, 1);
15585 STRIP_NOPS (op00);
15586 if (TREE_CODE (op00) == ADDR_EXPR)
15588 tree op00type;
15589 op00 = TREE_OPERAND (op00, 0);
15590 op00type = TREE_TYPE (op00);
15592 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15593 if (TREE_CODE (op00type) == VECTOR_TYPE
15594 && type == TREE_TYPE (op00type))
15596 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15597 tree part_width = TYPE_SIZE (type);
15598 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15599 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15600 tree index = bitsize_int (indexi);
15602 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15603 return fold_build3_loc (loc,
15604 BIT_FIELD_REF, type, op00,
15605 part_width, index);
15608 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15609 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15610 && type == TREE_TYPE (op00type))
15612 tree size = TYPE_SIZE_UNIT (type);
15613 if (tree_int_cst_equal (size, op01))
15614 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15616 /* ((foo *)&fooarray)[1] => fooarray[1] */
15617 else if (TREE_CODE (op00type) == ARRAY_TYPE
15618 && type == TREE_TYPE (op00type))
15620 tree type_domain = TYPE_DOMAIN (op00type);
15621 tree min_val = size_zero_node;
15622 if (type_domain && TYPE_MIN_VALUE (type_domain))
15623 min_val = TYPE_MIN_VALUE (type_domain);
15624 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15625 TYPE_SIZE_UNIT (type));
15626 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15627 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15628 NULL_TREE, NULL_TREE);
15633 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15634 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15635 && type == TREE_TYPE (TREE_TYPE (subtype)))
15637 tree type_domain;
15638 tree min_val = size_zero_node;
15639 sub = build_fold_indirect_ref_loc (loc, sub);
15640 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15641 if (type_domain && TYPE_MIN_VALUE (type_domain))
15642 min_val = TYPE_MIN_VALUE (type_domain);
15643 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15644 NULL_TREE);
15647 return NULL_TREE;
15650 /* Builds an expression for an indirection through T, simplifying some
15651 cases. */
15653 tree
15654 build_fold_indirect_ref_loc (location_t loc, tree t)
15656 tree type = TREE_TYPE (TREE_TYPE (t));
15657 tree sub = fold_indirect_ref_1 (loc, type, t);
15659 if (sub)
15660 return sub;
15662 return build1_loc (loc, INDIRECT_REF, type, t);
15665 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15667 tree
15668 fold_indirect_ref_loc (location_t loc, tree t)
15670 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15672 if (sub)
15673 return sub;
15674 else
15675 return t;
15678 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15679 whose result is ignored. The type of the returned tree need not be
15680 the same as the original expression. */
15682 tree
15683 fold_ignored_result (tree t)
15685 if (!TREE_SIDE_EFFECTS (t))
15686 return integer_zero_node;
15688 for (;;)
15689 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15691 case tcc_unary:
15692 t = TREE_OPERAND (t, 0);
15693 break;
15695 case tcc_binary:
15696 case tcc_comparison:
15697 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15698 t = TREE_OPERAND (t, 0);
15699 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15700 t = TREE_OPERAND (t, 1);
15701 else
15702 return t;
15703 break;
15705 case tcc_expression:
15706 switch (TREE_CODE (t))
15708 case COMPOUND_EXPR:
15709 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15710 return t;
15711 t = TREE_OPERAND (t, 0);
15712 break;
15714 case COND_EXPR:
15715 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15716 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15717 return t;
15718 t = TREE_OPERAND (t, 0);
15719 break;
15721 default:
15722 return t;
15724 break;
15726 default:
15727 return t;
15731 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15732 This can only be applied to objects of a sizetype. */
15734 tree
15735 round_up_loc (location_t loc, tree value, int divisor)
15737 tree div = NULL_TREE;
15739 gcc_assert (divisor > 0);
15740 if (divisor == 1)
15741 return value;
15743 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15744 have to do anything. Only do this when we are not given a const,
15745 because in that case, this check is more expensive than just
15746 doing it. */
15747 if (TREE_CODE (value) != INTEGER_CST)
15749 div = build_int_cst (TREE_TYPE (value), divisor);
15751 if (multiple_of_p (TREE_TYPE (value), value, div))
15752 return value;
15755 /* If divisor is a power of two, simplify this to bit manipulation. */
15756 if (divisor == (divisor & -divisor))
15758 if (TREE_CODE (value) == INTEGER_CST)
15760 double_int val = tree_to_double_int (value);
15761 bool overflow_p;
15763 if ((val.low & (divisor - 1)) == 0)
15764 return value;
15766 overflow_p = TREE_OVERFLOW (value);
15767 val.low &= ~(divisor - 1);
15768 val.low += divisor;
15769 if (val.low == 0)
15771 val.high++;
15772 if (val.high == 0)
15773 overflow_p = true;
15776 return force_fit_type_double (TREE_TYPE (value), val,
15777 -1, overflow_p);
15779 else
15781 tree t;
15783 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15784 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15785 t = build_int_cst (TREE_TYPE (value), -divisor);
15786 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15789 else
15791 if (!div)
15792 div = build_int_cst (TREE_TYPE (value), divisor);
15793 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15794 value = size_binop_loc (loc, MULT_EXPR, value, div);
15797 return value;
15800 /* Likewise, but round down. */
15802 tree
15803 round_down_loc (location_t loc, tree value, int divisor)
15805 tree div = NULL_TREE;
15807 gcc_assert (divisor > 0);
15808 if (divisor == 1)
15809 return value;
15811 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15812 have to do anything. Only do this when we are not given a const,
15813 because in that case, this check is more expensive than just
15814 doing it. */
15815 if (TREE_CODE (value) != INTEGER_CST)
15817 div = build_int_cst (TREE_TYPE (value), divisor);
15819 if (multiple_of_p (TREE_TYPE (value), value, div))
15820 return value;
15823 /* If divisor is a power of two, simplify this to bit manipulation. */
15824 if (divisor == (divisor & -divisor))
15826 tree t;
15828 t = build_int_cst (TREE_TYPE (value), -divisor);
15829 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15831 else
15833 if (!div)
15834 div = build_int_cst (TREE_TYPE (value), divisor);
15835 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15836 value = size_binop_loc (loc, MULT_EXPR, value, div);
15839 return value;
15842 /* Returns the pointer to the base of the object addressed by EXP and
15843 extracts the information about the offset of the access, storing it
15844 to PBITPOS and POFFSET. */
15846 static tree
15847 split_address_to_core_and_offset (tree exp,
15848 HOST_WIDE_INT *pbitpos, tree *poffset)
15850 tree core;
15851 enum machine_mode mode;
15852 int unsignedp, volatilep;
15853 HOST_WIDE_INT bitsize;
15854 location_t loc = EXPR_LOCATION (exp);
15856 if (TREE_CODE (exp) == ADDR_EXPR)
15858 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15859 poffset, &mode, &unsignedp, &volatilep,
15860 false);
15861 core = build_fold_addr_expr_loc (loc, core);
15863 else
15865 core = exp;
15866 *pbitpos = 0;
15867 *poffset = NULL_TREE;
15870 return core;
15873 /* Returns true if addresses of E1 and E2 differ by a constant, false
15874 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15876 bool
15877 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15879 tree core1, core2;
15880 HOST_WIDE_INT bitpos1, bitpos2;
15881 tree toffset1, toffset2, tdiff, type;
15883 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15884 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15886 if (bitpos1 % BITS_PER_UNIT != 0
15887 || bitpos2 % BITS_PER_UNIT != 0
15888 || !operand_equal_p (core1, core2, 0))
15889 return false;
15891 if (toffset1 && toffset2)
15893 type = TREE_TYPE (toffset1);
15894 if (type != TREE_TYPE (toffset2))
15895 toffset2 = fold_convert (type, toffset2);
15897 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15898 if (!cst_and_fits_in_hwi (tdiff))
15899 return false;
15901 *diff = int_cst_value (tdiff);
15903 else if (toffset1 || toffset2)
15905 /* If only one of the offsets is non-constant, the difference cannot
15906 be a constant. */
15907 return false;
15909 else
15910 *diff = 0;
15912 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15913 return true;
15916 /* Simplify the floating point expression EXP when the sign of the
15917 result is not significant. Return NULL_TREE if no simplification
15918 is possible. */
15920 tree
15921 fold_strip_sign_ops (tree exp)
15923 tree arg0, arg1;
15924 location_t loc = EXPR_LOCATION (exp);
15926 switch (TREE_CODE (exp))
15928 case ABS_EXPR:
15929 case NEGATE_EXPR:
15930 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15931 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15933 case MULT_EXPR:
15934 case RDIV_EXPR:
15935 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15936 return NULL_TREE;
15937 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15938 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15939 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15940 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15941 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15942 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15943 break;
15945 case COMPOUND_EXPR:
15946 arg0 = TREE_OPERAND (exp, 0);
15947 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15948 if (arg1)
15949 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15950 break;
15952 case COND_EXPR:
15953 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15954 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15955 if (arg0 || arg1)
15956 return fold_build3_loc (loc,
15957 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15958 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15959 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15960 break;
15962 case CALL_EXPR:
15964 const enum built_in_function fcode = builtin_mathfn_code (exp);
15965 switch (fcode)
15967 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15968 /* Strip copysign function call, return the 1st argument. */
15969 arg0 = CALL_EXPR_ARG (exp, 0);
15970 arg1 = CALL_EXPR_ARG (exp, 1);
15971 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15973 default:
15974 /* Strip sign ops from the argument of "odd" math functions. */
15975 if (negate_mathfn_p (fcode))
15977 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15978 if (arg0)
15979 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15981 break;
15984 break;
15986 default:
15987 break;
15989 return NULL_TREE;