* src/powerpc/aix_closure.S (ffi_closure_ASM): Adjust for Darwin64
[official-gcc.git] / gcc / fold-const.c
blob0f806752dd36a80d67d7bb47f36b38df83301e6f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
187 double_int quo, rem;
188 int uns;
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
196 uns = false;
198 quo = double_int_divmod (tree_to_double_int (arg1),
199 tree_to_double_int (arg2),
200 uns, code, &rem);
202 if (double_int_zero_p (rem))
203 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
205 return NULL_TREE;
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
215 used. */
217 static int fold_deferring_overflow_warnings;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
234 void
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
247 deferred code. */
249 void
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
252 const char *warnmsg;
253 location_t locus;
255 gcc_assert (fold_deferring_overflow_warnings > 0);
256 --fold_deferring_overflow_warnings;
257 if (fold_deferring_overflow_warnings > 0)
259 if (fold_deferred_overflow_warning != NULL
260 && code != 0
261 && code < (int) fold_deferred_overflow_code)
262 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
263 return;
266 warnmsg = fold_deferred_overflow_warning;
267 fold_deferred_overflow_warning = NULL;
269 if (!issue || warnmsg == NULL)
270 return;
272 if (gimple_no_warning_p (stmt))
273 return;
275 /* Use the smallest code level when deciding to issue the
276 warning. */
277 if (code == 0 || code > (int) fold_deferred_overflow_code)
278 code = fold_deferred_overflow_code;
280 if (!issue_strict_overflow_warning (code))
281 return;
283 if (stmt == NULL)
284 locus = input_location;
285 else
286 locus = gimple_location (stmt);
287 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
290 /* Stop deferring overflow warnings, ignoring any deferred
291 warnings. */
293 void
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL, 0);
299 /* Whether we are deferring overflow warnings. */
301 bool
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings > 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
310 static void
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
313 if (fold_deferring_overflow_warnings > 0)
315 if (fold_deferred_overflow_warning == NULL
316 || wc < fold_deferred_overflow_code)
318 fold_deferred_overflow_warning = gmsgid;
319 fold_deferred_overflow_code = wc;
322 else if (issue_strict_overflow_warning (wc))
323 warning (OPT_Wstrict_overflow, gmsgid);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
329 static bool
330 negate_mathfn_p (enum built_in_function code)
332 switch (code)
334 CASE_FLT_FN (BUILT_IN_ASIN):
335 CASE_FLT_FN (BUILT_IN_ASINH):
336 CASE_FLT_FN (BUILT_IN_ATAN):
337 CASE_FLT_FN (BUILT_IN_ATANH):
338 CASE_FLT_FN (BUILT_IN_CASIN):
339 CASE_FLT_FN (BUILT_IN_CASINH):
340 CASE_FLT_FN (BUILT_IN_CATAN):
341 CASE_FLT_FN (BUILT_IN_CATANH):
342 CASE_FLT_FN (BUILT_IN_CBRT):
343 CASE_FLT_FN (BUILT_IN_CPROJ):
344 CASE_FLT_FN (BUILT_IN_CSIN):
345 CASE_FLT_FN (BUILT_IN_CSINH):
346 CASE_FLT_FN (BUILT_IN_CTAN):
347 CASE_FLT_FN (BUILT_IN_CTANH):
348 CASE_FLT_FN (BUILT_IN_ERF):
349 CASE_FLT_FN (BUILT_IN_LLROUND):
350 CASE_FLT_FN (BUILT_IN_LROUND):
351 CASE_FLT_FN (BUILT_IN_ROUND):
352 CASE_FLT_FN (BUILT_IN_SIN):
353 CASE_FLT_FN (BUILT_IN_SINH):
354 CASE_FLT_FN (BUILT_IN_TAN):
355 CASE_FLT_FN (BUILT_IN_TANH):
356 CASE_FLT_FN (BUILT_IN_TRUNC):
357 return true;
359 CASE_FLT_FN (BUILT_IN_LLRINT):
360 CASE_FLT_FN (BUILT_IN_LRINT):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT):
362 CASE_FLT_FN (BUILT_IN_RINT):
363 return !flag_rounding_math;
365 default:
366 break;
368 return false;
371 /* Check whether we may negate an integer constant T without causing
372 overflow. */
374 bool
375 may_negate_without_overflow_p (const_tree t)
377 unsigned HOST_WIDE_INT val;
378 unsigned int prec;
379 tree type;
381 gcc_assert (TREE_CODE (t) == INTEGER_CST);
383 type = TREE_TYPE (t);
384 if (TYPE_UNSIGNED (type))
385 return false;
387 prec = TYPE_PRECISION (type);
388 if (prec > HOST_BITS_PER_WIDE_INT)
390 if (TREE_INT_CST_LOW (t) != 0)
391 return true;
392 prec -= HOST_BITS_PER_WIDE_INT;
393 val = TREE_INT_CST_HIGH (t);
395 else
396 val = TREE_INT_CST_LOW (t);
397 if (prec < HOST_BITS_PER_WIDE_INT)
398 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
405 static bool
406 negate_expr_p (tree t)
408 tree type;
410 if (t == 0)
411 return false;
413 type = TREE_TYPE (t);
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
418 case INTEGER_CST:
419 if (TYPE_OVERFLOW_WRAPS (type))
420 return true;
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
428 case FIXED_CST:
429 case NEGATE_EXPR:
430 return true;
432 case REAL_CST:
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
437 case COMPLEX_CST:
438 return negate_expr_p (TREE_REALPART (t))
439 && negate_expr_p (TREE_IMAGPART (t));
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
471 /* Fall through. */
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case FLOOR_DIV_EXPR:
482 case CEIL_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
488 overflow. */
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
491 break;
492 return negate_expr_p (TREE_OPERAND (t, 1))
493 || negate_expr_p (TREE_OPERAND (t, 0));
495 case NOP_EXPR:
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
499 tree tem = strip_float_extensions (t);
500 if (tem != t)
501 return negate_expr_p (tem);
503 break;
505 case CALL_EXPR:
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
509 break;
511 case RSHIFT_EXPR:
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
515 tree op1 = TREE_OPERAND (t, 1);
516 if (TREE_INT_CST_HIGH (op1) == 0
517 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
518 == TREE_INT_CST_LOW (op1))
519 return true;
521 break;
523 default:
524 break;
526 return false;
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
532 returned. */
534 static tree
535 fold_negate_expr (location_t loc, tree t)
537 tree type = TREE_TYPE (t);
538 tree tem;
540 switch (TREE_CODE (t))
542 /* Convert - (~A) to A + 1. */
543 case BIT_NOT_EXPR:
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_int_cst (type, 1));
547 break;
549 case INTEGER_CST:
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || !TYPE_OVERFLOW_TRAPS (type))
553 return tem;
554 break;
556 case REAL_CST:
557 tem = fold_negate_const (t, type);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
560 return tem;
561 break;
563 case FIXED_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
567 case COMPLEX_CST:
569 tree rpart = negate_expr (TREE_REALPART (t));
570 tree ipart = negate_expr (TREE_IMAGPART (t));
572 if ((TREE_CODE (rpart) == REAL_CST
573 && TREE_CODE (ipart) == REAL_CST)
574 || (TREE_CODE (rpart) == INTEGER_CST
575 && TREE_CODE (ipart) == INTEGER_CST))
576 return build_complex (type, rpart, ipart);
578 break;
580 case COMPLEX_EXPR:
581 if (negate_expr_p (t))
582 return fold_build2_loc (loc, COMPLEX_EXPR, type,
583 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
584 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
585 break;
587 case CONJ_EXPR:
588 if (negate_expr_p (t))
589 return fold_build1_loc (loc, CONJ_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
591 break;
593 case NEGATE_EXPR:
594 return TREE_OPERAND (t, 0);
596 case PLUS_EXPR:
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t, 1))
602 && reorder_operands_p (TREE_OPERAND (t, 0),
603 TREE_OPERAND (t, 1)))
605 tem = negate_expr (TREE_OPERAND (t, 1));
606 return fold_build2_loc (loc, MINUS_EXPR, type,
607 tem, TREE_OPERAND (t, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t, 0)))
613 tem = negate_expr (TREE_OPERAND (t, 0));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 1));
618 break;
620 case MINUS_EXPR:
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
624 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627 break;
629 case MULT_EXPR:
630 if (TYPE_UNSIGNED (type))
631 break;
633 /* Fall through. */
635 case RDIV_EXPR:
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
647 break;
649 case TRUNC_DIV_EXPR:
650 case ROUND_DIV_EXPR:
651 case FLOOR_DIV_EXPR:
652 case CEIL_DIV_EXPR:
653 case EXACT_DIV_EXPR:
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
658 overflow. */
659 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
661 const char * const warnmsg = G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
666 if (INTEGRAL_TYPE_P (type)
667 && (TREE_CODE (tem) != INTEGER_CST
668 || integer_onep (tem)))
669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 TREE_OPERAND (t, 0), negate_expr (tem));
673 tem = TREE_OPERAND (t, 0);
674 if (negate_expr_p (tem))
676 if (INTEGRAL_TYPE_P (type)
677 && (TREE_CODE (tem) != INTEGER_CST
678 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 negate_expr (tem), TREE_OPERAND (t, 1));
684 break;
686 case NOP_EXPR:
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type) == REAL_TYPE)
690 tem = strip_float_extensions (t);
691 if (tem != t && negate_expr_p (tem))
692 return fold_convert_loc (loc, type, negate_expr (tem));
694 break;
696 case CALL_EXPR:
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t))
699 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
701 tree fndecl, arg;
703 fndecl = get_callee_fndecl (t);
704 arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 return build_call_expr_loc (loc, fndecl, 1, arg);
707 break;
709 case RSHIFT_EXPR:
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
713 tree op1 = TREE_OPERAND (t, 1);
714 if (TREE_INT_CST_HIGH (op1) == 0
715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 == TREE_INT_CST_LOW (op1))
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
726 break;
728 default:
729 break;
732 return NULL_TREE;
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
739 static tree
740 negate_expr (tree t)
742 tree type, tem;
743 location_t loc;
745 if (t == NULL_TREE)
746 return NULL_TREE;
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
778 static tree
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 tree *minus_litp, int negate_p)
782 tree var = 0;
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
825 var = in;
826 else if (op0 != 0)
827 var = op0;
828 else
829 var = op1, neg_var_p = neg1_p;
831 /* Now do any needed negations. */
832 if (neg_litp_p)
833 *minus_litp = *litp, *litp = 0;
834 if (neg_conp_p)
835 *conp = negate_expr (*conp);
836 if (neg_var_p)
837 var = negate_expr (var);
839 else if (TREE_CONSTANT (in))
840 *conp = in;
841 else
842 var = in;
844 if (negate_p)
846 if (*litp)
847 *minus_litp = *litp, *litp = 0;
848 else if (*minus_litp)
849 *litp = *minus_litp, *minus_litp = 0;
850 *conp = negate_expr (*conp);
851 var = negate_expr (var);
854 return var;
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
862 static tree
863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
865 if (t1 == 0)
866 return t2;
867 else if (t2 == 0)
868 return t1;
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
874 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
876 if (code == PLUS_EXPR)
878 if (TREE_CODE (t1) == NEGATE_EXPR)
879 return build2_loc (loc, MINUS_EXPR, type,
880 fold_convert_loc (loc, type, t2),
881 fold_convert_loc (loc, type,
882 TREE_OPERAND (t1, 0)));
883 else if (TREE_CODE (t2) == NEGATE_EXPR)
884 return build2_loc (loc, MINUS_EXPR, type,
885 fold_convert_loc (loc, type, t1),
886 fold_convert_loc (loc, type,
887 TREE_OPERAND (t2, 0)));
888 else if (integer_zerop (t2))
889 return fold_convert_loc (loc, type, t1);
891 else if (code == MINUS_EXPR)
893 if (integer_zerop (t2))
894 return fold_convert_loc (loc, type, t1);
897 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 fold_convert_loc (loc, type, t2));
901 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
908 static bool
909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
911 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
912 return false;
913 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
914 return false;
916 switch (code)
918 case LSHIFT_EXPR:
919 case RSHIFT_EXPR:
920 case LROTATE_EXPR:
921 case RROTATE_EXPR:
922 return true;
924 default:
925 break;
928 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
929 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
930 && TYPE_MODE (type1) == TYPE_MODE (type2);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
938 tree
939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
941 double_int op1, op2, res, tmp;
942 tree t;
943 tree type = TREE_TYPE (arg1);
944 bool uns = TYPE_UNSIGNED (type);
945 bool is_sizetype
946 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
947 bool overflow = false;
949 op1 = tree_to_double_int (arg1);
950 op2 = tree_to_double_int (arg2);
952 switch (code)
954 case BIT_IOR_EXPR:
955 res = double_int_ior (op1, op2);
956 break;
958 case BIT_XOR_EXPR:
959 res = double_int_xor (op1, op2);
960 break;
962 case BIT_AND_EXPR:
963 res = double_int_and (op1, op2);
964 break;
966 case RSHIFT_EXPR:
967 res = double_int_rshift (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type), !uns);
969 break;
971 case LSHIFT_EXPR:
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res = double_int_lshift (op1, double_int_to_shwi (op2),
976 TYPE_PRECISION (type), !uns);
977 break;
979 case RROTATE_EXPR:
980 res = double_int_rrotate (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type));
982 break;
984 case LROTATE_EXPR:
985 res = double_int_lrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
987 break;
989 case PLUS_EXPR:
990 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
991 &res.low, &res.high);
992 break;
994 case MINUS_EXPR:
995 neg_double (op2.low, op2.high, &res.low, &res.high);
996 add_double (op1.low, op1.high, res.low, res.high,
997 &res.low, &res.high);
998 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
999 break;
1001 case MULT_EXPR:
1002 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1003 &res.low, &res.high);
1004 break;
1006 case TRUNC_DIV_EXPR:
1007 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1008 case EXACT_DIV_EXPR:
1009 /* This is a shortcut for a common special case. */
1010 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1011 && !TREE_OVERFLOW (arg1)
1012 && !TREE_OVERFLOW (arg2)
1013 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1015 if (code == CEIL_DIV_EXPR)
1016 op1.low += op2.low - 1;
1018 res.low = op1.low / op2.low, res.high = 0;
1019 break;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR:
1025 if (double_int_zero_p (op2))
1026 return NULL_TREE;
1027 if (double_int_one_p (op2))
1029 res = op1;
1030 break;
1032 if (double_int_equal_p (op1, op2)
1033 && ! double_int_zero_p (op1))
1035 res = double_int_one;
1036 break;
1038 overflow = div_and_round_double (code, uns,
1039 op1.low, op1.high, op2.low, op2.high,
1040 &res.low, &res.high,
1041 &tmp.low, &tmp.high);
1042 break;
1044 case TRUNC_MOD_EXPR:
1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046 /* This is a shortcut for a common special case. */
1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 && !TREE_OVERFLOW (arg1)
1049 && !TREE_OVERFLOW (arg2)
1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1052 if (code == CEIL_MOD_EXPR)
1053 op1.low += op2.low - 1;
1054 res.low = op1.low % op2.low, res.high = 0;
1055 break;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR:
1061 if (double_int_zero_p (op2))
1062 return NULL_TREE;
1063 overflow = div_and_round_double (code, uns,
1064 op1.low, op1.high, op2.low, op2.high,
1065 &tmp.low, &tmp.high,
1066 &res.low, &res.high);
1067 break;
1069 case MIN_EXPR:
1070 res = double_int_min (op1, op2, uns);
1071 break;
1073 case MAX_EXPR:
1074 res = double_int_max (op1, op2, uns);
1075 break;
1077 default:
1078 return NULL_TREE;
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1082 ((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1085 return t;
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1093 static tree
1094 const_binop (enum tree_code code, tree arg1, tree arg2)
1096 /* Sanity check for the recursive cases. */
1097 if (!arg1 || !arg2)
1098 return NULL_TREE;
1100 STRIP_NOPS (arg1);
1101 STRIP_NOPS (arg2);
1103 if (TREE_CODE (arg1) == INTEGER_CST)
1104 return int_const_binop (code, arg1, arg2);
1106 if (TREE_CODE (arg1) == REAL_CST)
1108 enum machine_mode mode;
1109 REAL_VALUE_TYPE d1;
1110 REAL_VALUE_TYPE d2;
1111 REAL_VALUE_TYPE value;
1112 REAL_VALUE_TYPE result;
1113 bool inexact;
1114 tree t, type;
1116 /* The following codes are handled by real_arithmetic. */
1117 switch (code)
1119 case PLUS_EXPR:
1120 case MINUS_EXPR:
1121 case MULT_EXPR:
1122 case RDIV_EXPR:
1123 case MIN_EXPR:
1124 case MAX_EXPR:
1125 break;
1127 default:
1128 return NULL_TREE;
1131 d1 = TREE_REAL_CST (arg1);
1132 d2 = TREE_REAL_CST (arg2);
1134 type = TREE_TYPE (arg1);
1135 mode = TYPE_MODE (type);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode)
1140 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1141 return NULL_TREE;
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code == RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2, dconst0)
1147 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1148 return NULL_TREE;
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1))
1153 return arg1;
1154 else if (REAL_VALUE_ISNAN (d2))
1155 return arg2;
1157 inexact = real_arithmetic (&value, code, &d1, &d2);
1158 real_convert (&result, mode, &value);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode)
1164 && REAL_VALUE_ISINF (result)
1165 && !REAL_VALUE_ISINF (d1)
1166 && !REAL_VALUE_ISINF (d2))
1167 return NULL_TREE;
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1175 && (inexact || !real_identical (&result, &value)))
1176 return NULL_TREE;
1178 t = build_real (type, result);
1180 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1181 return t;
1184 if (TREE_CODE (arg1) == FIXED_CST)
1186 FIXED_VALUE_TYPE f1;
1187 FIXED_VALUE_TYPE f2;
1188 FIXED_VALUE_TYPE result;
1189 tree t, type;
1190 int sat_p;
1191 bool overflow_p;
1193 /* The following codes are handled by fixed_arithmetic. */
1194 switch (code)
1196 case PLUS_EXPR:
1197 case MINUS_EXPR:
1198 case MULT_EXPR:
1199 case TRUNC_DIV_EXPR:
1200 f2 = TREE_FIXED_CST (arg2);
1201 break;
1203 case LSHIFT_EXPR:
1204 case RSHIFT_EXPR:
1205 f2.data.high = TREE_INT_CST_HIGH (arg2);
1206 f2.data.low = TREE_INT_CST_LOW (arg2);
1207 f2.mode = SImode;
1208 break;
1210 default:
1211 return NULL_TREE;
1214 f1 = TREE_FIXED_CST (arg1);
1215 type = TREE_TYPE (arg1);
1216 sat_p = TYPE_SATURATING (type);
1217 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1218 t = build_fixed (type, result);
1219 /* Propagate overflow flags. */
1220 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1221 TREE_OVERFLOW (t) = 1;
1222 return t;
1225 if (TREE_CODE (arg1) == COMPLEX_CST)
1227 tree type = TREE_TYPE (arg1);
1228 tree r1 = TREE_REALPART (arg1);
1229 tree i1 = TREE_IMAGPART (arg1);
1230 tree r2 = TREE_REALPART (arg2);
1231 tree i2 = TREE_IMAGPART (arg2);
1232 tree real, imag;
1234 switch (code)
1236 case PLUS_EXPR:
1237 case MINUS_EXPR:
1238 real = const_binop (code, r1, r2);
1239 imag = const_binop (code, i1, i2);
1240 break;
1242 case MULT_EXPR:
1243 if (COMPLEX_FLOAT_TYPE_P (type))
1244 return do_mpc_arg2 (arg1, arg2, type,
1245 /* do_nonfinite= */ folding_initializer,
1246 mpc_mul);
1248 real = const_binop (MINUS_EXPR,
1249 const_binop (MULT_EXPR, r1, r2),
1250 const_binop (MULT_EXPR, i1, i2));
1251 imag = const_binop (PLUS_EXPR,
1252 const_binop (MULT_EXPR, r1, i2),
1253 const_binop (MULT_EXPR, i1, r2));
1254 break;
1256 case RDIV_EXPR:
1257 if (COMPLEX_FLOAT_TYPE_P (type))
1258 return do_mpc_arg2 (arg1, arg2, type,
1259 /* do_nonfinite= */ folding_initializer,
1260 mpc_div);
1261 /* Fallthru ... */
1262 case TRUNC_DIV_EXPR:
1263 case CEIL_DIV_EXPR:
1264 case FLOOR_DIV_EXPR:
1265 case ROUND_DIV_EXPR:
1266 if (flag_complex_method == 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1273 t = br*br + bi*bi
1275 tree magsquared
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r2, r2),
1278 const_binop (MULT_EXPR, i2, i2));
1279 tree t1
1280 = const_binop (PLUS_EXPR,
1281 const_binop (MULT_EXPR, r1, r2),
1282 const_binop (MULT_EXPR, i1, i2));
1283 tree t2
1284 = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, i1, r2),
1286 const_binop (MULT_EXPR, r1, i2));
1288 real = const_binop (code, t1, magsquared);
1289 imag = const_binop (code, t2, magsquared);
1291 else
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1299 fold_abs_const (r2, TREE_TYPE (type)),
1300 fold_abs_const (i2, TREE_TYPE (type)));
1302 if (integer_nonzerop (compare))
1304 /* In the TRUE branch, we compute
1305 ratio = br/bi;
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1309 tr = tr / div;
1310 ti = ti / div; */
1311 tree ratio = const_binop (code, r2, i2);
1312 tree div = const_binop (PLUS_EXPR, i2,
1313 const_binop (MULT_EXPR, r2, ratio));
1314 real = const_binop (MULT_EXPR, r1, ratio);
1315 real = const_binop (PLUS_EXPR, real, i1);
1316 real = const_binop (code, real, div);
1318 imag = const_binop (MULT_EXPR, i1, ratio);
1319 imag = const_binop (MINUS_EXPR, imag, r1);
1320 imag = const_binop (code, imag, div);
1322 else
1324 /* In the FALSE branch, we compute
1325 ratio = d/c;
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1329 tr = tr / div;
1330 ti = ti / div; */
1331 tree ratio = const_binop (code, i2, r2);
1332 tree div = const_binop (PLUS_EXPR, r2,
1333 const_binop (MULT_EXPR, i2, ratio));
1335 real = const_binop (MULT_EXPR, i1, ratio);
1336 real = const_binop (PLUS_EXPR, real, r1);
1337 real = const_binop (code, real, div);
1339 imag = const_binop (MULT_EXPR, r1, ratio);
1340 imag = const_binop (MINUS_EXPR, i1, imag);
1341 imag = const_binop (code, imag, div);
1344 break;
1346 default:
1347 return NULL_TREE;
1350 if (real && imag)
1351 return build_complex (type, real, imag);
1354 if (TREE_CODE (arg1) == VECTOR_CST)
1356 tree type = TREE_TYPE(arg1);
1357 int count = TYPE_VECTOR_SUBPARTS (type), i;
1358 tree elements1, elements2, list = NULL_TREE;
1360 if(TREE_CODE(arg2) != VECTOR_CST)
1361 return NULL_TREE;
1363 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1364 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1366 for (i = 0; i < count; i++)
1368 tree elem1, elem2, elem;
1370 /* The trailing elements can be empty and should be treated as 0 */
1371 if(!elements1)
1372 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1373 else
1375 elem1 = TREE_VALUE(elements1);
1376 elements1 = TREE_CHAIN (elements1);
1379 if(!elements2)
1380 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1381 else
1383 elem2 = TREE_VALUE(elements2);
1384 elements2 = TREE_CHAIN (elements2);
1387 elem = const_binop (code, elem1, elem2);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem == NULL_TREE)
1392 return NULL_TREE;
1394 list = tree_cons (NULL_TREE, elem, list);
1396 return build_vector(type, nreverse(list));
1398 return NULL_TREE;
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1404 tree
1405 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1407 return build_int_cst (sizetype_tab[(int) kind], number);
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1415 tree
1416 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1418 tree type = TREE_TYPE (arg0);
1420 if (arg0 == error_mark_node || arg1 == error_mark_node)
1421 return error_mark_node;
1423 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1424 TREE_TYPE (arg1)));
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1429 /* And some specific cases even faster than that. */
1430 if (code == PLUS_EXPR)
1432 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1433 return arg1;
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1435 return arg0;
1437 else if (code == MINUS_EXPR)
1439 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 return arg0;
1442 else if (code == MULT_EXPR)
1444 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1445 return arg1;
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code, arg0, arg1);
1452 return fold_build2_loc (loc, code, type, arg0, arg1);
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1459 tree
1460 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1462 tree type = TREE_TYPE (arg0);
1463 tree ctype;
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1466 TREE_TYPE (arg1)));
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type))
1470 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1472 if (type == sizetype)
1473 ctype = ssizetype;
1474 else if (type == bitsizetype)
1475 ctype = sbitsizetype;
1476 else
1477 ctype = signed_type_for (type);
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1483 return size_binop_loc (loc, MINUS_EXPR,
1484 fold_convert_loc (loc, ctype, arg0),
1485 fold_convert_loc (loc, ctype, arg1));
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0, arg1))
1492 return build_int_cst (ctype, 0);
1493 else if (tree_int_cst_lt (arg1, arg0))
1494 return fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1496 else
1497 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1498 fold_convert_loc (loc, ctype,
1499 size_binop_loc (loc,
1500 MINUS_EXPR,
1501 arg1, arg0)));
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1507 static tree
1508 fold_convert_const_int_from_int (tree type, const_tree arg1)
1510 tree t;
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t = force_fit_type_double (type, tree_to_double_int (arg1),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1516 (TREE_INT_CST_HIGH (arg1) < 0
1517 && (TYPE_UNSIGNED (type)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1519 | TREE_OVERFLOW (arg1));
1521 return t;
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1527 static tree
1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1530 int overflow = 0;
1531 tree t;
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1542 double_int val;
1543 REAL_VALUE_TYPE r;
1544 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1546 switch (code)
1548 case FIX_TRUNC_EXPR:
1549 real_trunc (&r, VOIDmode, &x);
1550 break;
1552 default:
1553 gcc_unreachable ();
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r))
1559 overflow = 1;
1560 val = double_int_zero;
1563 /* See if R is less than the lower bound or greater than the
1564 upper bound. */
1566 if (! overflow)
1568 tree lt = TYPE_MIN_VALUE (type);
1569 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1570 if (REAL_VALUES_LESS (r, l))
1572 overflow = 1;
1573 val = tree_to_double_int (lt);
1577 if (! overflow)
1579 tree ut = TYPE_MAX_VALUE (type);
1580 if (ut)
1582 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1583 if (REAL_VALUES_LESS (u, r))
1585 overflow = 1;
1586 val = tree_to_double_int (ut);
1591 if (! overflow)
1592 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1594 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1595 return t;
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1601 static tree
1602 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1604 tree t;
1605 double_int temp, temp_trunc;
1606 unsigned int mode;
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp = TREE_FIXED_CST (arg1).data;
1610 mode = TREE_FIXED_CST (arg1).mode;
1611 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1613 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1619 HOST_BITS_PER_DOUBLE_INT,
1620 SIGNED_FIXED_POINT_MODE_P (mode));
1622 else
1624 temp = double_int_zero;
1625 temp_trunc = double_int_zero;
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode)
1631 && double_int_negative_p (temp_trunc)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1633 temp = double_int_add (temp, double_int_one);
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t = force_fit_type_double (type, temp, -1,
1638 (double_int_negative_p (temp)
1639 && (TYPE_UNSIGNED (type)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1641 | TREE_OVERFLOW (arg1));
1643 return t;
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1649 static tree
1650 fold_convert_const_real_from_real (tree type, const_tree arg1)
1652 REAL_VALUE_TYPE value;
1653 tree t;
1655 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1656 t = build_real (type, value);
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1664 TREE_OVERFLOW (t) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_NANS (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1671 && REAL_VALUE_ISINF (value)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1673 TREE_OVERFLOW (t) = 1;
1674 else
1675 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1676 return t;
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1682 static tree
1683 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1685 REAL_VALUE_TYPE value;
1686 tree t;
1688 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1689 t = build_real (type, value);
1691 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1692 return t;
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1698 static tree
1699 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1701 FIXED_VALUE_TYPE value;
1702 tree t;
1703 bool overflow_p;
1705 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1706 TYPE_SATURATING (type));
1707 t = build_fixed (type, value);
1709 /* Propagate overflow flags. */
1710 if (overflow_p | TREE_OVERFLOW (arg1))
1711 TREE_OVERFLOW (t) = 1;
1712 return t;
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1718 static tree
1719 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1721 FIXED_VALUE_TYPE value;
1722 tree t;
1723 bool overflow_p;
1725 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1726 TREE_INT_CST (arg1),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1728 TYPE_SATURATING (type));
1729 t = build_fixed (type, value);
1731 /* Propagate overflow flags. */
1732 if (overflow_p | TREE_OVERFLOW (arg1))
1733 TREE_OVERFLOW (t) = 1;
1734 return t;
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1740 static tree
1741 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1743 FIXED_VALUE_TYPE value;
1744 tree t;
1745 bool overflow_p;
1747 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1748 &TREE_REAL_CST (arg1),
1749 TYPE_SATURATING (type));
1750 t = build_fixed (type, value);
1752 /* Propagate overflow flags. */
1753 if (overflow_p | TREE_OVERFLOW (arg1))
1754 TREE_OVERFLOW (t) = 1;
1755 return t;
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1761 static tree
1762 fold_convert_const (enum tree_code code, tree type, tree arg1)
1764 if (TREE_TYPE (arg1) == type)
1765 return arg1;
1767 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1768 || TREE_CODE (type) == OFFSET_TYPE)
1770 if (TREE_CODE (arg1) == INTEGER_CST)
1771 return fold_convert_const_int_from_int (type, arg1);
1772 else if (TREE_CODE (arg1) == REAL_CST)
1773 return fold_convert_const_int_from_real (code, type, arg1);
1774 else if (TREE_CODE (arg1) == FIXED_CST)
1775 return fold_convert_const_int_from_fixed (type, arg1);
1777 else if (TREE_CODE (type) == REAL_TYPE)
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return build_real_from_int_cst (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_real_from_real (type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_real_from_fixed (type, arg1);
1786 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1788 if (TREE_CODE (arg1) == FIXED_CST)
1789 return fold_convert_const_fixed_from_fixed (type, arg1);
1790 else if (TREE_CODE (arg1) == INTEGER_CST)
1791 return fold_convert_const_fixed_from_int (type, arg1);
1792 else if (TREE_CODE (arg1) == REAL_CST)
1793 return fold_convert_const_fixed_from_real (type, arg1);
1795 return NULL_TREE;
1798 /* Construct a vector of zero elements of vector type TYPE. */
1800 static tree
1801 build_zero_vector (tree type)
1803 tree t;
1805 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1806 return build_vector_from_val (type, t);
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1811 bool
1812 fold_convertible_p (const_tree type, const_tree arg)
1814 tree orig = TREE_TYPE (arg);
1816 if (type == orig)
1817 return true;
1819 if (TREE_CODE (arg) == ERROR_MARK
1820 || TREE_CODE (type) == ERROR_MARK
1821 || TREE_CODE (orig) == ERROR_MARK)
1822 return false;
1824 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1825 return true;
1827 switch (TREE_CODE (type))
1829 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1830 case POINTER_TYPE: case REFERENCE_TYPE:
1831 case OFFSET_TYPE:
1832 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1833 || TREE_CODE (orig) == OFFSET_TYPE)
1834 return true;
1835 return (TREE_CODE (orig) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1838 case REAL_TYPE:
1839 case FIXED_POINT_TYPE:
1840 case COMPLEX_TYPE:
1841 case VECTOR_TYPE:
1842 case VOID_TYPE:
1843 return TREE_CODE (type) == TREE_CODE (orig);
1845 default:
1846 return false;
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1853 tree
1854 fold_convert_loc (location_t loc, tree type, tree arg)
1856 tree orig = TREE_TYPE (arg);
1857 tree tem;
1859 if (type == orig)
1860 return arg;
1862 if (TREE_CODE (arg) == ERROR_MARK
1863 || TREE_CODE (type) == ERROR_MARK
1864 || TREE_CODE (orig) == ERROR_MARK)
1865 return error_mark_node;
1867 switch (TREE_CODE (type))
1869 case POINTER_TYPE:
1870 case REFERENCE_TYPE:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1875 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1876 /* fall through */
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879 case OFFSET_TYPE:
1880 if (TREE_CODE (arg) == INTEGER_CST)
1882 tem = fold_convert_const (NOP_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1884 return tem;
1886 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1887 || TREE_CODE (orig) == OFFSET_TYPE)
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889 if (TREE_CODE (orig) == COMPLEX_TYPE)
1890 return fold_convert_loc (loc, type,
1891 fold_build1_loc (loc, REALPART_EXPR,
1892 TREE_TYPE (orig), arg));
1893 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1897 case REAL_TYPE:
1898 if (TREE_CODE (arg) == INTEGER_CST)
1900 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1901 if (tem != NULL_TREE)
1902 return tem;
1904 else if (TREE_CODE (arg) == REAL_CST)
1906 tem = fold_convert_const (NOP_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1908 return tem;
1910 else if (TREE_CODE (arg) == FIXED_CST)
1912 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1917 switch (TREE_CODE (orig))
1919 case INTEGER_TYPE:
1920 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1921 case POINTER_TYPE: case REFERENCE_TYPE:
1922 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1924 case REAL_TYPE:
1925 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1927 case FIXED_POINT_TYPE:
1928 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1930 case COMPLEX_TYPE:
1931 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1932 return fold_convert_loc (loc, type, tem);
1934 default:
1935 gcc_unreachable ();
1938 case FIXED_POINT_TYPE:
1939 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1940 || TREE_CODE (arg) == REAL_CST)
1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 goto fold_convert_exit;
1947 switch (TREE_CODE (orig))
1949 case FIXED_POINT_TYPE:
1950 case INTEGER_TYPE:
1951 case ENUMERAL_TYPE:
1952 case BOOLEAN_TYPE:
1953 case REAL_TYPE:
1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1956 case COMPLEX_TYPE:
1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 return fold_convert_loc (loc, type, tem);
1960 default:
1961 gcc_unreachable ();
1964 case COMPLEX_TYPE:
1965 switch (TREE_CODE (orig))
1967 case INTEGER_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 case REAL_TYPE:
1971 case FIXED_POINT_TYPE:
1972 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1973 fold_convert_loc (loc, TREE_TYPE (type), arg),
1974 fold_convert_loc (loc, TREE_TYPE (type),
1975 integer_zero_node));
1976 case COMPLEX_TYPE:
1978 tree rpart, ipart;
1980 if (TREE_CODE (arg) == COMPLEX_EXPR)
1982 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1983 TREE_OPERAND (arg, 0));
1984 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1985 TREE_OPERAND (arg, 1));
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1989 arg = save_expr (arg);
1990 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1992 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1993 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1994 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1997 default:
1998 gcc_unreachable ();
2001 case VECTOR_TYPE:
2002 if (integer_zerop (arg))
2003 return build_zero_vector (type);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2006 || TREE_CODE (orig) == VECTOR_TYPE);
2007 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2009 case VOID_TYPE:
2010 tem = fold_ignored_result (arg);
2011 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2013 default:
2014 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2015 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2016 gcc_unreachable ();
2018 fold_convert_exit:
2019 protected_set_expr_location_unshare (tem, loc);
2020 return tem;
2023 /* Return false if expr can be assumed not to be an lvalue, true
2024 otherwise. */
2026 static bool
2027 maybe_lvalue_p (const_tree x)
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x))
2032 case VAR_DECL:
2033 case PARM_DECL:
2034 case RESULT_DECL:
2035 case LABEL_DECL:
2036 case FUNCTION_DECL:
2037 case SSA_NAME:
2039 case COMPONENT_REF:
2040 case MEM_REF:
2041 case INDIRECT_REF:
2042 case ARRAY_REF:
2043 case ARRAY_RANGE_REF:
2044 case BIT_FIELD_REF:
2045 case OBJ_TYPE_REF:
2047 case REALPART_EXPR:
2048 case IMAGPART_EXPR:
2049 case PREINCREMENT_EXPR:
2050 case PREDECREMENT_EXPR:
2051 case SAVE_EXPR:
2052 case TRY_CATCH_EXPR:
2053 case WITH_CLEANUP_EXPR:
2054 case COMPOUND_EXPR:
2055 case MODIFY_EXPR:
2056 case TARGET_EXPR:
2057 case COND_EXPR:
2058 case BIND_EXPR:
2059 break;
2061 default:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2064 break;
2065 return false;
2068 return true;
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2073 tree
2074 non_lvalue_loc (location_t loc, tree x)
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2077 us. */
2078 if (in_gimple_form)
2079 return x;
2081 if (! maybe_lvalue_p (x))
2082 return x;
2083 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2089 int pedantic_lvalues;
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2094 static tree
2095 pedantic_non_lvalue_loc (location_t loc, tree x)
2097 if (pedantic_lvalues)
2098 return non_lvalue_loc (loc, x);
2100 return protected_set_expr_location_unshare (x, loc);
2103 /* Given a tree comparison code, return the code that is the logical inverse.
2104 It is generally not safe to do this for floating-point comparisons, except
2105 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2107 enum tree_code
2108 invert_tree_comparison (enum tree_code code, bool honor_nans)
2110 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
2111 return ERROR_MARK;
2113 switch (code)
2115 case EQ_EXPR:
2116 return NE_EXPR;
2117 case NE_EXPR:
2118 return EQ_EXPR;
2119 case GT_EXPR:
2120 return honor_nans ? UNLE_EXPR : LE_EXPR;
2121 case GE_EXPR:
2122 return honor_nans ? UNLT_EXPR : LT_EXPR;
2123 case LT_EXPR:
2124 return honor_nans ? UNGE_EXPR : GE_EXPR;
2125 case LE_EXPR:
2126 return honor_nans ? UNGT_EXPR : GT_EXPR;
2127 case LTGT_EXPR:
2128 return UNEQ_EXPR;
2129 case UNEQ_EXPR:
2130 return LTGT_EXPR;
2131 case UNGT_EXPR:
2132 return LE_EXPR;
2133 case UNGE_EXPR:
2134 return LT_EXPR;
2135 case UNLT_EXPR:
2136 return GE_EXPR;
2137 case UNLE_EXPR:
2138 return GT_EXPR;
2139 case ORDERED_EXPR:
2140 return UNORDERED_EXPR;
2141 case UNORDERED_EXPR:
2142 return ORDERED_EXPR;
2143 default:
2144 gcc_unreachable ();
2148 /* Similar, but return the comparison that results if the operands are
2149 swapped. This is safe for floating-point. */
2151 enum tree_code
2152 swap_tree_comparison (enum tree_code code)
2154 switch (code)
2156 case EQ_EXPR:
2157 case NE_EXPR:
2158 case ORDERED_EXPR:
2159 case UNORDERED_EXPR:
2160 case LTGT_EXPR:
2161 case UNEQ_EXPR:
2162 return code;
2163 case GT_EXPR:
2164 return LT_EXPR;
2165 case GE_EXPR:
2166 return LE_EXPR;
2167 case LT_EXPR:
2168 return GT_EXPR;
2169 case LE_EXPR:
2170 return GE_EXPR;
2171 case UNGT_EXPR:
2172 return UNLT_EXPR;
2173 case UNGE_EXPR:
2174 return UNLE_EXPR;
2175 case UNLT_EXPR:
2176 return UNGT_EXPR;
2177 case UNLE_EXPR:
2178 return UNGE_EXPR;
2179 default:
2180 gcc_unreachable ();
2185 /* Convert a comparison tree code from an enum tree_code representation
2186 into a compcode bit-based encoding. This function is the inverse of
2187 compcode_to_comparison. */
2189 static enum comparison_code
2190 comparison_to_compcode (enum tree_code code)
2192 switch (code)
2194 case LT_EXPR:
2195 return COMPCODE_LT;
2196 case EQ_EXPR:
2197 return COMPCODE_EQ;
2198 case LE_EXPR:
2199 return COMPCODE_LE;
2200 case GT_EXPR:
2201 return COMPCODE_GT;
2202 case NE_EXPR:
2203 return COMPCODE_NE;
2204 case GE_EXPR:
2205 return COMPCODE_GE;
2206 case ORDERED_EXPR:
2207 return COMPCODE_ORD;
2208 case UNORDERED_EXPR:
2209 return COMPCODE_UNORD;
2210 case UNLT_EXPR:
2211 return COMPCODE_UNLT;
2212 case UNEQ_EXPR:
2213 return COMPCODE_UNEQ;
2214 case UNLE_EXPR:
2215 return COMPCODE_UNLE;
2216 case UNGT_EXPR:
2217 return COMPCODE_UNGT;
2218 case LTGT_EXPR:
2219 return COMPCODE_LTGT;
2220 case UNGE_EXPR:
2221 return COMPCODE_UNGE;
2222 default:
2223 gcc_unreachable ();
2227 /* Convert a compcode bit-based encoding of a comparison operator back
2228 to GCC's enum tree_code representation. This function is the
2229 inverse of comparison_to_compcode. */
2231 static enum tree_code
2232 compcode_to_comparison (enum comparison_code code)
2234 switch (code)
2236 case COMPCODE_LT:
2237 return LT_EXPR;
2238 case COMPCODE_EQ:
2239 return EQ_EXPR;
2240 case COMPCODE_LE:
2241 return LE_EXPR;
2242 case COMPCODE_GT:
2243 return GT_EXPR;
2244 case COMPCODE_NE:
2245 return NE_EXPR;
2246 case COMPCODE_GE:
2247 return GE_EXPR;
2248 case COMPCODE_ORD:
2249 return ORDERED_EXPR;
2250 case COMPCODE_UNORD:
2251 return UNORDERED_EXPR;
2252 case COMPCODE_UNLT:
2253 return UNLT_EXPR;
2254 case COMPCODE_UNEQ:
2255 return UNEQ_EXPR;
2256 case COMPCODE_UNLE:
2257 return UNLE_EXPR;
2258 case COMPCODE_UNGT:
2259 return UNGT_EXPR;
2260 case COMPCODE_LTGT:
2261 return LTGT_EXPR;
2262 case COMPCODE_UNGE:
2263 return UNGE_EXPR;
2264 default:
2265 gcc_unreachable ();
2269 /* Return a tree for the comparison which is the combination of
2270 doing the AND or OR (depending on CODE) of the two operations LCODE
2271 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2272 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2273 if this makes the transformation invalid. */
2275 tree
2276 combine_comparisons (location_t loc,
2277 enum tree_code code, enum tree_code lcode,
2278 enum tree_code rcode, tree truth_type,
2279 tree ll_arg, tree lr_arg)
2281 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2282 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2283 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2284 int compcode;
2286 switch (code)
2288 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2289 compcode = lcompcode & rcompcode;
2290 break;
2292 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2293 compcode = lcompcode | rcompcode;
2294 break;
2296 default:
2297 return NULL_TREE;
2300 if (!honor_nans)
2302 /* Eliminate unordered comparisons, as well as LTGT and ORD
2303 which are not used unless the mode has NaNs. */
2304 compcode &= ~COMPCODE_UNORD;
2305 if (compcode == COMPCODE_LTGT)
2306 compcode = COMPCODE_NE;
2307 else if (compcode == COMPCODE_ORD)
2308 compcode = COMPCODE_TRUE;
2310 else if (flag_trapping_math)
2312 /* Check that the original operation and the optimized ones will trap
2313 under the same condition. */
2314 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2315 && (lcompcode != COMPCODE_EQ)
2316 && (lcompcode != COMPCODE_ORD);
2317 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2318 && (rcompcode != COMPCODE_EQ)
2319 && (rcompcode != COMPCODE_ORD);
2320 bool trap = (compcode & COMPCODE_UNORD) == 0
2321 && (compcode != COMPCODE_EQ)
2322 && (compcode != COMPCODE_ORD);
2324 /* In a short-circuited boolean expression the LHS might be
2325 such that the RHS, if evaluated, will never trap. For
2326 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2327 if neither x nor y is NaN. (This is a mixed blessing: for
2328 example, the expression above will never trap, hence
2329 optimizing it to x < y would be invalid). */
2330 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2331 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2332 rtrap = false;
2334 /* If the comparison was short-circuited, and only the RHS
2335 trapped, we may now generate a spurious trap. */
2336 if (rtrap && !ltrap
2337 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2338 return NULL_TREE;
2340 /* If we changed the conditions that cause a trap, we lose. */
2341 if ((ltrap || rtrap) != trap)
2342 return NULL_TREE;
2345 if (compcode == COMPCODE_TRUE)
2346 return constant_boolean_node (true, truth_type);
2347 else if (compcode == COMPCODE_FALSE)
2348 return constant_boolean_node (false, truth_type);
2349 else
2351 enum tree_code tcode;
2353 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2354 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2358 /* Return nonzero if two operands (typically of the same tree node)
2359 are necessarily equal. If either argument has side-effects this
2360 function returns zero. FLAGS modifies behavior as follows:
2362 If OEP_ONLY_CONST is set, only return nonzero for constants.
2363 This function tests whether the operands are indistinguishable;
2364 it does not test whether they are equal using C's == operation.
2365 The distinction is important for IEEE floating point, because
2366 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2367 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2369 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2370 even though it may hold multiple values during a function.
2371 This is because a GCC tree node guarantees that nothing else is
2372 executed between the evaluation of its "operands" (which may often
2373 be evaluated in arbitrary order). Hence if the operands themselves
2374 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2375 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2376 unset means assuming isochronic (or instantaneous) tree equivalence.
2377 Unless comparing arbitrary expression trees, such as from different
2378 statements, this flag can usually be left unset.
2380 If OEP_PURE_SAME is set, then pure functions with identical arguments
2381 are considered the same. It is used when the caller has other ways
2382 to ensure that global memory is unchanged in between. */
2385 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2387 /* If either is ERROR_MARK, they aren't equal. */
2388 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2389 || TREE_TYPE (arg0) == error_mark_node
2390 || TREE_TYPE (arg1) == error_mark_node)
2391 return 0;
2393 /* Similar, if either does not have a type (like a released SSA name),
2394 they aren't equal. */
2395 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2396 return 0;
2398 /* Check equality of integer constants before bailing out due to
2399 precision differences. */
2400 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2401 return tree_int_cst_equal (arg0, arg1);
2403 /* If both types don't have the same signedness, then we can't consider
2404 them equal. We must check this before the STRIP_NOPS calls
2405 because they may change the signedness of the arguments. As pointers
2406 strictly don't have a signedness, require either two pointers or
2407 two non-pointers as well. */
2408 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2409 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2410 return 0;
2412 /* We cannot consider pointers to different address space equal. */
2413 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2414 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2415 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2416 return 0;
2418 /* If both types don't have the same precision, then it is not safe
2419 to strip NOPs. */
2420 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2421 return 0;
2423 STRIP_NOPS (arg0);
2424 STRIP_NOPS (arg1);
2426 /* In case both args are comparisons but with different comparison
2427 code, try to swap the comparison operands of one arg to produce
2428 a match and compare that variant. */
2429 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2430 && COMPARISON_CLASS_P (arg0)
2431 && COMPARISON_CLASS_P (arg1))
2433 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2435 if (TREE_CODE (arg0) == swap_code)
2436 return operand_equal_p (TREE_OPERAND (arg0, 0),
2437 TREE_OPERAND (arg1, 1), flags)
2438 && operand_equal_p (TREE_OPERAND (arg0, 1),
2439 TREE_OPERAND (arg1, 0), flags);
2442 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2443 /* This is needed for conversions and for COMPONENT_REF.
2444 Might as well play it safe and always test this. */
2445 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2446 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2447 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2448 return 0;
2450 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2451 We don't care about side effects in that case because the SAVE_EXPR
2452 takes care of that for us. In all other cases, two expressions are
2453 equal if they have no side effects. If we have two identical
2454 expressions with side effects that should be treated the same due
2455 to the only side effects being identical SAVE_EXPR's, that will
2456 be detected in the recursive calls below.
2457 If we are taking an invariant address of two identical objects
2458 they are necessarily equal as well. */
2459 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2460 && (TREE_CODE (arg0) == SAVE_EXPR
2461 || (flags & OEP_CONSTANT_ADDRESS_OF)
2462 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2463 return 1;
2465 /* Next handle constant cases, those for which we can return 1 even
2466 if ONLY_CONST is set. */
2467 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2468 switch (TREE_CODE (arg0))
2470 case INTEGER_CST:
2471 return tree_int_cst_equal (arg0, arg1);
2473 case FIXED_CST:
2474 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2475 TREE_FIXED_CST (arg1));
2477 case REAL_CST:
2478 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2479 TREE_REAL_CST (arg1)))
2480 return 1;
2483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2485 /* If we do not distinguish between signed and unsigned zero,
2486 consider them equal. */
2487 if (real_zerop (arg0) && real_zerop (arg1))
2488 return 1;
2490 return 0;
2492 case VECTOR_CST:
2494 tree v1, v2;
2496 v1 = TREE_VECTOR_CST_ELTS (arg0);
2497 v2 = TREE_VECTOR_CST_ELTS (arg1);
2498 while (v1 && v2)
2500 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2501 flags))
2502 return 0;
2503 v1 = TREE_CHAIN (v1);
2504 v2 = TREE_CHAIN (v2);
2507 return v1 == v2;
2510 case COMPLEX_CST:
2511 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2512 flags)
2513 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2514 flags));
2516 case STRING_CST:
2517 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2518 && ! memcmp (TREE_STRING_POINTER (arg0),
2519 TREE_STRING_POINTER (arg1),
2520 TREE_STRING_LENGTH (arg0)));
2522 case ADDR_EXPR:
2523 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2524 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2525 ? OEP_CONSTANT_ADDRESS_OF : 0);
2526 default:
2527 break;
2530 if (flags & OEP_ONLY_CONST)
2531 return 0;
2533 /* Define macros to test an operand from arg0 and arg1 for equality and a
2534 variant that allows null and views null as being different from any
2535 non-null value. In the latter case, if either is null, the both
2536 must be; otherwise, do the normal comparison. */
2537 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2538 TREE_OPERAND (arg1, N), flags)
2540 #define OP_SAME_WITH_NULL(N) \
2541 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2542 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2544 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2546 case tcc_unary:
2547 /* Two conversions are equal only if signedness and modes match. */
2548 switch (TREE_CODE (arg0))
2550 CASE_CONVERT:
2551 case FIX_TRUNC_EXPR:
2552 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2553 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2554 return 0;
2555 break;
2556 default:
2557 break;
2560 return OP_SAME (0);
2563 case tcc_comparison:
2564 case tcc_binary:
2565 if (OP_SAME (0) && OP_SAME (1))
2566 return 1;
2568 /* For commutative ops, allow the other order. */
2569 return (commutative_tree_code (TREE_CODE (arg0))
2570 && operand_equal_p (TREE_OPERAND (arg0, 0),
2571 TREE_OPERAND (arg1, 1), flags)
2572 && operand_equal_p (TREE_OPERAND (arg0, 1),
2573 TREE_OPERAND (arg1, 0), flags));
2575 case tcc_reference:
2576 /* If either of the pointer (or reference) expressions we are
2577 dereferencing contain a side effect, these cannot be equal. */
2578 if (TREE_SIDE_EFFECTS (arg0)
2579 || TREE_SIDE_EFFECTS (arg1))
2580 return 0;
2582 switch (TREE_CODE (arg0))
2584 case INDIRECT_REF:
2585 case REALPART_EXPR:
2586 case IMAGPART_EXPR:
2587 return OP_SAME (0);
2589 case MEM_REF:
2590 /* Require equal access sizes, and similar pointer types.
2591 We can have incomplete types for array references of
2592 variable-sized arrays from the Fortran frontent
2593 though. */
2594 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2595 || (TYPE_SIZE (TREE_TYPE (arg0))
2596 && TYPE_SIZE (TREE_TYPE (arg1))
2597 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2598 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2600 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2601 && OP_SAME (0) && OP_SAME (1));
2603 case ARRAY_REF:
2604 case ARRAY_RANGE_REF:
2605 /* Operands 2 and 3 may be null.
2606 Compare the array index by value if it is constant first as we
2607 may have different types but same value here. */
2608 return (OP_SAME (0)
2609 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2610 TREE_OPERAND (arg1, 1))
2611 || OP_SAME (1))
2612 && OP_SAME_WITH_NULL (2)
2613 && OP_SAME_WITH_NULL (3));
2615 case COMPONENT_REF:
2616 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2617 may be NULL when we're called to compare MEM_EXPRs. */
2618 return OP_SAME_WITH_NULL (0)
2619 && OP_SAME (1)
2620 && OP_SAME_WITH_NULL (2);
2622 case BIT_FIELD_REF:
2623 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2625 default:
2626 return 0;
2629 case tcc_expression:
2630 switch (TREE_CODE (arg0))
2632 case ADDR_EXPR:
2633 case TRUTH_NOT_EXPR:
2634 return OP_SAME (0);
2636 case TRUTH_ANDIF_EXPR:
2637 case TRUTH_ORIF_EXPR:
2638 return OP_SAME (0) && OP_SAME (1);
2640 case FMA_EXPR:
2641 case WIDEN_MULT_PLUS_EXPR:
2642 case WIDEN_MULT_MINUS_EXPR:
2643 if (!OP_SAME (2))
2644 return 0;
2645 /* The multiplcation operands are commutative. */
2646 /* FALLTHRU */
2648 case TRUTH_AND_EXPR:
2649 case TRUTH_OR_EXPR:
2650 case TRUTH_XOR_EXPR:
2651 if (OP_SAME (0) && OP_SAME (1))
2652 return 1;
2654 /* Otherwise take into account this is a commutative operation. */
2655 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags));
2660 case COND_EXPR:
2661 case VEC_COND_EXPR:
2662 case DOT_PROD_EXPR:
2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2665 default:
2666 return 0;
2669 case tcc_vl_exp:
2670 switch (TREE_CODE (arg0))
2672 case CALL_EXPR:
2673 /* If the CALL_EXPRs call different functions, then they
2674 clearly can not be equal. */
2675 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2676 flags))
2677 return 0;
2680 unsigned int cef = call_expr_flags (arg0);
2681 if (flags & OEP_PURE_SAME)
2682 cef &= ECF_CONST | ECF_PURE;
2683 else
2684 cef &= ECF_CONST;
2685 if (!cef)
2686 return 0;
2689 /* Now see if all the arguments are the same. */
2691 const_call_expr_arg_iterator iter0, iter1;
2692 const_tree a0, a1;
2693 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2694 a1 = first_const_call_expr_arg (arg1, &iter1);
2695 a0 && a1;
2696 a0 = next_const_call_expr_arg (&iter0),
2697 a1 = next_const_call_expr_arg (&iter1))
2698 if (! operand_equal_p (a0, a1, flags))
2699 return 0;
2701 /* If we get here and both argument lists are exhausted
2702 then the CALL_EXPRs are equal. */
2703 return ! (a0 || a1);
2705 default:
2706 return 0;
2709 case tcc_declaration:
2710 /* Consider __builtin_sqrt equal to sqrt. */
2711 return (TREE_CODE (arg0) == FUNCTION_DECL
2712 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2713 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2714 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2716 default:
2717 return 0;
2720 #undef OP_SAME
2721 #undef OP_SAME_WITH_NULL
2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2725 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727 When in doubt, return 0. */
2729 static int
2730 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2732 int unsignedp1, unsignedpo;
2733 tree primarg0, primarg1, primother;
2734 unsigned int correct_width;
2736 if (operand_equal_p (arg0, arg1, 0))
2737 return 1;
2739 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2740 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2741 return 0;
2743 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2744 and see if the inner values are the same. This removes any
2745 signedness comparison, which doesn't matter here. */
2746 primarg0 = arg0, primarg1 = arg1;
2747 STRIP_NOPS (primarg0);
2748 STRIP_NOPS (primarg1);
2749 if (operand_equal_p (primarg0, primarg1, 0))
2750 return 1;
2752 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2753 actual comparison operand, ARG0.
2755 First throw away any conversions to wider types
2756 already present in the operands. */
2758 primarg1 = get_narrower (arg1, &unsignedp1);
2759 primother = get_narrower (other, &unsignedpo);
2761 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2762 if (unsignedp1 == unsignedpo
2763 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2764 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2766 tree type = TREE_TYPE (arg0);
2768 /* Make sure shorter operand is extended the right way
2769 to match the longer operand. */
2770 primarg1 = fold_convert (signed_or_unsigned_type_for
2771 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2773 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2774 return 1;
2777 return 0;
2780 /* See if ARG is an expression that is either a comparison or is performing
2781 arithmetic on comparisons. The comparisons must only be comparing
2782 two different values, which will be stored in *CVAL1 and *CVAL2; if
2783 they are nonzero it means that some operands have already been found.
2784 No variables may be used anywhere else in the expression except in the
2785 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2786 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788 If this is true, return 1. Otherwise, return zero. */
2790 static int
2791 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2793 enum tree_code code = TREE_CODE (arg);
2794 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2796 /* We can handle some of the tcc_expression cases here. */
2797 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2798 tclass = tcc_unary;
2799 else if (tclass == tcc_expression
2800 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2801 || code == COMPOUND_EXPR))
2802 tclass = tcc_binary;
2804 else if (tclass == tcc_expression && code == SAVE_EXPR
2805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2807 /* If we've already found a CVAL1 or CVAL2, this expression is
2808 two complex to handle. */
2809 if (*cval1 || *cval2)
2810 return 0;
2812 tclass = tcc_unary;
2813 *save_p = 1;
2816 switch (tclass)
2818 case tcc_unary:
2819 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2821 case tcc_binary:
2822 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2823 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2824 cval1, cval2, save_p));
2826 case tcc_constant:
2827 return 1;
2829 case tcc_expression:
2830 if (code == COND_EXPR)
2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2832 cval1, cval2, save_p)
2833 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2834 cval1, cval2, save_p)
2835 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2836 cval1, cval2, save_p));
2837 return 0;
2839 case tcc_comparison:
2840 /* First see if we can handle the first operand, then the second. For
2841 the second operand, we know *CVAL1 can't be zero. It must be that
2842 one side of the comparison is each of the values; test for the
2843 case where this isn't true by failing if the two operands
2844 are the same. */
2846 if (operand_equal_p (TREE_OPERAND (arg, 0),
2847 TREE_OPERAND (arg, 1), 0))
2848 return 0;
2850 if (*cval1 == 0)
2851 *cval1 = TREE_OPERAND (arg, 0);
2852 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2854 else if (*cval2 == 0)
2855 *cval2 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2858 else
2859 return 0;
2861 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2863 else if (*cval2 == 0)
2864 *cval2 = TREE_OPERAND (arg, 1);
2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2867 else
2868 return 0;
2870 return 1;
2872 default:
2873 return 0;
2877 /* ARG is a tree that is known to contain just arithmetic operations and
2878 comparisons. Evaluate the operations in the tree substituting NEW0 for
2879 any occurrence of OLD0 as an operand of a comparison and likewise for
2880 NEW1 and OLD1. */
2882 static tree
2883 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2884 tree old1, tree new1)
2886 tree type = TREE_TYPE (arg);
2887 enum tree_code code = TREE_CODE (arg);
2888 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2892 tclass = tcc_unary;
2893 else if (tclass == tcc_expression
2894 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2895 tclass = tcc_binary;
2897 switch (tclass)
2899 case tcc_unary:
2900 return fold_build1_loc (loc, code, type,
2901 eval_subst (loc, TREE_OPERAND (arg, 0),
2902 old0, new0, old1, new1));
2904 case tcc_binary:
2905 return fold_build2_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1),
2908 eval_subst (loc, TREE_OPERAND (arg, 1),
2909 old0, new0, old1, new1));
2911 case tcc_expression:
2912 switch (code)
2914 case SAVE_EXPR:
2915 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2916 old1, new1);
2918 case COMPOUND_EXPR:
2919 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2920 old1, new1);
2922 case COND_EXPR:
2923 return fold_build3_loc (loc, code, type,
2924 eval_subst (loc, TREE_OPERAND (arg, 0),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 1),
2927 old0, new0, old1, new1),
2928 eval_subst (loc, TREE_OPERAND (arg, 2),
2929 old0, new0, old1, new1));
2930 default:
2931 break;
2933 /* Fall through - ??? */
2935 case tcc_comparison:
2937 tree arg0 = TREE_OPERAND (arg, 0);
2938 tree arg1 = TREE_OPERAND (arg, 1);
2940 /* We need to check both for exact equality and tree equality. The
2941 former will be true if the operand has a side-effect. In that
2942 case, we know the operand occurred exactly once. */
2944 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2945 arg0 = new0;
2946 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2947 arg0 = new1;
2949 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2950 arg1 = new0;
2951 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2952 arg1 = new1;
2954 return fold_build2_loc (loc, code, type, arg0, arg1);
2957 default:
2958 return arg;
2962 /* Return a tree for the case when the result of an expression is RESULT
2963 converted to TYPE and OMITTED was previously an operand of the expression
2964 but is now not needed (e.g., we folded OMITTED * 0).
2966 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2967 the conversion of RESULT to TYPE. */
2969 tree
2970 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2972 tree t = fold_convert_loc (loc, type, result);
2974 /* If the resulting operand is an empty statement, just return the omitted
2975 statement casted to void. */
2976 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2977 return build1_loc (loc, NOP_EXPR, void_type_node,
2978 fold_ignored_result (omitted));
2980 if (TREE_SIDE_EFFECTS (omitted))
2981 return build2_loc (loc, COMPOUND_EXPR, type,
2982 fold_ignored_result (omitted), t);
2984 return non_lvalue_loc (loc, t);
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2989 static tree
2990 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2991 tree omitted)
2993 tree t = fold_convert_loc (loc, type, result);
2995 /* If the resulting operand is an empty statement, just return the omitted
2996 statement casted to void. */
2997 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2998 return build1_loc (loc, NOP_EXPR, void_type_node,
2999 fold_ignored_result (omitted));
3001 if (TREE_SIDE_EFFECTS (omitted))
3002 return build2_loc (loc, COMPOUND_EXPR, type,
3003 fold_ignored_result (omitted), t);
3005 return pedantic_non_lvalue_loc (loc, t);
3008 /* Return a tree for the case when the result of an expression is RESULT
3009 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3010 of the expression but are now not needed.
3012 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3013 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3014 evaluated before OMITTED2. Otherwise, if neither has side effects,
3015 just do the conversion of RESULT to TYPE. */
3017 tree
3018 omit_two_operands_loc (location_t loc, tree type, tree result,
3019 tree omitted1, tree omitted2)
3021 tree t = fold_convert_loc (loc, type, result);
3023 if (TREE_SIDE_EFFECTS (omitted2))
3024 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3025 if (TREE_SIDE_EFFECTS (omitted1))
3026 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3028 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3032 /* Return a simplified tree node for the truth-negation of ARG. This
3033 never alters ARG itself. We assume that ARG is an operation that
3034 returns a truth value (0 or 1).
3036 FIXME: one would think we would fold the result, but it causes
3037 problems with the dominator optimizer. */
3039 tree
3040 fold_truth_not_expr (location_t loc, tree arg)
3042 tree type = TREE_TYPE (arg);
3043 enum tree_code code = TREE_CODE (arg);
3044 location_t loc1, loc2;
3046 /* If this is a comparison, we can simply invert it, except for
3047 floating-point non-equality comparisons, in which case we just
3048 enclose a TRUTH_NOT_EXPR around what we have. */
3050 if (TREE_CODE_CLASS (code) == tcc_comparison)
3052 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3053 if (FLOAT_TYPE_P (op_type)
3054 && flag_trapping_math
3055 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3056 && code != NE_EXPR && code != EQ_EXPR)
3057 return NULL_TREE;
3059 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3060 if (code == ERROR_MARK)
3061 return NULL_TREE;
3063 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3064 TREE_OPERAND (arg, 1));
3067 switch (code)
3069 case INTEGER_CST:
3070 return constant_boolean_node (integer_zerop (arg), type);
3072 case TRUTH_AND_EXPR:
3073 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3074 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3075 return build2_loc (loc, TRUTH_OR_EXPR, type,
3076 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3077 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3079 case TRUTH_OR_EXPR:
3080 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3081 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3082 return build2_loc (loc, TRUTH_AND_EXPR, type,
3083 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3084 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3086 case TRUTH_XOR_EXPR:
3087 /* Here we can invert either operand. We invert the first operand
3088 unless the second operand is a TRUTH_NOT_EXPR in which case our
3089 result is the XOR of the first operand with the inside of the
3090 negation of the second operand. */
3092 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3093 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3094 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3095 else
3096 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3097 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3098 TREE_OPERAND (arg, 1));
3100 case TRUTH_ANDIF_EXPR:
3101 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3102 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3103 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3104 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3105 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3107 case TRUTH_ORIF_EXPR:
3108 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3109 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3110 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3111 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3112 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3114 case TRUTH_NOT_EXPR:
3115 return TREE_OPERAND (arg, 0);
3117 case COND_EXPR:
3119 tree arg1 = TREE_OPERAND (arg, 1);
3120 tree arg2 = TREE_OPERAND (arg, 2);
3122 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3123 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3125 /* A COND_EXPR may have a throw as one operand, which
3126 then has void type. Just leave void operands
3127 as they are. */
3128 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3129 VOID_TYPE_P (TREE_TYPE (arg1))
3130 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3131 VOID_TYPE_P (TREE_TYPE (arg2))
3132 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3135 case COMPOUND_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3137 return build2_loc (loc, COMPOUND_EXPR, type,
3138 TREE_OPERAND (arg, 0),
3139 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3141 case NON_LVALUE_EXPR:
3142 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3143 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3145 CASE_CONVERT:
3146 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3147 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3149 /* ... fall through ... */
3151 case FLOAT_EXPR:
3152 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3153 return build1_loc (loc, TREE_CODE (arg), type,
3154 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3156 case BIT_AND_EXPR:
3157 if (!integer_onep (TREE_OPERAND (arg, 1)))
3158 return NULL_TREE;
3159 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3161 case SAVE_EXPR:
3162 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3164 case CLEANUP_POINT_EXPR:
3165 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3166 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3167 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3169 default:
3170 return NULL_TREE;
3174 /* Return a simplified tree node for the truth-negation of ARG. This
3175 never alters ARG itself. We assume that ARG is an operation that
3176 returns a truth value (0 or 1).
3178 FIXME: one would think we would fold the result, but it causes
3179 problems with the dominator optimizer. */
3181 tree
3182 invert_truthvalue_loc (location_t loc, tree arg)
3184 tree tem;
3186 if (TREE_CODE (arg) == ERROR_MARK)
3187 return arg;
3189 tem = fold_truth_not_expr (loc, arg);
3190 if (!tem)
3191 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3193 return tem;
3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3197 operands are another bit-wise operation with a common input. If so,
3198 distribute the bit operations to save an operation and possibly two if
3199 constants are involved. For example, convert
3200 (A | B) & (A | C) into A | (B & C)
3201 Further simplification will occur if B and C are constants.
3203 If this optimization cannot be done, 0 will be returned. */
3205 static tree
3206 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3207 tree arg0, tree arg1)
3209 tree common;
3210 tree left, right;
3212 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3213 || TREE_CODE (arg0) == code
3214 || (TREE_CODE (arg0) != BIT_AND_EXPR
3215 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3216 return 0;
3218 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3220 common = TREE_OPERAND (arg0, 0);
3221 left = TREE_OPERAND (arg0, 1);
3222 right = TREE_OPERAND (arg1, 1);
3224 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3226 common = TREE_OPERAND (arg0, 0);
3227 left = TREE_OPERAND (arg0, 1);
3228 right = TREE_OPERAND (arg1, 0);
3230 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3232 common = TREE_OPERAND (arg0, 1);
3233 left = TREE_OPERAND (arg0, 0);
3234 right = TREE_OPERAND (arg1, 1);
3236 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3238 common = TREE_OPERAND (arg0, 1);
3239 left = TREE_OPERAND (arg0, 0);
3240 right = TREE_OPERAND (arg1, 0);
3242 else
3243 return 0;
3245 common = fold_convert_loc (loc, type, common);
3246 left = fold_convert_loc (loc, type, left);
3247 right = fold_convert_loc (loc, type, right);
3248 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3249 fold_build2_loc (loc, code, type, left, right));
3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3253 with code CODE. This optimization is unsafe. */
3254 static tree
3255 distribute_real_division (location_t loc, enum tree_code code, tree type,
3256 tree arg0, tree arg1)
3258 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3259 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3261 /* (A / C) +- (B / C) -> (A +- B) / C. */
3262 if (mul0 == mul1
3263 && operand_equal_p (TREE_OPERAND (arg0, 1),
3264 TREE_OPERAND (arg1, 1), 0))
3265 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3266 fold_build2_loc (loc, code, type,
3267 TREE_OPERAND (arg0, 0),
3268 TREE_OPERAND (arg1, 0)),
3269 TREE_OPERAND (arg0, 1));
3271 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3272 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0), 0)
3274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3275 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3277 REAL_VALUE_TYPE r0, r1;
3278 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3279 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3280 if (!mul0)
3281 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3282 if (!mul1)
3283 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3284 real_arithmetic (&r0, code, &r0, &r1);
3285 return fold_build2_loc (loc, MULT_EXPR, type,
3286 TREE_OPERAND (arg0, 0),
3287 build_real (type, r0));
3290 return NULL_TREE;
3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3294 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3296 static tree
3297 make_bit_field_ref (location_t loc, tree inner, tree type,
3298 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3300 tree result, bftype;
3302 if (bitpos == 0)
3304 tree size = TYPE_SIZE (TREE_TYPE (inner));
3305 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3306 || POINTER_TYPE_P (TREE_TYPE (inner)))
3307 && host_integerp (size, 0)
3308 && tree_low_cst (size, 0) == bitsize)
3309 return fold_convert_loc (loc, type, inner);
3312 bftype = type;
3313 if (TYPE_PRECISION (bftype) != bitsize
3314 || TYPE_UNSIGNED (bftype) == !unsignedp)
3315 bftype = build_nonstandard_integer_type (bitsize, 0);
3317 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3318 size_int (bitsize), bitsize_int (bitpos));
3320 if (bftype != type)
3321 result = fold_convert_loc (loc, type, result);
3323 return result;
3326 /* Optimize a bit-field compare.
3328 There are two cases: First is a compare against a constant and the
3329 second is a comparison of two items where the fields are at the same
3330 bit position relative to the start of a chunk (byte, halfword, word)
3331 large enough to contain it. In these cases we can avoid the shift
3332 implicit in bitfield extractions.
3334 For constants, we emit a compare of the shifted constant with the
3335 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3336 compared. For two fields at the same position, we do the ANDs with the
3337 similar mask and compare the result of the ANDs.
3339 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3340 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3341 are the left and right operands of the comparison, respectively.
3343 If the optimization described above can be done, we return the resulting
3344 tree. Otherwise we return zero. */
3346 static tree
3347 optimize_bit_field_compare (location_t loc, enum tree_code code,
3348 tree compare_type, tree lhs, tree rhs)
3350 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3351 tree type = TREE_TYPE (lhs);
3352 tree signed_type, unsigned_type;
3353 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3354 enum machine_mode lmode, rmode, nmode;
3355 int lunsignedp, runsignedp;
3356 int lvolatilep = 0, rvolatilep = 0;
3357 tree linner, rinner = NULL_TREE;
3358 tree mask;
3359 tree offset;
3361 /* Get all the information about the extractions being done. If the bit size
3362 if the same as the size of the underlying object, we aren't doing an
3363 extraction at all and so can do nothing. We also don't want to
3364 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3365 then will no longer be able to replace it. */
3366 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3367 &lunsignedp, &lvolatilep, false);
3368 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3369 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3370 return 0;
3372 if (!const_p)
3374 /* If this is not a constant, we can only do something if bit positions,
3375 sizes, and signedness are the same. */
3376 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3377 &runsignedp, &rvolatilep, false);
3379 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3380 || lunsignedp != runsignedp || offset != 0
3381 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3382 return 0;
3385 /* See if we can find a mode to refer to this field. We should be able to,
3386 but fail if we can't. */
3387 if (lvolatilep
3388 && GET_MODE_BITSIZE (lmode) > 0
3389 && flag_strict_volatile_bitfields > 0)
3390 nmode = lmode;
3391 else
3392 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3393 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3394 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3395 TYPE_ALIGN (TREE_TYPE (rinner))),
3396 word_mode, lvolatilep || rvolatilep);
3397 if (nmode == VOIDmode)
3398 return 0;
3400 /* Set signed and unsigned types of the precision of this mode for the
3401 shifts below. */
3402 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3403 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3405 /* Compute the bit position and size for the new reference and our offset
3406 within it. If the new reference is the same size as the original, we
3407 won't optimize anything, so return zero. */
3408 nbitsize = GET_MODE_BITSIZE (nmode);
3409 nbitpos = lbitpos & ~ (nbitsize - 1);
3410 lbitpos -= nbitpos;
3411 if (nbitsize == lbitsize)
3412 return 0;
3414 if (BYTES_BIG_ENDIAN)
3415 lbitpos = nbitsize - lbitsize - lbitpos;
3417 /* Make the mask to be used against the extracted field. */
3418 mask = build_int_cst_type (unsigned_type, -1);
3419 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3420 mask = const_binop (RSHIFT_EXPR, mask,
3421 size_int (nbitsize - lbitsize - lbitpos));
3423 if (! const_p)
3424 /* If not comparing with constant, just rework the comparison
3425 and return. */
3426 return fold_build2_loc (loc, code, compare_type,
3427 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3428 make_bit_field_ref (loc, linner,
3429 unsigned_type,
3430 nbitsize, nbitpos,
3432 mask),
3433 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3434 make_bit_field_ref (loc, rinner,
3435 unsigned_type,
3436 nbitsize, nbitpos,
3438 mask));
3440 /* Otherwise, we are handling the constant case. See if the constant is too
3441 big for the field. Warn and return a tree of for 0 (false) if so. We do
3442 this not only for its own sake, but to avoid having to test for this
3443 error case below. If we didn't, we might generate wrong code.
3445 For unsigned fields, the constant shifted right by the field length should
3446 be all zero. For signed fields, the high-order bits should agree with
3447 the sign bit. */
3449 if (lunsignedp)
3451 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3452 fold_convert_loc (loc,
3453 unsigned_type, rhs),
3454 size_int (lbitsize))))
3456 warning (0, "comparison is always %d due to width of bit-field",
3457 code == NE_EXPR);
3458 return constant_boolean_node (code == NE_EXPR, compare_type);
3461 else
3463 tree tem = const_binop (RSHIFT_EXPR,
3464 fold_convert_loc (loc, signed_type, rhs),
3465 size_int (lbitsize - 1));
3466 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3468 warning (0, "comparison is always %d due to width of bit-field",
3469 code == NE_EXPR);
3470 return constant_boolean_node (code == NE_EXPR, compare_type);
3474 /* Single-bit compares should always be against zero. */
3475 if (lbitsize == 1 && ! integer_zerop (rhs))
3477 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3478 rhs = build_int_cst (type, 0);
3481 /* Make a new bitfield reference, shift the constant over the
3482 appropriate number of bits and mask it with the computed mask
3483 (in case this was a signed field). If we changed it, make a new one. */
3484 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3485 if (lvolatilep)
3487 TREE_SIDE_EFFECTS (lhs) = 1;
3488 TREE_THIS_VOLATILE (lhs) = 1;
3491 rhs = const_binop (BIT_AND_EXPR,
3492 const_binop (LSHIFT_EXPR,
3493 fold_convert_loc (loc, unsigned_type, rhs),
3494 size_int (lbitpos)),
3495 mask);
3497 lhs = build2_loc (loc, code, compare_type,
3498 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3499 return lhs;
3502 /* Subroutine for fold_truth_andor_1: decode a field reference.
3504 If EXP is a comparison reference, we return the innermost reference.
3506 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3507 set to the starting bit number.
3509 If the innermost field can be completely contained in a mode-sized
3510 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3512 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3513 otherwise it is not changed.
3515 *PUNSIGNEDP is set to the signedness of the field.
3517 *PMASK is set to the mask used. This is either contained in a
3518 BIT_AND_EXPR or derived from the width of the field.
3520 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3522 Return 0 if this is not a component reference or is one that we can't
3523 do anything with. */
3525 static tree
3526 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3527 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3528 int *punsignedp, int *pvolatilep,
3529 tree *pmask, tree *pand_mask)
3531 tree outer_type = 0;
3532 tree and_mask = 0;
3533 tree mask, inner, offset;
3534 tree unsigned_type;
3535 unsigned int precision;
3537 /* All the optimizations using this function assume integer fields.
3538 There are problems with FP fields since the type_for_size call
3539 below can fail for, e.g., XFmode. */
3540 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3541 return 0;
3543 /* We are interested in the bare arrangement of bits, so strip everything
3544 that doesn't affect the machine mode. However, record the type of the
3545 outermost expression if it may matter below. */
3546 if (CONVERT_EXPR_P (exp)
3547 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3548 outer_type = TREE_TYPE (exp);
3549 STRIP_NOPS (exp);
3551 if (TREE_CODE (exp) == BIT_AND_EXPR)
3553 and_mask = TREE_OPERAND (exp, 1);
3554 exp = TREE_OPERAND (exp, 0);
3555 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3556 if (TREE_CODE (and_mask) != INTEGER_CST)
3557 return 0;
3560 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3561 punsignedp, pvolatilep, false);
3562 if ((inner == exp && and_mask == 0)
3563 || *pbitsize < 0 || offset != 0
3564 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3565 return 0;
3567 /* If the number of bits in the reference is the same as the bitsize of
3568 the outer type, then the outer type gives the signedness. Otherwise
3569 (in case of a small bitfield) the signedness is unchanged. */
3570 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3571 *punsignedp = TYPE_UNSIGNED (outer_type);
3573 /* Compute the mask to access the bitfield. */
3574 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3575 precision = TYPE_PRECISION (unsigned_type);
3577 mask = build_int_cst_type (unsigned_type, -1);
3579 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3580 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3582 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3583 if (and_mask != 0)
3584 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3585 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3587 *pmask = mask;
3588 *pand_mask = and_mask;
3589 return inner;
3592 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3593 bit positions. */
3595 static int
3596 all_ones_mask_p (const_tree mask, int size)
3598 tree type = TREE_TYPE (mask);
3599 unsigned int precision = TYPE_PRECISION (type);
3600 tree tmask;
3602 tmask = build_int_cst_type (signed_type_for (type), -1);
3604 return
3605 tree_int_cst_equal (mask,
3606 const_binop (RSHIFT_EXPR,
3607 const_binop (LSHIFT_EXPR, tmask,
3608 size_int (precision - size)),
3609 size_int (precision - size)));
3612 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3613 represents the sign bit of EXP's type. If EXP represents a sign
3614 or zero extension, also test VAL against the unextended type.
3615 The return value is the (sub)expression whose sign bit is VAL,
3616 or NULL_TREE otherwise. */
3618 static tree
3619 sign_bit_p (tree exp, const_tree val)
3621 unsigned HOST_WIDE_INT mask_lo, lo;
3622 HOST_WIDE_INT mask_hi, hi;
3623 int width;
3624 tree t;
3626 /* Tree EXP must have an integral type. */
3627 t = TREE_TYPE (exp);
3628 if (! INTEGRAL_TYPE_P (t))
3629 return NULL_TREE;
3631 /* Tree VAL must be an integer constant. */
3632 if (TREE_CODE (val) != INTEGER_CST
3633 || TREE_OVERFLOW (val))
3634 return NULL_TREE;
3636 width = TYPE_PRECISION (t);
3637 if (width > HOST_BITS_PER_WIDE_INT)
3639 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3640 lo = 0;
3642 mask_hi = ((unsigned HOST_WIDE_INT) -1
3643 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3644 mask_lo = -1;
3646 else
3648 hi = 0;
3649 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3651 mask_hi = 0;
3652 mask_lo = ((unsigned HOST_WIDE_INT) -1
3653 >> (HOST_BITS_PER_WIDE_INT - width));
3656 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3657 treat VAL as if it were unsigned. */
3658 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3659 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3660 return exp;
3662 /* Handle extension from a narrower type. */
3663 if (TREE_CODE (exp) == NOP_EXPR
3664 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3665 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3667 return NULL_TREE;
3670 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3671 to be evaluated unconditionally. */
3673 static int
3674 simple_operand_p (const_tree exp)
3676 /* Strip any conversions that don't change the machine mode. */
3677 STRIP_NOPS (exp);
3679 return (CONSTANT_CLASS_P (exp)
3680 || TREE_CODE (exp) == SSA_NAME
3681 || (DECL_P (exp)
3682 && ! TREE_ADDRESSABLE (exp)
3683 && ! TREE_THIS_VOLATILE (exp)
3684 && ! DECL_NONLOCAL (exp)
3685 /* Don't regard global variables as simple. They may be
3686 allocated in ways unknown to the compiler (shared memory,
3687 #pragma weak, etc). */
3688 && ! TREE_PUBLIC (exp)
3689 && ! DECL_EXTERNAL (exp)
3690 /* Loading a static variable is unduly expensive, but global
3691 registers aren't expensive. */
3692 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3695 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3696 to be evaluated unconditionally.
3697 I addition to simple_operand_p, we assume that comparisons, conversions,
3698 and logic-not operations are simple, if their operands are simple, too. */
3700 static bool
3701 simple_operand_p_2 (tree exp)
3703 enum tree_code code;
3705 if (TREE_SIDE_EFFECTS (exp)
3706 || tree_could_trap_p (exp))
3707 return false;
3709 while (CONVERT_EXPR_P (exp))
3710 exp = TREE_OPERAND (exp, 0);
3712 code = TREE_CODE (exp);
3714 if (TREE_CODE_CLASS (code) == tcc_comparison)
3715 return (simple_operand_p (TREE_OPERAND (exp, 0))
3716 && simple_operand_p (TREE_OPERAND (exp, 1)));
3718 if (code == TRUTH_NOT_EXPR)
3719 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3721 return simple_operand_p (exp);
3725 /* The following functions are subroutines to fold_range_test and allow it to
3726 try to change a logical combination of comparisons into a range test.
3728 For example, both
3729 X == 2 || X == 3 || X == 4 || X == 5
3731 X >= 2 && X <= 5
3732 are converted to
3733 (unsigned) (X - 2) <= 3
3735 We describe each set of comparisons as being either inside or outside
3736 a range, using a variable named like IN_P, and then describe the
3737 range with a lower and upper bound. If one of the bounds is omitted,
3738 it represents either the highest or lowest value of the type.
3740 In the comments below, we represent a range by two numbers in brackets
3741 preceded by a "+" to designate being inside that range, or a "-" to
3742 designate being outside that range, so the condition can be inverted by
3743 flipping the prefix. An omitted bound is represented by a "-". For
3744 example, "- [-, 10]" means being outside the range starting at the lowest
3745 possible value and ending at 10, in other words, being greater than 10.
3746 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3747 always false.
3749 We set up things so that the missing bounds are handled in a consistent
3750 manner so neither a missing bound nor "true" and "false" need to be
3751 handled using a special case. */
3753 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3754 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3755 and UPPER1_P are nonzero if the respective argument is an upper bound
3756 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3757 must be specified for a comparison. ARG1 will be converted to ARG0's
3758 type if both are specified. */
3760 static tree
3761 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3762 tree arg1, int upper1_p)
3764 tree tem;
3765 int result;
3766 int sgn0, sgn1;
3768 /* If neither arg represents infinity, do the normal operation.
3769 Else, if not a comparison, return infinity. Else handle the special
3770 comparison rules. Note that most of the cases below won't occur, but
3771 are handled for consistency. */
3773 if (arg0 != 0 && arg1 != 0)
3775 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3776 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3777 STRIP_NOPS (tem);
3778 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3781 if (TREE_CODE_CLASS (code) != tcc_comparison)
3782 return 0;
3784 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3785 for neither. In real maths, we cannot assume open ended ranges are
3786 the same. But, this is computer arithmetic, where numbers are finite.
3787 We can therefore make the transformation of any unbounded range with
3788 the value Z, Z being greater than any representable number. This permits
3789 us to treat unbounded ranges as equal. */
3790 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3791 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3792 switch (code)
3794 case EQ_EXPR:
3795 result = sgn0 == sgn1;
3796 break;
3797 case NE_EXPR:
3798 result = sgn0 != sgn1;
3799 break;
3800 case LT_EXPR:
3801 result = sgn0 < sgn1;
3802 break;
3803 case LE_EXPR:
3804 result = sgn0 <= sgn1;
3805 break;
3806 case GT_EXPR:
3807 result = sgn0 > sgn1;
3808 break;
3809 case GE_EXPR:
3810 result = sgn0 >= sgn1;
3811 break;
3812 default:
3813 gcc_unreachable ();
3816 return constant_boolean_node (result, type);
3819 /* Helper routine for make_range. Perform one step for it, return
3820 new expression if the loop should continue or NULL_TREE if it should
3821 stop. */
3823 tree
3824 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3825 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3826 bool *strict_overflow_p)
3828 tree arg0_type = TREE_TYPE (arg0);
3829 tree n_low, n_high, low = *p_low, high = *p_high;
3830 int in_p = *p_in_p, n_in_p;
3832 switch (code)
3834 case TRUTH_NOT_EXPR:
3835 *p_in_p = ! in_p;
3836 return arg0;
3838 case EQ_EXPR: case NE_EXPR:
3839 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3840 /* We can only do something if the range is testing for zero
3841 and if the second operand is an integer constant. Note that
3842 saying something is "in" the range we make is done by
3843 complementing IN_P since it will set in the initial case of
3844 being not equal to zero; "out" is leaving it alone. */
3845 if (low == NULL_TREE || high == NULL_TREE
3846 || ! integer_zerop (low) || ! integer_zerop (high)
3847 || TREE_CODE (arg1) != INTEGER_CST)
3848 return NULL_TREE;
3850 switch (code)
3852 case NE_EXPR: /* - [c, c] */
3853 low = high = arg1;
3854 break;
3855 case EQ_EXPR: /* + [c, c] */
3856 in_p = ! in_p, low = high = arg1;
3857 break;
3858 case GT_EXPR: /* - [-, c] */
3859 low = 0, high = arg1;
3860 break;
3861 case GE_EXPR: /* + [c, -] */
3862 in_p = ! in_p, low = arg1, high = 0;
3863 break;
3864 case LT_EXPR: /* - [c, -] */
3865 low = arg1, high = 0;
3866 break;
3867 case LE_EXPR: /* + [-, c] */
3868 in_p = ! in_p, low = 0, high = arg1;
3869 break;
3870 default:
3871 gcc_unreachable ();
3874 /* If this is an unsigned comparison, we also know that EXP is
3875 greater than or equal to zero. We base the range tests we make
3876 on that fact, so we record it here so we can parse existing
3877 range tests. We test arg0_type since often the return type
3878 of, e.g. EQ_EXPR, is boolean. */
3879 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3881 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3882 in_p, low, high, 1,
3883 build_int_cst (arg0_type, 0),
3884 NULL_TREE))
3885 return NULL_TREE;
3887 in_p = n_in_p, low = n_low, high = n_high;
3889 /* If the high bound is missing, but we have a nonzero low
3890 bound, reverse the range so it goes from zero to the low bound
3891 minus 1. */
3892 if (high == 0 && low && ! integer_zerop (low))
3894 in_p = ! in_p;
3895 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3896 integer_one_node, 0);
3897 low = build_int_cst (arg0_type, 0);
3901 *p_low = low;
3902 *p_high = high;
3903 *p_in_p = in_p;
3904 return arg0;
3906 case NEGATE_EXPR:
3907 /* (-x) IN [a,b] -> x in [-b, -a] */
3908 n_low = range_binop (MINUS_EXPR, exp_type,
3909 build_int_cst (exp_type, 0),
3910 0, high, 1);
3911 n_high = range_binop (MINUS_EXPR, exp_type,
3912 build_int_cst (exp_type, 0),
3913 0, low, 0);
3914 if (n_high != 0 && TREE_OVERFLOW (n_high))
3915 return NULL_TREE;
3916 goto normalize;
3918 case BIT_NOT_EXPR:
3919 /* ~ X -> -X - 1 */
3920 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3921 build_int_cst (exp_type, 1));
3923 case PLUS_EXPR:
3924 case MINUS_EXPR:
3925 if (TREE_CODE (arg1) != INTEGER_CST)
3926 return NULL_TREE;
3928 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3929 move a constant to the other side. */
3930 if (!TYPE_UNSIGNED (arg0_type)
3931 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3932 return NULL_TREE;
3934 /* If EXP is signed, any overflow in the computation is undefined,
3935 so we don't worry about it so long as our computations on
3936 the bounds don't overflow. For unsigned, overflow is defined
3937 and this is exactly the right thing. */
3938 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3939 arg0_type, low, 0, arg1, 0);
3940 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3941 arg0_type, high, 1, arg1, 0);
3942 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3943 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3944 return NULL_TREE;
3946 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3947 *strict_overflow_p = true;
3949 normalize:
3950 /* Check for an unsigned range which has wrapped around the maximum
3951 value thus making n_high < n_low, and normalize it. */
3952 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3954 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3955 integer_one_node, 0);
3956 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3957 integer_one_node, 0);
3959 /* If the range is of the form +/- [ x+1, x ], we won't
3960 be able to normalize it. But then, it represents the
3961 whole range or the empty set, so make it
3962 +/- [ -, - ]. */
3963 if (tree_int_cst_equal (n_low, low)
3964 && tree_int_cst_equal (n_high, high))
3965 low = high = 0;
3966 else
3967 in_p = ! in_p;
3969 else
3970 low = n_low, high = n_high;
3972 *p_low = low;
3973 *p_high = high;
3974 *p_in_p = in_p;
3975 return arg0;
3977 CASE_CONVERT:
3978 case NON_LVALUE_EXPR:
3979 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3980 return NULL_TREE;
3982 if (! INTEGRAL_TYPE_P (arg0_type)
3983 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3984 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3985 return NULL_TREE;
3987 n_low = low, n_high = high;
3989 if (n_low != 0)
3990 n_low = fold_convert_loc (loc, arg0_type, n_low);
3992 if (n_high != 0)
3993 n_high = fold_convert_loc (loc, arg0_type, n_high);
3995 /* If we're converting arg0 from an unsigned type, to exp,
3996 a signed type, we will be doing the comparison as unsigned.
3997 The tests above have already verified that LOW and HIGH
3998 are both positive.
4000 So we have to ensure that we will handle large unsigned
4001 values the same way that the current signed bounds treat
4002 negative values. */
4004 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4006 tree high_positive;
4007 tree equiv_type;
4008 /* For fixed-point modes, we need to pass the saturating flag
4009 as the 2nd parameter. */
4010 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4011 equiv_type
4012 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4013 TYPE_SATURATING (arg0_type));
4014 else
4015 equiv_type
4016 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4018 /* A range without an upper bound is, naturally, unbounded.
4019 Since convert would have cropped a very large value, use
4020 the max value for the destination type. */
4021 high_positive
4022 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4023 : TYPE_MAX_VALUE (arg0_type);
4025 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4026 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4027 fold_convert_loc (loc, arg0_type,
4028 high_positive),
4029 build_int_cst (arg0_type, 1));
4031 /* If the low bound is specified, "and" the range with the
4032 range for which the original unsigned value will be
4033 positive. */
4034 if (low != 0)
4036 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4037 1, fold_convert_loc (loc, arg0_type,
4038 integer_zero_node),
4039 high_positive))
4040 return NULL_TREE;
4042 in_p = (n_in_p == in_p);
4044 else
4046 /* Otherwise, "or" the range with the range of the input
4047 that will be interpreted as negative. */
4048 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4049 1, fold_convert_loc (loc, arg0_type,
4050 integer_zero_node),
4051 high_positive))
4052 return NULL_TREE;
4054 in_p = (in_p != n_in_p);
4058 *p_low = n_low;
4059 *p_high = n_high;
4060 *p_in_p = in_p;
4061 return arg0;
4063 default:
4064 return NULL_TREE;
4068 /* Given EXP, a logical expression, set the range it is testing into
4069 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4070 actually being tested. *PLOW and *PHIGH will be made of the same
4071 type as the returned expression. If EXP is not a comparison, we
4072 will most likely not be returning a useful value and range. Set
4073 *STRICT_OVERFLOW_P to true if the return value is only valid
4074 because signed overflow is undefined; otherwise, do not change
4075 *STRICT_OVERFLOW_P. */
4077 tree
4078 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4079 bool *strict_overflow_p)
4081 enum tree_code code;
4082 tree arg0, arg1 = NULL_TREE;
4083 tree exp_type, nexp;
4084 int in_p;
4085 tree low, high;
4086 location_t loc = EXPR_LOCATION (exp);
4088 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4089 and see if we can refine the range. Some of the cases below may not
4090 happen, but it doesn't seem worth worrying about this. We "continue"
4091 the outer loop when we've changed something; otherwise we "break"
4092 the switch, which will "break" the while. */
4094 in_p = 0;
4095 low = high = build_int_cst (TREE_TYPE (exp), 0);
4097 while (1)
4099 code = TREE_CODE (exp);
4100 exp_type = TREE_TYPE (exp);
4101 arg0 = NULL_TREE;
4103 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4105 if (TREE_OPERAND_LENGTH (exp) > 0)
4106 arg0 = TREE_OPERAND (exp, 0);
4107 if (TREE_CODE_CLASS (code) == tcc_binary
4108 || TREE_CODE_CLASS (code) == tcc_comparison
4109 || (TREE_CODE_CLASS (code) == tcc_expression
4110 && TREE_OPERAND_LENGTH (exp) > 1))
4111 arg1 = TREE_OPERAND (exp, 1);
4113 if (arg0 == NULL_TREE)
4114 break;
4116 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4117 &high, &in_p, strict_overflow_p);
4118 if (nexp == NULL_TREE)
4119 break;
4120 exp = nexp;
4123 /* If EXP is a constant, we can evaluate whether this is true or false. */
4124 if (TREE_CODE (exp) == INTEGER_CST)
4126 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4127 exp, 0, low, 0))
4128 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4129 exp, 1, high, 1)));
4130 low = high = 0;
4131 exp = 0;
4134 *pin_p = in_p, *plow = low, *phigh = high;
4135 return exp;
4138 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4139 type, TYPE, return an expression to test if EXP is in (or out of, depending
4140 on IN_P) the range. Return 0 if the test couldn't be created. */
4142 tree
4143 build_range_check (location_t loc, tree type, tree exp, int in_p,
4144 tree low, tree high)
4146 tree etype = TREE_TYPE (exp), value;
4148 #ifdef HAVE_canonicalize_funcptr_for_compare
4149 /* Disable this optimization for function pointer expressions
4150 on targets that require function pointer canonicalization. */
4151 if (HAVE_canonicalize_funcptr_for_compare
4152 && TREE_CODE (etype) == POINTER_TYPE
4153 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4154 return NULL_TREE;
4155 #endif
4157 if (! in_p)
4159 value = build_range_check (loc, type, exp, 1, low, high);
4160 if (value != 0)
4161 return invert_truthvalue_loc (loc, value);
4163 return 0;
4166 if (low == 0 && high == 0)
4167 return build_int_cst (type, 1);
4169 if (low == 0)
4170 return fold_build2_loc (loc, LE_EXPR, type, exp,
4171 fold_convert_loc (loc, etype, high));
4173 if (high == 0)
4174 return fold_build2_loc (loc, GE_EXPR, type, exp,
4175 fold_convert_loc (loc, etype, low));
4177 if (operand_equal_p (low, high, 0))
4178 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4179 fold_convert_loc (loc, etype, low));
4181 if (integer_zerop (low))
4183 if (! TYPE_UNSIGNED (etype))
4185 etype = unsigned_type_for (etype);
4186 high = fold_convert_loc (loc, etype, high);
4187 exp = fold_convert_loc (loc, etype, exp);
4189 return build_range_check (loc, type, exp, 1, 0, high);
4192 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4193 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4195 unsigned HOST_WIDE_INT lo;
4196 HOST_WIDE_INT hi;
4197 int prec;
4199 prec = TYPE_PRECISION (etype);
4200 if (prec <= HOST_BITS_PER_WIDE_INT)
4202 hi = 0;
4203 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4205 else
4207 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4208 lo = (unsigned HOST_WIDE_INT) -1;
4211 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4213 if (TYPE_UNSIGNED (etype))
4215 tree signed_etype = signed_type_for (etype);
4216 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4217 etype
4218 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4219 else
4220 etype = signed_etype;
4221 exp = fold_convert_loc (loc, etype, exp);
4223 return fold_build2_loc (loc, GT_EXPR, type, exp,
4224 build_int_cst (etype, 0));
4228 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4229 This requires wrap-around arithmetics for the type of the expression.
4230 First make sure that arithmetics in this type is valid, then make sure
4231 that it wraps around. */
4232 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4233 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4234 TYPE_UNSIGNED (etype));
4236 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4238 tree utype, minv, maxv;
4240 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4241 for the type in question, as we rely on this here. */
4242 utype = unsigned_type_for (etype);
4243 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4244 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4245 integer_one_node, 1);
4246 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4248 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4249 minv, 1, maxv, 1)))
4250 etype = utype;
4251 else
4252 return 0;
4255 high = fold_convert_loc (loc, etype, high);
4256 low = fold_convert_loc (loc, etype, low);
4257 exp = fold_convert_loc (loc, etype, exp);
4259 value = const_binop (MINUS_EXPR, high, low);
4262 if (POINTER_TYPE_P (etype))
4264 if (value != 0 && !TREE_OVERFLOW (value))
4266 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4267 return build_range_check (loc, type,
4268 fold_build_pointer_plus_loc (loc, exp, low),
4269 1, build_int_cst (etype, 0), value);
4271 return 0;
4274 if (value != 0 && !TREE_OVERFLOW (value))
4275 return build_range_check (loc, type,
4276 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4277 1, build_int_cst (etype, 0), value);
4279 return 0;
4282 /* Return the predecessor of VAL in its type, handling the infinite case. */
4284 static tree
4285 range_predecessor (tree val)
4287 tree type = TREE_TYPE (val);
4289 if (INTEGRAL_TYPE_P (type)
4290 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4291 return 0;
4292 else
4293 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4296 /* Return the successor of VAL in its type, handling the infinite case. */
4298 static tree
4299 range_successor (tree val)
4301 tree type = TREE_TYPE (val);
4303 if (INTEGRAL_TYPE_P (type)
4304 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4305 return 0;
4306 else
4307 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4310 /* Given two ranges, see if we can merge them into one. Return 1 if we
4311 can, 0 if we can't. Set the output range into the specified parameters. */
4313 bool
4314 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4315 tree high0, int in1_p, tree low1, tree high1)
4317 int no_overlap;
4318 int subset;
4319 int temp;
4320 tree tem;
4321 int in_p;
4322 tree low, high;
4323 int lowequal = ((low0 == 0 && low1 == 0)
4324 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4325 low0, 0, low1, 0)));
4326 int highequal = ((high0 == 0 && high1 == 0)
4327 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4328 high0, 1, high1, 1)));
4330 /* Make range 0 be the range that starts first, or ends last if they
4331 start at the same value. Swap them if it isn't. */
4332 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4333 low0, 0, low1, 0))
4334 || (lowequal
4335 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4336 high1, 1, high0, 1))))
4338 temp = in0_p, in0_p = in1_p, in1_p = temp;
4339 tem = low0, low0 = low1, low1 = tem;
4340 tem = high0, high0 = high1, high1 = tem;
4343 /* Now flag two cases, whether the ranges are disjoint or whether the
4344 second range is totally subsumed in the first. Note that the tests
4345 below are simplified by the ones above. */
4346 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4347 high0, 1, low1, 0));
4348 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4349 high1, 1, high0, 1));
4351 /* We now have four cases, depending on whether we are including or
4352 excluding the two ranges. */
4353 if (in0_p && in1_p)
4355 /* If they don't overlap, the result is false. If the second range
4356 is a subset it is the result. Otherwise, the range is from the start
4357 of the second to the end of the first. */
4358 if (no_overlap)
4359 in_p = 0, low = high = 0;
4360 else if (subset)
4361 in_p = 1, low = low1, high = high1;
4362 else
4363 in_p = 1, low = low1, high = high0;
4366 else if (in0_p && ! in1_p)
4368 /* If they don't overlap, the result is the first range. If they are
4369 equal, the result is false. If the second range is a subset of the
4370 first, and the ranges begin at the same place, we go from just after
4371 the end of the second range to the end of the first. If the second
4372 range is not a subset of the first, or if it is a subset and both
4373 ranges end at the same place, the range starts at the start of the
4374 first range and ends just before the second range.
4375 Otherwise, we can't describe this as a single range. */
4376 if (no_overlap)
4377 in_p = 1, low = low0, high = high0;
4378 else if (lowequal && highequal)
4379 in_p = 0, low = high = 0;
4380 else if (subset && lowequal)
4382 low = range_successor (high1);
4383 high = high0;
4384 in_p = 1;
4385 if (low == 0)
4387 /* We are in the weird situation where high0 > high1 but
4388 high1 has no successor. Punt. */
4389 return 0;
4392 else if (! subset || highequal)
4394 low = low0;
4395 high = range_predecessor (low1);
4396 in_p = 1;
4397 if (high == 0)
4399 /* low0 < low1 but low1 has no predecessor. Punt. */
4400 return 0;
4403 else
4404 return 0;
4407 else if (! in0_p && in1_p)
4409 /* If they don't overlap, the result is the second range. If the second
4410 is a subset of the first, the result is false. Otherwise,
4411 the range starts just after the first range and ends at the
4412 end of the second. */
4413 if (no_overlap)
4414 in_p = 1, low = low1, high = high1;
4415 else if (subset || highequal)
4416 in_p = 0, low = high = 0;
4417 else
4419 low = range_successor (high0);
4420 high = high1;
4421 in_p = 1;
4422 if (low == 0)
4424 /* high1 > high0 but high0 has no successor. Punt. */
4425 return 0;
4430 else
4432 /* The case where we are excluding both ranges. Here the complex case
4433 is if they don't overlap. In that case, the only time we have a
4434 range is if they are adjacent. If the second is a subset of the
4435 first, the result is the first. Otherwise, the range to exclude
4436 starts at the beginning of the first range and ends at the end of the
4437 second. */
4438 if (no_overlap)
4440 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4441 range_successor (high0),
4442 1, low1, 0)))
4443 in_p = 0, low = low0, high = high1;
4444 else
4446 /* Canonicalize - [min, x] into - [-, x]. */
4447 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4448 switch (TREE_CODE (TREE_TYPE (low0)))
4450 case ENUMERAL_TYPE:
4451 if (TYPE_PRECISION (TREE_TYPE (low0))
4452 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4453 break;
4454 /* FALLTHROUGH */
4455 case INTEGER_TYPE:
4456 if (tree_int_cst_equal (low0,
4457 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4458 low0 = 0;
4459 break;
4460 case POINTER_TYPE:
4461 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4462 && integer_zerop (low0))
4463 low0 = 0;
4464 break;
4465 default:
4466 break;
4469 /* Canonicalize - [x, max] into - [x, -]. */
4470 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4471 switch (TREE_CODE (TREE_TYPE (high1)))
4473 case ENUMERAL_TYPE:
4474 if (TYPE_PRECISION (TREE_TYPE (high1))
4475 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4476 break;
4477 /* FALLTHROUGH */
4478 case INTEGER_TYPE:
4479 if (tree_int_cst_equal (high1,
4480 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4481 high1 = 0;
4482 break;
4483 case POINTER_TYPE:
4484 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4485 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4486 high1, 1,
4487 integer_one_node, 1)))
4488 high1 = 0;
4489 break;
4490 default:
4491 break;
4494 /* The ranges might be also adjacent between the maximum and
4495 minimum values of the given type. For
4496 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4497 return + [x + 1, y - 1]. */
4498 if (low0 == 0 && high1 == 0)
4500 low = range_successor (high0);
4501 high = range_predecessor (low1);
4502 if (low == 0 || high == 0)
4503 return 0;
4505 in_p = 1;
4507 else
4508 return 0;
4511 else if (subset)
4512 in_p = 0, low = low0, high = high0;
4513 else
4514 in_p = 0, low = low0, high = high1;
4517 *pin_p = in_p, *plow = low, *phigh = high;
4518 return 1;
4522 /* Subroutine of fold, looking inside expressions of the form
4523 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4524 of the COND_EXPR. This function is being used also to optimize
4525 A op B ? C : A, by reversing the comparison first.
4527 Return a folded expression whose code is not a COND_EXPR
4528 anymore, or NULL_TREE if no folding opportunity is found. */
4530 static tree
4531 fold_cond_expr_with_comparison (location_t loc, tree type,
4532 tree arg0, tree arg1, tree arg2)
4534 enum tree_code comp_code = TREE_CODE (arg0);
4535 tree arg00 = TREE_OPERAND (arg0, 0);
4536 tree arg01 = TREE_OPERAND (arg0, 1);
4537 tree arg1_type = TREE_TYPE (arg1);
4538 tree tem;
4540 STRIP_NOPS (arg1);
4541 STRIP_NOPS (arg2);
4543 /* If we have A op 0 ? A : -A, consider applying the following
4544 transformations:
4546 A == 0? A : -A same as -A
4547 A != 0? A : -A same as A
4548 A >= 0? A : -A same as abs (A)
4549 A > 0? A : -A same as abs (A)
4550 A <= 0? A : -A same as -abs (A)
4551 A < 0? A : -A same as -abs (A)
4553 None of these transformations work for modes with signed
4554 zeros. If A is +/-0, the first two transformations will
4555 change the sign of the result (from +0 to -0, or vice
4556 versa). The last four will fix the sign of the result,
4557 even though the original expressions could be positive or
4558 negative, depending on the sign of A.
4560 Note that all these transformations are correct if A is
4561 NaN, since the two alternatives (A and -A) are also NaNs. */
4562 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4563 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4564 ? real_zerop (arg01)
4565 : integer_zerop (arg01))
4566 && ((TREE_CODE (arg2) == NEGATE_EXPR
4567 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4568 /* In the case that A is of the form X-Y, '-A' (arg2) may
4569 have already been folded to Y-X, check for that. */
4570 || (TREE_CODE (arg1) == MINUS_EXPR
4571 && TREE_CODE (arg2) == MINUS_EXPR
4572 && operand_equal_p (TREE_OPERAND (arg1, 0),
4573 TREE_OPERAND (arg2, 1), 0)
4574 && operand_equal_p (TREE_OPERAND (arg1, 1),
4575 TREE_OPERAND (arg2, 0), 0))))
4576 switch (comp_code)
4578 case EQ_EXPR:
4579 case UNEQ_EXPR:
4580 tem = fold_convert_loc (loc, arg1_type, arg1);
4581 return pedantic_non_lvalue_loc (loc,
4582 fold_convert_loc (loc, type,
4583 negate_expr (tem)));
4584 case NE_EXPR:
4585 case LTGT_EXPR:
4586 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4587 case UNGE_EXPR:
4588 case UNGT_EXPR:
4589 if (flag_trapping_math)
4590 break;
4591 /* Fall through. */
4592 case GE_EXPR:
4593 case GT_EXPR:
4594 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4595 arg1 = fold_convert_loc (loc, signed_type_for
4596 (TREE_TYPE (arg1)), arg1);
4597 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4598 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4599 case UNLE_EXPR:
4600 case UNLT_EXPR:
4601 if (flag_trapping_math)
4602 break;
4603 case LE_EXPR:
4604 case LT_EXPR:
4605 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4606 arg1 = fold_convert_loc (loc, signed_type_for
4607 (TREE_TYPE (arg1)), arg1);
4608 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4609 return negate_expr (fold_convert_loc (loc, type, tem));
4610 default:
4611 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4612 break;
4615 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4616 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4617 both transformations are correct when A is NaN: A != 0
4618 is then true, and A == 0 is false. */
4620 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4621 && integer_zerop (arg01) && integer_zerop (arg2))
4623 if (comp_code == NE_EXPR)
4624 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4625 else if (comp_code == EQ_EXPR)
4626 return build_int_cst (type, 0);
4629 /* Try some transformations of A op B ? A : B.
4631 A == B? A : B same as B
4632 A != B? A : B same as A
4633 A >= B? A : B same as max (A, B)
4634 A > B? A : B same as max (B, A)
4635 A <= B? A : B same as min (A, B)
4636 A < B? A : B same as min (B, A)
4638 As above, these transformations don't work in the presence
4639 of signed zeros. For example, if A and B are zeros of
4640 opposite sign, the first two transformations will change
4641 the sign of the result. In the last four, the original
4642 expressions give different results for (A=+0, B=-0) and
4643 (A=-0, B=+0), but the transformed expressions do not.
4645 The first two transformations are correct if either A or B
4646 is a NaN. In the first transformation, the condition will
4647 be false, and B will indeed be chosen. In the case of the
4648 second transformation, the condition A != B will be true,
4649 and A will be chosen.
4651 The conversions to max() and min() are not correct if B is
4652 a number and A is not. The conditions in the original
4653 expressions will be false, so all four give B. The min()
4654 and max() versions would give a NaN instead. */
4655 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4656 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4657 /* Avoid these transformations if the COND_EXPR may be used
4658 as an lvalue in the C++ front-end. PR c++/19199. */
4659 && (in_gimple_form
4660 || (strcmp (lang_hooks.name, "GNU C++") != 0
4661 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4662 || ! maybe_lvalue_p (arg1)
4663 || ! maybe_lvalue_p (arg2)))
4665 tree comp_op0 = arg00;
4666 tree comp_op1 = arg01;
4667 tree comp_type = TREE_TYPE (comp_op0);
4669 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4670 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4672 comp_type = type;
4673 comp_op0 = arg1;
4674 comp_op1 = arg2;
4677 switch (comp_code)
4679 case EQ_EXPR:
4680 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4681 case NE_EXPR:
4682 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4683 case LE_EXPR:
4684 case LT_EXPR:
4685 case UNLE_EXPR:
4686 case UNLT_EXPR:
4687 /* In C++ a ?: expression can be an lvalue, so put the
4688 operand which will be used if they are equal first
4689 so that we can convert this back to the
4690 corresponding COND_EXPR. */
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4693 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4694 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4695 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4696 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4697 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4698 comp_op1, comp_op0);
4699 return pedantic_non_lvalue_loc (loc,
4700 fold_convert_loc (loc, type, tem));
4702 break;
4703 case GE_EXPR:
4704 case GT_EXPR:
4705 case UNGE_EXPR:
4706 case UNGT_EXPR:
4707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4709 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4710 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4711 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4712 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4713 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4714 comp_op1, comp_op0);
4715 return pedantic_non_lvalue_loc (loc,
4716 fold_convert_loc (loc, type, tem));
4718 break;
4719 case UNEQ_EXPR:
4720 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4721 return pedantic_non_lvalue_loc (loc,
4722 fold_convert_loc (loc, type, arg2));
4723 break;
4724 case LTGT_EXPR:
4725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4726 return pedantic_non_lvalue_loc (loc,
4727 fold_convert_loc (loc, type, arg1));
4728 break;
4729 default:
4730 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4731 break;
4735 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4736 we might still be able to simplify this. For example,
4737 if C1 is one less or one more than C2, this might have started
4738 out as a MIN or MAX and been transformed by this function.
4739 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4741 if (INTEGRAL_TYPE_P (type)
4742 && TREE_CODE (arg01) == INTEGER_CST
4743 && TREE_CODE (arg2) == INTEGER_CST)
4744 switch (comp_code)
4746 case EQ_EXPR:
4747 if (TREE_CODE (arg1) == INTEGER_CST)
4748 break;
4749 /* We can replace A with C1 in this case. */
4750 arg1 = fold_convert_loc (loc, type, arg01);
4751 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4753 case LT_EXPR:
4754 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4755 MIN_EXPR, to preserve the signedness of the comparison. */
4756 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4757 OEP_ONLY_CONST)
4758 && operand_equal_p (arg01,
4759 const_binop (PLUS_EXPR, arg2,
4760 build_int_cst (type, 1)),
4761 OEP_ONLY_CONST))
4763 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4764 fold_convert_loc (loc, TREE_TYPE (arg00),
4765 arg2));
4766 return pedantic_non_lvalue_loc (loc,
4767 fold_convert_loc (loc, type, tem));
4769 break;
4771 case LE_EXPR:
4772 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4773 as above. */
4774 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4775 OEP_ONLY_CONST)
4776 && operand_equal_p (arg01,
4777 const_binop (MINUS_EXPR, arg2,
4778 build_int_cst (type, 1)),
4779 OEP_ONLY_CONST))
4781 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4782 fold_convert_loc (loc, TREE_TYPE (arg00),
4783 arg2));
4784 return pedantic_non_lvalue_loc (loc,
4785 fold_convert_loc (loc, type, tem));
4787 break;
4789 case GT_EXPR:
4790 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4791 MAX_EXPR, to preserve the signedness of the comparison. */
4792 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4793 OEP_ONLY_CONST)
4794 && operand_equal_p (arg01,
4795 const_binop (MINUS_EXPR, arg2,
4796 build_int_cst (type, 1)),
4797 OEP_ONLY_CONST))
4799 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4800 fold_convert_loc (loc, TREE_TYPE (arg00),
4801 arg2));
4802 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4804 break;
4806 case GE_EXPR:
4807 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4808 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4809 OEP_ONLY_CONST)
4810 && operand_equal_p (arg01,
4811 const_binop (PLUS_EXPR, arg2,
4812 build_int_cst (type, 1)),
4813 OEP_ONLY_CONST))
4815 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4816 fold_convert_loc (loc, TREE_TYPE (arg00),
4817 arg2));
4818 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4820 break;
4821 case NE_EXPR:
4822 break;
4823 default:
4824 gcc_unreachable ();
4827 return NULL_TREE;
4832 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4833 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4834 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4835 false) >= 2)
4836 #endif
4838 /* EXP is some logical combination of boolean tests. See if we can
4839 merge it into some range test. Return the new tree if so. */
4841 static tree
4842 fold_range_test (location_t loc, enum tree_code code, tree type,
4843 tree op0, tree op1)
4845 int or_op = (code == TRUTH_ORIF_EXPR
4846 || code == TRUTH_OR_EXPR);
4847 int in0_p, in1_p, in_p;
4848 tree low0, low1, low, high0, high1, high;
4849 bool strict_overflow_p = false;
4850 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4851 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4852 tree tem;
4853 const char * const warnmsg = G_("assuming signed overflow does not occur "
4854 "when simplifying range test");
4856 /* If this is an OR operation, invert both sides; we will invert
4857 again at the end. */
4858 if (or_op)
4859 in0_p = ! in0_p, in1_p = ! in1_p;
4861 /* If both expressions are the same, if we can merge the ranges, and we
4862 can build the range test, return it or it inverted. If one of the
4863 ranges is always true or always false, consider it to be the same
4864 expression as the other. */
4865 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4866 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4867 in1_p, low1, high1)
4868 && 0 != (tem = (build_range_check (loc, type,
4869 lhs != 0 ? lhs
4870 : rhs != 0 ? rhs : integer_zero_node,
4871 in_p, low, high))))
4873 if (strict_overflow_p)
4874 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4875 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4878 /* On machines where the branch cost is expensive, if this is a
4879 short-circuited branch and the underlying object on both sides
4880 is the same, make a non-short-circuit operation. */
4881 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4882 && lhs != 0 && rhs != 0
4883 && (code == TRUTH_ANDIF_EXPR
4884 || code == TRUTH_ORIF_EXPR)
4885 && operand_equal_p (lhs, rhs, 0))
4887 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4888 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4889 which cases we can't do this. */
4890 if (simple_operand_p (lhs))
4891 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4892 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4893 type, op0, op1);
4895 else if (!lang_hooks.decls.global_bindings_p ()
4896 && !CONTAINS_PLACEHOLDER_P (lhs))
4898 tree common = save_expr (lhs);
4900 if (0 != (lhs = build_range_check (loc, type, common,
4901 or_op ? ! in0_p : in0_p,
4902 low0, high0))
4903 && (0 != (rhs = build_range_check (loc, type, common,
4904 or_op ? ! in1_p : in1_p,
4905 low1, high1))))
4907 if (strict_overflow_p)
4908 fold_overflow_warning (warnmsg,
4909 WARN_STRICT_OVERFLOW_COMPARISON);
4910 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4911 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4912 type, lhs, rhs);
4917 return 0;
4920 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4921 bit value. Arrange things so the extra bits will be set to zero if and
4922 only if C is signed-extended to its full width. If MASK is nonzero,
4923 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4925 static tree
4926 unextend (tree c, int p, int unsignedp, tree mask)
4928 tree type = TREE_TYPE (c);
4929 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4930 tree temp;
4932 if (p == modesize || unsignedp)
4933 return c;
4935 /* We work by getting just the sign bit into the low-order bit, then
4936 into the high-order bit, then sign-extend. We then XOR that value
4937 with C. */
4938 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4939 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4941 /* We must use a signed type in order to get an arithmetic right shift.
4942 However, we must also avoid introducing accidental overflows, so that
4943 a subsequent call to integer_zerop will work. Hence we must
4944 do the type conversion here. At this point, the constant is either
4945 zero or one, and the conversion to a signed type can never overflow.
4946 We could get an overflow if this conversion is done anywhere else. */
4947 if (TYPE_UNSIGNED (type))
4948 temp = fold_convert (signed_type_for (type), temp);
4950 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4951 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4952 if (mask != 0)
4953 temp = const_binop (BIT_AND_EXPR, temp,
4954 fold_convert (TREE_TYPE (c), mask));
4955 /* If necessary, convert the type back to match the type of C. */
4956 if (TYPE_UNSIGNED (type))
4957 temp = fold_convert (type, temp);
4959 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4962 /* For an expression that has the form
4963 (A && B) || ~B
4965 (A || B) && ~B,
4966 we can drop one of the inner expressions and simplify to
4967 A || ~B
4969 A && ~B
4970 LOC is the location of the resulting expression. OP is the inner
4971 logical operation; the left-hand side in the examples above, while CMPOP
4972 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4973 removing a condition that guards another, as in
4974 (A != NULL && A->...) || A == NULL
4975 which we must not transform. If RHS_ONLY is true, only eliminate the
4976 right-most operand of the inner logical operation. */
4978 static tree
4979 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4980 bool rhs_only)
4982 tree type = TREE_TYPE (cmpop);
4983 enum tree_code code = TREE_CODE (cmpop);
4984 enum tree_code truthop_code = TREE_CODE (op);
4985 tree lhs = TREE_OPERAND (op, 0);
4986 tree rhs = TREE_OPERAND (op, 1);
4987 tree orig_lhs = lhs, orig_rhs = rhs;
4988 enum tree_code rhs_code = TREE_CODE (rhs);
4989 enum tree_code lhs_code = TREE_CODE (lhs);
4990 enum tree_code inv_code;
4992 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4993 return NULL_TREE;
4995 if (TREE_CODE_CLASS (code) != tcc_comparison)
4996 return NULL_TREE;
4998 if (rhs_code == truthop_code)
5000 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5001 if (newrhs != NULL_TREE)
5003 rhs = newrhs;
5004 rhs_code = TREE_CODE (rhs);
5007 if (lhs_code == truthop_code && !rhs_only)
5009 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5010 if (newlhs != NULL_TREE)
5012 lhs = newlhs;
5013 lhs_code = TREE_CODE (lhs);
5017 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5018 if (inv_code == rhs_code
5019 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5020 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5021 return lhs;
5022 if (!rhs_only && inv_code == lhs_code
5023 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5024 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5025 return rhs;
5026 if (rhs != orig_rhs || lhs != orig_lhs)
5027 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5028 lhs, rhs);
5029 return NULL_TREE;
5032 /* Find ways of folding logical expressions of LHS and RHS:
5033 Try to merge two comparisons to the same innermost item.
5034 Look for range tests like "ch >= '0' && ch <= '9'".
5035 Look for combinations of simple terms on machines with expensive branches
5036 and evaluate the RHS unconditionally.
5038 For example, if we have p->a == 2 && p->b == 4 and we can make an
5039 object large enough to span both A and B, we can do this with a comparison
5040 against the object ANDed with the a mask.
5042 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5043 operations to do this with one comparison.
5045 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5046 function and the one above.
5048 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5049 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5051 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5052 two operands.
5054 We return the simplified tree or 0 if no optimization is possible. */
5056 static tree
5057 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5058 tree lhs, tree rhs)
5060 /* If this is the "or" of two comparisons, we can do something if
5061 the comparisons are NE_EXPR. If this is the "and", we can do something
5062 if the comparisons are EQ_EXPR. I.e.,
5063 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5065 WANTED_CODE is this operation code. For single bit fields, we can
5066 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5067 comparison for one-bit fields. */
5069 enum tree_code wanted_code;
5070 enum tree_code lcode, rcode;
5071 tree ll_arg, lr_arg, rl_arg, rr_arg;
5072 tree ll_inner, lr_inner, rl_inner, rr_inner;
5073 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5074 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5075 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5076 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5077 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5078 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5079 enum machine_mode lnmode, rnmode;
5080 tree ll_mask, lr_mask, rl_mask, rr_mask;
5081 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5082 tree l_const, r_const;
5083 tree lntype, rntype, result;
5084 HOST_WIDE_INT first_bit, end_bit;
5085 int volatilep;
5087 /* Start by getting the comparison codes. Fail if anything is volatile.
5088 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5089 it were surrounded with a NE_EXPR. */
5091 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5092 return 0;
5094 lcode = TREE_CODE (lhs);
5095 rcode = TREE_CODE (rhs);
5097 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5099 lhs = build2 (NE_EXPR, truth_type, lhs,
5100 build_int_cst (TREE_TYPE (lhs), 0));
5101 lcode = NE_EXPR;
5104 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5106 rhs = build2 (NE_EXPR, truth_type, rhs,
5107 build_int_cst (TREE_TYPE (rhs), 0));
5108 rcode = NE_EXPR;
5111 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5112 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5113 return 0;
5115 ll_arg = TREE_OPERAND (lhs, 0);
5116 lr_arg = TREE_OPERAND (lhs, 1);
5117 rl_arg = TREE_OPERAND (rhs, 0);
5118 rr_arg = TREE_OPERAND (rhs, 1);
5120 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5121 if (simple_operand_p (ll_arg)
5122 && simple_operand_p (lr_arg))
5124 if (operand_equal_p (ll_arg, rl_arg, 0)
5125 && operand_equal_p (lr_arg, rr_arg, 0))
5127 result = combine_comparisons (loc, code, lcode, rcode,
5128 truth_type, ll_arg, lr_arg);
5129 if (result)
5130 return result;
5132 else if (operand_equal_p (ll_arg, rr_arg, 0)
5133 && operand_equal_p (lr_arg, rl_arg, 0))
5135 result = combine_comparisons (loc, code, lcode,
5136 swap_tree_comparison (rcode),
5137 truth_type, ll_arg, lr_arg);
5138 if (result)
5139 return result;
5143 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5144 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5146 /* If the RHS can be evaluated unconditionally and its operands are
5147 simple, it wins to evaluate the RHS unconditionally on machines
5148 with expensive branches. In this case, this isn't a comparison
5149 that can be merged. */
5151 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5152 false) >= 2
5153 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5154 && simple_operand_p (rl_arg)
5155 && simple_operand_p (rr_arg))
5157 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5158 if (code == TRUTH_OR_EXPR
5159 && lcode == NE_EXPR && integer_zerop (lr_arg)
5160 && rcode == NE_EXPR && integer_zerop (rr_arg)
5161 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5162 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5163 return build2_loc (loc, NE_EXPR, truth_type,
5164 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5165 ll_arg, rl_arg),
5166 build_int_cst (TREE_TYPE (ll_arg), 0));
5168 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5169 if (code == TRUTH_AND_EXPR
5170 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5171 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5172 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5173 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5174 return build2_loc (loc, EQ_EXPR, truth_type,
5175 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5176 ll_arg, rl_arg),
5177 build_int_cst (TREE_TYPE (ll_arg), 0));
5180 /* See if the comparisons can be merged. Then get all the parameters for
5181 each side. */
5183 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5184 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5185 return 0;
5187 volatilep = 0;
5188 ll_inner = decode_field_reference (loc, ll_arg,
5189 &ll_bitsize, &ll_bitpos, &ll_mode,
5190 &ll_unsignedp, &volatilep, &ll_mask,
5191 &ll_and_mask);
5192 lr_inner = decode_field_reference (loc, lr_arg,
5193 &lr_bitsize, &lr_bitpos, &lr_mode,
5194 &lr_unsignedp, &volatilep, &lr_mask,
5195 &lr_and_mask);
5196 rl_inner = decode_field_reference (loc, rl_arg,
5197 &rl_bitsize, &rl_bitpos, &rl_mode,
5198 &rl_unsignedp, &volatilep, &rl_mask,
5199 &rl_and_mask);
5200 rr_inner = decode_field_reference (loc, rr_arg,
5201 &rr_bitsize, &rr_bitpos, &rr_mode,
5202 &rr_unsignedp, &volatilep, &rr_mask,
5203 &rr_and_mask);
5205 /* It must be true that the inner operation on the lhs of each
5206 comparison must be the same if we are to be able to do anything.
5207 Then see if we have constants. If not, the same must be true for
5208 the rhs's. */
5209 if (volatilep || ll_inner == 0 || rl_inner == 0
5210 || ! operand_equal_p (ll_inner, rl_inner, 0))
5211 return 0;
5213 if (TREE_CODE (lr_arg) == INTEGER_CST
5214 && TREE_CODE (rr_arg) == INTEGER_CST)
5215 l_const = lr_arg, r_const = rr_arg;
5216 else if (lr_inner == 0 || rr_inner == 0
5217 || ! operand_equal_p (lr_inner, rr_inner, 0))
5218 return 0;
5219 else
5220 l_const = r_const = 0;
5222 /* If either comparison code is not correct for our logical operation,
5223 fail. However, we can convert a one-bit comparison against zero into
5224 the opposite comparison against that bit being set in the field. */
5226 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5227 if (lcode != wanted_code)
5229 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5231 /* Make the left operand unsigned, since we are only interested
5232 in the value of one bit. Otherwise we are doing the wrong
5233 thing below. */
5234 ll_unsignedp = 1;
5235 l_const = ll_mask;
5237 else
5238 return 0;
5241 /* This is analogous to the code for l_const above. */
5242 if (rcode != wanted_code)
5244 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5246 rl_unsignedp = 1;
5247 r_const = rl_mask;
5249 else
5250 return 0;
5253 /* See if we can find a mode that contains both fields being compared on
5254 the left. If we can't, fail. Otherwise, update all constants and masks
5255 to be relative to a field of that size. */
5256 first_bit = MIN (ll_bitpos, rl_bitpos);
5257 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5258 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5259 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5260 volatilep);
5261 if (lnmode == VOIDmode)
5262 return 0;
5264 lnbitsize = GET_MODE_BITSIZE (lnmode);
5265 lnbitpos = first_bit & ~ (lnbitsize - 1);
5266 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5267 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5269 if (BYTES_BIG_ENDIAN)
5271 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5272 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5275 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5276 size_int (xll_bitpos));
5277 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5278 size_int (xrl_bitpos));
5280 if (l_const)
5282 l_const = fold_convert_loc (loc, lntype, l_const);
5283 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5284 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5285 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5286 fold_build1_loc (loc, BIT_NOT_EXPR,
5287 lntype, ll_mask))))
5289 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5291 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5294 if (r_const)
5296 r_const = fold_convert_loc (loc, lntype, r_const);
5297 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5298 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5299 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5300 fold_build1_loc (loc, BIT_NOT_EXPR,
5301 lntype, rl_mask))))
5303 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5305 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5309 /* If the right sides are not constant, do the same for it. Also,
5310 disallow this optimization if a size or signedness mismatch occurs
5311 between the left and right sides. */
5312 if (l_const == 0)
5314 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5315 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5316 /* Make sure the two fields on the right
5317 correspond to the left without being swapped. */
5318 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5319 return 0;
5321 first_bit = MIN (lr_bitpos, rr_bitpos);
5322 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5323 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5324 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5325 volatilep);
5326 if (rnmode == VOIDmode)
5327 return 0;
5329 rnbitsize = GET_MODE_BITSIZE (rnmode);
5330 rnbitpos = first_bit & ~ (rnbitsize - 1);
5331 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5332 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5334 if (BYTES_BIG_ENDIAN)
5336 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5337 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5340 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5341 rntype, lr_mask),
5342 size_int (xlr_bitpos));
5343 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5344 rntype, rr_mask),
5345 size_int (xrr_bitpos));
5347 /* Make a mask that corresponds to both fields being compared.
5348 Do this for both items being compared. If the operands are the
5349 same size and the bits being compared are in the same position
5350 then we can do this by masking both and comparing the masked
5351 results. */
5352 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5353 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5354 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5356 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5357 ll_unsignedp || rl_unsignedp);
5358 if (! all_ones_mask_p (ll_mask, lnbitsize))
5359 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5361 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5362 lr_unsignedp || rr_unsignedp);
5363 if (! all_ones_mask_p (lr_mask, rnbitsize))
5364 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5366 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5369 /* There is still another way we can do something: If both pairs of
5370 fields being compared are adjacent, we may be able to make a wider
5371 field containing them both.
5373 Note that we still must mask the lhs/rhs expressions. Furthermore,
5374 the mask must be shifted to account for the shift done by
5375 make_bit_field_ref. */
5376 if ((ll_bitsize + ll_bitpos == rl_bitpos
5377 && lr_bitsize + lr_bitpos == rr_bitpos)
5378 || (ll_bitpos == rl_bitpos + rl_bitsize
5379 && lr_bitpos == rr_bitpos + rr_bitsize))
5381 tree type;
5383 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5384 ll_bitsize + rl_bitsize,
5385 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5386 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5387 lr_bitsize + rr_bitsize,
5388 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5390 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5391 size_int (MIN (xll_bitpos, xrl_bitpos)));
5392 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5393 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5395 /* Convert to the smaller type before masking out unwanted bits. */
5396 type = lntype;
5397 if (lntype != rntype)
5399 if (lnbitsize > rnbitsize)
5401 lhs = fold_convert_loc (loc, rntype, lhs);
5402 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5403 type = rntype;
5405 else if (lnbitsize < rnbitsize)
5407 rhs = fold_convert_loc (loc, lntype, rhs);
5408 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5409 type = lntype;
5413 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5414 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5416 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5417 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5419 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5422 return 0;
5425 /* Handle the case of comparisons with constants. If there is something in
5426 common between the masks, those bits of the constants must be the same.
5427 If not, the condition is always false. Test for this to avoid generating
5428 incorrect code below. */
5429 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5430 if (! integer_zerop (result)
5431 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5432 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5434 if (wanted_code == NE_EXPR)
5436 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5437 return constant_boolean_node (true, truth_type);
5439 else
5441 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5442 return constant_boolean_node (false, truth_type);
5446 /* Construct the expression we will return. First get the component
5447 reference we will make. Unless the mask is all ones the width of
5448 that field, perform the mask operation. Then compare with the
5449 merged constant. */
5450 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5451 ll_unsignedp || rl_unsignedp);
5453 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5454 if (! all_ones_mask_p (ll_mask, lnbitsize))
5455 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5457 return build2_loc (loc, wanted_code, truth_type, result,
5458 const_binop (BIT_IOR_EXPR, l_const, r_const));
5461 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5462 constant. */
5464 static tree
5465 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5466 tree op0, tree op1)
5468 tree arg0 = op0;
5469 enum tree_code op_code;
5470 tree comp_const;
5471 tree minmax_const;
5472 int consts_equal, consts_lt;
5473 tree inner;
5475 STRIP_SIGN_NOPS (arg0);
5477 op_code = TREE_CODE (arg0);
5478 minmax_const = TREE_OPERAND (arg0, 1);
5479 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5480 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5481 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5482 inner = TREE_OPERAND (arg0, 0);
5484 /* If something does not permit us to optimize, return the original tree. */
5485 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5486 || TREE_CODE (comp_const) != INTEGER_CST
5487 || TREE_OVERFLOW (comp_const)
5488 || TREE_CODE (minmax_const) != INTEGER_CST
5489 || TREE_OVERFLOW (minmax_const))
5490 return NULL_TREE;
5492 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5493 and GT_EXPR, doing the rest with recursive calls using logical
5494 simplifications. */
5495 switch (code)
5497 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5499 tree tem
5500 = optimize_minmax_comparison (loc,
5501 invert_tree_comparison (code, false),
5502 type, op0, op1);
5503 if (tem)
5504 return invert_truthvalue_loc (loc, tem);
5505 return NULL_TREE;
5508 case GE_EXPR:
5509 return
5510 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5511 optimize_minmax_comparison
5512 (loc, EQ_EXPR, type, arg0, comp_const),
5513 optimize_minmax_comparison
5514 (loc, GT_EXPR, type, arg0, comp_const));
5516 case EQ_EXPR:
5517 if (op_code == MAX_EXPR && consts_equal)
5518 /* MAX (X, 0) == 0 -> X <= 0 */
5519 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5521 else if (op_code == MAX_EXPR && consts_lt)
5522 /* MAX (X, 0) == 5 -> X == 5 */
5523 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5525 else if (op_code == MAX_EXPR)
5526 /* MAX (X, 0) == -1 -> false */
5527 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5529 else if (consts_equal)
5530 /* MIN (X, 0) == 0 -> X >= 0 */
5531 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5533 else if (consts_lt)
5534 /* MIN (X, 0) == 5 -> false */
5535 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5537 else
5538 /* MIN (X, 0) == -1 -> X == -1 */
5539 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5541 case GT_EXPR:
5542 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5543 /* MAX (X, 0) > 0 -> X > 0
5544 MAX (X, 0) > 5 -> X > 5 */
5545 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5547 else if (op_code == MAX_EXPR)
5548 /* MAX (X, 0) > -1 -> true */
5549 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5551 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5552 /* MIN (X, 0) > 0 -> false
5553 MIN (X, 0) > 5 -> false */
5554 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5556 else
5557 /* MIN (X, 0) > -1 -> X > -1 */
5558 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5560 default:
5561 return NULL_TREE;
5565 /* T is an integer expression that is being multiplied, divided, or taken a
5566 modulus (CODE says which and what kind of divide or modulus) by a
5567 constant C. See if we can eliminate that operation by folding it with
5568 other operations already in T. WIDE_TYPE, if non-null, is a type that
5569 should be used for the computation if wider than our type.
5571 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5572 (X * 2) + (Y * 4). We must, however, be assured that either the original
5573 expression would not overflow or that overflow is undefined for the type
5574 in the language in question.
5576 If we return a non-null expression, it is an equivalent form of the
5577 original computation, but need not be in the original type.
5579 We set *STRICT_OVERFLOW_P to true if the return values depends on
5580 signed overflow being undefined. Otherwise we do not change
5581 *STRICT_OVERFLOW_P. */
5583 static tree
5584 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5585 bool *strict_overflow_p)
5587 /* To avoid exponential search depth, refuse to allow recursion past
5588 three levels. Beyond that (1) it's highly unlikely that we'll find
5589 something interesting and (2) we've probably processed it before
5590 when we built the inner expression. */
5592 static int depth;
5593 tree ret;
5595 if (depth > 3)
5596 return NULL;
5598 depth++;
5599 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5600 depth--;
5602 return ret;
5605 static tree
5606 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5607 bool *strict_overflow_p)
5609 tree type = TREE_TYPE (t);
5610 enum tree_code tcode = TREE_CODE (t);
5611 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5612 > GET_MODE_SIZE (TYPE_MODE (type)))
5613 ? wide_type : type);
5614 tree t1, t2;
5615 int same_p = tcode == code;
5616 tree op0 = NULL_TREE, op1 = NULL_TREE;
5617 bool sub_strict_overflow_p;
5619 /* Don't deal with constants of zero here; they confuse the code below. */
5620 if (integer_zerop (c))
5621 return NULL_TREE;
5623 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5624 op0 = TREE_OPERAND (t, 0);
5626 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5627 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5629 /* Note that we need not handle conditional operations here since fold
5630 already handles those cases. So just do arithmetic here. */
5631 switch (tcode)
5633 case INTEGER_CST:
5634 /* For a constant, we can always simplify if we are a multiply
5635 or (for divide and modulus) if it is a multiple of our constant. */
5636 if (code == MULT_EXPR
5637 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5638 return const_binop (code, fold_convert (ctype, t),
5639 fold_convert (ctype, c));
5640 break;
5642 CASE_CONVERT: case NON_LVALUE_EXPR:
5643 /* If op0 is an expression ... */
5644 if ((COMPARISON_CLASS_P (op0)
5645 || UNARY_CLASS_P (op0)
5646 || BINARY_CLASS_P (op0)
5647 || VL_EXP_CLASS_P (op0)
5648 || EXPRESSION_CLASS_P (op0))
5649 /* ... and has wrapping overflow, and its type is smaller
5650 than ctype, then we cannot pass through as widening. */
5651 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5652 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5653 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5654 && (TYPE_PRECISION (ctype)
5655 > TYPE_PRECISION (TREE_TYPE (op0))))
5656 /* ... or this is a truncation (t is narrower than op0),
5657 then we cannot pass through this narrowing. */
5658 || (TYPE_PRECISION (type)
5659 < TYPE_PRECISION (TREE_TYPE (op0)))
5660 /* ... or signedness changes for division or modulus,
5661 then we cannot pass through this conversion. */
5662 || (code != MULT_EXPR
5663 && (TYPE_UNSIGNED (ctype)
5664 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5665 /* ... or has undefined overflow while the converted to
5666 type has not, we cannot do the operation in the inner type
5667 as that would introduce undefined overflow. */
5668 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5669 && !TYPE_OVERFLOW_UNDEFINED (type))))
5670 break;
5672 /* Pass the constant down and see if we can make a simplification. If
5673 we can, replace this expression with the inner simplification for
5674 possible later conversion to our or some other type. */
5675 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5676 && TREE_CODE (t2) == INTEGER_CST
5677 && !TREE_OVERFLOW (t2)
5678 && (0 != (t1 = extract_muldiv (op0, t2, code,
5679 code == MULT_EXPR
5680 ? ctype : NULL_TREE,
5681 strict_overflow_p))))
5682 return t1;
5683 break;
5685 case ABS_EXPR:
5686 /* If widening the type changes it from signed to unsigned, then we
5687 must avoid building ABS_EXPR itself as unsigned. */
5688 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5690 tree cstype = (*signed_type_for) (ctype);
5691 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5692 != 0)
5694 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5695 return fold_convert (ctype, t1);
5697 break;
5699 /* If the constant is negative, we cannot simplify this. */
5700 if (tree_int_cst_sgn (c) == -1)
5701 break;
5702 /* FALLTHROUGH */
5703 case NEGATE_EXPR:
5704 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5705 != 0)
5706 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5707 break;
5709 case MIN_EXPR: case MAX_EXPR:
5710 /* If widening the type changes the signedness, then we can't perform
5711 this optimization as that changes the result. */
5712 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5713 break;
5715 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5716 sub_strict_overflow_p = false;
5717 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5718 &sub_strict_overflow_p)) != 0
5719 && (t2 = extract_muldiv (op1, c, code, wide_type,
5720 &sub_strict_overflow_p)) != 0)
5722 if (tree_int_cst_sgn (c) < 0)
5723 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5724 if (sub_strict_overflow_p)
5725 *strict_overflow_p = true;
5726 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5727 fold_convert (ctype, t2));
5729 break;
5731 case LSHIFT_EXPR: case RSHIFT_EXPR:
5732 /* If the second operand is constant, this is a multiplication
5733 or floor division, by a power of two, so we can treat it that
5734 way unless the multiplier or divisor overflows. Signed
5735 left-shift overflow is implementation-defined rather than
5736 undefined in C90, so do not convert signed left shift into
5737 multiplication. */
5738 if (TREE_CODE (op1) == INTEGER_CST
5739 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5740 /* const_binop may not detect overflow correctly,
5741 so check for it explicitly here. */
5742 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5743 && TREE_INT_CST_HIGH (op1) == 0
5744 && 0 != (t1 = fold_convert (ctype,
5745 const_binop (LSHIFT_EXPR,
5746 size_one_node,
5747 op1)))
5748 && !TREE_OVERFLOW (t1))
5749 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5750 ? MULT_EXPR : FLOOR_DIV_EXPR,
5751 ctype,
5752 fold_convert (ctype, op0),
5753 t1),
5754 c, code, wide_type, strict_overflow_p);
5755 break;
5757 case PLUS_EXPR: case MINUS_EXPR:
5758 /* See if we can eliminate the operation on both sides. If we can, we
5759 can return a new PLUS or MINUS. If we can't, the only remaining
5760 cases where we can do anything are if the second operand is a
5761 constant. */
5762 sub_strict_overflow_p = false;
5763 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5764 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5765 if (t1 != 0 && t2 != 0
5766 && (code == MULT_EXPR
5767 /* If not multiplication, we can only do this if both operands
5768 are divisible by c. */
5769 || (multiple_of_p (ctype, op0, c)
5770 && multiple_of_p (ctype, op1, c))))
5772 if (sub_strict_overflow_p)
5773 *strict_overflow_p = true;
5774 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5775 fold_convert (ctype, t2));
5778 /* If this was a subtraction, negate OP1 and set it to be an addition.
5779 This simplifies the logic below. */
5780 if (tcode == MINUS_EXPR)
5782 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5783 /* If OP1 was not easily negatable, the constant may be OP0. */
5784 if (TREE_CODE (op0) == INTEGER_CST)
5786 tree tem = op0;
5787 op0 = op1;
5788 op1 = tem;
5789 tem = t1;
5790 t1 = t2;
5791 t2 = tem;
5795 if (TREE_CODE (op1) != INTEGER_CST)
5796 break;
5798 /* If either OP1 or C are negative, this optimization is not safe for
5799 some of the division and remainder types while for others we need
5800 to change the code. */
5801 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5803 if (code == CEIL_DIV_EXPR)
5804 code = FLOOR_DIV_EXPR;
5805 else if (code == FLOOR_DIV_EXPR)
5806 code = CEIL_DIV_EXPR;
5807 else if (code != MULT_EXPR
5808 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5809 break;
5812 /* If it's a multiply or a division/modulus operation of a multiple
5813 of our constant, do the operation and verify it doesn't overflow. */
5814 if (code == MULT_EXPR
5815 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5817 op1 = const_binop (code, fold_convert (ctype, op1),
5818 fold_convert (ctype, c));
5819 /* We allow the constant to overflow with wrapping semantics. */
5820 if (op1 == 0
5821 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5822 break;
5824 else
5825 break;
5827 /* If we have an unsigned type is not a sizetype, we cannot widen
5828 the operation since it will change the result if the original
5829 computation overflowed. */
5830 if (TYPE_UNSIGNED (ctype)
5831 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5832 && ctype != type)
5833 break;
5835 /* If we were able to eliminate our operation from the first side,
5836 apply our operation to the second side and reform the PLUS. */
5837 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5838 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5840 /* The last case is if we are a multiply. In that case, we can
5841 apply the distributive law to commute the multiply and addition
5842 if the multiplication of the constants doesn't overflow. */
5843 if (code == MULT_EXPR)
5844 return fold_build2 (tcode, ctype,
5845 fold_build2 (code, ctype,
5846 fold_convert (ctype, op0),
5847 fold_convert (ctype, c)),
5848 op1);
5850 break;
5852 case MULT_EXPR:
5853 /* We have a special case here if we are doing something like
5854 (C * 8) % 4 since we know that's zero. */
5855 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5856 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5857 /* If the multiplication can overflow we cannot optimize this.
5858 ??? Until we can properly mark individual operations as
5859 not overflowing we need to treat sizetype special here as
5860 stor-layout relies on this opimization to make
5861 DECL_FIELD_BIT_OFFSET always a constant. */
5862 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5863 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5864 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5865 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5866 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5868 *strict_overflow_p = true;
5869 return omit_one_operand (type, integer_zero_node, op0);
5872 /* ... fall through ... */
5874 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5875 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5876 /* If we can extract our operation from the LHS, do so and return a
5877 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5878 do something only if the second operand is a constant. */
5879 if (same_p
5880 && (t1 = extract_muldiv (op0, c, code, wide_type,
5881 strict_overflow_p)) != 0)
5882 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5883 fold_convert (ctype, op1));
5884 else if (tcode == MULT_EXPR && code == MULT_EXPR
5885 && (t1 = extract_muldiv (op1, c, code, wide_type,
5886 strict_overflow_p)) != 0)
5887 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5888 fold_convert (ctype, t1));
5889 else if (TREE_CODE (op1) != INTEGER_CST)
5890 return 0;
5892 /* If these are the same operation types, we can associate them
5893 assuming no overflow. */
5894 if (tcode == code)
5896 double_int mul;
5897 int overflow_p;
5898 mul = double_int_mul_with_sign
5899 (double_int_ext
5900 (tree_to_double_int (op1),
5901 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5902 double_int_ext
5903 (tree_to_double_int (c),
5904 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5905 false, &overflow_p);
5906 overflow_p = (((!TYPE_UNSIGNED (ctype)
5907 || (TREE_CODE (ctype) == INTEGER_TYPE
5908 && TYPE_IS_SIZETYPE (ctype)))
5909 && overflow_p)
5910 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5911 if (!double_int_fits_to_tree_p (ctype, mul)
5912 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5913 || !TYPE_UNSIGNED (ctype)
5914 || (TREE_CODE (ctype) == INTEGER_TYPE
5915 && TYPE_IS_SIZETYPE (ctype))))
5916 overflow_p = 1;
5917 if (!overflow_p)
5918 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5919 double_int_to_tree (ctype, mul));
5922 /* If these operations "cancel" each other, we have the main
5923 optimizations of this pass, which occur when either constant is a
5924 multiple of the other, in which case we replace this with either an
5925 operation or CODE or TCODE.
5927 If we have an unsigned type that is not a sizetype, we cannot do
5928 this since it will change the result if the original computation
5929 overflowed. */
5930 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5931 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5932 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5933 || (tcode == MULT_EXPR
5934 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5935 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5936 && code != MULT_EXPR)))
5938 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5940 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5941 *strict_overflow_p = true;
5942 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5943 fold_convert (ctype,
5944 const_binop (TRUNC_DIV_EXPR,
5945 op1, c)));
5947 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5949 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5950 *strict_overflow_p = true;
5951 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5952 fold_convert (ctype,
5953 const_binop (TRUNC_DIV_EXPR,
5954 c, op1)));
5957 break;
5959 default:
5960 break;
5963 return 0;
5966 /* Return a node which has the indicated constant VALUE (either 0 or
5967 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5968 and is of the indicated TYPE. */
5970 tree
5971 constant_boolean_node (bool value, tree type)
5973 if (type == integer_type_node)
5974 return value ? integer_one_node : integer_zero_node;
5975 else if (type == boolean_type_node)
5976 return value ? boolean_true_node : boolean_false_node;
5977 else if (TREE_CODE (type) == VECTOR_TYPE)
5978 return build_vector_from_val (type,
5979 build_int_cst (TREE_TYPE (type),
5980 value ? -1 : 0));
5981 else
5982 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5986 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5987 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5988 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5989 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5990 COND is the first argument to CODE; otherwise (as in the example
5991 given here), it is the second argument. TYPE is the type of the
5992 original expression. Return NULL_TREE if no simplification is
5993 possible. */
5995 static tree
5996 fold_binary_op_with_conditional_arg (location_t loc,
5997 enum tree_code code,
5998 tree type, tree op0, tree op1,
5999 tree cond, tree arg, int cond_first_p)
6001 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6002 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6003 tree test, true_value, false_value;
6004 tree lhs = NULL_TREE;
6005 tree rhs = NULL_TREE;
6007 if (TREE_CODE (cond) == COND_EXPR)
6009 test = TREE_OPERAND (cond, 0);
6010 true_value = TREE_OPERAND (cond, 1);
6011 false_value = TREE_OPERAND (cond, 2);
6012 /* If this operand throws an expression, then it does not make
6013 sense to try to perform a logical or arithmetic operation
6014 involving it. */
6015 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6016 lhs = true_value;
6017 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6018 rhs = false_value;
6020 else
6022 tree testtype = TREE_TYPE (cond);
6023 test = cond;
6024 true_value = constant_boolean_node (true, testtype);
6025 false_value = constant_boolean_node (false, testtype);
6028 /* This transformation is only worthwhile if we don't have to wrap ARG
6029 in a SAVE_EXPR and the operation can be simplified on at least one
6030 of the branches once its pushed inside the COND_EXPR. */
6031 if (!TREE_CONSTANT (arg)
6032 && (TREE_SIDE_EFFECTS (arg)
6033 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6034 return NULL_TREE;
6036 arg = fold_convert_loc (loc, arg_type, arg);
6037 if (lhs == 0)
6039 true_value = fold_convert_loc (loc, cond_type, true_value);
6040 if (cond_first_p)
6041 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6042 else
6043 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6045 if (rhs == 0)
6047 false_value = fold_convert_loc (loc, cond_type, false_value);
6048 if (cond_first_p)
6049 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6050 else
6051 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6054 /* Check that we have simplified at least one of the branches. */
6055 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6056 return NULL_TREE;
6058 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6062 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6064 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6065 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6066 ADDEND is the same as X.
6068 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6069 and finite. The problematic cases are when X is zero, and its mode
6070 has signed zeros. In the case of rounding towards -infinity,
6071 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6072 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6074 bool
6075 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6077 if (!real_zerop (addend))
6078 return false;
6080 /* Don't allow the fold with -fsignaling-nans. */
6081 if (HONOR_SNANS (TYPE_MODE (type)))
6082 return false;
6084 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6085 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6086 return true;
6088 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6089 if (TREE_CODE (addend) == REAL_CST
6090 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6091 negate = !negate;
6093 /* The mode has signed zeros, and we have to honor their sign.
6094 In this situation, there is only one case we can return true for.
6095 X - 0 is the same as X unless rounding towards -infinity is
6096 supported. */
6097 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6100 /* Subroutine of fold() that checks comparisons of built-in math
6101 functions against real constants.
6103 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6104 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6105 is the type of the result and ARG0 and ARG1 are the operands of the
6106 comparison. ARG1 must be a TREE_REAL_CST.
6108 The function returns the constant folded tree if a simplification
6109 can be made, and NULL_TREE otherwise. */
6111 static tree
6112 fold_mathfn_compare (location_t loc,
6113 enum built_in_function fcode, enum tree_code code,
6114 tree type, tree arg0, tree arg1)
6116 REAL_VALUE_TYPE c;
6118 if (BUILTIN_SQRT_P (fcode))
6120 tree arg = CALL_EXPR_ARG (arg0, 0);
6121 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6123 c = TREE_REAL_CST (arg1);
6124 if (REAL_VALUE_NEGATIVE (c))
6126 /* sqrt(x) < y is always false, if y is negative. */
6127 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6128 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6130 /* sqrt(x) > y is always true, if y is negative and we
6131 don't care about NaNs, i.e. negative values of x. */
6132 if (code == NE_EXPR || !HONOR_NANS (mode))
6133 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6135 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6136 return fold_build2_loc (loc, GE_EXPR, type, arg,
6137 build_real (TREE_TYPE (arg), dconst0));
6139 else if (code == GT_EXPR || code == GE_EXPR)
6141 REAL_VALUE_TYPE c2;
6143 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6144 real_convert (&c2, mode, &c2);
6146 if (REAL_VALUE_ISINF (c2))
6148 /* sqrt(x) > y is x == +Inf, when y is very large. */
6149 if (HONOR_INFINITIES (mode))
6150 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6151 build_real (TREE_TYPE (arg), c2));
6153 /* sqrt(x) > y is always false, when y is very large
6154 and we don't care about infinities. */
6155 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6158 /* sqrt(x) > c is the same as x > c*c. */
6159 return fold_build2_loc (loc, code, type, arg,
6160 build_real (TREE_TYPE (arg), c2));
6162 else if (code == LT_EXPR || code == LE_EXPR)
6164 REAL_VALUE_TYPE c2;
6166 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6167 real_convert (&c2, mode, &c2);
6169 if (REAL_VALUE_ISINF (c2))
6171 /* sqrt(x) < y is always true, when y is a very large
6172 value and we don't care about NaNs or Infinities. */
6173 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6174 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6176 /* sqrt(x) < y is x != +Inf when y is very large and we
6177 don't care about NaNs. */
6178 if (! HONOR_NANS (mode))
6179 return fold_build2_loc (loc, NE_EXPR, type, arg,
6180 build_real (TREE_TYPE (arg), c2));
6182 /* sqrt(x) < y is x >= 0 when y is very large and we
6183 don't care about Infinities. */
6184 if (! HONOR_INFINITIES (mode))
6185 return fold_build2_loc (loc, GE_EXPR, type, arg,
6186 build_real (TREE_TYPE (arg), dconst0));
6188 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6189 arg = save_expr (arg);
6190 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6191 fold_build2_loc (loc, GE_EXPR, type, arg,
6192 build_real (TREE_TYPE (arg),
6193 dconst0)),
6194 fold_build2_loc (loc, NE_EXPR, type, arg,
6195 build_real (TREE_TYPE (arg),
6196 c2)));
6199 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6200 if (! HONOR_NANS (mode))
6201 return fold_build2_loc (loc, code, type, arg,
6202 build_real (TREE_TYPE (arg), c2));
6204 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6205 arg = save_expr (arg);
6206 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6207 fold_build2_loc (loc, GE_EXPR, type, arg,
6208 build_real (TREE_TYPE (arg),
6209 dconst0)),
6210 fold_build2_loc (loc, code, type, arg,
6211 build_real (TREE_TYPE (arg),
6212 c2)));
6216 return NULL_TREE;
6219 /* Subroutine of fold() that optimizes comparisons against Infinities,
6220 either +Inf or -Inf.
6222 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6223 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6224 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6226 The function returns the constant folded tree if a simplification
6227 can be made, and NULL_TREE otherwise. */
6229 static tree
6230 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6231 tree arg0, tree arg1)
6233 enum machine_mode mode;
6234 REAL_VALUE_TYPE max;
6235 tree temp;
6236 bool neg;
6238 mode = TYPE_MODE (TREE_TYPE (arg0));
6240 /* For negative infinity swap the sense of the comparison. */
6241 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6242 if (neg)
6243 code = swap_tree_comparison (code);
6245 switch (code)
6247 case GT_EXPR:
6248 /* x > +Inf is always false, if with ignore sNANs. */
6249 if (HONOR_SNANS (mode))
6250 return NULL_TREE;
6251 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6253 case LE_EXPR:
6254 /* x <= +Inf is always true, if we don't case about NaNs. */
6255 if (! HONOR_NANS (mode))
6256 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6258 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6259 arg0 = save_expr (arg0);
6260 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6262 case EQ_EXPR:
6263 case GE_EXPR:
6264 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6265 real_maxval (&max, neg, mode);
6266 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6267 arg0, build_real (TREE_TYPE (arg0), max));
6269 case LT_EXPR:
6270 /* x < +Inf is always equal to x <= DBL_MAX. */
6271 real_maxval (&max, neg, mode);
6272 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6273 arg0, build_real (TREE_TYPE (arg0), max));
6275 case NE_EXPR:
6276 /* x != +Inf is always equal to !(x > DBL_MAX). */
6277 real_maxval (&max, neg, mode);
6278 if (! HONOR_NANS (mode))
6279 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6280 arg0, build_real (TREE_TYPE (arg0), max));
6282 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6283 arg0, build_real (TREE_TYPE (arg0), max));
6284 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6286 default:
6287 break;
6290 return NULL_TREE;
6293 /* Subroutine of fold() that optimizes comparisons of a division by
6294 a nonzero integer constant against an integer constant, i.e.
6295 X/C1 op C2.
6297 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6298 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6299 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6301 The function returns the constant folded tree if a simplification
6302 can be made, and NULL_TREE otherwise. */
6304 static tree
6305 fold_div_compare (location_t loc,
6306 enum tree_code code, tree type, tree arg0, tree arg1)
6308 tree prod, tmp, hi, lo;
6309 tree arg00 = TREE_OPERAND (arg0, 0);
6310 tree arg01 = TREE_OPERAND (arg0, 1);
6311 double_int val;
6312 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6313 bool neg_overflow;
6314 int overflow;
6316 /* We have to do this the hard way to detect unsigned overflow.
6317 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6318 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6319 TREE_INT_CST_HIGH (arg01),
6320 TREE_INT_CST_LOW (arg1),
6321 TREE_INT_CST_HIGH (arg1),
6322 &val.low, &val.high, unsigned_p);
6323 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6324 neg_overflow = false;
6326 if (unsigned_p)
6328 tmp = int_const_binop (MINUS_EXPR, arg01,
6329 build_int_cst (TREE_TYPE (arg01), 1));
6330 lo = prod;
6332 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6333 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6334 TREE_INT_CST_HIGH (prod),
6335 TREE_INT_CST_LOW (tmp),
6336 TREE_INT_CST_HIGH (tmp),
6337 &val.low, &val.high, unsigned_p);
6338 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6339 -1, overflow | TREE_OVERFLOW (prod));
6341 else if (tree_int_cst_sgn (arg01) >= 0)
6343 tmp = int_const_binop (MINUS_EXPR, arg01,
6344 build_int_cst (TREE_TYPE (arg01), 1));
6345 switch (tree_int_cst_sgn (arg1))
6347 case -1:
6348 neg_overflow = true;
6349 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6350 hi = prod;
6351 break;
6353 case 0:
6354 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6355 hi = tmp;
6356 break;
6358 case 1:
6359 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6360 lo = prod;
6361 break;
6363 default:
6364 gcc_unreachable ();
6367 else
6369 /* A negative divisor reverses the relational operators. */
6370 code = swap_tree_comparison (code);
6372 tmp = int_const_binop (PLUS_EXPR, arg01,
6373 build_int_cst (TREE_TYPE (arg01), 1));
6374 switch (tree_int_cst_sgn (arg1))
6376 case -1:
6377 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6378 lo = prod;
6379 break;
6381 case 0:
6382 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6383 lo = tmp;
6384 break;
6386 case 1:
6387 neg_overflow = true;
6388 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6389 hi = prod;
6390 break;
6392 default:
6393 gcc_unreachable ();
6397 switch (code)
6399 case EQ_EXPR:
6400 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6401 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6402 if (TREE_OVERFLOW (hi))
6403 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6404 if (TREE_OVERFLOW (lo))
6405 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6406 return build_range_check (loc, type, arg00, 1, lo, hi);
6408 case NE_EXPR:
6409 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6410 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6411 if (TREE_OVERFLOW (hi))
6412 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6413 if (TREE_OVERFLOW (lo))
6414 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6415 return build_range_check (loc, type, arg00, 0, lo, hi);
6417 case LT_EXPR:
6418 if (TREE_OVERFLOW (lo))
6420 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6421 return omit_one_operand_loc (loc, type, tmp, arg00);
6423 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6425 case LE_EXPR:
6426 if (TREE_OVERFLOW (hi))
6428 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6429 return omit_one_operand_loc (loc, type, tmp, arg00);
6431 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6433 case GT_EXPR:
6434 if (TREE_OVERFLOW (hi))
6436 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6437 return omit_one_operand_loc (loc, type, tmp, arg00);
6439 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6441 case GE_EXPR:
6442 if (TREE_OVERFLOW (lo))
6444 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6445 return omit_one_operand_loc (loc, type, tmp, arg00);
6447 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6449 default:
6450 break;
6453 return NULL_TREE;
6457 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6458 equality/inequality test, then return a simplified form of the test
6459 using a sign testing. Otherwise return NULL. TYPE is the desired
6460 result type. */
6462 static tree
6463 fold_single_bit_test_into_sign_test (location_t loc,
6464 enum tree_code code, tree arg0, tree arg1,
6465 tree result_type)
6467 /* If this is testing a single bit, we can optimize the test. */
6468 if ((code == NE_EXPR || code == EQ_EXPR)
6469 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6470 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6472 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6473 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6474 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6476 if (arg00 != NULL_TREE
6477 /* This is only a win if casting to a signed type is cheap,
6478 i.e. when arg00's type is not a partial mode. */
6479 && TYPE_PRECISION (TREE_TYPE (arg00))
6480 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6482 tree stype = signed_type_for (TREE_TYPE (arg00));
6483 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6484 result_type,
6485 fold_convert_loc (loc, stype, arg00),
6486 build_int_cst (stype, 0));
6490 return NULL_TREE;
6493 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6494 equality/inequality test, then return a simplified form of
6495 the test using shifts and logical operations. Otherwise return
6496 NULL. TYPE is the desired result type. */
6498 tree
6499 fold_single_bit_test (location_t loc, enum tree_code code,
6500 tree arg0, tree arg1, tree result_type)
6502 /* If this is testing a single bit, we can optimize the test. */
6503 if ((code == NE_EXPR || code == EQ_EXPR)
6504 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6505 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6507 tree inner = TREE_OPERAND (arg0, 0);
6508 tree type = TREE_TYPE (arg0);
6509 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6510 enum machine_mode operand_mode = TYPE_MODE (type);
6511 int ops_unsigned;
6512 tree signed_type, unsigned_type, intermediate_type;
6513 tree tem, one;
6515 /* First, see if we can fold the single bit test into a sign-bit
6516 test. */
6517 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6518 result_type);
6519 if (tem)
6520 return tem;
6522 /* Otherwise we have (A & C) != 0 where C is a single bit,
6523 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6524 Similarly for (A & C) == 0. */
6526 /* If INNER is a right shift of a constant and it plus BITNUM does
6527 not overflow, adjust BITNUM and INNER. */
6528 if (TREE_CODE (inner) == RSHIFT_EXPR
6529 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6530 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6531 && bitnum < TYPE_PRECISION (type)
6532 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6533 bitnum - TYPE_PRECISION (type)))
6535 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6536 inner = TREE_OPERAND (inner, 0);
6539 /* If we are going to be able to omit the AND below, we must do our
6540 operations as unsigned. If we must use the AND, we have a choice.
6541 Normally unsigned is faster, but for some machines signed is. */
6542 #ifdef LOAD_EXTEND_OP
6543 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6544 && !flag_syntax_only) ? 0 : 1;
6545 #else
6546 ops_unsigned = 1;
6547 #endif
6549 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6550 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6551 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6552 inner = fold_convert_loc (loc, intermediate_type, inner);
6554 if (bitnum != 0)
6555 inner = build2 (RSHIFT_EXPR, intermediate_type,
6556 inner, size_int (bitnum));
6558 one = build_int_cst (intermediate_type, 1);
6560 if (code == EQ_EXPR)
6561 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6563 /* Put the AND last so it can combine with more things. */
6564 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6566 /* Make sure to return the proper type. */
6567 inner = fold_convert_loc (loc, result_type, inner);
6569 return inner;
6571 return NULL_TREE;
6574 /* Check whether we are allowed to reorder operands arg0 and arg1,
6575 such that the evaluation of arg1 occurs before arg0. */
6577 static bool
6578 reorder_operands_p (const_tree arg0, const_tree arg1)
6580 if (! flag_evaluation_order)
6581 return true;
6582 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6583 return true;
6584 return ! TREE_SIDE_EFFECTS (arg0)
6585 && ! TREE_SIDE_EFFECTS (arg1);
6588 /* Test whether it is preferable two swap two operands, ARG0 and
6589 ARG1, for example because ARG0 is an integer constant and ARG1
6590 isn't. If REORDER is true, only recommend swapping if we can
6591 evaluate the operands in reverse order. */
6593 bool
6594 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6596 STRIP_SIGN_NOPS (arg0);
6597 STRIP_SIGN_NOPS (arg1);
6599 if (TREE_CODE (arg1) == INTEGER_CST)
6600 return 0;
6601 if (TREE_CODE (arg0) == INTEGER_CST)
6602 return 1;
6604 if (TREE_CODE (arg1) == REAL_CST)
6605 return 0;
6606 if (TREE_CODE (arg0) == REAL_CST)
6607 return 1;
6609 if (TREE_CODE (arg1) == FIXED_CST)
6610 return 0;
6611 if (TREE_CODE (arg0) == FIXED_CST)
6612 return 1;
6614 if (TREE_CODE (arg1) == COMPLEX_CST)
6615 return 0;
6616 if (TREE_CODE (arg0) == COMPLEX_CST)
6617 return 1;
6619 if (TREE_CONSTANT (arg1))
6620 return 0;
6621 if (TREE_CONSTANT (arg0))
6622 return 1;
6624 if (optimize_function_for_size_p (cfun))
6625 return 0;
6627 if (reorder && flag_evaluation_order
6628 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6629 return 0;
6631 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6632 for commutative and comparison operators. Ensuring a canonical
6633 form allows the optimizers to find additional redundancies without
6634 having to explicitly check for both orderings. */
6635 if (TREE_CODE (arg0) == SSA_NAME
6636 && TREE_CODE (arg1) == SSA_NAME
6637 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6638 return 1;
6640 /* Put SSA_NAMEs last. */
6641 if (TREE_CODE (arg1) == SSA_NAME)
6642 return 0;
6643 if (TREE_CODE (arg0) == SSA_NAME)
6644 return 1;
6646 /* Put variables last. */
6647 if (DECL_P (arg1))
6648 return 0;
6649 if (DECL_P (arg0))
6650 return 1;
6652 return 0;
6655 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6656 ARG0 is extended to a wider type. */
6658 static tree
6659 fold_widened_comparison (location_t loc, enum tree_code code,
6660 tree type, tree arg0, tree arg1)
6662 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6663 tree arg1_unw;
6664 tree shorter_type, outer_type;
6665 tree min, max;
6666 bool above, below;
6668 if (arg0_unw == arg0)
6669 return NULL_TREE;
6670 shorter_type = TREE_TYPE (arg0_unw);
6672 #ifdef HAVE_canonicalize_funcptr_for_compare
6673 /* Disable this optimization if we're casting a function pointer
6674 type on targets that require function pointer canonicalization. */
6675 if (HAVE_canonicalize_funcptr_for_compare
6676 && TREE_CODE (shorter_type) == POINTER_TYPE
6677 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6678 return NULL_TREE;
6679 #endif
6681 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6682 return NULL_TREE;
6684 arg1_unw = get_unwidened (arg1, NULL_TREE);
6686 /* If possible, express the comparison in the shorter mode. */
6687 if ((code == EQ_EXPR || code == NE_EXPR
6688 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6689 && (TREE_TYPE (arg1_unw) == shorter_type
6690 || ((TYPE_PRECISION (shorter_type)
6691 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6692 && (TYPE_UNSIGNED (shorter_type)
6693 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6694 || (TREE_CODE (arg1_unw) == INTEGER_CST
6695 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6696 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6697 && int_fits_type_p (arg1_unw, shorter_type))))
6698 return fold_build2_loc (loc, code, type, arg0_unw,
6699 fold_convert_loc (loc, shorter_type, arg1_unw));
6701 if (TREE_CODE (arg1_unw) != INTEGER_CST
6702 || TREE_CODE (shorter_type) != INTEGER_TYPE
6703 || !int_fits_type_p (arg1_unw, shorter_type))
6704 return NULL_TREE;
6706 /* If we are comparing with the integer that does not fit into the range
6707 of the shorter type, the result is known. */
6708 outer_type = TREE_TYPE (arg1_unw);
6709 min = lower_bound_in_type (outer_type, shorter_type);
6710 max = upper_bound_in_type (outer_type, shorter_type);
6712 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6713 max, arg1_unw));
6714 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6715 arg1_unw, min));
6717 switch (code)
6719 case EQ_EXPR:
6720 if (above || below)
6721 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6722 break;
6724 case NE_EXPR:
6725 if (above || below)
6726 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6727 break;
6729 case LT_EXPR:
6730 case LE_EXPR:
6731 if (above)
6732 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6733 else if (below)
6734 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6736 case GT_EXPR:
6737 case GE_EXPR:
6738 if (above)
6739 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6740 else if (below)
6741 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6743 default:
6744 break;
6747 return NULL_TREE;
6750 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6751 ARG0 just the signedness is changed. */
6753 static tree
6754 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6755 tree arg0, tree arg1)
6757 tree arg0_inner;
6758 tree inner_type, outer_type;
6760 if (!CONVERT_EXPR_P (arg0))
6761 return NULL_TREE;
6763 outer_type = TREE_TYPE (arg0);
6764 arg0_inner = TREE_OPERAND (arg0, 0);
6765 inner_type = TREE_TYPE (arg0_inner);
6767 #ifdef HAVE_canonicalize_funcptr_for_compare
6768 /* Disable this optimization if we're casting a function pointer
6769 type on targets that require function pointer canonicalization. */
6770 if (HAVE_canonicalize_funcptr_for_compare
6771 && TREE_CODE (inner_type) == POINTER_TYPE
6772 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6773 return NULL_TREE;
6774 #endif
6776 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6777 return NULL_TREE;
6779 if (TREE_CODE (arg1) != INTEGER_CST
6780 && !(CONVERT_EXPR_P (arg1)
6781 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6782 return NULL_TREE;
6784 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6785 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6786 && code != NE_EXPR
6787 && code != EQ_EXPR)
6788 return NULL_TREE;
6790 if (TREE_CODE (arg1) == INTEGER_CST)
6791 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6792 0, TREE_OVERFLOW (arg1));
6793 else
6794 arg1 = fold_convert_loc (loc, inner_type, arg1);
6796 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6799 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6800 step of the array. Reconstructs s and delta in the case of s *
6801 delta being an integer constant (and thus already folded). ADDR is
6802 the address. MULT is the multiplicative expression. If the
6803 function succeeds, the new address expression is returned.
6804 Otherwise NULL_TREE is returned. LOC is the location of the
6805 resulting expression. */
6807 static tree
6808 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6810 tree s, delta, step;
6811 tree ref = TREE_OPERAND (addr, 0), pref;
6812 tree ret, pos;
6813 tree itype;
6814 bool mdim = false;
6816 /* Strip the nops that might be added when converting op1 to sizetype. */
6817 STRIP_NOPS (op1);
6819 /* Canonicalize op1 into a possibly non-constant delta
6820 and an INTEGER_CST s. */
6821 if (TREE_CODE (op1) == MULT_EXPR)
6823 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6825 STRIP_NOPS (arg0);
6826 STRIP_NOPS (arg1);
6828 if (TREE_CODE (arg0) == INTEGER_CST)
6830 s = arg0;
6831 delta = arg1;
6833 else if (TREE_CODE (arg1) == INTEGER_CST)
6835 s = arg1;
6836 delta = arg0;
6838 else
6839 return NULL_TREE;
6841 else if (TREE_CODE (op1) == INTEGER_CST)
6843 delta = op1;
6844 s = NULL_TREE;
6846 else
6848 /* Simulate we are delta * 1. */
6849 delta = op1;
6850 s = integer_one_node;
6853 for (;; ref = TREE_OPERAND (ref, 0))
6855 if (TREE_CODE (ref) == ARRAY_REF)
6857 tree domain;
6859 /* Remember if this was a multi-dimensional array. */
6860 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6861 mdim = true;
6863 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6864 if (! domain)
6865 continue;
6866 itype = TREE_TYPE (domain);
6868 step = array_ref_element_size (ref);
6869 if (TREE_CODE (step) != INTEGER_CST)
6870 continue;
6872 if (s)
6874 if (! tree_int_cst_equal (step, s))
6875 continue;
6877 else
6879 /* Try if delta is a multiple of step. */
6880 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6881 if (! tmp)
6882 continue;
6883 delta = tmp;
6886 /* Only fold here if we can verify we do not overflow one
6887 dimension of a multi-dimensional array. */
6888 if (mdim)
6890 tree tmp;
6892 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6893 || !TYPE_MAX_VALUE (domain)
6894 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6895 continue;
6897 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6898 fold_convert_loc (loc, itype,
6899 TREE_OPERAND (ref, 1)),
6900 fold_convert_loc (loc, itype, delta));
6901 if (!tmp
6902 || TREE_CODE (tmp) != INTEGER_CST
6903 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6904 continue;
6907 break;
6909 else if (TREE_CODE (ref) == COMPONENT_REF
6910 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6912 tree domain;
6914 /* Remember if this was a multi-dimensional array. */
6915 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6916 mdim = true;
6918 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6919 if (! domain)
6920 continue;
6921 itype = TREE_TYPE (domain);
6923 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6924 if (TREE_CODE (step) != INTEGER_CST)
6925 continue;
6927 if (s)
6929 if (! tree_int_cst_equal (step, s))
6930 continue;
6932 else
6934 /* Try if delta is a multiple of step. */
6935 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6936 if (! tmp)
6937 continue;
6938 delta = tmp;
6941 /* Only fold here if we can verify we do not overflow one
6942 dimension of a multi-dimensional array. */
6943 if (mdim)
6945 tree tmp;
6947 if (!TYPE_MIN_VALUE (domain)
6948 || !TYPE_MAX_VALUE (domain)
6949 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6950 continue;
6952 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6953 fold_convert_loc (loc, itype,
6954 TYPE_MIN_VALUE (domain)),
6955 fold_convert_loc (loc, itype, delta));
6956 if (TREE_CODE (tmp) != INTEGER_CST
6957 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6958 continue;
6961 break;
6963 else
6964 mdim = false;
6966 if (!handled_component_p (ref))
6967 return NULL_TREE;
6970 /* We found the suitable array reference. So copy everything up to it,
6971 and replace the index. */
6973 pref = TREE_OPERAND (addr, 0);
6974 ret = copy_node (pref);
6975 SET_EXPR_LOCATION (ret, loc);
6976 pos = ret;
6978 while (pref != ref)
6980 pref = TREE_OPERAND (pref, 0);
6981 TREE_OPERAND (pos, 0) = copy_node (pref);
6982 pos = TREE_OPERAND (pos, 0);
6985 if (TREE_CODE (ref) == ARRAY_REF)
6987 TREE_OPERAND (pos, 1)
6988 = fold_build2_loc (loc, PLUS_EXPR, itype,
6989 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6990 fold_convert_loc (loc, itype, delta));
6991 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6993 else if (TREE_CODE (ref) == COMPONENT_REF)
6995 gcc_assert (ret == pos);
6996 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6997 fold_build2_loc
6998 (loc, PLUS_EXPR, itype,
6999 fold_convert_loc (loc, itype,
7000 TYPE_MIN_VALUE
7001 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7002 fold_convert_loc (loc, itype, delta)),
7003 NULL_TREE, NULL_TREE);
7004 return build_fold_addr_expr_loc (loc, ret);
7006 else
7007 gcc_unreachable ();
7011 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7012 means A >= Y && A != MAX, but in this case we know that
7013 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7015 static tree
7016 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7018 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7020 if (TREE_CODE (bound) == LT_EXPR)
7021 a = TREE_OPERAND (bound, 0);
7022 else if (TREE_CODE (bound) == GT_EXPR)
7023 a = TREE_OPERAND (bound, 1);
7024 else
7025 return NULL_TREE;
7027 typea = TREE_TYPE (a);
7028 if (!INTEGRAL_TYPE_P (typea)
7029 && !POINTER_TYPE_P (typea))
7030 return NULL_TREE;
7032 if (TREE_CODE (ineq) == LT_EXPR)
7034 a1 = TREE_OPERAND (ineq, 1);
7035 y = TREE_OPERAND (ineq, 0);
7037 else if (TREE_CODE (ineq) == GT_EXPR)
7039 a1 = TREE_OPERAND (ineq, 0);
7040 y = TREE_OPERAND (ineq, 1);
7042 else
7043 return NULL_TREE;
7045 if (TREE_TYPE (a1) != typea)
7046 return NULL_TREE;
7048 if (POINTER_TYPE_P (typea))
7050 /* Convert the pointer types into integer before taking the difference. */
7051 tree ta = fold_convert_loc (loc, ssizetype, a);
7052 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7053 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7055 else
7056 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7058 if (!diff || !integer_onep (diff))
7059 return NULL_TREE;
7061 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7064 /* Fold a sum or difference of at least one multiplication.
7065 Returns the folded tree or NULL if no simplification could be made. */
7067 static tree
7068 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7069 tree arg0, tree arg1)
7071 tree arg00, arg01, arg10, arg11;
7072 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7074 /* (A * C) +- (B * C) -> (A+-B) * C.
7075 (A * C) +- A -> A * (C+-1).
7076 We are most concerned about the case where C is a constant,
7077 but other combinations show up during loop reduction. Since
7078 it is not difficult, try all four possibilities. */
7080 if (TREE_CODE (arg0) == MULT_EXPR)
7082 arg00 = TREE_OPERAND (arg0, 0);
7083 arg01 = TREE_OPERAND (arg0, 1);
7085 else if (TREE_CODE (arg0) == INTEGER_CST)
7087 arg00 = build_one_cst (type);
7088 arg01 = arg0;
7090 else
7092 /* We cannot generate constant 1 for fract. */
7093 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7094 return NULL_TREE;
7095 arg00 = arg0;
7096 arg01 = build_one_cst (type);
7098 if (TREE_CODE (arg1) == MULT_EXPR)
7100 arg10 = TREE_OPERAND (arg1, 0);
7101 arg11 = TREE_OPERAND (arg1, 1);
7103 else if (TREE_CODE (arg1) == INTEGER_CST)
7105 arg10 = build_one_cst (type);
7106 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7107 the purpose of this canonicalization. */
7108 if (TREE_INT_CST_HIGH (arg1) == -1
7109 && negate_expr_p (arg1)
7110 && code == PLUS_EXPR)
7112 arg11 = negate_expr (arg1);
7113 code = MINUS_EXPR;
7115 else
7116 arg11 = arg1;
7118 else
7120 /* We cannot generate constant 1 for fract. */
7121 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7122 return NULL_TREE;
7123 arg10 = arg1;
7124 arg11 = build_one_cst (type);
7126 same = NULL_TREE;
7128 if (operand_equal_p (arg01, arg11, 0))
7129 same = arg01, alt0 = arg00, alt1 = arg10;
7130 else if (operand_equal_p (arg00, arg10, 0))
7131 same = arg00, alt0 = arg01, alt1 = arg11;
7132 else if (operand_equal_p (arg00, arg11, 0))
7133 same = arg00, alt0 = arg01, alt1 = arg10;
7134 else if (operand_equal_p (arg01, arg10, 0))
7135 same = arg01, alt0 = arg00, alt1 = arg11;
7137 /* No identical multiplicands; see if we can find a common
7138 power-of-two factor in non-power-of-two multiplies. This
7139 can help in multi-dimensional array access. */
7140 else if (host_integerp (arg01, 0)
7141 && host_integerp (arg11, 0))
7143 HOST_WIDE_INT int01, int11, tmp;
7144 bool swap = false;
7145 tree maybe_same;
7146 int01 = TREE_INT_CST_LOW (arg01);
7147 int11 = TREE_INT_CST_LOW (arg11);
7149 /* Move min of absolute values to int11. */
7150 if (absu_hwi (int01) < absu_hwi (int11))
7152 tmp = int01, int01 = int11, int11 = tmp;
7153 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7154 maybe_same = arg01;
7155 swap = true;
7157 else
7158 maybe_same = arg11;
7160 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7161 /* The remainder should not be a constant, otherwise we
7162 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7163 increased the number of multiplications necessary. */
7164 && TREE_CODE (arg10) != INTEGER_CST)
7166 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7167 build_int_cst (TREE_TYPE (arg00),
7168 int01 / int11));
7169 alt1 = arg10;
7170 same = maybe_same;
7171 if (swap)
7172 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7176 if (same)
7177 return fold_build2_loc (loc, MULT_EXPR, type,
7178 fold_build2_loc (loc, code, type,
7179 fold_convert_loc (loc, type, alt0),
7180 fold_convert_loc (loc, type, alt1)),
7181 fold_convert_loc (loc, type, same));
7183 return NULL_TREE;
7186 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7187 specified by EXPR into the buffer PTR of length LEN bytes.
7188 Return the number of bytes placed in the buffer, or zero
7189 upon failure. */
7191 static int
7192 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7194 tree type = TREE_TYPE (expr);
7195 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7196 int byte, offset, word, words;
7197 unsigned char value;
7199 if (total_bytes > len)
7200 return 0;
7201 words = total_bytes / UNITS_PER_WORD;
7203 for (byte = 0; byte < total_bytes; byte++)
7205 int bitpos = byte * BITS_PER_UNIT;
7206 if (bitpos < HOST_BITS_PER_WIDE_INT)
7207 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7208 else
7209 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7210 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7212 if (total_bytes > UNITS_PER_WORD)
7214 word = byte / UNITS_PER_WORD;
7215 if (WORDS_BIG_ENDIAN)
7216 word = (words - 1) - word;
7217 offset = word * UNITS_PER_WORD;
7218 if (BYTES_BIG_ENDIAN)
7219 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7220 else
7221 offset += byte % UNITS_PER_WORD;
7223 else
7224 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7225 ptr[offset] = value;
7227 return total_bytes;
7231 /* Subroutine of native_encode_expr. Encode the REAL_CST
7232 specified by EXPR into the buffer PTR of length LEN bytes.
7233 Return the number of bytes placed in the buffer, or zero
7234 upon failure. */
7236 static int
7237 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7239 tree type = TREE_TYPE (expr);
7240 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7241 int byte, offset, word, words, bitpos;
7242 unsigned char value;
7244 /* There are always 32 bits in each long, no matter the size of
7245 the hosts long. We handle floating point representations with
7246 up to 192 bits. */
7247 long tmp[6];
7249 if (total_bytes > len)
7250 return 0;
7251 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7253 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7255 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7256 bitpos += BITS_PER_UNIT)
7258 byte = (bitpos / BITS_PER_UNIT) & 3;
7259 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7261 if (UNITS_PER_WORD < 4)
7263 word = byte / UNITS_PER_WORD;
7264 if (WORDS_BIG_ENDIAN)
7265 word = (words - 1) - word;
7266 offset = word * UNITS_PER_WORD;
7267 if (BYTES_BIG_ENDIAN)
7268 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7269 else
7270 offset += byte % UNITS_PER_WORD;
7272 else
7273 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7274 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7276 return total_bytes;
7279 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7284 static int
7285 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7287 int rsize, isize;
7288 tree part;
7290 part = TREE_REALPART (expr);
7291 rsize = native_encode_expr (part, ptr, len);
7292 if (rsize == 0)
7293 return 0;
7294 part = TREE_IMAGPART (expr);
7295 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7296 if (isize != rsize)
7297 return 0;
7298 return rsize + isize;
7302 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7303 specified by EXPR into the buffer PTR of length LEN bytes.
7304 Return the number of bytes placed in the buffer, or zero
7305 upon failure. */
7307 static int
7308 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7310 int i, size, offset, count;
7311 tree itype, elem, elements;
7313 offset = 0;
7314 elements = TREE_VECTOR_CST_ELTS (expr);
7315 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7316 itype = TREE_TYPE (TREE_TYPE (expr));
7317 size = GET_MODE_SIZE (TYPE_MODE (itype));
7318 for (i = 0; i < count; i++)
7320 if (elements)
7322 elem = TREE_VALUE (elements);
7323 elements = TREE_CHAIN (elements);
7325 else
7326 elem = NULL_TREE;
7328 if (elem)
7330 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7331 return 0;
7333 else
7335 if (offset + size > len)
7336 return 0;
7337 memset (ptr+offset, 0, size);
7339 offset += size;
7341 return offset;
7345 /* Subroutine of native_encode_expr. Encode the STRING_CST
7346 specified by EXPR into the buffer PTR of length LEN bytes.
7347 Return the number of bytes placed in the buffer, or zero
7348 upon failure. */
7350 static int
7351 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7353 tree type = TREE_TYPE (expr);
7354 HOST_WIDE_INT total_bytes;
7356 if (TREE_CODE (type) != ARRAY_TYPE
7357 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7358 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7359 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7360 return 0;
7361 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7362 if (total_bytes > len)
7363 return 0;
7364 if (TREE_STRING_LENGTH (expr) < total_bytes)
7366 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7367 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7368 total_bytes - TREE_STRING_LENGTH (expr));
7370 else
7371 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7372 return total_bytes;
7376 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7377 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7378 buffer PTR of length LEN bytes. Return the number of bytes
7379 placed in the buffer, or zero upon failure. */
7382 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7384 switch (TREE_CODE (expr))
7386 case INTEGER_CST:
7387 return native_encode_int (expr, ptr, len);
7389 case REAL_CST:
7390 return native_encode_real (expr, ptr, len);
7392 case COMPLEX_CST:
7393 return native_encode_complex (expr, ptr, len);
7395 case VECTOR_CST:
7396 return native_encode_vector (expr, ptr, len);
7398 case STRING_CST:
7399 return native_encode_string (expr, ptr, len);
7401 default:
7402 return 0;
7407 /* Subroutine of native_interpret_expr. Interpret the contents of
7408 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7409 If the buffer cannot be interpreted, return NULL_TREE. */
7411 static tree
7412 native_interpret_int (tree type, const unsigned char *ptr, int len)
7414 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7415 int byte, offset, word, words;
7416 unsigned char value;
7417 double_int result;
7419 if (total_bytes > len)
7420 return NULL_TREE;
7421 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7422 return NULL_TREE;
7424 result = double_int_zero;
7425 words = total_bytes / UNITS_PER_WORD;
7427 for (byte = 0; byte < total_bytes; byte++)
7429 int bitpos = byte * BITS_PER_UNIT;
7430 if (total_bytes > UNITS_PER_WORD)
7432 word = byte / UNITS_PER_WORD;
7433 if (WORDS_BIG_ENDIAN)
7434 word = (words - 1) - word;
7435 offset = word * UNITS_PER_WORD;
7436 if (BYTES_BIG_ENDIAN)
7437 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7438 else
7439 offset += byte % UNITS_PER_WORD;
7441 else
7442 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7443 value = ptr[offset];
7445 if (bitpos < HOST_BITS_PER_WIDE_INT)
7446 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7447 else
7448 result.high |= (unsigned HOST_WIDE_INT) value
7449 << (bitpos - HOST_BITS_PER_WIDE_INT);
7452 return double_int_to_tree (type, result);
7456 /* Subroutine of native_interpret_expr. Interpret the contents of
7457 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7458 If the buffer cannot be interpreted, return NULL_TREE. */
7460 static tree
7461 native_interpret_real (tree type, const unsigned char *ptr, int len)
7463 enum machine_mode mode = TYPE_MODE (type);
7464 int total_bytes = GET_MODE_SIZE (mode);
7465 int byte, offset, word, words, bitpos;
7466 unsigned char value;
7467 /* There are always 32 bits in each long, no matter the size of
7468 the hosts long. We handle floating point representations with
7469 up to 192 bits. */
7470 REAL_VALUE_TYPE r;
7471 long tmp[6];
7473 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7474 if (total_bytes > len || total_bytes > 24)
7475 return NULL_TREE;
7476 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7478 memset (tmp, 0, sizeof (tmp));
7479 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7480 bitpos += BITS_PER_UNIT)
7482 byte = (bitpos / BITS_PER_UNIT) & 3;
7483 if (UNITS_PER_WORD < 4)
7485 word = byte / UNITS_PER_WORD;
7486 if (WORDS_BIG_ENDIAN)
7487 word = (words - 1) - word;
7488 offset = word * UNITS_PER_WORD;
7489 if (BYTES_BIG_ENDIAN)
7490 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7491 else
7492 offset += byte % UNITS_PER_WORD;
7494 else
7495 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7496 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7498 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7501 real_from_target (&r, tmp, mode);
7502 return build_real (type, r);
7506 /* Subroutine of native_interpret_expr. Interpret the contents of
7507 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7508 If the buffer cannot be interpreted, return NULL_TREE. */
7510 static tree
7511 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7513 tree etype, rpart, ipart;
7514 int size;
7516 etype = TREE_TYPE (type);
7517 size = GET_MODE_SIZE (TYPE_MODE (etype));
7518 if (size * 2 > len)
7519 return NULL_TREE;
7520 rpart = native_interpret_expr (etype, ptr, size);
7521 if (!rpart)
7522 return NULL_TREE;
7523 ipart = native_interpret_expr (etype, ptr+size, size);
7524 if (!ipart)
7525 return NULL_TREE;
7526 return build_complex (type, rpart, ipart);
7530 /* Subroutine of native_interpret_expr. Interpret the contents of
7531 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7532 If the buffer cannot be interpreted, return NULL_TREE. */
7534 static tree
7535 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7537 tree etype, elem, elements;
7538 int i, size, count;
7540 etype = TREE_TYPE (type);
7541 size = GET_MODE_SIZE (TYPE_MODE (etype));
7542 count = TYPE_VECTOR_SUBPARTS (type);
7543 if (size * count > len)
7544 return NULL_TREE;
7546 elements = NULL_TREE;
7547 for (i = count - 1; i >= 0; i--)
7549 elem = native_interpret_expr (etype, ptr+(i*size), size);
7550 if (!elem)
7551 return NULL_TREE;
7552 elements = tree_cons (NULL_TREE, elem, elements);
7554 return build_vector (type, elements);
7558 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7559 the buffer PTR of length LEN as a constant of type TYPE. For
7560 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7561 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7562 return NULL_TREE. */
7564 tree
7565 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7567 switch (TREE_CODE (type))
7569 case INTEGER_TYPE:
7570 case ENUMERAL_TYPE:
7571 case BOOLEAN_TYPE:
7572 return native_interpret_int (type, ptr, len);
7574 case REAL_TYPE:
7575 return native_interpret_real (type, ptr, len);
7577 case COMPLEX_TYPE:
7578 return native_interpret_complex (type, ptr, len);
7580 case VECTOR_TYPE:
7581 return native_interpret_vector (type, ptr, len);
7583 default:
7584 return NULL_TREE;
7589 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7590 TYPE at compile-time. If we're unable to perform the conversion
7591 return NULL_TREE. */
7593 static tree
7594 fold_view_convert_expr (tree type, tree expr)
7596 /* We support up to 512-bit values (for V8DFmode). */
7597 unsigned char buffer[64];
7598 int len;
7600 /* Check that the host and target are sane. */
7601 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7602 return NULL_TREE;
7604 len = native_encode_expr (expr, buffer, sizeof (buffer));
7605 if (len == 0)
7606 return NULL_TREE;
7608 return native_interpret_expr (type, buffer, len);
7611 /* Build an expression for the address of T. Folds away INDIRECT_REF
7612 to avoid confusing the gimplify process. */
7614 tree
7615 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7617 /* The size of the object is not relevant when talking about its address. */
7618 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7619 t = TREE_OPERAND (t, 0);
7621 if (TREE_CODE (t) == INDIRECT_REF)
7623 t = TREE_OPERAND (t, 0);
7625 if (TREE_TYPE (t) != ptrtype)
7626 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7628 else if (TREE_CODE (t) == MEM_REF
7629 && integer_zerop (TREE_OPERAND (t, 1)))
7630 return TREE_OPERAND (t, 0);
7631 else if (TREE_CODE (t) == MEM_REF
7632 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7633 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7634 TREE_OPERAND (t, 0),
7635 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7636 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7638 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7640 if (TREE_TYPE (t) != ptrtype)
7641 t = fold_convert_loc (loc, ptrtype, t);
7643 else
7644 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7646 return t;
7649 /* Build an expression for the address of T. */
7651 tree
7652 build_fold_addr_expr_loc (location_t loc, tree t)
7654 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7656 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7659 static bool vec_cst_ctor_to_array (tree, tree *);
7661 /* Fold a unary expression of code CODE and type TYPE with operand
7662 OP0. Return the folded expression if folding is successful.
7663 Otherwise, return NULL_TREE. */
7665 tree
7666 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7668 tree tem;
7669 tree arg0;
7670 enum tree_code_class kind = TREE_CODE_CLASS (code);
7672 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7673 && TREE_CODE_LENGTH (code) == 1);
7675 arg0 = op0;
7676 if (arg0)
7678 if (CONVERT_EXPR_CODE_P (code)
7679 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7681 /* Don't use STRIP_NOPS, because signedness of argument type
7682 matters. */
7683 STRIP_SIGN_NOPS (arg0);
7685 else
7687 /* Strip any conversions that don't change the mode. This
7688 is safe for every expression, except for a comparison
7689 expression because its signedness is derived from its
7690 operands.
7692 Note that this is done as an internal manipulation within
7693 the constant folder, in order to find the simplest
7694 representation of the arguments so that their form can be
7695 studied. In any cases, the appropriate type conversions
7696 should be put back in the tree that will get out of the
7697 constant folder. */
7698 STRIP_NOPS (arg0);
7702 if (TREE_CODE_CLASS (code) == tcc_unary)
7704 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7705 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7706 fold_build1_loc (loc, code, type,
7707 fold_convert_loc (loc, TREE_TYPE (op0),
7708 TREE_OPERAND (arg0, 1))));
7709 else if (TREE_CODE (arg0) == COND_EXPR)
7711 tree arg01 = TREE_OPERAND (arg0, 1);
7712 tree arg02 = TREE_OPERAND (arg0, 2);
7713 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7714 arg01 = fold_build1_loc (loc, code, type,
7715 fold_convert_loc (loc,
7716 TREE_TYPE (op0), arg01));
7717 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7718 arg02 = fold_build1_loc (loc, code, type,
7719 fold_convert_loc (loc,
7720 TREE_TYPE (op0), arg02));
7721 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7722 arg01, arg02);
7724 /* If this was a conversion, and all we did was to move into
7725 inside the COND_EXPR, bring it back out. But leave it if
7726 it is a conversion from integer to integer and the
7727 result precision is no wider than a word since such a
7728 conversion is cheap and may be optimized away by combine,
7729 while it couldn't if it were outside the COND_EXPR. Then return
7730 so we don't get into an infinite recursion loop taking the
7731 conversion out and then back in. */
7733 if ((CONVERT_EXPR_CODE_P (code)
7734 || code == NON_LVALUE_EXPR)
7735 && TREE_CODE (tem) == COND_EXPR
7736 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7737 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7738 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7739 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7740 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7741 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7742 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7743 && (INTEGRAL_TYPE_P
7744 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7745 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7746 || flag_syntax_only))
7747 tem = build1_loc (loc, code, type,
7748 build3 (COND_EXPR,
7749 TREE_TYPE (TREE_OPERAND
7750 (TREE_OPERAND (tem, 1), 0)),
7751 TREE_OPERAND (tem, 0),
7752 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7753 TREE_OPERAND (TREE_OPERAND (tem, 2),
7754 0)));
7755 return tem;
7759 switch (code)
7761 case PAREN_EXPR:
7762 /* Re-association barriers around constants and other re-association
7763 barriers can be removed. */
7764 if (CONSTANT_CLASS_P (op0)
7765 || TREE_CODE (op0) == PAREN_EXPR)
7766 return fold_convert_loc (loc, type, op0);
7767 return NULL_TREE;
7769 CASE_CONVERT:
7770 case FLOAT_EXPR:
7771 case FIX_TRUNC_EXPR:
7772 if (TREE_TYPE (op0) == type)
7773 return op0;
7775 if (COMPARISON_CLASS_P (op0))
7777 /* If we have (type) (a CMP b) and type is an integral type, return
7778 new expression involving the new type. Canonicalize
7779 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7780 non-integral type.
7781 Do not fold the result as that would not simplify further, also
7782 folding again results in recursions. */
7783 if (TREE_CODE (type) == BOOLEAN_TYPE)
7784 return build2_loc (loc, TREE_CODE (op0), type,
7785 TREE_OPERAND (op0, 0),
7786 TREE_OPERAND (op0, 1));
7787 else if (!INTEGRAL_TYPE_P (type))
7788 return build3_loc (loc, COND_EXPR, type, op0,
7789 constant_boolean_node (true, type),
7790 constant_boolean_node (false, type));
7793 /* Handle cases of two conversions in a row. */
7794 if (CONVERT_EXPR_P (op0))
7796 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7797 tree inter_type = TREE_TYPE (op0);
7798 int inside_int = INTEGRAL_TYPE_P (inside_type);
7799 int inside_ptr = POINTER_TYPE_P (inside_type);
7800 int inside_float = FLOAT_TYPE_P (inside_type);
7801 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7802 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7803 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7804 int inter_int = INTEGRAL_TYPE_P (inter_type);
7805 int inter_ptr = POINTER_TYPE_P (inter_type);
7806 int inter_float = FLOAT_TYPE_P (inter_type);
7807 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7808 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7809 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7810 int final_int = INTEGRAL_TYPE_P (type);
7811 int final_ptr = POINTER_TYPE_P (type);
7812 int final_float = FLOAT_TYPE_P (type);
7813 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7814 unsigned int final_prec = TYPE_PRECISION (type);
7815 int final_unsignedp = TYPE_UNSIGNED (type);
7817 /* In addition to the cases of two conversions in a row
7818 handled below, if we are converting something to its own
7819 type via an object of identical or wider precision, neither
7820 conversion is needed. */
7821 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7822 && (((inter_int || inter_ptr) && final_int)
7823 || (inter_float && final_float))
7824 && inter_prec >= final_prec)
7825 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7827 /* Likewise, if the intermediate and initial types are either both
7828 float or both integer, we don't need the middle conversion if the
7829 former is wider than the latter and doesn't change the signedness
7830 (for integers). Avoid this if the final type is a pointer since
7831 then we sometimes need the middle conversion. Likewise if the
7832 final type has a precision not equal to the size of its mode. */
7833 if (((inter_int && inside_int)
7834 || (inter_float && inside_float)
7835 || (inter_vec && inside_vec))
7836 && inter_prec >= inside_prec
7837 && (inter_float || inter_vec
7838 || inter_unsignedp == inside_unsignedp)
7839 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7840 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7841 && ! final_ptr
7842 && (! final_vec || inter_prec == inside_prec))
7843 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7845 /* If we have a sign-extension of a zero-extended value, we can
7846 replace that by a single zero-extension. */
7847 if (inside_int && inter_int && final_int
7848 && inside_prec < inter_prec && inter_prec < final_prec
7849 && inside_unsignedp && !inter_unsignedp)
7850 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7852 /* Two conversions in a row are not needed unless:
7853 - some conversion is floating-point (overstrict for now), or
7854 - some conversion is a vector (overstrict for now), or
7855 - the intermediate type is narrower than both initial and
7856 final, or
7857 - the intermediate type and innermost type differ in signedness,
7858 and the outermost type is wider than the intermediate, or
7859 - the initial type is a pointer type and the precisions of the
7860 intermediate and final types differ, or
7861 - the final type is a pointer type and the precisions of the
7862 initial and intermediate types differ. */
7863 if (! inside_float && ! inter_float && ! final_float
7864 && ! inside_vec && ! inter_vec && ! final_vec
7865 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7866 && ! (inside_int && inter_int
7867 && inter_unsignedp != inside_unsignedp
7868 && inter_prec < final_prec)
7869 && ((inter_unsignedp && inter_prec > inside_prec)
7870 == (final_unsignedp && final_prec > inter_prec))
7871 && ! (inside_ptr && inter_prec != final_prec)
7872 && ! (final_ptr && inside_prec != inter_prec)
7873 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7874 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7875 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7878 /* Handle (T *)&A.B.C for A being of type T and B and C
7879 living at offset zero. This occurs frequently in
7880 C++ upcasting and then accessing the base. */
7881 if (TREE_CODE (op0) == ADDR_EXPR
7882 && POINTER_TYPE_P (type)
7883 && handled_component_p (TREE_OPERAND (op0, 0)))
7885 HOST_WIDE_INT bitsize, bitpos;
7886 tree offset;
7887 enum machine_mode mode;
7888 int unsignedp, volatilep;
7889 tree base = TREE_OPERAND (op0, 0);
7890 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7891 &mode, &unsignedp, &volatilep, false);
7892 /* If the reference was to a (constant) zero offset, we can use
7893 the address of the base if it has the same base type
7894 as the result type and the pointer type is unqualified. */
7895 if (! offset && bitpos == 0
7896 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7897 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7898 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7899 return fold_convert_loc (loc, type,
7900 build_fold_addr_expr_loc (loc, base));
7903 if (TREE_CODE (op0) == MODIFY_EXPR
7904 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7905 /* Detect assigning a bitfield. */
7906 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7907 && DECL_BIT_FIELD
7908 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7910 /* Don't leave an assignment inside a conversion
7911 unless assigning a bitfield. */
7912 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7913 /* First do the assignment, then return converted constant. */
7914 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7915 TREE_NO_WARNING (tem) = 1;
7916 TREE_USED (tem) = 1;
7917 return tem;
7920 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7921 constants (if x has signed type, the sign bit cannot be set
7922 in c). This folds extension into the BIT_AND_EXPR.
7923 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7924 very likely don't have maximal range for their precision and this
7925 transformation effectively doesn't preserve non-maximal ranges. */
7926 if (TREE_CODE (type) == INTEGER_TYPE
7927 && TREE_CODE (op0) == BIT_AND_EXPR
7928 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7930 tree and_expr = op0;
7931 tree and0 = TREE_OPERAND (and_expr, 0);
7932 tree and1 = TREE_OPERAND (and_expr, 1);
7933 int change = 0;
7935 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7936 || (TYPE_PRECISION (type)
7937 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7938 change = 1;
7939 else if (TYPE_PRECISION (TREE_TYPE (and1))
7940 <= HOST_BITS_PER_WIDE_INT
7941 && host_integerp (and1, 1))
7943 unsigned HOST_WIDE_INT cst;
7945 cst = tree_low_cst (and1, 1);
7946 cst &= (HOST_WIDE_INT) -1
7947 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7948 change = (cst == 0);
7949 #ifdef LOAD_EXTEND_OP
7950 if (change
7951 && !flag_syntax_only
7952 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7953 == ZERO_EXTEND))
7955 tree uns = unsigned_type_for (TREE_TYPE (and0));
7956 and0 = fold_convert_loc (loc, uns, and0);
7957 and1 = fold_convert_loc (loc, uns, and1);
7959 #endif
7961 if (change)
7963 tem = force_fit_type_double (type, tree_to_double_int (and1),
7964 0, TREE_OVERFLOW (and1));
7965 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7966 fold_convert_loc (loc, type, and0), tem);
7970 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7971 when one of the new casts will fold away. Conservatively we assume
7972 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7973 if (POINTER_TYPE_P (type)
7974 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7975 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7976 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7977 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7978 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7980 tree arg00 = TREE_OPERAND (arg0, 0);
7981 tree arg01 = TREE_OPERAND (arg0, 1);
7983 return fold_build_pointer_plus_loc
7984 (loc, fold_convert_loc (loc, type, arg00), arg01);
7987 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7988 of the same precision, and X is an integer type not narrower than
7989 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7990 if (INTEGRAL_TYPE_P (type)
7991 && TREE_CODE (op0) == BIT_NOT_EXPR
7992 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7993 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7994 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7996 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7997 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7998 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7999 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8000 fold_convert_loc (loc, type, tem));
8003 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8004 type of X and Y (integer types only). */
8005 if (INTEGRAL_TYPE_P (type)
8006 && TREE_CODE (op0) == MULT_EXPR
8007 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8008 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8010 /* Be careful not to introduce new overflows. */
8011 tree mult_type;
8012 if (TYPE_OVERFLOW_WRAPS (type))
8013 mult_type = type;
8014 else
8015 mult_type = unsigned_type_for (type);
8017 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8019 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8020 fold_convert_loc (loc, mult_type,
8021 TREE_OPERAND (op0, 0)),
8022 fold_convert_loc (loc, mult_type,
8023 TREE_OPERAND (op0, 1)));
8024 return fold_convert_loc (loc, type, tem);
8028 tem = fold_convert_const (code, type, op0);
8029 return tem ? tem : NULL_TREE;
8031 case ADDR_SPACE_CONVERT_EXPR:
8032 if (integer_zerop (arg0))
8033 return fold_convert_const (code, type, arg0);
8034 return NULL_TREE;
8036 case FIXED_CONVERT_EXPR:
8037 tem = fold_convert_const (code, type, arg0);
8038 return tem ? tem : NULL_TREE;
8040 case VIEW_CONVERT_EXPR:
8041 if (TREE_TYPE (op0) == type)
8042 return op0;
8043 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8044 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8045 type, TREE_OPERAND (op0, 0));
8046 if (TREE_CODE (op0) == MEM_REF)
8047 return fold_build2_loc (loc, MEM_REF, type,
8048 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8050 /* For integral conversions with the same precision or pointer
8051 conversions use a NOP_EXPR instead. */
8052 if ((INTEGRAL_TYPE_P (type)
8053 || POINTER_TYPE_P (type))
8054 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8055 || POINTER_TYPE_P (TREE_TYPE (op0)))
8056 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8057 return fold_convert_loc (loc, type, op0);
8059 /* Strip inner integral conversions that do not change the precision. */
8060 if (CONVERT_EXPR_P (op0)
8061 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8062 || POINTER_TYPE_P (TREE_TYPE (op0)))
8063 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8064 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8065 && (TYPE_PRECISION (TREE_TYPE (op0))
8066 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8067 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8068 type, TREE_OPERAND (op0, 0));
8070 return fold_view_convert_expr (type, op0);
8072 case NEGATE_EXPR:
8073 tem = fold_negate_expr (loc, arg0);
8074 if (tem)
8075 return fold_convert_loc (loc, type, tem);
8076 return NULL_TREE;
8078 case ABS_EXPR:
8079 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8080 return fold_abs_const (arg0, type);
8081 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8082 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8083 /* Convert fabs((double)float) into (double)fabsf(float). */
8084 else if (TREE_CODE (arg0) == NOP_EXPR
8085 && TREE_CODE (type) == REAL_TYPE)
8087 tree targ0 = strip_float_extensions (arg0);
8088 if (targ0 != arg0)
8089 return fold_convert_loc (loc, type,
8090 fold_build1_loc (loc, ABS_EXPR,
8091 TREE_TYPE (targ0),
8092 targ0));
8094 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8095 else if (TREE_CODE (arg0) == ABS_EXPR)
8096 return arg0;
8097 else if (tree_expr_nonnegative_p (arg0))
8098 return arg0;
8100 /* Strip sign ops from argument. */
8101 if (TREE_CODE (type) == REAL_TYPE)
8103 tem = fold_strip_sign_ops (arg0);
8104 if (tem)
8105 return fold_build1_loc (loc, ABS_EXPR, type,
8106 fold_convert_loc (loc, type, tem));
8108 return NULL_TREE;
8110 case CONJ_EXPR:
8111 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8112 return fold_convert_loc (loc, type, arg0);
8113 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8115 tree itype = TREE_TYPE (type);
8116 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8117 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8118 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8119 negate_expr (ipart));
8121 if (TREE_CODE (arg0) == COMPLEX_CST)
8123 tree itype = TREE_TYPE (type);
8124 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8125 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8126 return build_complex (type, rpart, negate_expr (ipart));
8128 if (TREE_CODE (arg0) == CONJ_EXPR)
8129 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8130 return NULL_TREE;
8132 case BIT_NOT_EXPR:
8133 if (TREE_CODE (arg0) == INTEGER_CST)
8134 return fold_not_const (arg0, type);
8135 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8136 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8137 /* Convert ~ (-A) to A - 1. */
8138 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8139 return fold_build2_loc (loc, MINUS_EXPR, type,
8140 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8141 build_int_cst (type, 1));
8142 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8143 else if (INTEGRAL_TYPE_P (type)
8144 && ((TREE_CODE (arg0) == MINUS_EXPR
8145 && integer_onep (TREE_OPERAND (arg0, 1)))
8146 || (TREE_CODE (arg0) == PLUS_EXPR
8147 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8148 return fold_build1_loc (loc, NEGATE_EXPR, type,
8149 fold_convert_loc (loc, type,
8150 TREE_OPERAND (arg0, 0)));
8151 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8152 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8153 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8154 fold_convert_loc (loc, type,
8155 TREE_OPERAND (arg0, 0)))))
8156 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8157 fold_convert_loc (loc, type,
8158 TREE_OPERAND (arg0, 1)));
8159 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8160 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8161 fold_convert_loc (loc, type,
8162 TREE_OPERAND (arg0, 1)))))
8163 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8164 fold_convert_loc (loc, type,
8165 TREE_OPERAND (arg0, 0)), tem);
8166 /* Perform BIT_NOT_EXPR on each element individually. */
8167 else if (TREE_CODE (arg0) == VECTOR_CST)
8169 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8170 int count = TYPE_VECTOR_SUBPARTS (type), i;
8172 for (i = 0; i < count; i++)
8174 if (elements)
8176 elem = TREE_VALUE (elements);
8177 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8178 if (elem == NULL_TREE)
8179 break;
8180 elements = TREE_CHAIN (elements);
8182 else
8183 elem = build_int_cst (TREE_TYPE (type), -1);
8184 list = tree_cons (NULL_TREE, elem, list);
8186 if (i == count)
8187 return build_vector (type, nreverse (list));
8190 return NULL_TREE;
8192 case TRUTH_NOT_EXPR:
8193 /* The argument to invert_truthvalue must have Boolean type. */
8194 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8195 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8197 /* Note that the operand of this must be an int
8198 and its values must be 0 or 1.
8199 ("true" is a fixed value perhaps depending on the language,
8200 but we don't handle values other than 1 correctly yet.) */
8201 tem = fold_truth_not_expr (loc, arg0);
8202 if (!tem)
8203 return NULL_TREE;
8204 return fold_convert_loc (loc, type, tem);
8206 case REALPART_EXPR:
8207 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8208 return fold_convert_loc (loc, type, arg0);
8209 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8210 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8211 TREE_OPERAND (arg0, 1));
8212 if (TREE_CODE (arg0) == COMPLEX_CST)
8213 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8214 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8216 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8217 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8218 fold_build1_loc (loc, REALPART_EXPR, itype,
8219 TREE_OPERAND (arg0, 0)),
8220 fold_build1_loc (loc, REALPART_EXPR, itype,
8221 TREE_OPERAND (arg0, 1)));
8222 return fold_convert_loc (loc, type, tem);
8224 if (TREE_CODE (arg0) == CONJ_EXPR)
8226 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8227 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8228 TREE_OPERAND (arg0, 0));
8229 return fold_convert_loc (loc, type, tem);
8231 if (TREE_CODE (arg0) == CALL_EXPR)
8233 tree fn = get_callee_fndecl (arg0);
8234 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8235 switch (DECL_FUNCTION_CODE (fn))
8237 CASE_FLT_FN (BUILT_IN_CEXPI):
8238 fn = mathfn_built_in (type, BUILT_IN_COS);
8239 if (fn)
8240 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8241 break;
8243 default:
8244 break;
8247 return NULL_TREE;
8249 case IMAGPART_EXPR:
8250 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8251 return build_zero_cst (type);
8252 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8253 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8254 TREE_OPERAND (arg0, 0));
8255 if (TREE_CODE (arg0) == COMPLEX_CST)
8256 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8257 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8259 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8260 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8261 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8262 TREE_OPERAND (arg0, 0)),
8263 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8264 TREE_OPERAND (arg0, 1)));
8265 return fold_convert_loc (loc, type, tem);
8267 if (TREE_CODE (arg0) == CONJ_EXPR)
8269 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8270 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8271 return fold_convert_loc (loc, type, negate_expr (tem));
8273 if (TREE_CODE (arg0) == CALL_EXPR)
8275 tree fn = get_callee_fndecl (arg0);
8276 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8277 switch (DECL_FUNCTION_CODE (fn))
8279 CASE_FLT_FN (BUILT_IN_CEXPI):
8280 fn = mathfn_built_in (type, BUILT_IN_SIN);
8281 if (fn)
8282 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8283 break;
8285 default:
8286 break;
8289 return NULL_TREE;
8291 case INDIRECT_REF:
8292 /* Fold *&X to X if X is an lvalue. */
8293 if (TREE_CODE (op0) == ADDR_EXPR)
8295 tree op00 = TREE_OPERAND (op0, 0);
8296 if ((TREE_CODE (op00) == VAR_DECL
8297 || TREE_CODE (op00) == PARM_DECL
8298 || TREE_CODE (op00) == RESULT_DECL)
8299 && !TREE_READONLY (op00))
8300 return op00;
8302 return NULL_TREE;
8304 case VEC_UNPACK_LO_EXPR:
8305 case VEC_UNPACK_HI_EXPR:
8306 case VEC_UNPACK_FLOAT_LO_EXPR:
8307 case VEC_UNPACK_FLOAT_HI_EXPR:
8309 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8310 tree *elts, vals = NULL_TREE;
8311 enum tree_code subcode;
8313 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8314 if (TREE_CODE (arg0) != VECTOR_CST)
8315 return NULL_TREE;
8317 elts = XALLOCAVEC (tree, nelts * 2);
8318 if (!vec_cst_ctor_to_array (arg0, elts))
8319 return NULL_TREE;
8321 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8322 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8323 elts += nelts;
8325 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8326 subcode = NOP_EXPR;
8327 else
8328 subcode = FLOAT_EXPR;
8330 for (i = 0; i < nelts; i++)
8332 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8333 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8334 return NULL_TREE;
8337 for (i = 0; i < nelts; i++)
8338 vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
8339 return build_vector (type, vals);
8342 default:
8343 return NULL_TREE;
8344 } /* switch (code) */
8348 /* If the operation was a conversion do _not_ mark a resulting constant
8349 with TREE_OVERFLOW if the original constant was not. These conversions
8350 have implementation defined behavior and retaining the TREE_OVERFLOW
8351 flag here would confuse later passes such as VRP. */
8352 tree
8353 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8354 tree type, tree op0)
8356 tree res = fold_unary_loc (loc, code, type, op0);
8357 if (res
8358 && TREE_CODE (res) == INTEGER_CST
8359 && TREE_CODE (op0) == INTEGER_CST
8360 && CONVERT_EXPR_CODE_P (code))
8361 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8363 return res;
8366 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8367 operands OP0 and OP1. LOC is the location of the resulting expression.
8368 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8369 Return the folded expression if folding is successful. Otherwise,
8370 return NULL_TREE. */
8371 static tree
8372 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8373 tree arg0, tree arg1, tree op0, tree op1)
8375 tree tem;
8377 /* We only do these simplifications if we are optimizing. */
8378 if (!optimize)
8379 return NULL_TREE;
8381 /* Check for things like (A || B) && (A || C). We can convert this
8382 to A || (B && C). Note that either operator can be any of the four
8383 truth and/or operations and the transformation will still be
8384 valid. Also note that we only care about order for the
8385 ANDIF and ORIF operators. If B contains side effects, this
8386 might change the truth-value of A. */
8387 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8388 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8389 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8390 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8391 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8392 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8394 tree a00 = TREE_OPERAND (arg0, 0);
8395 tree a01 = TREE_OPERAND (arg0, 1);
8396 tree a10 = TREE_OPERAND (arg1, 0);
8397 tree a11 = TREE_OPERAND (arg1, 1);
8398 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8399 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8400 && (code == TRUTH_AND_EXPR
8401 || code == TRUTH_OR_EXPR));
8403 if (operand_equal_p (a00, a10, 0))
8404 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8405 fold_build2_loc (loc, code, type, a01, a11));
8406 else if (commutative && operand_equal_p (a00, a11, 0))
8407 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8408 fold_build2_loc (loc, code, type, a01, a10));
8409 else if (commutative && operand_equal_p (a01, a10, 0))
8410 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8411 fold_build2_loc (loc, code, type, a00, a11));
8413 /* This case if tricky because we must either have commutative
8414 operators or else A10 must not have side-effects. */
8416 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8417 && operand_equal_p (a01, a11, 0))
8418 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8419 fold_build2_loc (loc, code, type, a00, a10),
8420 a01);
8423 /* See if we can build a range comparison. */
8424 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8425 return tem;
8427 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8428 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8430 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8431 if (tem)
8432 return fold_build2_loc (loc, code, type, tem, arg1);
8435 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8436 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8438 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8439 if (tem)
8440 return fold_build2_loc (loc, code, type, arg0, tem);
8443 /* Check for the possibility of merging component references. If our
8444 lhs is another similar operation, try to merge its rhs with our
8445 rhs. Then try to merge our lhs and rhs. */
8446 if (TREE_CODE (arg0) == code
8447 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8448 TREE_OPERAND (arg0, 1), arg1)))
8449 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8451 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8452 return tem;
8454 if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8455 false) >= 2)
8456 && LOGICAL_OP_NON_SHORT_CIRCUIT
8457 && (code == TRUTH_AND_EXPR
8458 || code == TRUTH_ANDIF_EXPR
8459 || code == TRUTH_OR_EXPR
8460 || code == TRUTH_ORIF_EXPR))
8462 enum tree_code ncode, icode;
8464 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8465 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8466 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8468 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8469 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8470 We don't want to pack more than two leafs to a non-IF AND/OR
8471 expression.
8472 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8473 equal to IF-CODE, then we don't want to add right-hand operand.
8474 If the inner right-hand side of left-hand operand has
8475 side-effects, or isn't simple, then we can't add to it,
8476 as otherwise we might destroy if-sequence. */
8477 if (TREE_CODE (arg0) == icode
8478 && simple_operand_p_2 (arg1)
8479 /* Needed for sequence points to handle trappings, and
8480 side-effects. */
8481 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8483 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8484 arg1);
8485 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8486 tem);
8488 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8489 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8490 else if (TREE_CODE (arg1) == icode
8491 && simple_operand_p_2 (arg0)
8492 /* Needed for sequence points to handle trappings, and
8493 side-effects. */
8494 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8496 tem = fold_build2_loc (loc, ncode, type,
8497 arg0, TREE_OPERAND (arg1, 0));
8498 return fold_build2_loc (loc, icode, type, tem,
8499 TREE_OPERAND (arg1, 1));
8501 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8502 into (A OR B).
8503 For sequence point consistancy, we need to check for trapping,
8504 and side-effects. */
8505 else if (code == icode && simple_operand_p_2 (arg0)
8506 && simple_operand_p_2 (arg1))
8507 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8510 return NULL_TREE;
8513 /* Fold a binary expression of code CODE and type TYPE with operands
8514 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8515 Return the folded expression if folding is successful. Otherwise,
8516 return NULL_TREE. */
8518 static tree
8519 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8521 enum tree_code compl_code;
8523 if (code == MIN_EXPR)
8524 compl_code = MAX_EXPR;
8525 else if (code == MAX_EXPR)
8526 compl_code = MIN_EXPR;
8527 else
8528 gcc_unreachable ();
8530 /* MIN (MAX (a, b), b) == b. */
8531 if (TREE_CODE (op0) == compl_code
8532 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8533 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8535 /* MIN (MAX (b, a), b) == b. */
8536 if (TREE_CODE (op0) == compl_code
8537 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8538 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8539 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8541 /* MIN (a, MAX (a, b)) == a. */
8542 if (TREE_CODE (op1) == compl_code
8543 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8544 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8545 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8547 /* MIN (a, MAX (b, a)) == a. */
8548 if (TREE_CODE (op1) == compl_code
8549 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8550 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8551 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8553 return NULL_TREE;
8556 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8557 by changing CODE to reduce the magnitude of constants involved in
8558 ARG0 of the comparison.
8559 Returns a canonicalized comparison tree if a simplification was
8560 possible, otherwise returns NULL_TREE.
8561 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8562 valid if signed overflow is undefined. */
8564 static tree
8565 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8566 tree arg0, tree arg1,
8567 bool *strict_overflow_p)
8569 enum tree_code code0 = TREE_CODE (arg0);
8570 tree t, cst0 = NULL_TREE;
8571 int sgn0;
8572 bool swap = false;
8574 /* Match A +- CST code arg1 and CST code arg1. We can change the
8575 first form only if overflow is undefined. */
8576 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8577 /* In principle pointers also have undefined overflow behavior,
8578 but that causes problems elsewhere. */
8579 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8580 && (code0 == MINUS_EXPR
8581 || code0 == PLUS_EXPR)
8582 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8583 || code0 == INTEGER_CST))
8584 return NULL_TREE;
8586 /* Identify the constant in arg0 and its sign. */
8587 if (code0 == INTEGER_CST)
8588 cst0 = arg0;
8589 else
8590 cst0 = TREE_OPERAND (arg0, 1);
8591 sgn0 = tree_int_cst_sgn (cst0);
8593 /* Overflowed constants and zero will cause problems. */
8594 if (integer_zerop (cst0)
8595 || TREE_OVERFLOW (cst0))
8596 return NULL_TREE;
8598 /* See if we can reduce the magnitude of the constant in
8599 arg0 by changing the comparison code. */
8600 if (code0 == INTEGER_CST)
8602 /* CST <= arg1 -> CST-1 < arg1. */
8603 if (code == LE_EXPR && sgn0 == 1)
8604 code = LT_EXPR;
8605 /* -CST < arg1 -> -CST-1 <= arg1. */
8606 else if (code == LT_EXPR && sgn0 == -1)
8607 code = LE_EXPR;
8608 /* CST > arg1 -> CST-1 >= arg1. */
8609 else if (code == GT_EXPR && sgn0 == 1)
8610 code = GE_EXPR;
8611 /* -CST >= arg1 -> -CST-1 > arg1. */
8612 else if (code == GE_EXPR && sgn0 == -1)
8613 code = GT_EXPR;
8614 else
8615 return NULL_TREE;
8616 /* arg1 code' CST' might be more canonical. */
8617 swap = true;
8619 else
8621 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8622 if (code == LT_EXPR
8623 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8624 code = LE_EXPR;
8625 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8626 else if (code == GT_EXPR
8627 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8628 code = GE_EXPR;
8629 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8630 else if (code == LE_EXPR
8631 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8632 code = LT_EXPR;
8633 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8634 else if (code == GE_EXPR
8635 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8636 code = GT_EXPR;
8637 else
8638 return NULL_TREE;
8639 *strict_overflow_p = true;
8642 /* Now build the constant reduced in magnitude. But not if that
8643 would produce one outside of its types range. */
8644 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8645 && ((sgn0 == 1
8646 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8647 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8648 || (sgn0 == -1
8649 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8650 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8651 /* We cannot swap the comparison here as that would cause us to
8652 endlessly recurse. */
8653 return NULL_TREE;
8655 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8656 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8657 if (code0 != INTEGER_CST)
8658 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8659 t = fold_convert (TREE_TYPE (arg1), t);
8661 /* If swapping might yield to a more canonical form, do so. */
8662 if (swap)
8663 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8664 else
8665 return fold_build2_loc (loc, code, type, t, arg1);
8668 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8669 overflow further. Try to decrease the magnitude of constants involved
8670 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8671 and put sole constants at the second argument position.
8672 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8674 static tree
8675 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8676 tree arg0, tree arg1)
8678 tree t;
8679 bool strict_overflow_p;
8680 const char * const warnmsg = G_("assuming signed overflow does not occur "
8681 "when reducing constant in comparison");
8683 /* Try canonicalization by simplifying arg0. */
8684 strict_overflow_p = false;
8685 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8686 &strict_overflow_p);
8687 if (t)
8689 if (strict_overflow_p)
8690 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8691 return t;
8694 /* Try canonicalization by simplifying arg1 using the swapped
8695 comparison. */
8696 code = swap_tree_comparison (code);
8697 strict_overflow_p = false;
8698 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8699 &strict_overflow_p);
8700 if (t && strict_overflow_p)
8701 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8702 return t;
8705 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8706 space. This is used to avoid issuing overflow warnings for
8707 expressions like &p->x which can not wrap. */
8709 static bool
8710 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8712 unsigned HOST_WIDE_INT offset_low, total_low;
8713 HOST_WIDE_INT size, offset_high, total_high;
8715 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8716 return true;
8718 if (bitpos < 0)
8719 return true;
8721 if (offset == NULL_TREE)
8723 offset_low = 0;
8724 offset_high = 0;
8726 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8727 return true;
8728 else
8730 offset_low = TREE_INT_CST_LOW (offset);
8731 offset_high = TREE_INT_CST_HIGH (offset);
8734 if (add_double_with_sign (offset_low, offset_high,
8735 bitpos / BITS_PER_UNIT, 0,
8736 &total_low, &total_high,
8737 true))
8738 return true;
8740 if (total_high != 0)
8741 return true;
8743 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8744 if (size <= 0)
8745 return true;
8747 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8748 array. */
8749 if (TREE_CODE (base) == ADDR_EXPR)
8751 HOST_WIDE_INT base_size;
8753 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8754 if (base_size > 0 && size < base_size)
8755 size = base_size;
8758 return total_low > (unsigned HOST_WIDE_INT) size;
8761 /* Subroutine of fold_binary. This routine performs all of the
8762 transformations that are common to the equality/inequality
8763 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8764 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8765 fold_binary should call fold_binary. Fold a comparison with
8766 tree code CODE and type TYPE with operands OP0 and OP1. Return
8767 the folded comparison or NULL_TREE. */
8769 static tree
8770 fold_comparison (location_t loc, enum tree_code code, tree type,
8771 tree op0, tree op1)
8773 tree arg0, arg1, tem;
8775 arg0 = op0;
8776 arg1 = op1;
8778 STRIP_SIGN_NOPS (arg0);
8779 STRIP_SIGN_NOPS (arg1);
8781 tem = fold_relational_const (code, type, arg0, arg1);
8782 if (tem != NULL_TREE)
8783 return tem;
8785 /* If one arg is a real or integer constant, put it last. */
8786 if (tree_swap_operands_p (arg0, arg1, true))
8787 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8789 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8790 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8791 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8792 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8793 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8794 && (TREE_CODE (arg1) == INTEGER_CST
8795 && !TREE_OVERFLOW (arg1)))
8797 tree const1 = TREE_OPERAND (arg0, 1);
8798 tree const2 = arg1;
8799 tree variable = TREE_OPERAND (arg0, 0);
8800 tree lhs;
8801 int lhs_add;
8802 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8804 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8805 TREE_TYPE (arg1), const2, const1);
8807 /* If the constant operation overflowed this can be
8808 simplified as a comparison against INT_MAX/INT_MIN. */
8809 if (TREE_CODE (lhs) == INTEGER_CST
8810 && TREE_OVERFLOW (lhs))
8812 int const1_sgn = tree_int_cst_sgn (const1);
8813 enum tree_code code2 = code;
8815 /* Get the sign of the constant on the lhs if the
8816 operation were VARIABLE + CONST1. */
8817 if (TREE_CODE (arg0) == MINUS_EXPR)
8818 const1_sgn = -const1_sgn;
8820 /* The sign of the constant determines if we overflowed
8821 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8822 Canonicalize to the INT_MIN overflow by swapping the comparison
8823 if necessary. */
8824 if (const1_sgn == -1)
8825 code2 = swap_tree_comparison (code);
8827 /* We now can look at the canonicalized case
8828 VARIABLE + 1 CODE2 INT_MIN
8829 and decide on the result. */
8830 if (code2 == LT_EXPR
8831 || code2 == LE_EXPR
8832 || code2 == EQ_EXPR)
8833 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8834 else if (code2 == NE_EXPR
8835 || code2 == GE_EXPR
8836 || code2 == GT_EXPR)
8837 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8840 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8841 && (TREE_CODE (lhs) != INTEGER_CST
8842 || !TREE_OVERFLOW (lhs)))
8844 if (code != EQ_EXPR && code != NE_EXPR)
8845 fold_overflow_warning ("assuming signed overflow does not occur "
8846 "when changing X +- C1 cmp C2 to "
8847 "X cmp C1 +- C2",
8848 WARN_STRICT_OVERFLOW_COMPARISON);
8849 return fold_build2_loc (loc, code, type, variable, lhs);
8853 /* For comparisons of pointers we can decompose it to a compile time
8854 comparison of the base objects and the offsets into the object.
8855 This requires at least one operand being an ADDR_EXPR or a
8856 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8857 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8858 && (TREE_CODE (arg0) == ADDR_EXPR
8859 || TREE_CODE (arg1) == ADDR_EXPR
8860 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8861 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8863 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8864 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8865 enum machine_mode mode;
8866 int volatilep, unsignedp;
8867 bool indirect_base0 = false, indirect_base1 = false;
8869 /* Get base and offset for the access. Strip ADDR_EXPR for
8870 get_inner_reference, but put it back by stripping INDIRECT_REF
8871 off the base object if possible. indirect_baseN will be true
8872 if baseN is not an address but refers to the object itself. */
8873 base0 = arg0;
8874 if (TREE_CODE (arg0) == ADDR_EXPR)
8876 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8877 &bitsize, &bitpos0, &offset0, &mode,
8878 &unsignedp, &volatilep, false);
8879 if (TREE_CODE (base0) == INDIRECT_REF)
8880 base0 = TREE_OPERAND (base0, 0);
8881 else
8882 indirect_base0 = true;
8884 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8886 base0 = TREE_OPERAND (arg0, 0);
8887 STRIP_SIGN_NOPS (base0);
8888 if (TREE_CODE (base0) == ADDR_EXPR)
8890 base0 = TREE_OPERAND (base0, 0);
8891 indirect_base0 = true;
8893 offset0 = TREE_OPERAND (arg0, 1);
8894 if (host_integerp (offset0, 0))
8896 HOST_WIDE_INT off = size_low_cst (offset0);
8897 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8898 * BITS_PER_UNIT)
8899 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8901 bitpos0 = off * BITS_PER_UNIT;
8902 offset0 = NULL_TREE;
8907 base1 = arg1;
8908 if (TREE_CODE (arg1) == ADDR_EXPR)
8910 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8911 &bitsize, &bitpos1, &offset1, &mode,
8912 &unsignedp, &volatilep, false);
8913 if (TREE_CODE (base1) == INDIRECT_REF)
8914 base1 = TREE_OPERAND (base1, 0);
8915 else
8916 indirect_base1 = true;
8918 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8920 base1 = TREE_OPERAND (arg1, 0);
8921 STRIP_SIGN_NOPS (base1);
8922 if (TREE_CODE (base1) == ADDR_EXPR)
8924 base1 = TREE_OPERAND (base1, 0);
8925 indirect_base1 = true;
8927 offset1 = TREE_OPERAND (arg1, 1);
8928 if (host_integerp (offset1, 0))
8930 HOST_WIDE_INT off = size_low_cst (offset1);
8931 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8932 * BITS_PER_UNIT)
8933 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8935 bitpos1 = off * BITS_PER_UNIT;
8936 offset1 = NULL_TREE;
8941 /* A local variable can never be pointed to by
8942 the default SSA name of an incoming parameter. */
8943 if ((TREE_CODE (arg0) == ADDR_EXPR
8944 && indirect_base0
8945 && TREE_CODE (base0) == VAR_DECL
8946 && auto_var_in_fn_p (base0, current_function_decl)
8947 && !indirect_base1
8948 && TREE_CODE (base1) == SSA_NAME
8949 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8950 && SSA_NAME_IS_DEFAULT_DEF (base1))
8951 || (TREE_CODE (arg1) == ADDR_EXPR
8952 && indirect_base1
8953 && TREE_CODE (base1) == VAR_DECL
8954 && auto_var_in_fn_p (base1, current_function_decl)
8955 && !indirect_base0
8956 && TREE_CODE (base0) == SSA_NAME
8957 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8958 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8960 if (code == NE_EXPR)
8961 return constant_boolean_node (1, type);
8962 else if (code == EQ_EXPR)
8963 return constant_boolean_node (0, type);
8965 /* If we have equivalent bases we might be able to simplify. */
8966 else if (indirect_base0 == indirect_base1
8967 && operand_equal_p (base0, base1, 0))
8969 /* We can fold this expression to a constant if the non-constant
8970 offset parts are equal. */
8971 if ((offset0 == offset1
8972 || (offset0 && offset1
8973 && operand_equal_p (offset0, offset1, 0)))
8974 && (code == EQ_EXPR
8975 || code == NE_EXPR
8976 || (indirect_base0 && DECL_P (base0))
8977 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8980 if (code != EQ_EXPR
8981 && code != NE_EXPR
8982 && bitpos0 != bitpos1
8983 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8984 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8985 fold_overflow_warning (("assuming pointer wraparound does not "
8986 "occur when comparing P +- C1 with "
8987 "P +- C2"),
8988 WARN_STRICT_OVERFLOW_CONDITIONAL);
8990 switch (code)
8992 case EQ_EXPR:
8993 return constant_boolean_node (bitpos0 == bitpos1, type);
8994 case NE_EXPR:
8995 return constant_boolean_node (bitpos0 != bitpos1, type);
8996 case LT_EXPR:
8997 return constant_boolean_node (bitpos0 < bitpos1, type);
8998 case LE_EXPR:
8999 return constant_boolean_node (bitpos0 <= bitpos1, type);
9000 case GE_EXPR:
9001 return constant_boolean_node (bitpos0 >= bitpos1, type);
9002 case GT_EXPR:
9003 return constant_boolean_node (bitpos0 > bitpos1, type);
9004 default:;
9007 /* We can simplify the comparison to a comparison of the variable
9008 offset parts if the constant offset parts are equal.
9009 Be careful to use signed size type here because otherwise we
9010 mess with array offsets in the wrong way. This is possible
9011 because pointer arithmetic is restricted to retain within an
9012 object and overflow on pointer differences is undefined as of
9013 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9014 else if (bitpos0 == bitpos1
9015 && ((code == EQ_EXPR || code == NE_EXPR)
9016 || (indirect_base0 && DECL_P (base0))
9017 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9019 /* By converting to signed size type we cover middle-end pointer
9020 arithmetic which operates on unsigned pointer types of size
9021 type size and ARRAY_REF offsets which are properly sign or
9022 zero extended from their type in case it is narrower than
9023 size type. */
9024 if (offset0 == NULL_TREE)
9025 offset0 = build_int_cst (ssizetype, 0);
9026 else
9027 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9028 if (offset1 == NULL_TREE)
9029 offset1 = build_int_cst (ssizetype, 0);
9030 else
9031 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9033 if (code != EQ_EXPR
9034 && code != NE_EXPR
9035 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9036 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9037 fold_overflow_warning (("assuming pointer wraparound does not "
9038 "occur when comparing P +- C1 with "
9039 "P +- C2"),
9040 WARN_STRICT_OVERFLOW_COMPARISON);
9042 return fold_build2_loc (loc, code, type, offset0, offset1);
9045 /* For non-equal bases we can simplify if they are addresses
9046 of local binding decls or constants. */
9047 else if (indirect_base0 && indirect_base1
9048 /* We know that !operand_equal_p (base0, base1, 0)
9049 because the if condition was false. But make
9050 sure two decls are not the same. */
9051 && base0 != base1
9052 && TREE_CODE (arg0) == ADDR_EXPR
9053 && TREE_CODE (arg1) == ADDR_EXPR
9054 && (((TREE_CODE (base0) == VAR_DECL
9055 || TREE_CODE (base0) == PARM_DECL)
9056 && (targetm.binds_local_p (base0)
9057 || CONSTANT_CLASS_P (base1)))
9058 || CONSTANT_CLASS_P (base0))
9059 && (((TREE_CODE (base1) == VAR_DECL
9060 || TREE_CODE (base1) == PARM_DECL)
9061 && (targetm.binds_local_p (base1)
9062 || CONSTANT_CLASS_P (base0)))
9063 || CONSTANT_CLASS_P (base1)))
9065 if (code == EQ_EXPR)
9066 return omit_two_operands_loc (loc, type, boolean_false_node,
9067 arg0, arg1);
9068 else if (code == NE_EXPR)
9069 return omit_two_operands_loc (loc, type, boolean_true_node,
9070 arg0, arg1);
9072 /* For equal offsets we can simplify to a comparison of the
9073 base addresses. */
9074 else if (bitpos0 == bitpos1
9075 && (indirect_base0
9076 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9077 && (indirect_base1
9078 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9079 && ((offset0 == offset1)
9080 || (offset0 && offset1
9081 && operand_equal_p (offset0, offset1, 0))))
9083 if (indirect_base0)
9084 base0 = build_fold_addr_expr_loc (loc, base0);
9085 if (indirect_base1)
9086 base1 = build_fold_addr_expr_loc (loc, base1);
9087 return fold_build2_loc (loc, code, type, base0, base1);
9091 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9092 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9093 the resulting offset is smaller in absolute value than the
9094 original one. */
9095 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9096 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9097 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9098 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9099 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9100 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9101 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9103 tree const1 = TREE_OPERAND (arg0, 1);
9104 tree const2 = TREE_OPERAND (arg1, 1);
9105 tree variable1 = TREE_OPERAND (arg0, 0);
9106 tree variable2 = TREE_OPERAND (arg1, 0);
9107 tree cst;
9108 const char * const warnmsg = G_("assuming signed overflow does not "
9109 "occur when combining constants around "
9110 "a comparison");
9112 /* Put the constant on the side where it doesn't overflow and is
9113 of lower absolute value than before. */
9114 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9115 ? MINUS_EXPR : PLUS_EXPR,
9116 const2, const1);
9117 if (!TREE_OVERFLOW (cst)
9118 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9120 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9121 return fold_build2_loc (loc, code, type,
9122 variable1,
9123 fold_build2_loc (loc,
9124 TREE_CODE (arg1), TREE_TYPE (arg1),
9125 variable2, cst));
9128 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9129 ? MINUS_EXPR : PLUS_EXPR,
9130 const1, const2);
9131 if (!TREE_OVERFLOW (cst)
9132 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9134 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9135 return fold_build2_loc (loc, code, type,
9136 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9137 variable1, cst),
9138 variable2);
9142 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9143 signed arithmetic case. That form is created by the compiler
9144 often enough for folding it to be of value. One example is in
9145 computing loop trip counts after Operator Strength Reduction. */
9146 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9147 && TREE_CODE (arg0) == MULT_EXPR
9148 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9149 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9150 && integer_zerop (arg1))
9152 tree const1 = TREE_OPERAND (arg0, 1);
9153 tree const2 = arg1; /* zero */
9154 tree variable1 = TREE_OPERAND (arg0, 0);
9155 enum tree_code cmp_code = code;
9157 /* Handle unfolded multiplication by zero. */
9158 if (integer_zerop (const1))
9159 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9161 fold_overflow_warning (("assuming signed overflow does not occur when "
9162 "eliminating multiplication in comparison "
9163 "with zero"),
9164 WARN_STRICT_OVERFLOW_COMPARISON);
9166 /* If const1 is negative we swap the sense of the comparison. */
9167 if (tree_int_cst_sgn (const1) < 0)
9168 cmp_code = swap_tree_comparison (cmp_code);
9170 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9173 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9174 if (tem)
9175 return tem;
9177 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9179 tree targ0 = strip_float_extensions (arg0);
9180 tree targ1 = strip_float_extensions (arg1);
9181 tree newtype = TREE_TYPE (targ0);
9183 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9184 newtype = TREE_TYPE (targ1);
9186 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9187 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9188 return fold_build2_loc (loc, code, type,
9189 fold_convert_loc (loc, newtype, targ0),
9190 fold_convert_loc (loc, newtype, targ1));
9192 /* (-a) CMP (-b) -> b CMP a */
9193 if (TREE_CODE (arg0) == NEGATE_EXPR
9194 && TREE_CODE (arg1) == NEGATE_EXPR)
9195 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9196 TREE_OPERAND (arg0, 0));
9198 if (TREE_CODE (arg1) == REAL_CST)
9200 REAL_VALUE_TYPE cst;
9201 cst = TREE_REAL_CST (arg1);
9203 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9204 if (TREE_CODE (arg0) == NEGATE_EXPR)
9205 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9206 TREE_OPERAND (arg0, 0),
9207 build_real (TREE_TYPE (arg1),
9208 real_value_negate (&cst)));
9210 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9211 /* a CMP (-0) -> a CMP 0 */
9212 if (REAL_VALUE_MINUS_ZERO (cst))
9213 return fold_build2_loc (loc, code, type, arg0,
9214 build_real (TREE_TYPE (arg1), dconst0));
9216 /* x != NaN is always true, other ops are always false. */
9217 if (REAL_VALUE_ISNAN (cst)
9218 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9220 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9221 return omit_one_operand_loc (loc, type, tem, arg0);
9224 /* Fold comparisons against infinity. */
9225 if (REAL_VALUE_ISINF (cst)
9226 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9228 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9229 if (tem != NULL_TREE)
9230 return tem;
9234 /* If this is a comparison of a real constant with a PLUS_EXPR
9235 or a MINUS_EXPR of a real constant, we can convert it into a
9236 comparison with a revised real constant as long as no overflow
9237 occurs when unsafe_math_optimizations are enabled. */
9238 if (flag_unsafe_math_optimizations
9239 && TREE_CODE (arg1) == REAL_CST
9240 && (TREE_CODE (arg0) == PLUS_EXPR
9241 || TREE_CODE (arg0) == MINUS_EXPR)
9242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9243 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9244 ? MINUS_EXPR : PLUS_EXPR,
9245 arg1, TREE_OPERAND (arg0, 1)))
9246 && !TREE_OVERFLOW (tem))
9247 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9249 /* Likewise, we can simplify a comparison of a real constant with
9250 a MINUS_EXPR whose first operand is also a real constant, i.e.
9251 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9252 floating-point types only if -fassociative-math is set. */
9253 if (flag_associative_math
9254 && TREE_CODE (arg1) == REAL_CST
9255 && TREE_CODE (arg0) == MINUS_EXPR
9256 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9257 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9258 arg1))
9259 && !TREE_OVERFLOW (tem))
9260 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9261 TREE_OPERAND (arg0, 1), tem);
9263 /* Fold comparisons against built-in math functions. */
9264 if (TREE_CODE (arg1) == REAL_CST
9265 && flag_unsafe_math_optimizations
9266 && ! flag_errno_math)
9268 enum built_in_function fcode = builtin_mathfn_code (arg0);
9270 if (fcode != END_BUILTINS)
9272 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9273 if (tem != NULL_TREE)
9274 return tem;
9279 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9280 && CONVERT_EXPR_P (arg0))
9282 /* If we are widening one operand of an integer comparison,
9283 see if the other operand is similarly being widened. Perhaps we
9284 can do the comparison in the narrower type. */
9285 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9286 if (tem)
9287 return tem;
9289 /* Or if we are changing signedness. */
9290 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9291 if (tem)
9292 return tem;
9295 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9296 constant, we can simplify it. */
9297 if (TREE_CODE (arg1) == INTEGER_CST
9298 && (TREE_CODE (arg0) == MIN_EXPR
9299 || TREE_CODE (arg0) == MAX_EXPR)
9300 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9302 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9303 if (tem)
9304 return tem;
9307 /* Simplify comparison of something with itself. (For IEEE
9308 floating-point, we can only do some of these simplifications.) */
9309 if (operand_equal_p (arg0, arg1, 0))
9311 switch (code)
9313 case EQ_EXPR:
9314 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9315 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9316 return constant_boolean_node (1, type);
9317 break;
9319 case GE_EXPR:
9320 case LE_EXPR:
9321 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9322 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9323 return constant_boolean_node (1, type);
9324 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9326 case NE_EXPR:
9327 /* For NE, we can only do this simplification if integer
9328 or we don't honor IEEE floating point NaNs. */
9329 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9330 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9331 break;
9332 /* ... fall through ... */
9333 case GT_EXPR:
9334 case LT_EXPR:
9335 return constant_boolean_node (0, type);
9336 default:
9337 gcc_unreachable ();
9341 /* If we are comparing an expression that just has comparisons
9342 of two integer values, arithmetic expressions of those comparisons,
9343 and constants, we can simplify it. There are only three cases
9344 to check: the two values can either be equal, the first can be
9345 greater, or the second can be greater. Fold the expression for
9346 those three values. Since each value must be 0 or 1, we have
9347 eight possibilities, each of which corresponds to the constant 0
9348 or 1 or one of the six possible comparisons.
9350 This handles common cases like (a > b) == 0 but also handles
9351 expressions like ((x > y) - (y > x)) > 0, which supposedly
9352 occur in macroized code. */
9354 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9356 tree cval1 = 0, cval2 = 0;
9357 int save_p = 0;
9359 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9360 /* Don't handle degenerate cases here; they should already
9361 have been handled anyway. */
9362 && cval1 != 0 && cval2 != 0
9363 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9364 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9365 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9366 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9367 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9368 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9369 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9371 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9372 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9374 /* We can't just pass T to eval_subst in case cval1 or cval2
9375 was the same as ARG1. */
9377 tree high_result
9378 = fold_build2_loc (loc, code, type,
9379 eval_subst (loc, arg0, cval1, maxval,
9380 cval2, minval),
9381 arg1);
9382 tree equal_result
9383 = fold_build2_loc (loc, code, type,
9384 eval_subst (loc, arg0, cval1, maxval,
9385 cval2, maxval),
9386 arg1);
9387 tree low_result
9388 = fold_build2_loc (loc, code, type,
9389 eval_subst (loc, arg0, cval1, minval,
9390 cval2, maxval),
9391 arg1);
9393 /* All three of these results should be 0 or 1. Confirm they are.
9394 Then use those values to select the proper code to use. */
9396 if (TREE_CODE (high_result) == INTEGER_CST
9397 && TREE_CODE (equal_result) == INTEGER_CST
9398 && TREE_CODE (low_result) == INTEGER_CST)
9400 /* Make a 3-bit mask with the high-order bit being the
9401 value for `>', the next for '=', and the low for '<'. */
9402 switch ((integer_onep (high_result) * 4)
9403 + (integer_onep (equal_result) * 2)
9404 + integer_onep (low_result))
9406 case 0:
9407 /* Always false. */
9408 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9409 case 1:
9410 code = LT_EXPR;
9411 break;
9412 case 2:
9413 code = EQ_EXPR;
9414 break;
9415 case 3:
9416 code = LE_EXPR;
9417 break;
9418 case 4:
9419 code = GT_EXPR;
9420 break;
9421 case 5:
9422 code = NE_EXPR;
9423 break;
9424 case 6:
9425 code = GE_EXPR;
9426 break;
9427 case 7:
9428 /* Always true. */
9429 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9432 if (save_p)
9434 tem = save_expr (build2 (code, type, cval1, cval2));
9435 SET_EXPR_LOCATION (tem, loc);
9436 return tem;
9438 return fold_build2_loc (loc, code, type, cval1, cval2);
9443 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9444 into a single range test. */
9445 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9446 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9447 && TREE_CODE (arg1) == INTEGER_CST
9448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9449 && !integer_zerop (TREE_OPERAND (arg0, 1))
9450 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9451 && !TREE_OVERFLOW (arg1))
9453 tem = fold_div_compare (loc, code, type, arg0, arg1);
9454 if (tem != NULL_TREE)
9455 return tem;
9458 /* Fold ~X op ~Y as Y op X. */
9459 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9460 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9462 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9463 return fold_build2_loc (loc, code, type,
9464 fold_convert_loc (loc, cmp_type,
9465 TREE_OPERAND (arg1, 0)),
9466 TREE_OPERAND (arg0, 0));
9469 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9470 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9471 && TREE_CODE (arg1) == INTEGER_CST)
9473 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9474 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9475 TREE_OPERAND (arg0, 0),
9476 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9477 fold_convert_loc (loc, cmp_type, arg1)));
9480 return NULL_TREE;
9484 /* Subroutine of fold_binary. Optimize complex multiplications of the
9485 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9486 argument EXPR represents the expression "z" of type TYPE. */
9488 static tree
9489 fold_mult_zconjz (location_t loc, tree type, tree expr)
9491 tree itype = TREE_TYPE (type);
9492 tree rpart, ipart, tem;
9494 if (TREE_CODE (expr) == COMPLEX_EXPR)
9496 rpart = TREE_OPERAND (expr, 0);
9497 ipart = TREE_OPERAND (expr, 1);
9499 else if (TREE_CODE (expr) == COMPLEX_CST)
9501 rpart = TREE_REALPART (expr);
9502 ipart = TREE_IMAGPART (expr);
9504 else
9506 expr = save_expr (expr);
9507 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9508 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9511 rpart = save_expr (rpart);
9512 ipart = save_expr (ipart);
9513 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9514 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9515 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9516 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9517 build_zero_cst (itype));
9521 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9522 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9523 guarantees that P and N have the same least significant log2(M) bits.
9524 N is not otherwise constrained. In particular, N is not normalized to
9525 0 <= N < M as is common. In general, the precise value of P is unknown.
9526 M is chosen as large as possible such that constant N can be determined.
9528 Returns M and sets *RESIDUE to N.
9530 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9531 account. This is not always possible due to PR 35705.
9534 static unsigned HOST_WIDE_INT
9535 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9536 bool allow_func_align)
9538 enum tree_code code;
9540 *residue = 0;
9542 code = TREE_CODE (expr);
9543 if (code == ADDR_EXPR)
9545 unsigned int bitalign;
9546 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9547 *residue /= BITS_PER_UNIT;
9548 return bitalign / BITS_PER_UNIT;
9550 else if (code == POINTER_PLUS_EXPR)
9552 tree op0, op1;
9553 unsigned HOST_WIDE_INT modulus;
9554 enum tree_code inner_code;
9556 op0 = TREE_OPERAND (expr, 0);
9557 STRIP_NOPS (op0);
9558 modulus = get_pointer_modulus_and_residue (op0, residue,
9559 allow_func_align);
9561 op1 = TREE_OPERAND (expr, 1);
9562 STRIP_NOPS (op1);
9563 inner_code = TREE_CODE (op1);
9564 if (inner_code == INTEGER_CST)
9566 *residue += TREE_INT_CST_LOW (op1);
9567 return modulus;
9569 else if (inner_code == MULT_EXPR)
9571 op1 = TREE_OPERAND (op1, 1);
9572 if (TREE_CODE (op1) == INTEGER_CST)
9574 unsigned HOST_WIDE_INT align;
9576 /* Compute the greatest power-of-2 divisor of op1. */
9577 align = TREE_INT_CST_LOW (op1);
9578 align &= -align;
9580 /* If align is non-zero and less than *modulus, replace
9581 *modulus with align., If align is 0, then either op1 is 0
9582 or the greatest power-of-2 divisor of op1 doesn't fit in an
9583 unsigned HOST_WIDE_INT. In either case, no additional
9584 constraint is imposed. */
9585 if (align)
9586 modulus = MIN (modulus, align);
9588 return modulus;
9593 /* If we get here, we were unable to determine anything useful about the
9594 expression. */
9595 return 1;
9598 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9599 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9601 static bool
9602 vec_cst_ctor_to_array (tree arg, tree *elts)
9604 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9606 if (TREE_CODE (arg) == VECTOR_CST)
9608 tree t;
9610 for (i = 0, t = TREE_VECTOR_CST_ELTS (arg);
9611 i < nelts && t; i++, t = TREE_CHAIN (t))
9612 elts[i] = TREE_VALUE (t);
9613 if (t)
9614 return false;
9616 else if (TREE_CODE (arg) == CONSTRUCTOR)
9618 constructor_elt *elt;
9620 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9621 if (i >= nelts)
9622 return false;
9623 else
9624 elts[i] = elt->value;
9626 else
9627 return false;
9628 for (; i < nelts; i++)
9629 elts[i]
9630 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9631 return true;
9634 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9635 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9636 NULL_TREE otherwise. */
9638 static tree
9639 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9641 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9642 tree *elts;
9643 bool need_ctor = false;
9645 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9646 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9647 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9648 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9649 return NULL_TREE;
9651 elts = XALLOCAVEC (tree, nelts * 3);
9652 if (!vec_cst_ctor_to_array (arg0, elts)
9653 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9654 return NULL_TREE;
9656 for (i = 0; i < nelts; i++)
9658 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9659 need_ctor = true;
9660 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9663 if (need_ctor)
9665 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9666 for (i = 0; i < nelts; i++)
9667 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9668 return build_constructor (type, v);
9670 else
9672 tree vals = NULL_TREE;
9673 for (i = 0; i < nelts; i++)
9674 vals = tree_cons (NULL_TREE, elts[3 * nelts - i - 1], vals);
9675 return build_vector (type, vals);
9679 /* Try to fold a pointer difference of type TYPE two address expressions of
9680 array references AREF0 and AREF1 using location LOC. Return a
9681 simplified expression for the difference or NULL_TREE. */
9683 static tree
9684 fold_addr_of_array_ref_difference (location_t loc, tree type,
9685 tree aref0, tree aref1)
9687 tree base0 = TREE_OPERAND (aref0, 0);
9688 tree base1 = TREE_OPERAND (aref1, 0);
9689 tree base_offset = build_int_cst (type, 0);
9691 /* If the bases are array references as well, recurse. If the bases
9692 are pointer indirections compute the difference of the pointers.
9693 If the bases are equal, we are set. */
9694 if ((TREE_CODE (base0) == ARRAY_REF
9695 && TREE_CODE (base1) == ARRAY_REF
9696 && (base_offset
9697 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9698 || (INDIRECT_REF_P (base0)
9699 && INDIRECT_REF_P (base1)
9700 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9701 TREE_OPERAND (base0, 0),
9702 TREE_OPERAND (base1, 0))))
9703 || operand_equal_p (base0, base1, 0))
9705 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9706 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9707 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9708 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9709 return fold_build2_loc (loc, PLUS_EXPR, type,
9710 base_offset,
9711 fold_build2_loc (loc, MULT_EXPR, type,
9712 diff, esz));
9714 return NULL_TREE;
9717 /* Fold a binary expression of code CODE and type TYPE with operands
9718 OP0 and OP1. LOC is the location of the resulting expression.
9719 Return the folded expression if folding is successful. Otherwise,
9720 return NULL_TREE. */
9722 tree
9723 fold_binary_loc (location_t loc,
9724 enum tree_code code, tree type, tree op0, tree op1)
9726 enum tree_code_class kind = TREE_CODE_CLASS (code);
9727 tree arg0, arg1, tem;
9728 tree t1 = NULL_TREE;
9729 bool strict_overflow_p;
9731 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9732 && TREE_CODE_LENGTH (code) == 2
9733 && op0 != NULL_TREE
9734 && op1 != NULL_TREE);
9736 arg0 = op0;
9737 arg1 = op1;
9739 /* Strip any conversions that don't change the mode. This is
9740 safe for every expression, except for a comparison expression
9741 because its signedness is derived from its operands. So, in
9742 the latter case, only strip conversions that don't change the
9743 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9744 preserved.
9746 Note that this is done as an internal manipulation within the
9747 constant folder, in order to find the simplest representation
9748 of the arguments so that their form can be studied. In any
9749 cases, the appropriate type conversions should be put back in
9750 the tree that will get out of the constant folder. */
9752 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9754 STRIP_SIGN_NOPS (arg0);
9755 STRIP_SIGN_NOPS (arg1);
9757 else
9759 STRIP_NOPS (arg0);
9760 STRIP_NOPS (arg1);
9763 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9764 constant but we can't do arithmetic on them. */
9765 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9766 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9767 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9768 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9769 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9770 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9772 if (kind == tcc_binary)
9774 /* Make sure type and arg0 have the same saturating flag. */
9775 gcc_assert (TYPE_SATURATING (type)
9776 == TYPE_SATURATING (TREE_TYPE (arg0)));
9777 tem = const_binop (code, arg0, arg1);
9779 else if (kind == tcc_comparison)
9780 tem = fold_relational_const (code, type, arg0, arg1);
9781 else
9782 tem = NULL_TREE;
9784 if (tem != NULL_TREE)
9786 if (TREE_TYPE (tem) != type)
9787 tem = fold_convert_loc (loc, type, tem);
9788 return tem;
9792 /* If this is a commutative operation, and ARG0 is a constant, move it
9793 to ARG1 to reduce the number of tests below. */
9794 if (commutative_tree_code (code)
9795 && tree_swap_operands_p (arg0, arg1, true))
9796 return fold_build2_loc (loc, code, type, op1, op0);
9798 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9800 First check for cases where an arithmetic operation is applied to a
9801 compound, conditional, or comparison operation. Push the arithmetic
9802 operation inside the compound or conditional to see if any folding
9803 can then be done. Convert comparison to conditional for this purpose.
9804 The also optimizes non-constant cases that used to be done in
9805 expand_expr.
9807 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9808 one of the operands is a comparison and the other is a comparison, a
9809 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9810 code below would make the expression more complex. Change it to a
9811 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9812 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9814 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9815 || code == EQ_EXPR || code == NE_EXPR)
9816 && ((truth_value_p (TREE_CODE (arg0))
9817 && (truth_value_p (TREE_CODE (arg1))
9818 || (TREE_CODE (arg1) == BIT_AND_EXPR
9819 && integer_onep (TREE_OPERAND (arg1, 1)))))
9820 || (truth_value_p (TREE_CODE (arg1))
9821 && (truth_value_p (TREE_CODE (arg0))
9822 || (TREE_CODE (arg0) == BIT_AND_EXPR
9823 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9825 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9826 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9827 : TRUTH_XOR_EXPR,
9828 boolean_type_node,
9829 fold_convert_loc (loc, boolean_type_node, arg0),
9830 fold_convert_loc (loc, boolean_type_node, arg1));
9832 if (code == EQ_EXPR)
9833 tem = invert_truthvalue_loc (loc, tem);
9835 return fold_convert_loc (loc, type, tem);
9838 if (TREE_CODE_CLASS (code) == tcc_binary
9839 || TREE_CODE_CLASS (code) == tcc_comparison)
9841 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9843 tem = fold_build2_loc (loc, code, type,
9844 fold_convert_loc (loc, TREE_TYPE (op0),
9845 TREE_OPERAND (arg0, 1)), op1);
9846 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9847 tem);
9849 if (TREE_CODE (arg1) == COMPOUND_EXPR
9850 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9852 tem = fold_build2_loc (loc, code, type, op0,
9853 fold_convert_loc (loc, TREE_TYPE (op1),
9854 TREE_OPERAND (arg1, 1)));
9855 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9856 tem);
9859 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9861 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9862 arg0, arg1,
9863 /*cond_first_p=*/1);
9864 if (tem != NULL_TREE)
9865 return tem;
9868 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9870 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9871 arg1, arg0,
9872 /*cond_first_p=*/0);
9873 if (tem != NULL_TREE)
9874 return tem;
9878 switch (code)
9880 case MEM_REF:
9881 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9882 if (TREE_CODE (arg0) == ADDR_EXPR
9883 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9885 tree iref = TREE_OPERAND (arg0, 0);
9886 return fold_build2 (MEM_REF, type,
9887 TREE_OPERAND (iref, 0),
9888 int_const_binop (PLUS_EXPR, arg1,
9889 TREE_OPERAND (iref, 1)));
9892 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9893 if (TREE_CODE (arg0) == ADDR_EXPR
9894 && handled_component_p (TREE_OPERAND (arg0, 0)))
9896 tree base;
9897 HOST_WIDE_INT coffset;
9898 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9899 &coffset);
9900 if (!base)
9901 return NULL_TREE;
9902 return fold_build2 (MEM_REF, type,
9903 build_fold_addr_expr (base),
9904 int_const_binop (PLUS_EXPR, arg1,
9905 size_int (coffset)));
9908 return NULL_TREE;
9910 case POINTER_PLUS_EXPR:
9911 /* 0 +p index -> (type)index */
9912 if (integer_zerop (arg0))
9913 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9915 /* PTR +p 0 -> PTR */
9916 if (integer_zerop (arg1))
9917 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9919 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9920 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9921 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9922 return fold_convert_loc (loc, type,
9923 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9924 fold_convert_loc (loc, sizetype,
9925 arg1),
9926 fold_convert_loc (loc, sizetype,
9927 arg0)));
9929 /* (PTR +p B) +p A -> PTR +p (B + A) */
9930 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9932 tree inner;
9933 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9934 tree arg00 = TREE_OPERAND (arg0, 0);
9935 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9936 arg01, fold_convert_loc (loc, sizetype, arg1));
9937 return fold_convert_loc (loc, type,
9938 fold_build_pointer_plus_loc (loc,
9939 arg00, inner));
9942 /* PTR_CST +p CST -> CST1 */
9943 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9944 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9945 fold_convert_loc (loc, type, arg1));
9947 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9948 of the array. Loop optimizer sometimes produce this type of
9949 expressions. */
9950 if (TREE_CODE (arg0) == ADDR_EXPR)
9952 tem = try_move_mult_to_index (loc, arg0,
9953 fold_convert_loc (loc, sizetype, arg1));
9954 if (tem)
9955 return fold_convert_loc (loc, type, tem);
9958 return NULL_TREE;
9960 case PLUS_EXPR:
9961 /* A + (-B) -> A - B */
9962 if (TREE_CODE (arg1) == NEGATE_EXPR)
9963 return fold_build2_loc (loc, MINUS_EXPR, type,
9964 fold_convert_loc (loc, type, arg0),
9965 fold_convert_loc (loc, type,
9966 TREE_OPERAND (arg1, 0)));
9967 /* (-A) + B -> B - A */
9968 if (TREE_CODE (arg0) == NEGATE_EXPR
9969 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9970 return fold_build2_loc (loc, MINUS_EXPR, type,
9971 fold_convert_loc (loc, type, arg1),
9972 fold_convert_loc (loc, type,
9973 TREE_OPERAND (arg0, 0)));
9975 if (INTEGRAL_TYPE_P (type))
9977 /* Convert ~A + 1 to -A. */
9978 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9979 && integer_onep (arg1))
9980 return fold_build1_loc (loc, NEGATE_EXPR, type,
9981 fold_convert_loc (loc, type,
9982 TREE_OPERAND (arg0, 0)));
9984 /* ~X + X is -1. */
9985 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9986 && !TYPE_OVERFLOW_TRAPS (type))
9988 tree tem = TREE_OPERAND (arg0, 0);
9990 STRIP_NOPS (tem);
9991 if (operand_equal_p (tem, arg1, 0))
9993 t1 = build_int_cst_type (type, -1);
9994 return omit_one_operand_loc (loc, type, t1, arg1);
9998 /* X + ~X is -1. */
9999 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10000 && !TYPE_OVERFLOW_TRAPS (type))
10002 tree tem = TREE_OPERAND (arg1, 0);
10004 STRIP_NOPS (tem);
10005 if (operand_equal_p (arg0, tem, 0))
10007 t1 = build_int_cst_type (type, -1);
10008 return omit_one_operand_loc (loc, type, t1, arg0);
10012 /* X + (X / CST) * -CST is X % CST. */
10013 if (TREE_CODE (arg1) == MULT_EXPR
10014 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10015 && operand_equal_p (arg0,
10016 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10018 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10019 tree cst1 = TREE_OPERAND (arg1, 1);
10020 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10021 cst1, cst0);
10022 if (sum && integer_zerop (sum))
10023 return fold_convert_loc (loc, type,
10024 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10025 TREE_TYPE (arg0), arg0,
10026 cst0));
10030 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10031 same or one. Make sure type is not saturating.
10032 fold_plusminus_mult_expr will re-associate. */
10033 if ((TREE_CODE (arg0) == MULT_EXPR
10034 || TREE_CODE (arg1) == MULT_EXPR)
10035 && !TYPE_SATURATING (type)
10036 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10038 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10039 if (tem)
10040 return tem;
10043 if (! FLOAT_TYPE_P (type))
10045 if (integer_zerop (arg1))
10046 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10048 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10049 with a constant, and the two constants have no bits in common,
10050 we should treat this as a BIT_IOR_EXPR since this may produce more
10051 simplifications. */
10052 if (TREE_CODE (arg0) == BIT_AND_EXPR
10053 && TREE_CODE (arg1) == BIT_AND_EXPR
10054 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10055 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10056 && integer_zerop (const_binop (BIT_AND_EXPR,
10057 TREE_OPERAND (arg0, 1),
10058 TREE_OPERAND (arg1, 1))))
10060 code = BIT_IOR_EXPR;
10061 goto bit_ior;
10064 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10065 (plus (plus (mult) (mult)) (foo)) so that we can
10066 take advantage of the factoring cases below. */
10067 if (TYPE_OVERFLOW_WRAPS (type)
10068 && (((TREE_CODE (arg0) == PLUS_EXPR
10069 || TREE_CODE (arg0) == MINUS_EXPR)
10070 && TREE_CODE (arg1) == MULT_EXPR)
10071 || ((TREE_CODE (arg1) == PLUS_EXPR
10072 || TREE_CODE (arg1) == MINUS_EXPR)
10073 && TREE_CODE (arg0) == MULT_EXPR)))
10075 tree parg0, parg1, parg, marg;
10076 enum tree_code pcode;
10078 if (TREE_CODE (arg1) == MULT_EXPR)
10079 parg = arg0, marg = arg1;
10080 else
10081 parg = arg1, marg = arg0;
10082 pcode = TREE_CODE (parg);
10083 parg0 = TREE_OPERAND (parg, 0);
10084 parg1 = TREE_OPERAND (parg, 1);
10085 STRIP_NOPS (parg0);
10086 STRIP_NOPS (parg1);
10088 if (TREE_CODE (parg0) == MULT_EXPR
10089 && TREE_CODE (parg1) != MULT_EXPR)
10090 return fold_build2_loc (loc, pcode, type,
10091 fold_build2_loc (loc, PLUS_EXPR, type,
10092 fold_convert_loc (loc, type,
10093 parg0),
10094 fold_convert_loc (loc, type,
10095 marg)),
10096 fold_convert_loc (loc, type, parg1));
10097 if (TREE_CODE (parg0) != MULT_EXPR
10098 && TREE_CODE (parg1) == MULT_EXPR)
10099 return
10100 fold_build2_loc (loc, PLUS_EXPR, type,
10101 fold_convert_loc (loc, type, parg0),
10102 fold_build2_loc (loc, pcode, type,
10103 fold_convert_loc (loc, type, marg),
10104 fold_convert_loc (loc, type,
10105 parg1)));
10108 else
10110 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10111 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10112 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10114 /* Likewise if the operands are reversed. */
10115 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10116 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10118 /* Convert X + -C into X - C. */
10119 if (TREE_CODE (arg1) == REAL_CST
10120 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10122 tem = fold_negate_const (arg1, type);
10123 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10124 return fold_build2_loc (loc, MINUS_EXPR, type,
10125 fold_convert_loc (loc, type, arg0),
10126 fold_convert_loc (loc, type, tem));
10129 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10130 to __complex__ ( x, y ). This is not the same for SNaNs or
10131 if signed zeros are involved. */
10132 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10133 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10134 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10136 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10137 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10138 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10139 bool arg0rz = false, arg0iz = false;
10140 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10141 || (arg0i && (arg0iz = real_zerop (arg0i))))
10143 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10144 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10145 if (arg0rz && arg1i && real_zerop (arg1i))
10147 tree rp = arg1r ? arg1r
10148 : build1 (REALPART_EXPR, rtype, arg1);
10149 tree ip = arg0i ? arg0i
10150 : build1 (IMAGPART_EXPR, rtype, arg0);
10151 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10153 else if (arg0iz && arg1r && real_zerop (arg1r))
10155 tree rp = arg0r ? arg0r
10156 : build1 (REALPART_EXPR, rtype, arg0);
10157 tree ip = arg1i ? arg1i
10158 : build1 (IMAGPART_EXPR, rtype, arg1);
10159 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10164 if (flag_unsafe_math_optimizations
10165 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10166 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10167 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10168 return tem;
10170 /* Convert x+x into x*2.0. */
10171 if (operand_equal_p (arg0, arg1, 0)
10172 && SCALAR_FLOAT_TYPE_P (type))
10173 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10174 build_real (type, dconst2));
10176 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10177 We associate floats only if the user has specified
10178 -fassociative-math. */
10179 if (flag_associative_math
10180 && TREE_CODE (arg1) == PLUS_EXPR
10181 && TREE_CODE (arg0) != MULT_EXPR)
10183 tree tree10 = TREE_OPERAND (arg1, 0);
10184 tree tree11 = TREE_OPERAND (arg1, 1);
10185 if (TREE_CODE (tree11) == MULT_EXPR
10186 && TREE_CODE (tree10) == MULT_EXPR)
10188 tree tree0;
10189 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10190 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10193 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10194 We associate floats only if the user has specified
10195 -fassociative-math. */
10196 if (flag_associative_math
10197 && TREE_CODE (arg0) == PLUS_EXPR
10198 && TREE_CODE (arg1) != MULT_EXPR)
10200 tree tree00 = TREE_OPERAND (arg0, 0);
10201 tree tree01 = TREE_OPERAND (arg0, 1);
10202 if (TREE_CODE (tree01) == MULT_EXPR
10203 && TREE_CODE (tree00) == MULT_EXPR)
10205 tree tree0;
10206 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10207 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10212 bit_rotate:
10213 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10214 is a rotate of A by C1 bits. */
10215 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10216 is a rotate of A by B bits. */
10218 enum tree_code code0, code1;
10219 tree rtype;
10220 code0 = TREE_CODE (arg0);
10221 code1 = TREE_CODE (arg1);
10222 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10223 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10224 && operand_equal_p (TREE_OPERAND (arg0, 0),
10225 TREE_OPERAND (arg1, 0), 0)
10226 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10227 TYPE_UNSIGNED (rtype))
10228 /* Only create rotates in complete modes. Other cases are not
10229 expanded properly. */
10230 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10232 tree tree01, tree11;
10233 enum tree_code code01, code11;
10235 tree01 = TREE_OPERAND (arg0, 1);
10236 tree11 = TREE_OPERAND (arg1, 1);
10237 STRIP_NOPS (tree01);
10238 STRIP_NOPS (tree11);
10239 code01 = TREE_CODE (tree01);
10240 code11 = TREE_CODE (tree11);
10241 if (code01 == INTEGER_CST
10242 && code11 == INTEGER_CST
10243 && TREE_INT_CST_HIGH (tree01) == 0
10244 && TREE_INT_CST_HIGH (tree11) == 0
10245 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10246 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10248 tem = build2_loc (loc, LROTATE_EXPR,
10249 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10250 TREE_OPERAND (arg0, 0),
10251 code0 == LSHIFT_EXPR ? tree01 : tree11);
10252 return fold_convert_loc (loc, type, tem);
10254 else if (code11 == MINUS_EXPR)
10256 tree tree110, tree111;
10257 tree110 = TREE_OPERAND (tree11, 0);
10258 tree111 = TREE_OPERAND (tree11, 1);
10259 STRIP_NOPS (tree110);
10260 STRIP_NOPS (tree111);
10261 if (TREE_CODE (tree110) == INTEGER_CST
10262 && 0 == compare_tree_int (tree110,
10263 TYPE_PRECISION
10264 (TREE_TYPE (TREE_OPERAND
10265 (arg0, 0))))
10266 && operand_equal_p (tree01, tree111, 0))
10267 return
10268 fold_convert_loc (loc, type,
10269 build2 ((code0 == LSHIFT_EXPR
10270 ? LROTATE_EXPR
10271 : RROTATE_EXPR),
10272 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10273 TREE_OPERAND (arg0, 0), tree01));
10275 else if (code01 == MINUS_EXPR)
10277 tree tree010, tree011;
10278 tree010 = TREE_OPERAND (tree01, 0);
10279 tree011 = TREE_OPERAND (tree01, 1);
10280 STRIP_NOPS (tree010);
10281 STRIP_NOPS (tree011);
10282 if (TREE_CODE (tree010) == INTEGER_CST
10283 && 0 == compare_tree_int (tree010,
10284 TYPE_PRECISION
10285 (TREE_TYPE (TREE_OPERAND
10286 (arg0, 0))))
10287 && operand_equal_p (tree11, tree011, 0))
10288 return fold_convert_loc
10289 (loc, type,
10290 build2 ((code0 != LSHIFT_EXPR
10291 ? LROTATE_EXPR
10292 : RROTATE_EXPR),
10293 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10294 TREE_OPERAND (arg0, 0), tree11));
10299 associate:
10300 /* In most languages, can't associate operations on floats through
10301 parentheses. Rather than remember where the parentheses were, we
10302 don't associate floats at all, unless the user has specified
10303 -fassociative-math.
10304 And, we need to make sure type is not saturating. */
10306 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10307 && !TYPE_SATURATING (type))
10309 tree var0, con0, lit0, minus_lit0;
10310 tree var1, con1, lit1, minus_lit1;
10311 bool ok = true;
10313 /* Split both trees into variables, constants, and literals. Then
10314 associate each group together, the constants with literals,
10315 then the result with variables. This increases the chances of
10316 literals being recombined later and of generating relocatable
10317 expressions for the sum of a constant and literal. */
10318 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10319 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10320 code == MINUS_EXPR);
10322 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10323 if (code == MINUS_EXPR)
10324 code = PLUS_EXPR;
10326 /* With undefined overflow we can only associate constants with one
10327 variable, and constants whose association doesn't overflow. */
10328 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10329 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10331 if (var0 && var1)
10333 tree tmp0 = var0;
10334 tree tmp1 = var1;
10336 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10337 tmp0 = TREE_OPERAND (tmp0, 0);
10338 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10339 tmp1 = TREE_OPERAND (tmp1, 0);
10340 /* The only case we can still associate with two variables
10341 is if they are the same, modulo negation. */
10342 if (!operand_equal_p (tmp0, tmp1, 0))
10343 ok = false;
10346 if (ok && lit0 && lit1)
10348 tree tmp0 = fold_convert (type, lit0);
10349 tree tmp1 = fold_convert (type, lit1);
10351 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10352 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10353 ok = false;
10357 /* Only do something if we found more than two objects. Otherwise,
10358 nothing has changed and we risk infinite recursion. */
10359 if (ok
10360 && (2 < ((var0 != 0) + (var1 != 0)
10361 + (con0 != 0) + (con1 != 0)
10362 + (lit0 != 0) + (lit1 != 0)
10363 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10365 var0 = associate_trees (loc, var0, var1, code, type);
10366 con0 = associate_trees (loc, con0, con1, code, type);
10367 lit0 = associate_trees (loc, lit0, lit1, code, type);
10368 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10370 /* Preserve the MINUS_EXPR if the negative part of the literal is
10371 greater than the positive part. Otherwise, the multiplicative
10372 folding code (i.e extract_muldiv) may be fooled in case
10373 unsigned constants are subtracted, like in the following
10374 example: ((X*2 + 4) - 8U)/2. */
10375 if (minus_lit0 && lit0)
10377 if (TREE_CODE (lit0) == INTEGER_CST
10378 && TREE_CODE (minus_lit0) == INTEGER_CST
10379 && tree_int_cst_lt (lit0, minus_lit0))
10381 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10382 MINUS_EXPR, type);
10383 lit0 = 0;
10385 else
10387 lit0 = associate_trees (loc, lit0, minus_lit0,
10388 MINUS_EXPR, type);
10389 minus_lit0 = 0;
10392 if (minus_lit0)
10394 if (con0 == 0)
10395 return
10396 fold_convert_loc (loc, type,
10397 associate_trees (loc, var0, minus_lit0,
10398 MINUS_EXPR, type));
10399 else
10401 con0 = associate_trees (loc, con0, minus_lit0,
10402 MINUS_EXPR, type);
10403 return
10404 fold_convert_loc (loc, type,
10405 associate_trees (loc, var0, con0,
10406 PLUS_EXPR, type));
10410 con0 = associate_trees (loc, con0, lit0, code, type);
10411 return
10412 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10413 code, type));
10417 return NULL_TREE;
10419 case MINUS_EXPR:
10420 /* Pointer simplifications for subtraction, simple reassociations. */
10421 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10423 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10424 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10425 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10427 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10428 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10429 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10430 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10431 return fold_build2_loc (loc, PLUS_EXPR, type,
10432 fold_build2_loc (loc, MINUS_EXPR, type,
10433 arg00, arg10),
10434 fold_build2_loc (loc, MINUS_EXPR, type,
10435 arg01, arg11));
10437 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10438 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10440 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10441 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10442 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10443 fold_convert_loc (loc, type, arg1));
10444 if (tmp)
10445 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10448 /* A - (-B) -> A + B */
10449 if (TREE_CODE (arg1) == NEGATE_EXPR)
10450 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10451 fold_convert_loc (loc, type,
10452 TREE_OPERAND (arg1, 0)));
10453 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10454 if (TREE_CODE (arg0) == NEGATE_EXPR
10455 && (FLOAT_TYPE_P (type)
10456 || INTEGRAL_TYPE_P (type))
10457 && negate_expr_p (arg1)
10458 && reorder_operands_p (arg0, arg1))
10459 return fold_build2_loc (loc, MINUS_EXPR, type,
10460 fold_convert_loc (loc, type,
10461 negate_expr (arg1)),
10462 fold_convert_loc (loc, type,
10463 TREE_OPERAND (arg0, 0)));
10464 /* Convert -A - 1 to ~A. */
10465 if (INTEGRAL_TYPE_P (type)
10466 && TREE_CODE (arg0) == NEGATE_EXPR
10467 && integer_onep (arg1)
10468 && !TYPE_OVERFLOW_TRAPS (type))
10469 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10470 fold_convert_loc (loc, type,
10471 TREE_OPERAND (arg0, 0)));
10473 /* Convert -1 - A to ~A. */
10474 if (INTEGRAL_TYPE_P (type)
10475 && integer_all_onesp (arg0))
10476 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10479 /* X - (X / CST) * CST is X % CST. */
10480 if (INTEGRAL_TYPE_P (type)
10481 && TREE_CODE (arg1) == MULT_EXPR
10482 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10483 && operand_equal_p (arg0,
10484 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10485 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10486 TREE_OPERAND (arg1, 1), 0))
10487 return
10488 fold_convert_loc (loc, type,
10489 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10490 arg0, TREE_OPERAND (arg1, 1)));
10492 if (! FLOAT_TYPE_P (type))
10494 if (integer_zerop (arg0))
10495 return negate_expr (fold_convert_loc (loc, type, arg1));
10496 if (integer_zerop (arg1))
10497 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10499 /* Fold A - (A & B) into ~B & A. */
10500 if (!TREE_SIDE_EFFECTS (arg0)
10501 && TREE_CODE (arg1) == BIT_AND_EXPR)
10503 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10505 tree arg10 = fold_convert_loc (loc, type,
10506 TREE_OPERAND (arg1, 0));
10507 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10508 fold_build1_loc (loc, BIT_NOT_EXPR,
10509 type, arg10),
10510 fold_convert_loc (loc, type, arg0));
10512 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10514 tree arg11 = fold_convert_loc (loc,
10515 type, TREE_OPERAND (arg1, 1));
10516 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10517 fold_build1_loc (loc, BIT_NOT_EXPR,
10518 type, arg11),
10519 fold_convert_loc (loc, type, arg0));
10523 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10524 any power of 2 minus 1. */
10525 if (TREE_CODE (arg0) == BIT_AND_EXPR
10526 && TREE_CODE (arg1) == BIT_AND_EXPR
10527 && operand_equal_p (TREE_OPERAND (arg0, 0),
10528 TREE_OPERAND (arg1, 0), 0))
10530 tree mask0 = TREE_OPERAND (arg0, 1);
10531 tree mask1 = TREE_OPERAND (arg1, 1);
10532 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10534 if (operand_equal_p (tem, mask1, 0))
10536 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10537 TREE_OPERAND (arg0, 0), mask1);
10538 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10543 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10544 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10545 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10547 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10548 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10549 (-ARG1 + ARG0) reduces to -ARG1. */
10550 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10551 return negate_expr (fold_convert_loc (loc, type, arg1));
10553 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10554 __complex__ ( x, -y ). This is not the same for SNaNs or if
10555 signed zeros are involved. */
10556 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10557 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10558 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10560 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10561 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10562 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10563 bool arg0rz = false, arg0iz = false;
10564 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10565 || (arg0i && (arg0iz = real_zerop (arg0i))))
10567 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10568 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10569 if (arg0rz && arg1i && real_zerop (arg1i))
10571 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10572 arg1r ? arg1r
10573 : build1 (REALPART_EXPR, rtype, arg1));
10574 tree ip = arg0i ? arg0i
10575 : build1 (IMAGPART_EXPR, rtype, arg0);
10576 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10578 else if (arg0iz && arg1r && real_zerop (arg1r))
10580 tree rp = arg0r ? arg0r
10581 : build1 (REALPART_EXPR, rtype, arg0);
10582 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10583 arg1i ? arg1i
10584 : build1 (IMAGPART_EXPR, rtype, arg1));
10585 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10590 /* Fold &x - &x. This can happen from &x.foo - &x.
10591 This is unsafe for certain floats even in non-IEEE formats.
10592 In IEEE, it is unsafe because it does wrong for NaNs.
10593 Also note that operand_equal_p is always false if an operand
10594 is volatile. */
10596 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10597 && operand_equal_p (arg0, arg1, 0))
10598 return build_zero_cst (type);
10600 /* A - B -> A + (-B) if B is easily negatable. */
10601 if (negate_expr_p (arg1)
10602 && ((FLOAT_TYPE_P (type)
10603 /* Avoid this transformation if B is a positive REAL_CST. */
10604 && (TREE_CODE (arg1) != REAL_CST
10605 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10606 || INTEGRAL_TYPE_P (type)))
10607 return fold_build2_loc (loc, PLUS_EXPR, type,
10608 fold_convert_loc (loc, type, arg0),
10609 fold_convert_loc (loc, type,
10610 negate_expr (arg1)));
10612 /* Try folding difference of addresses. */
10614 HOST_WIDE_INT diff;
10616 if ((TREE_CODE (arg0) == ADDR_EXPR
10617 || TREE_CODE (arg1) == ADDR_EXPR)
10618 && ptr_difference_const (arg0, arg1, &diff))
10619 return build_int_cst_type (type, diff);
10622 /* Fold &a[i] - &a[j] to i-j. */
10623 if (TREE_CODE (arg0) == ADDR_EXPR
10624 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10625 && TREE_CODE (arg1) == ADDR_EXPR
10626 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10628 tree tem = fold_addr_of_array_ref_difference (loc, type,
10629 TREE_OPERAND (arg0, 0),
10630 TREE_OPERAND (arg1, 0));
10631 if (tem)
10632 return tem;
10635 if (FLOAT_TYPE_P (type)
10636 && flag_unsafe_math_optimizations
10637 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10638 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10639 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10640 return tem;
10642 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10643 same or one. Make sure type is not saturating.
10644 fold_plusminus_mult_expr will re-associate. */
10645 if ((TREE_CODE (arg0) == MULT_EXPR
10646 || TREE_CODE (arg1) == MULT_EXPR)
10647 && !TYPE_SATURATING (type)
10648 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10650 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10651 if (tem)
10652 return tem;
10655 goto associate;
10657 case MULT_EXPR:
10658 /* (-A) * (-B) -> A * B */
10659 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10660 return fold_build2_loc (loc, MULT_EXPR, type,
10661 fold_convert_loc (loc, type,
10662 TREE_OPERAND (arg0, 0)),
10663 fold_convert_loc (loc, type,
10664 negate_expr (arg1)));
10665 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10666 return fold_build2_loc (loc, MULT_EXPR, type,
10667 fold_convert_loc (loc, type,
10668 negate_expr (arg0)),
10669 fold_convert_loc (loc, type,
10670 TREE_OPERAND (arg1, 0)));
10672 if (! FLOAT_TYPE_P (type))
10674 if (integer_zerop (arg1))
10675 return omit_one_operand_loc (loc, type, arg1, arg0);
10676 if (integer_onep (arg1))
10677 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10678 /* Transform x * -1 into -x. Make sure to do the negation
10679 on the original operand with conversions not stripped
10680 because we can only strip non-sign-changing conversions. */
10681 if (integer_all_onesp (arg1))
10682 return fold_convert_loc (loc, type, negate_expr (op0));
10683 /* Transform x * -C into -x * C if x is easily negatable. */
10684 if (TREE_CODE (arg1) == INTEGER_CST
10685 && tree_int_cst_sgn (arg1) == -1
10686 && negate_expr_p (arg0)
10687 && (tem = negate_expr (arg1)) != arg1
10688 && !TREE_OVERFLOW (tem))
10689 return fold_build2_loc (loc, MULT_EXPR, type,
10690 fold_convert_loc (loc, type,
10691 negate_expr (arg0)),
10692 tem);
10694 /* (a * (1 << b)) is (a << b) */
10695 if (TREE_CODE (arg1) == LSHIFT_EXPR
10696 && integer_onep (TREE_OPERAND (arg1, 0)))
10697 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10698 TREE_OPERAND (arg1, 1));
10699 if (TREE_CODE (arg0) == LSHIFT_EXPR
10700 && integer_onep (TREE_OPERAND (arg0, 0)))
10701 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10702 TREE_OPERAND (arg0, 1));
10704 /* (A + A) * C -> A * 2 * C */
10705 if (TREE_CODE (arg0) == PLUS_EXPR
10706 && TREE_CODE (arg1) == INTEGER_CST
10707 && operand_equal_p (TREE_OPERAND (arg0, 0),
10708 TREE_OPERAND (arg0, 1), 0))
10709 return fold_build2_loc (loc, MULT_EXPR, type,
10710 omit_one_operand_loc (loc, type,
10711 TREE_OPERAND (arg0, 0),
10712 TREE_OPERAND (arg0, 1)),
10713 fold_build2_loc (loc, MULT_EXPR, type,
10714 build_int_cst (type, 2) , arg1));
10716 strict_overflow_p = false;
10717 if (TREE_CODE (arg1) == INTEGER_CST
10718 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10719 &strict_overflow_p)))
10721 if (strict_overflow_p)
10722 fold_overflow_warning (("assuming signed overflow does not "
10723 "occur when simplifying "
10724 "multiplication"),
10725 WARN_STRICT_OVERFLOW_MISC);
10726 return fold_convert_loc (loc, type, tem);
10729 /* Optimize z * conj(z) for integer complex numbers. */
10730 if (TREE_CODE (arg0) == CONJ_EXPR
10731 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10732 return fold_mult_zconjz (loc, type, arg1);
10733 if (TREE_CODE (arg1) == CONJ_EXPR
10734 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10735 return fold_mult_zconjz (loc, type, arg0);
10737 else
10739 /* Maybe fold x * 0 to 0. The expressions aren't the same
10740 when x is NaN, since x * 0 is also NaN. Nor are they the
10741 same in modes with signed zeros, since multiplying a
10742 negative value by 0 gives -0, not +0. */
10743 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10744 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10745 && real_zerop (arg1))
10746 return omit_one_operand_loc (loc, type, arg1, arg0);
10747 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10748 Likewise for complex arithmetic with signed zeros. */
10749 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10750 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10751 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10752 && real_onep (arg1))
10753 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10755 /* Transform x * -1.0 into -x. */
10756 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10757 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10758 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10759 && real_minus_onep (arg1))
10760 return fold_convert_loc (loc, type, negate_expr (arg0));
10762 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10763 the result for floating point types due to rounding so it is applied
10764 only if -fassociative-math was specify. */
10765 if (flag_associative_math
10766 && TREE_CODE (arg0) == RDIV_EXPR
10767 && TREE_CODE (arg1) == REAL_CST
10768 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10770 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10771 arg1);
10772 if (tem)
10773 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10774 TREE_OPERAND (arg0, 1));
10777 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10778 if (operand_equal_p (arg0, arg1, 0))
10780 tree tem = fold_strip_sign_ops (arg0);
10781 if (tem != NULL_TREE)
10783 tem = fold_convert_loc (loc, type, tem);
10784 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10788 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10789 This is not the same for NaNs or if signed zeros are
10790 involved. */
10791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10792 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10793 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10794 && TREE_CODE (arg1) == COMPLEX_CST
10795 && real_zerop (TREE_REALPART (arg1)))
10797 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10798 if (real_onep (TREE_IMAGPART (arg1)))
10799 return
10800 fold_build2_loc (loc, COMPLEX_EXPR, type,
10801 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10802 rtype, arg0)),
10803 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10804 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10805 return
10806 fold_build2_loc (loc, COMPLEX_EXPR, type,
10807 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10808 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10809 rtype, arg0)));
10812 /* Optimize z * conj(z) for floating point complex numbers.
10813 Guarded by flag_unsafe_math_optimizations as non-finite
10814 imaginary components don't produce scalar results. */
10815 if (flag_unsafe_math_optimizations
10816 && TREE_CODE (arg0) == CONJ_EXPR
10817 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10818 return fold_mult_zconjz (loc, type, arg1);
10819 if (flag_unsafe_math_optimizations
10820 && TREE_CODE (arg1) == CONJ_EXPR
10821 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10822 return fold_mult_zconjz (loc, type, arg0);
10824 if (flag_unsafe_math_optimizations)
10826 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10827 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10829 /* Optimizations of root(...)*root(...). */
10830 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10832 tree rootfn, arg;
10833 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10834 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10836 /* Optimize sqrt(x)*sqrt(x) as x. */
10837 if (BUILTIN_SQRT_P (fcode0)
10838 && operand_equal_p (arg00, arg10, 0)
10839 && ! HONOR_SNANS (TYPE_MODE (type)))
10840 return arg00;
10842 /* Optimize root(x)*root(y) as root(x*y). */
10843 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10844 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10845 return build_call_expr_loc (loc, rootfn, 1, arg);
10848 /* Optimize expN(x)*expN(y) as expN(x+y). */
10849 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10851 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10852 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10853 CALL_EXPR_ARG (arg0, 0),
10854 CALL_EXPR_ARG (arg1, 0));
10855 return build_call_expr_loc (loc, expfn, 1, arg);
10858 /* Optimizations of pow(...)*pow(...). */
10859 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10860 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10861 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10863 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10864 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10865 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10866 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10868 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10869 if (operand_equal_p (arg01, arg11, 0))
10871 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10872 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10873 arg00, arg10);
10874 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10877 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10878 if (operand_equal_p (arg00, arg10, 0))
10880 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10881 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10882 arg01, arg11);
10883 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10887 /* Optimize tan(x)*cos(x) as sin(x). */
10888 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10889 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10890 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10891 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10892 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10893 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10894 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10895 CALL_EXPR_ARG (arg1, 0), 0))
10897 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10899 if (sinfn != NULL_TREE)
10900 return build_call_expr_loc (loc, sinfn, 1,
10901 CALL_EXPR_ARG (arg0, 0));
10904 /* Optimize x*pow(x,c) as pow(x,c+1). */
10905 if (fcode1 == BUILT_IN_POW
10906 || fcode1 == BUILT_IN_POWF
10907 || fcode1 == BUILT_IN_POWL)
10909 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10910 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10911 if (TREE_CODE (arg11) == REAL_CST
10912 && !TREE_OVERFLOW (arg11)
10913 && operand_equal_p (arg0, arg10, 0))
10915 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10916 REAL_VALUE_TYPE c;
10917 tree arg;
10919 c = TREE_REAL_CST (arg11);
10920 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10921 arg = build_real (type, c);
10922 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10926 /* Optimize pow(x,c)*x as pow(x,c+1). */
10927 if (fcode0 == BUILT_IN_POW
10928 || fcode0 == BUILT_IN_POWF
10929 || fcode0 == BUILT_IN_POWL)
10931 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10932 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10933 if (TREE_CODE (arg01) == REAL_CST
10934 && !TREE_OVERFLOW (arg01)
10935 && operand_equal_p (arg1, arg00, 0))
10937 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10938 REAL_VALUE_TYPE c;
10939 tree arg;
10941 c = TREE_REAL_CST (arg01);
10942 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10943 arg = build_real (type, c);
10944 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10948 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10949 if (!in_gimple_form
10950 && optimize
10951 && operand_equal_p (arg0, arg1, 0))
10953 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10955 if (powfn)
10957 tree arg = build_real (type, dconst2);
10958 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10963 goto associate;
10965 case BIT_IOR_EXPR:
10966 bit_ior:
10967 if (integer_all_onesp (arg1))
10968 return omit_one_operand_loc (loc, type, arg1, arg0);
10969 if (integer_zerop (arg1))
10970 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10971 if (operand_equal_p (arg0, arg1, 0))
10972 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10974 /* ~X | X is -1. */
10975 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10976 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10978 t1 = build_zero_cst (type);
10979 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10980 return omit_one_operand_loc (loc, type, t1, arg1);
10983 /* X | ~X is -1. */
10984 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10985 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10987 t1 = build_zero_cst (type);
10988 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10989 return omit_one_operand_loc (loc, type, t1, arg0);
10992 /* Canonicalize (X & C1) | C2. */
10993 if (TREE_CODE (arg0) == BIT_AND_EXPR
10994 && TREE_CODE (arg1) == INTEGER_CST
10995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10997 double_int c1, c2, c3, msk;
10998 int width = TYPE_PRECISION (type), w;
10999 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11000 c2 = tree_to_double_int (arg1);
11002 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11003 if (double_int_equal_p (double_int_and (c1, c2), c1))
11004 return omit_one_operand_loc (loc, type, arg1,
11005 TREE_OPERAND (arg0, 0));
11007 msk = double_int_mask (width);
11009 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11010 if (double_int_zero_p (double_int_and_not (msk,
11011 double_int_ior (c1, c2))))
11012 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11013 TREE_OPERAND (arg0, 0), arg1);
11015 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11016 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11017 mode which allows further optimizations. */
11018 c1 = double_int_and (c1, msk);
11019 c2 = double_int_and (c2, msk);
11020 c3 = double_int_and_not (c1, c2);
11021 for (w = BITS_PER_UNIT;
11022 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11023 w <<= 1)
11025 unsigned HOST_WIDE_INT mask
11026 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11027 if (((c1.low | c2.low) & mask) == mask
11028 && (c1.low & ~mask) == 0 && c1.high == 0)
11030 c3 = uhwi_to_double_int (mask);
11031 break;
11034 if (!double_int_equal_p (c3, c1))
11035 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11036 fold_build2_loc (loc, BIT_AND_EXPR, type,
11037 TREE_OPERAND (arg0, 0),
11038 double_int_to_tree (type,
11039 c3)),
11040 arg1);
11043 /* (X & Y) | Y is (X, Y). */
11044 if (TREE_CODE (arg0) == BIT_AND_EXPR
11045 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11046 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11047 /* (X & Y) | X is (Y, X). */
11048 if (TREE_CODE (arg0) == BIT_AND_EXPR
11049 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11050 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11051 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11052 /* X | (X & Y) is (Y, X). */
11053 if (TREE_CODE (arg1) == BIT_AND_EXPR
11054 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11055 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11056 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11057 /* X | (Y & X) is (Y, X). */
11058 if (TREE_CODE (arg1) == BIT_AND_EXPR
11059 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11060 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11061 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11063 /* (X & ~Y) | (~X & Y) is X ^ Y */
11064 if (TREE_CODE (arg0) == BIT_AND_EXPR
11065 && TREE_CODE (arg1) == BIT_AND_EXPR)
11067 tree a0, a1, l0, l1, n0, n1;
11069 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11070 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11072 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11073 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11075 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11076 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11078 if ((operand_equal_p (n0, a0, 0)
11079 && operand_equal_p (n1, a1, 0))
11080 || (operand_equal_p (n0, a1, 0)
11081 && operand_equal_p (n1, a0, 0)))
11082 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11085 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11086 if (t1 != NULL_TREE)
11087 return t1;
11089 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11091 This results in more efficient code for machines without a NAND
11092 instruction. Combine will canonicalize to the first form
11093 which will allow use of NAND instructions provided by the
11094 backend if they exist. */
11095 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11096 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11098 return
11099 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11100 build2 (BIT_AND_EXPR, type,
11101 fold_convert_loc (loc, type,
11102 TREE_OPERAND (arg0, 0)),
11103 fold_convert_loc (loc, type,
11104 TREE_OPERAND (arg1, 0))));
11107 /* See if this can be simplified into a rotate first. If that
11108 is unsuccessful continue in the association code. */
11109 goto bit_rotate;
11111 case BIT_XOR_EXPR:
11112 if (integer_zerop (arg1))
11113 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11114 if (integer_all_onesp (arg1))
11115 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11116 if (operand_equal_p (arg0, arg1, 0))
11117 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11119 /* ~X ^ X is -1. */
11120 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11121 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11123 t1 = build_zero_cst (type);
11124 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11125 return omit_one_operand_loc (loc, type, t1, arg1);
11128 /* X ^ ~X is -1. */
11129 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11130 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11132 t1 = build_zero_cst (type);
11133 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11134 return omit_one_operand_loc (loc, type, t1, arg0);
11137 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11138 with a constant, and the two constants have no bits in common,
11139 we should treat this as a BIT_IOR_EXPR since this may produce more
11140 simplifications. */
11141 if (TREE_CODE (arg0) == BIT_AND_EXPR
11142 && TREE_CODE (arg1) == BIT_AND_EXPR
11143 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11144 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11145 && integer_zerop (const_binop (BIT_AND_EXPR,
11146 TREE_OPERAND (arg0, 1),
11147 TREE_OPERAND (arg1, 1))))
11149 code = BIT_IOR_EXPR;
11150 goto bit_ior;
11153 /* (X | Y) ^ X -> Y & ~ X*/
11154 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11157 tree t2 = TREE_OPERAND (arg0, 1);
11158 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11159 arg1);
11160 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11161 fold_convert_loc (loc, type, t2),
11162 fold_convert_loc (loc, type, t1));
11163 return t1;
11166 /* (Y | X) ^ X -> Y & ~ X*/
11167 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11168 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11170 tree t2 = TREE_OPERAND (arg0, 0);
11171 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11172 arg1);
11173 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11174 fold_convert_loc (loc, type, t2),
11175 fold_convert_loc (loc, type, t1));
11176 return t1;
11179 /* X ^ (X | Y) -> Y & ~ X*/
11180 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11181 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11183 tree t2 = TREE_OPERAND (arg1, 1);
11184 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11185 arg0);
11186 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11187 fold_convert_loc (loc, type, t2),
11188 fold_convert_loc (loc, type, t1));
11189 return t1;
11192 /* X ^ (Y | X) -> Y & ~ X*/
11193 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11194 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11196 tree t2 = TREE_OPERAND (arg1, 0);
11197 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11198 arg0);
11199 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11200 fold_convert_loc (loc, type, t2),
11201 fold_convert_loc (loc, type, t1));
11202 return t1;
11205 /* Convert ~X ^ ~Y to X ^ Y. */
11206 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11207 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11208 return fold_build2_loc (loc, code, type,
11209 fold_convert_loc (loc, type,
11210 TREE_OPERAND (arg0, 0)),
11211 fold_convert_loc (loc, type,
11212 TREE_OPERAND (arg1, 0)));
11214 /* Convert ~X ^ C to X ^ ~C. */
11215 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11216 && TREE_CODE (arg1) == INTEGER_CST)
11217 return fold_build2_loc (loc, code, type,
11218 fold_convert_loc (loc, type,
11219 TREE_OPERAND (arg0, 0)),
11220 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11222 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11223 if (TREE_CODE (arg0) == BIT_AND_EXPR
11224 && integer_onep (TREE_OPERAND (arg0, 1))
11225 && integer_onep (arg1))
11226 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11227 build_int_cst (TREE_TYPE (arg0), 0));
11229 /* Fold (X & Y) ^ Y as ~X & Y. */
11230 if (TREE_CODE (arg0) == BIT_AND_EXPR
11231 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11233 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11234 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11235 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11236 fold_convert_loc (loc, type, arg1));
11238 /* Fold (X & Y) ^ X as ~Y & X. */
11239 if (TREE_CODE (arg0) == BIT_AND_EXPR
11240 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11241 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11243 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11244 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11245 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11246 fold_convert_loc (loc, type, arg1));
11248 /* Fold X ^ (X & Y) as X & ~Y. */
11249 if (TREE_CODE (arg1) == BIT_AND_EXPR
11250 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11252 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11253 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11254 fold_convert_loc (loc, type, arg0),
11255 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11257 /* Fold X ^ (Y & X) as ~Y & X. */
11258 if (TREE_CODE (arg1) == BIT_AND_EXPR
11259 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11260 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11262 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11263 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11264 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11265 fold_convert_loc (loc, type, arg0));
11268 /* See if this can be simplified into a rotate first. If that
11269 is unsuccessful continue in the association code. */
11270 goto bit_rotate;
11272 case BIT_AND_EXPR:
11273 if (integer_all_onesp (arg1))
11274 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11275 if (integer_zerop (arg1))
11276 return omit_one_operand_loc (loc, type, arg1, arg0);
11277 if (operand_equal_p (arg0, arg1, 0))
11278 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11280 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11281 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11282 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11283 || (TREE_CODE (arg0) == EQ_EXPR
11284 && integer_zerop (TREE_OPERAND (arg0, 1))))
11285 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11286 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11288 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11289 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11290 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11291 || (TREE_CODE (arg1) == EQ_EXPR
11292 && integer_zerop (TREE_OPERAND (arg1, 1))))
11293 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11294 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11296 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11297 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11298 && TREE_CODE (arg1) == INTEGER_CST
11299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11301 tree tmp1 = fold_convert_loc (loc, type, arg1);
11302 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11303 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11304 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11305 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11306 return
11307 fold_convert_loc (loc, type,
11308 fold_build2_loc (loc, BIT_IOR_EXPR,
11309 type, tmp2, tmp3));
11312 /* (X | Y) & Y is (X, Y). */
11313 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11314 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11315 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11316 /* (X | Y) & X is (Y, X). */
11317 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11319 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11320 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11321 /* X & (X | Y) is (Y, X). */
11322 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11324 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11325 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11326 /* X & (Y | X) is (Y, X). */
11327 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11328 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11329 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11330 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11332 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11333 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11334 && integer_onep (TREE_OPERAND (arg0, 1))
11335 && integer_onep (arg1))
11337 tem = TREE_OPERAND (arg0, 0);
11338 return fold_build2_loc (loc, EQ_EXPR, type,
11339 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11340 build_int_cst (TREE_TYPE (tem), 1)),
11341 build_int_cst (TREE_TYPE (tem), 0));
11343 /* Fold ~X & 1 as (X & 1) == 0. */
11344 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11345 && integer_onep (arg1))
11347 tem = TREE_OPERAND (arg0, 0);
11348 return fold_build2_loc (loc, EQ_EXPR, type,
11349 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11350 build_int_cst (TREE_TYPE (tem), 1)),
11351 build_int_cst (TREE_TYPE (tem), 0));
11353 /* Fold !X & 1 as X == 0. */
11354 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11355 && integer_onep (arg1))
11357 tem = TREE_OPERAND (arg0, 0);
11358 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11359 build_int_cst (TREE_TYPE (tem), 0));
11362 /* Fold (X ^ Y) & Y as ~X & Y. */
11363 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11364 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11366 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11367 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11368 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11369 fold_convert_loc (loc, type, arg1));
11371 /* Fold (X ^ Y) & X as ~Y & X. */
11372 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11373 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11374 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11376 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11377 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11378 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11379 fold_convert_loc (loc, type, arg1));
11381 /* Fold X & (X ^ Y) as X & ~Y. */
11382 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11383 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11385 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11386 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11387 fold_convert_loc (loc, type, arg0),
11388 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11390 /* Fold X & (Y ^ X) as ~Y & X. */
11391 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11392 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11393 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11395 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11396 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11397 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11398 fold_convert_loc (loc, type, arg0));
11401 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11402 ((A & N) + B) & M -> (A + B) & M
11403 Similarly if (N & M) == 0,
11404 ((A | N) + B) & M -> (A + B) & M
11405 and for - instead of + (or unary - instead of +)
11406 and/or ^ instead of |.
11407 If B is constant and (B & M) == 0, fold into A & M. */
11408 if (host_integerp (arg1, 1))
11410 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11411 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11412 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11413 && (TREE_CODE (arg0) == PLUS_EXPR
11414 || TREE_CODE (arg0) == MINUS_EXPR
11415 || TREE_CODE (arg0) == NEGATE_EXPR)
11416 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11417 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11419 tree pmop[2];
11420 int which = 0;
11421 unsigned HOST_WIDE_INT cst0;
11423 /* Now we know that arg0 is (C + D) or (C - D) or
11424 -C and arg1 (M) is == (1LL << cst) - 1.
11425 Store C into PMOP[0] and D into PMOP[1]. */
11426 pmop[0] = TREE_OPERAND (arg0, 0);
11427 pmop[1] = NULL;
11428 if (TREE_CODE (arg0) != NEGATE_EXPR)
11430 pmop[1] = TREE_OPERAND (arg0, 1);
11431 which = 1;
11434 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11435 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11436 & cst1) != cst1)
11437 which = -1;
11439 for (; which >= 0; which--)
11440 switch (TREE_CODE (pmop[which]))
11442 case BIT_AND_EXPR:
11443 case BIT_IOR_EXPR:
11444 case BIT_XOR_EXPR:
11445 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11446 != INTEGER_CST)
11447 break;
11448 /* tree_low_cst not used, because we don't care about
11449 the upper bits. */
11450 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11451 cst0 &= cst1;
11452 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11454 if (cst0 != cst1)
11455 break;
11457 else if (cst0 != 0)
11458 break;
11459 /* If C or D is of the form (A & N) where
11460 (N & M) == M, or of the form (A | N) or
11461 (A ^ N) where (N & M) == 0, replace it with A. */
11462 pmop[which] = TREE_OPERAND (pmop[which], 0);
11463 break;
11464 case INTEGER_CST:
11465 /* If C or D is a N where (N & M) == 0, it can be
11466 omitted (assumed 0). */
11467 if ((TREE_CODE (arg0) == PLUS_EXPR
11468 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11469 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11470 pmop[which] = NULL;
11471 break;
11472 default:
11473 break;
11476 /* Only build anything new if we optimized one or both arguments
11477 above. */
11478 if (pmop[0] != TREE_OPERAND (arg0, 0)
11479 || (TREE_CODE (arg0) != NEGATE_EXPR
11480 && pmop[1] != TREE_OPERAND (arg0, 1)))
11482 tree utype = TREE_TYPE (arg0);
11483 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11485 /* Perform the operations in a type that has defined
11486 overflow behavior. */
11487 utype = unsigned_type_for (TREE_TYPE (arg0));
11488 if (pmop[0] != NULL)
11489 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11490 if (pmop[1] != NULL)
11491 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11494 if (TREE_CODE (arg0) == NEGATE_EXPR)
11495 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11496 else if (TREE_CODE (arg0) == PLUS_EXPR)
11498 if (pmop[0] != NULL && pmop[1] != NULL)
11499 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11500 pmop[0], pmop[1]);
11501 else if (pmop[0] != NULL)
11502 tem = pmop[0];
11503 else if (pmop[1] != NULL)
11504 tem = pmop[1];
11505 else
11506 return build_int_cst (type, 0);
11508 else if (pmop[0] == NULL)
11509 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11510 else
11511 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11512 pmop[0], pmop[1]);
11513 /* TEM is now the new binary +, - or unary - replacement. */
11514 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11515 fold_convert_loc (loc, utype, arg1));
11516 return fold_convert_loc (loc, type, tem);
11521 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11522 if (t1 != NULL_TREE)
11523 return t1;
11524 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11525 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11526 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11528 unsigned int prec
11529 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11531 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11532 && (~TREE_INT_CST_LOW (arg1)
11533 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11534 return
11535 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11538 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11540 This results in more efficient code for machines without a NOR
11541 instruction. Combine will canonicalize to the first form
11542 which will allow use of NOR instructions provided by the
11543 backend if they exist. */
11544 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11545 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11547 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11548 build2 (BIT_IOR_EXPR, type,
11549 fold_convert_loc (loc, type,
11550 TREE_OPERAND (arg0, 0)),
11551 fold_convert_loc (loc, type,
11552 TREE_OPERAND (arg1, 0))));
11555 /* If arg0 is derived from the address of an object or function, we may
11556 be able to fold this expression using the object or function's
11557 alignment. */
11558 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11560 unsigned HOST_WIDE_INT modulus, residue;
11561 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11563 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11564 integer_onep (arg1));
11566 /* This works because modulus is a power of 2. If this weren't the
11567 case, we'd have to replace it by its greatest power-of-2
11568 divisor: modulus & -modulus. */
11569 if (low < modulus)
11570 return build_int_cst (type, residue & low);
11573 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11574 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11575 if the new mask might be further optimized. */
11576 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11577 || TREE_CODE (arg0) == RSHIFT_EXPR)
11578 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11579 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11580 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11581 < TYPE_PRECISION (TREE_TYPE (arg0))
11582 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11583 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11585 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11586 unsigned HOST_WIDE_INT mask
11587 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11588 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11589 tree shift_type = TREE_TYPE (arg0);
11591 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11592 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11593 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11594 && TYPE_PRECISION (TREE_TYPE (arg0))
11595 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11597 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11598 tree arg00 = TREE_OPERAND (arg0, 0);
11599 /* See if more bits can be proven as zero because of
11600 zero extension. */
11601 if (TREE_CODE (arg00) == NOP_EXPR
11602 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11604 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11605 if (TYPE_PRECISION (inner_type)
11606 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11607 && TYPE_PRECISION (inner_type) < prec)
11609 prec = TYPE_PRECISION (inner_type);
11610 /* See if we can shorten the right shift. */
11611 if (shiftc < prec)
11612 shift_type = inner_type;
11615 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11616 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11617 zerobits <<= prec - shiftc;
11618 /* For arithmetic shift if sign bit could be set, zerobits
11619 can contain actually sign bits, so no transformation is
11620 possible, unless MASK masks them all away. In that
11621 case the shift needs to be converted into logical shift. */
11622 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11623 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11625 if ((mask & zerobits) == 0)
11626 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11627 else
11628 zerobits = 0;
11632 /* ((X << 16) & 0xff00) is (X, 0). */
11633 if ((mask & zerobits) == mask)
11634 return omit_one_operand_loc (loc, type,
11635 build_int_cst (type, 0), arg0);
11637 newmask = mask | zerobits;
11638 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11640 unsigned int prec;
11642 /* Only do the transformation if NEWMASK is some integer
11643 mode's mask. */
11644 for (prec = BITS_PER_UNIT;
11645 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11646 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11647 break;
11648 if (prec < HOST_BITS_PER_WIDE_INT
11649 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11651 tree newmaskt;
11653 if (shift_type != TREE_TYPE (arg0))
11655 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11656 fold_convert_loc (loc, shift_type,
11657 TREE_OPERAND (arg0, 0)),
11658 TREE_OPERAND (arg0, 1));
11659 tem = fold_convert_loc (loc, type, tem);
11661 else
11662 tem = op0;
11663 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11664 if (!tree_int_cst_equal (newmaskt, arg1))
11665 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11670 goto associate;
11672 case RDIV_EXPR:
11673 /* Don't touch a floating-point divide by zero unless the mode
11674 of the constant can represent infinity. */
11675 if (TREE_CODE (arg1) == REAL_CST
11676 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11677 && real_zerop (arg1))
11678 return NULL_TREE;
11680 /* Optimize A / A to 1.0 if we don't care about
11681 NaNs or Infinities. Skip the transformation
11682 for non-real operands. */
11683 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11684 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11685 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11686 && operand_equal_p (arg0, arg1, 0))
11688 tree r = build_real (TREE_TYPE (arg0), dconst1);
11690 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11693 /* The complex version of the above A / A optimization. */
11694 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11695 && operand_equal_p (arg0, arg1, 0))
11697 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11698 if (! HONOR_NANS (TYPE_MODE (elem_type))
11699 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11701 tree r = build_real (elem_type, dconst1);
11702 /* omit_two_operands will call fold_convert for us. */
11703 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11707 /* (-A) / (-B) -> A / B */
11708 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11709 return fold_build2_loc (loc, RDIV_EXPR, type,
11710 TREE_OPERAND (arg0, 0),
11711 negate_expr (arg1));
11712 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11713 return fold_build2_loc (loc, RDIV_EXPR, type,
11714 negate_expr (arg0),
11715 TREE_OPERAND (arg1, 0));
11717 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11718 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11719 && real_onep (arg1))
11720 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11722 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11723 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11724 && real_minus_onep (arg1))
11725 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11726 negate_expr (arg0)));
11728 /* If ARG1 is a constant, we can convert this to a multiply by the
11729 reciprocal. This does not have the same rounding properties,
11730 so only do this if -freciprocal-math. We can actually
11731 always safely do it if ARG1 is a power of two, but it's hard to
11732 tell if it is or not in a portable manner. */
11733 if (TREE_CODE (arg1) == REAL_CST)
11735 if (flag_reciprocal_math
11736 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11737 arg1)))
11738 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11739 /* Find the reciprocal if optimizing and the result is exact. */
11740 if (optimize)
11742 REAL_VALUE_TYPE r;
11743 r = TREE_REAL_CST (arg1);
11744 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11746 tem = build_real (type, r);
11747 return fold_build2_loc (loc, MULT_EXPR, type,
11748 fold_convert_loc (loc, type, arg0), tem);
11752 /* Convert A/B/C to A/(B*C). */
11753 if (flag_reciprocal_math
11754 && TREE_CODE (arg0) == RDIV_EXPR)
11755 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11756 fold_build2_loc (loc, MULT_EXPR, type,
11757 TREE_OPERAND (arg0, 1), arg1));
11759 /* Convert A/(B/C) to (A/B)*C. */
11760 if (flag_reciprocal_math
11761 && TREE_CODE (arg1) == RDIV_EXPR)
11762 return fold_build2_loc (loc, MULT_EXPR, type,
11763 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11764 TREE_OPERAND (arg1, 0)),
11765 TREE_OPERAND (arg1, 1));
11767 /* Convert C1/(X*C2) into (C1/C2)/X. */
11768 if (flag_reciprocal_math
11769 && TREE_CODE (arg1) == MULT_EXPR
11770 && TREE_CODE (arg0) == REAL_CST
11771 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11773 tree tem = const_binop (RDIV_EXPR, arg0,
11774 TREE_OPERAND (arg1, 1));
11775 if (tem)
11776 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11777 TREE_OPERAND (arg1, 0));
11780 if (flag_unsafe_math_optimizations)
11782 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11783 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11785 /* Optimize sin(x)/cos(x) as tan(x). */
11786 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11787 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11788 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11789 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11790 CALL_EXPR_ARG (arg1, 0), 0))
11792 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11794 if (tanfn != NULL_TREE)
11795 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11798 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11799 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11800 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11801 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11802 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11803 CALL_EXPR_ARG (arg1, 0), 0))
11805 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11807 if (tanfn != NULL_TREE)
11809 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11810 CALL_EXPR_ARG (arg0, 0));
11811 return fold_build2_loc (loc, RDIV_EXPR, type,
11812 build_real (type, dconst1), tmp);
11816 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11817 NaNs or Infinities. */
11818 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11819 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11820 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11822 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11823 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11825 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11826 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11827 && operand_equal_p (arg00, arg01, 0))
11829 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11831 if (cosfn != NULL_TREE)
11832 return build_call_expr_loc (loc, cosfn, 1, arg00);
11836 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11837 NaNs or Infinities. */
11838 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11839 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11840 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11842 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11843 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11845 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11846 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11847 && operand_equal_p (arg00, arg01, 0))
11849 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11851 if (cosfn != NULL_TREE)
11853 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11854 return fold_build2_loc (loc, RDIV_EXPR, type,
11855 build_real (type, dconst1),
11856 tmp);
11861 /* Optimize pow(x,c)/x as pow(x,c-1). */
11862 if (fcode0 == BUILT_IN_POW
11863 || fcode0 == BUILT_IN_POWF
11864 || fcode0 == BUILT_IN_POWL)
11866 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11867 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11868 if (TREE_CODE (arg01) == REAL_CST
11869 && !TREE_OVERFLOW (arg01)
11870 && operand_equal_p (arg1, arg00, 0))
11872 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11873 REAL_VALUE_TYPE c;
11874 tree arg;
11876 c = TREE_REAL_CST (arg01);
11877 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11878 arg = build_real (type, c);
11879 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11883 /* Optimize a/root(b/c) into a*root(c/b). */
11884 if (BUILTIN_ROOT_P (fcode1))
11886 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11888 if (TREE_CODE (rootarg) == RDIV_EXPR)
11890 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11891 tree b = TREE_OPERAND (rootarg, 0);
11892 tree c = TREE_OPERAND (rootarg, 1);
11894 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11896 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11897 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11901 /* Optimize x/expN(y) into x*expN(-y). */
11902 if (BUILTIN_EXPONENT_P (fcode1))
11904 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11905 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11906 arg1 = build_call_expr_loc (loc,
11907 expfn, 1,
11908 fold_convert_loc (loc, type, arg));
11909 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11912 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11913 if (fcode1 == BUILT_IN_POW
11914 || fcode1 == BUILT_IN_POWF
11915 || fcode1 == BUILT_IN_POWL)
11917 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11918 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11919 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11920 tree neg11 = fold_convert_loc (loc, type,
11921 negate_expr (arg11));
11922 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11923 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11926 return NULL_TREE;
11928 case TRUNC_DIV_EXPR:
11929 /* Optimize (X & (-A)) / A where A is a power of 2,
11930 to X >> log2(A) */
11931 if (TREE_CODE (arg0) == BIT_AND_EXPR
11932 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11933 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11935 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11936 arg1, TREE_OPERAND (arg0, 1));
11937 if (sum && integer_zerop (sum)) {
11938 unsigned long pow2;
11940 if (TREE_INT_CST_LOW (arg1))
11941 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11942 else
11943 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11944 + HOST_BITS_PER_WIDE_INT;
11946 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11947 TREE_OPERAND (arg0, 0),
11948 build_int_cst (integer_type_node, pow2));
11952 /* Fall thru */
11954 case FLOOR_DIV_EXPR:
11955 /* Simplify A / (B << N) where A and B are positive and B is
11956 a power of 2, to A >> (N + log2(B)). */
11957 strict_overflow_p = false;
11958 if (TREE_CODE (arg1) == LSHIFT_EXPR
11959 && (TYPE_UNSIGNED (type)
11960 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11962 tree sval = TREE_OPERAND (arg1, 0);
11963 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11965 tree sh_cnt = TREE_OPERAND (arg1, 1);
11966 unsigned long pow2;
11968 if (TREE_INT_CST_LOW (sval))
11969 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11970 else
11971 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11972 + HOST_BITS_PER_WIDE_INT;
11974 if (strict_overflow_p)
11975 fold_overflow_warning (("assuming signed overflow does not "
11976 "occur when simplifying A / (B << N)"),
11977 WARN_STRICT_OVERFLOW_MISC);
11979 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11980 sh_cnt,
11981 build_int_cst (TREE_TYPE (sh_cnt),
11982 pow2));
11983 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11984 fold_convert_loc (loc, type, arg0), sh_cnt);
11988 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11989 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11990 if (INTEGRAL_TYPE_P (type)
11991 && TYPE_UNSIGNED (type)
11992 && code == FLOOR_DIV_EXPR)
11993 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11995 /* Fall thru */
11997 case ROUND_DIV_EXPR:
11998 case CEIL_DIV_EXPR:
11999 case EXACT_DIV_EXPR:
12000 if (integer_onep (arg1))
12001 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12002 if (integer_zerop (arg1))
12003 return NULL_TREE;
12004 /* X / -1 is -X. */
12005 if (!TYPE_UNSIGNED (type)
12006 && TREE_CODE (arg1) == INTEGER_CST
12007 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12008 && TREE_INT_CST_HIGH (arg1) == -1)
12009 return fold_convert_loc (loc, type, negate_expr (arg0));
12011 /* Convert -A / -B to A / B when the type is signed and overflow is
12012 undefined. */
12013 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12014 && TREE_CODE (arg0) == NEGATE_EXPR
12015 && negate_expr_p (arg1))
12017 if (INTEGRAL_TYPE_P (type))
12018 fold_overflow_warning (("assuming signed overflow does not occur "
12019 "when distributing negation across "
12020 "division"),
12021 WARN_STRICT_OVERFLOW_MISC);
12022 return fold_build2_loc (loc, code, type,
12023 fold_convert_loc (loc, type,
12024 TREE_OPERAND (arg0, 0)),
12025 fold_convert_loc (loc, type,
12026 negate_expr (arg1)));
12028 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12029 && TREE_CODE (arg1) == NEGATE_EXPR
12030 && negate_expr_p (arg0))
12032 if (INTEGRAL_TYPE_P (type))
12033 fold_overflow_warning (("assuming signed overflow does not occur "
12034 "when distributing negation across "
12035 "division"),
12036 WARN_STRICT_OVERFLOW_MISC);
12037 return fold_build2_loc (loc, code, type,
12038 fold_convert_loc (loc, type,
12039 negate_expr (arg0)),
12040 fold_convert_loc (loc, type,
12041 TREE_OPERAND (arg1, 0)));
12044 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12045 operation, EXACT_DIV_EXPR.
12047 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12048 At one time others generated faster code, it's not clear if they do
12049 after the last round to changes to the DIV code in expmed.c. */
12050 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12051 && multiple_of_p (type, arg0, arg1))
12052 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12054 strict_overflow_p = false;
12055 if (TREE_CODE (arg1) == INTEGER_CST
12056 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12057 &strict_overflow_p)))
12059 if (strict_overflow_p)
12060 fold_overflow_warning (("assuming signed overflow does not occur "
12061 "when simplifying division"),
12062 WARN_STRICT_OVERFLOW_MISC);
12063 return fold_convert_loc (loc, type, tem);
12066 return NULL_TREE;
12068 case CEIL_MOD_EXPR:
12069 case FLOOR_MOD_EXPR:
12070 case ROUND_MOD_EXPR:
12071 case TRUNC_MOD_EXPR:
12072 /* X % 1 is always zero, but be sure to preserve any side
12073 effects in X. */
12074 if (integer_onep (arg1))
12075 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12077 /* X % 0, return X % 0 unchanged so that we can get the
12078 proper warnings and errors. */
12079 if (integer_zerop (arg1))
12080 return NULL_TREE;
12082 /* 0 % X is always zero, but be sure to preserve any side
12083 effects in X. Place this after checking for X == 0. */
12084 if (integer_zerop (arg0))
12085 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12087 /* X % -1 is zero. */
12088 if (!TYPE_UNSIGNED (type)
12089 && TREE_CODE (arg1) == INTEGER_CST
12090 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12091 && TREE_INT_CST_HIGH (arg1) == -1)
12092 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12094 /* X % -C is the same as X % C. */
12095 if (code == TRUNC_MOD_EXPR
12096 && !TYPE_UNSIGNED (type)
12097 && TREE_CODE (arg1) == INTEGER_CST
12098 && !TREE_OVERFLOW (arg1)
12099 && TREE_INT_CST_HIGH (arg1) < 0
12100 && !TYPE_OVERFLOW_TRAPS (type)
12101 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12102 && !sign_bit_p (arg1, arg1))
12103 return fold_build2_loc (loc, code, type,
12104 fold_convert_loc (loc, type, arg0),
12105 fold_convert_loc (loc, type,
12106 negate_expr (arg1)));
12108 /* X % -Y is the same as X % Y. */
12109 if (code == TRUNC_MOD_EXPR
12110 && !TYPE_UNSIGNED (type)
12111 && TREE_CODE (arg1) == NEGATE_EXPR
12112 && !TYPE_OVERFLOW_TRAPS (type))
12113 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12114 fold_convert_loc (loc, type,
12115 TREE_OPERAND (arg1, 0)));
12117 strict_overflow_p = false;
12118 if (TREE_CODE (arg1) == INTEGER_CST
12119 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12120 &strict_overflow_p)))
12122 if (strict_overflow_p)
12123 fold_overflow_warning (("assuming signed overflow does not occur "
12124 "when simplifying modulus"),
12125 WARN_STRICT_OVERFLOW_MISC);
12126 return fold_convert_loc (loc, type, tem);
12129 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12130 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12131 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12132 && (TYPE_UNSIGNED (type)
12133 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12135 tree c = arg1;
12136 /* Also optimize A % (C << N) where C is a power of 2,
12137 to A & ((C << N) - 1). */
12138 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12139 c = TREE_OPERAND (arg1, 0);
12141 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12143 tree mask
12144 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12145 build_int_cst (TREE_TYPE (arg1), 1));
12146 if (strict_overflow_p)
12147 fold_overflow_warning (("assuming signed overflow does not "
12148 "occur when simplifying "
12149 "X % (power of two)"),
12150 WARN_STRICT_OVERFLOW_MISC);
12151 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12152 fold_convert_loc (loc, type, arg0),
12153 fold_convert_loc (loc, type, mask));
12157 return NULL_TREE;
12159 case LROTATE_EXPR:
12160 case RROTATE_EXPR:
12161 if (integer_all_onesp (arg0))
12162 return omit_one_operand_loc (loc, type, arg0, arg1);
12163 goto shift;
12165 case RSHIFT_EXPR:
12166 /* Optimize -1 >> x for arithmetic right shifts. */
12167 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12168 && tree_expr_nonnegative_p (arg1))
12169 return omit_one_operand_loc (loc, type, arg0, arg1);
12170 /* ... fall through ... */
12172 case LSHIFT_EXPR:
12173 shift:
12174 if (integer_zerop (arg1))
12175 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12176 if (integer_zerop (arg0))
12177 return omit_one_operand_loc (loc, type, arg0, arg1);
12179 /* Since negative shift count is not well-defined,
12180 don't try to compute it in the compiler. */
12181 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12182 return NULL_TREE;
12184 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12185 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12186 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12187 && host_integerp (TREE_OPERAND (arg0, 1), false)
12188 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12190 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12191 + TREE_INT_CST_LOW (arg1));
12193 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12194 being well defined. */
12195 if (low >= TYPE_PRECISION (type))
12197 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12198 low = low % TYPE_PRECISION (type);
12199 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12200 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12201 TREE_OPERAND (arg0, 0));
12202 else
12203 low = TYPE_PRECISION (type) - 1;
12206 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12207 build_int_cst (type, low));
12210 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12211 into x & ((unsigned)-1 >> c) for unsigned types. */
12212 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12213 || (TYPE_UNSIGNED (type)
12214 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12215 && host_integerp (arg1, false)
12216 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12217 && host_integerp (TREE_OPERAND (arg0, 1), false)
12218 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12220 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12221 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12222 tree lshift;
12223 tree arg00;
12225 if (low0 == low1)
12227 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12229 lshift = build_int_cst (type, -1);
12230 lshift = int_const_binop (code, lshift, arg1);
12232 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12236 /* Rewrite an LROTATE_EXPR by a constant into an
12237 RROTATE_EXPR by a new constant. */
12238 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12240 tree tem = build_int_cst (TREE_TYPE (arg1),
12241 TYPE_PRECISION (type));
12242 tem = const_binop (MINUS_EXPR, tem, arg1);
12243 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12246 /* If we have a rotate of a bit operation with the rotate count and
12247 the second operand of the bit operation both constant,
12248 permute the two operations. */
12249 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12250 && (TREE_CODE (arg0) == BIT_AND_EXPR
12251 || TREE_CODE (arg0) == BIT_IOR_EXPR
12252 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12253 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12254 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12255 fold_build2_loc (loc, code, type,
12256 TREE_OPERAND (arg0, 0), arg1),
12257 fold_build2_loc (loc, code, type,
12258 TREE_OPERAND (arg0, 1), arg1));
12260 /* Two consecutive rotates adding up to the precision of the
12261 type can be ignored. */
12262 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12263 && TREE_CODE (arg0) == RROTATE_EXPR
12264 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12265 && TREE_INT_CST_HIGH (arg1) == 0
12266 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12267 && ((TREE_INT_CST_LOW (arg1)
12268 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12269 == (unsigned int) TYPE_PRECISION (type)))
12270 return TREE_OPERAND (arg0, 0);
12272 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12273 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12274 if the latter can be further optimized. */
12275 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12276 && TREE_CODE (arg0) == BIT_AND_EXPR
12277 && TREE_CODE (arg1) == INTEGER_CST
12278 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12280 tree mask = fold_build2_loc (loc, code, type,
12281 fold_convert_loc (loc, type,
12282 TREE_OPERAND (arg0, 1)),
12283 arg1);
12284 tree shift = fold_build2_loc (loc, code, type,
12285 fold_convert_loc (loc, type,
12286 TREE_OPERAND (arg0, 0)),
12287 arg1);
12288 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12289 if (tem)
12290 return tem;
12293 return NULL_TREE;
12295 case MIN_EXPR:
12296 if (operand_equal_p (arg0, arg1, 0))
12297 return omit_one_operand_loc (loc, type, arg0, arg1);
12298 if (INTEGRAL_TYPE_P (type)
12299 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12300 return omit_one_operand_loc (loc, type, arg1, arg0);
12301 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12302 if (tem)
12303 return tem;
12304 goto associate;
12306 case MAX_EXPR:
12307 if (operand_equal_p (arg0, arg1, 0))
12308 return omit_one_operand_loc (loc, type, arg0, arg1);
12309 if (INTEGRAL_TYPE_P (type)
12310 && TYPE_MAX_VALUE (type)
12311 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12312 return omit_one_operand_loc (loc, type, arg1, arg0);
12313 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12314 if (tem)
12315 return tem;
12316 goto associate;
12318 case TRUTH_ANDIF_EXPR:
12319 /* Note that the operands of this must be ints
12320 and their values must be 0 or 1.
12321 ("true" is a fixed value perhaps depending on the language.) */
12322 /* If first arg is constant zero, return it. */
12323 if (integer_zerop (arg0))
12324 return fold_convert_loc (loc, type, arg0);
12325 case TRUTH_AND_EXPR:
12326 /* If either arg is constant true, drop it. */
12327 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12328 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12329 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12330 /* Preserve sequence points. */
12331 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12332 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12333 /* If second arg is constant zero, result is zero, but first arg
12334 must be evaluated. */
12335 if (integer_zerop (arg1))
12336 return omit_one_operand_loc (loc, type, arg1, arg0);
12337 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12338 case will be handled here. */
12339 if (integer_zerop (arg0))
12340 return omit_one_operand_loc (loc, type, arg0, arg1);
12342 /* !X && X is always false. */
12343 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12344 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12345 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12346 /* X && !X is always false. */
12347 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12348 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12349 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12351 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12352 means A >= Y && A != MAX, but in this case we know that
12353 A < X <= MAX. */
12355 if (!TREE_SIDE_EFFECTS (arg0)
12356 && !TREE_SIDE_EFFECTS (arg1))
12358 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12359 if (tem && !operand_equal_p (tem, arg0, 0))
12360 return fold_build2_loc (loc, code, type, tem, arg1);
12362 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12363 if (tem && !operand_equal_p (tem, arg1, 0))
12364 return fold_build2_loc (loc, code, type, arg0, tem);
12367 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12368 != NULL_TREE)
12369 return tem;
12371 return NULL_TREE;
12373 case TRUTH_ORIF_EXPR:
12374 /* Note that the operands of this must be ints
12375 and their values must be 0 or true.
12376 ("true" is a fixed value perhaps depending on the language.) */
12377 /* If first arg is constant true, return it. */
12378 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12379 return fold_convert_loc (loc, type, arg0);
12380 case TRUTH_OR_EXPR:
12381 /* If either arg is constant zero, drop it. */
12382 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12383 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12384 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12385 /* Preserve sequence points. */
12386 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12387 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12388 /* If second arg is constant true, result is true, but we must
12389 evaluate first arg. */
12390 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12391 return omit_one_operand_loc (loc, type, arg1, arg0);
12392 /* Likewise for first arg, but note this only occurs here for
12393 TRUTH_OR_EXPR. */
12394 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12395 return omit_one_operand_loc (loc, type, arg0, arg1);
12397 /* !X || X is always true. */
12398 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12399 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12400 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12401 /* X || !X is always true. */
12402 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12403 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12404 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12406 /* (X && !Y) || (!X && Y) is X ^ Y */
12407 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12408 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12410 tree a0, a1, l0, l1, n0, n1;
12412 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12413 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12415 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12416 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12418 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12419 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12421 if ((operand_equal_p (n0, a0, 0)
12422 && operand_equal_p (n1, a1, 0))
12423 || (operand_equal_p (n0, a1, 0)
12424 && operand_equal_p (n1, a0, 0)))
12425 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12428 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12429 != NULL_TREE)
12430 return tem;
12432 return NULL_TREE;
12434 case TRUTH_XOR_EXPR:
12435 /* If the second arg is constant zero, drop it. */
12436 if (integer_zerop (arg1))
12437 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12438 /* If the second arg is constant true, this is a logical inversion. */
12439 if (integer_onep (arg1))
12441 /* Only call invert_truthvalue if operand is a truth value. */
12442 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12443 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12444 else
12445 tem = invert_truthvalue_loc (loc, arg0);
12446 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12448 /* Identical arguments cancel to zero. */
12449 if (operand_equal_p (arg0, arg1, 0))
12450 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12452 /* !X ^ X is always true. */
12453 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12454 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12455 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12457 /* X ^ !X is always true. */
12458 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12459 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12460 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12462 return NULL_TREE;
12464 case EQ_EXPR:
12465 case NE_EXPR:
12466 STRIP_NOPS (arg0);
12467 STRIP_NOPS (arg1);
12469 tem = fold_comparison (loc, code, type, op0, op1);
12470 if (tem != NULL_TREE)
12471 return tem;
12473 /* bool_var != 0 becomes bool_var. */
12474 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12475 && code == NE_EXPR)
12476 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12478 /* bool_var == 1 becomes bool_var. */
12479 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12480 && code == EQ_EXPR)
12481 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12483 /* bool_var != 1 becomes !bool_var. */
12484 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12485 && code == NE_EXPR)
12486 return fold_convert_loc (loc, type,
12487 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12488 TREE_TYPE (arg0), arg0));
12490 /* bool_var == 0 becomes !bool_var. */
12491 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12492 && code == EQ_EXPR)
12493 return fold_convert_loc (loc, type,
12494 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12495 TREE_TYPE (arg0), arg0));
12497 /* !exp != 0 becomes !exp */
12498 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12499 && code == NE_EXPR)
12500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12502 /* If this is an equality comparison of the address of two non-weak,
12503 unaliased symbols neither of which are extern (since we do not
12504 have access to attributes for externs), then we know the result. */
12505 if (TREE_CODE (arg0) == ADDR_EXPR
12506 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12507 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12508 && ! lookup_attribute ("alias",
12509 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12510 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12511 && TREE_CODE (arg1) == ADDR_EXPR
12512 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12513 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12514 && ! lookup_attribute ("alias",
12515 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12516 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12518 /* We know that we're looking at the address of two
12519 non-weak, unaliased, static _DECL nodes.
12521 It is both wasteful and incorrect to call operand_equal_p
12522 to compare the two ADDR_EXPR nodes. It is wasteful in that
12523 all we need to do is test pointer equality for the arguments
12524 to the two ADDR_EXPR nodes. It is incorrect to use
12525 operand_equal_p as that function is NOT equivalent to a
12526 C equality test. It can in fact return false for two
12527 objects which would test as equal using the C equality
12528 operator. */
12529 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12530 return constant_boolean_node (equal
12531 ? code == EQ_EXPR : code != EQ_EXPR,
12532 type);
12535 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12536 a MINUS_EXPR of a constant, we can convert it into a comparison with
12537 a revised constant as long as no overflow occurs. */
12538 if (TREE_CODE (arg1) == INTEGER_CST
12539 && (TREE_CODE (arg0) == PLUS_EXPR
12540 || TREE_CODE (arg0) == MINUS_EXPR)
12541 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12542 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12543 ? MINUS_EXPR : PLUS_EXPR,
12544 fold_convert_loc (loc, TREE_TYPE (arg0),
12545 arg1),
12546 TREE_OPERAND (arg0, 1)))
12547 && !TREE_OVERFLOW (tem))
12548 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12550 /* Similarly for a NEGATE_EXPR. */
12551 if (TREE_CODE (arg0) == NEGATE_EXPR
12552 && TREE_CODE (arg1) == INTEGER_CST
12553 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12554 arg1)))
12555 && TREE_CODE (tem) == INTEGER_CST
12556 && !TREE_OVERFLOW (tem))
12557 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12559 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12560 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12561 && TREE_CODE (arg1) == INTEGER_CST
12562 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12563 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12564 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12565 fold_convert_loc (loc,
12566 TREE_TYPE (arg0),
12567 arg1),
12568 TREE_OPERAND (arg0, 1)));
12570 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12571 if ((TREE_CODE (arg0) == PLUS_EXPR
12572 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12573 || TREE_CODE (arg0) == MINUS_EXPR)
12574 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12575 0)),
12576 arg1, 0)
12577 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12578 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12580 tree val = TREE_OPERAND (arg0, 1);
12581 return omit_two_operands_loc (loc, type,
12582 fold_build2_loc (loc, code, type,
12583 val,
12584 build_int_cst (TREE_TYPE (val),
12585 0)),
12586 TREE_OPERAND (arg0, 0), arg1);
12589 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12590 if (TREE_CODE (arg0) == MINUS_EXPR
12591 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12592 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12593 1)),
12594 arg1, 0)
12595 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12597 return omit_two_operands_loc (loc, type,
12598 code == NE_EXPR
12599 ? boolean_true_node : boolean_false_node,
12600 TREE_OPERAND (arg0, 1), arg1);
12603 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12604 for !=. Don't do this for ordered comparisons due to overflow. */
12605 if (TREE_CODE (arg0) == MINUS_EXPR
12606 && integer_zerop (arg1))
12607 return fold_build2_loc (loc, code, type,
12608 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12610 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12611 if (TREE_CODE (arg0) == ABS_EXPR
12612 && (integer_zerop (arg1) || real_zerop (arg1)))
12613 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12615 /* If this is an EQ or NE comparison with zero and ARG0 is
12616 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12617 two operations, but the latter can be done in one less insn
12618 on machines that have only two-operand insns or on which a
12619 constant cannot be the first operand. */
12620 if (TREE_CODE (arg0) == BIT_AND_EXPR
12621 && integer_zerop (arg1))
12623 tree arg00 = TREE_OPERAND (arg0, 0);
12624 tree arg01 = TREE_OPERAND (arg0, 1);
12625 if (TREE_CODE (arg00) == LSHIFT_EXPR
12626 && integer_onep (TREE_OPERAND (arg00, 0)))
12628 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12629 arg01, TREE_OPERAND (arg00, 1));
12630 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12631 build_int_cst (TREE_TYPE (arg0), 1));
12632 return fold_build2_loc (loc, code, type,
12633 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12634 arg1);
12636 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12637 && integer_onep (TREE_OPERAND (arg01, 0)))
12639 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12640 arg00, TREE_OPERAND (arg01, 1));
12641 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12642 build_int_cst (TREE_TYPE (arg0), 1));
12643 return fold_build2_loc (loc, code, type,
12644 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12645 arg1);
12649 /* If this is an NE or EQ comparison of zero against the result of a
12650 signed MOD operation whose second operand is a power of 2, make
12651 the MOD operation unsigned since it is simpler and equivalent. */
12652 if (integer_zerop (arg1)
12653 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12654 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12655 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12656 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12657 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12658 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12660 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12661 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12662 fold_convert_loc (loc, newtype,
12663 TREE_OPERAND (arg0, 0)),
12664 fold_convert_loc (loc, newtype,
12665 TREE_OPERAND (arg0, 1)));
12667 return fold_build2_loc (loc, code, type, newmod,
12668 fold_convert_loc (loc, newtype, arg1));
12671 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12672 C1 is a valid shift constant, and C2 is a power of two, i.e.
12673 a single bit. */
12674 if (TREE_CODE (arg0) == BIT_AND_EXPR
12675 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12676 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12677 == INTEGER_CST
12678 && integer_pow2p (TREE_OPERAND (arg0, 1))
12679 && integer_zerop (arg1))
12681 tree itype = TREE_TYPE (arg0);
12682 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12683 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12685 /* Check for a valid shift count. */
12686 if (TREE_INT_CST_HIGH (arg001) == 0
12687 && TREE_INT_CST_LOW (arg001) < prec)
12689 tree arg01 = TREE_OPERAND (arg0, 1);
12690 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12691 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12692 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12693 can be rewritten as (X & (C2 << C1)) != 0. */
12694 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12696 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12697 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12698 return fold_build2_loc (loc, code, type, tem,
12699 fold_convert_loc (loc, itype, arg1));
12701 /* Otherwise, for signed (arithmetic) shifts,
12702 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12703 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12704 else if (!TYPE_UNSIGNED (itype))
12705 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12706 arg000, build_int_cst (itype, 0));
12707 /* Otherwise, of unsigned (logical) shifts,
12708 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12709 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12710 else
12711 return omit_one_operand_loc (loc, type,
12712 code == EQ_EXPR ? integer_one_node
12713 : integer_zero_node,
12714 arg000);
12718 /* If we have (A & C) == C where C is a power of 2, convert this into
12719 (A & C) != 0. Similarly for NE_EXPR. */
12720 if (TREE_CODE (arg0) == BIT_AND_EXPR
12721 && integer_pow2p (TREE_OPERAND (arg0, 1))
12722 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12723 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12724 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12725 integer_zero_node));
12727 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12728 bit, then fold the expression into A < 0 or A >= 0. */
12729 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12730 if (tem)
12731 return tem;
12733 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12734 Similarly for NE_EXPR. */
12735 if (TREE_CODE (arg0) == BIT_AND_EXPR
12736 && TREE_CODE (arg1) == INTEGER_CST
12737 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12739 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12740 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12741 TREE_OPERAND (arg0, 1));
12742 tree dandnotc
12743 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12744 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12745 notc);
12746 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12747 if (integer_nonzerop (dandnotc))
12748 return omit_one_operand_loc (loc, type, rslt, arg0);
12751 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12752 Similarly for NE_EXPR. */
12753 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12754 && TREE_CODE (arg1) == INTEGER_CST
12755 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12757 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12758 tree candnotd
12759 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12760 TREE_OPERAND (arg0, 1),
12761 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12762 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12763 if (integer_nonzerop (candnotd))
12764 return omit_one_operand_loc (loc, type, rslt, arg0);
12767 /* If this is a comparison of a field, we may be able to simplify it. */
12768 if ((TREE_CODE (arg0) == COMPONENT_REF
12769 || TREE_CODE (arg0) == BIT_FIELD_REF)
12770 /* Handle the constant case even without -O
12771 to make sure the warnings are given. */
12772 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12774 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12775 if (t1)
12776 return t1;
12779 /* Optimize comparisons of strlen vs zero to a compare of the
12780 first character of the string vs zero. To wit,
12781 strlen(ptr) == 0 => *ptr == 0
12782 strlen(ptr) != 0 => *ptr != 0
12783 Other cases should reduce to one of these two (or a constant)
12784 due to the return value of strlen being unsigned. */
12785 if (TREE_CODE (arg0) == CALL_EXPR
12786 && integer_zerop (arg1))
12788 tree fndecl = get_callee_fndecl (arg0);
12790 if (fndecl
12791 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12792 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12793 && call_expr_nargs (arg0) == 1
12794 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12796 tree iref = build_fold_indirect_ref_loc (loc,
12797 CALL_EXPR_ARG (arg0, 0));
12798 return fold_build2_loc (loc, code, type, iref,
12799 build_int_cst (TREE_TYPE (iref), 0));
12803 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12804 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12805 if (TREE_CODE (arg0) == RSHIFT_EXPR
12806 && integer_zerop (arg1)
12807 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12809 tree arg00 = TREE_OPERAND (arg0, 0);
12810 tree arg01 = TREE_OPERAND (arg0, 1);
12811 tree itype = TREE_TYPE (arg00);
12812 if (TREE_INT_CST_HIGH (arg01) == 0
12813 && TREE_INT_CST_LOW (arg01)
12814 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12816 if (TYPE_UNSIGNED (itype))
12818 itype = signed_type_for (itype);
12819 arg00 = fold_convert_loc (loc, itype, arg00);
12821 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12822 type, arg00, build_int_cst (itype, 0));
12826 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12827 if (integer_zerop (arg1)
12828 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12829 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12830 TREE_OPERAND (arg0, 1));
12832 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12833 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12834 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12835 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12836 build_int_cst (TREE_TYPE (arg0), 0));
12837 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12838 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12839 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12840 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12841 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12842 build_int_cst (TREE_TYPE (arg0), 0));
12844 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12845 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12846 && TREE_CODE (arg1) == INTEGER_CST
12847 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12848 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12849 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12850 TREE_OPERAND (arg0, 1), arg1));
12852 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12853 (X & C) == 0 when C is a single bit. */
12854 if (TREE_CODE (arg0) == BIT_AND_EXPR
12855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12856 && integer_zerop (arg1)
12857 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12859 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12860 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12861 TREE_OPERAND (arg0, 1));
12862 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12863 type, tem,
12864 fold_convert_loc (loc, TREE_TYPE (arg0),
12865 arg1));
12868 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12869 constant C is a power of two, i.e. a single bit. */
12870 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12871 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12872 && integer_zerop (arg1)
12873 && integer_pow2p (TREE_OPERAND (arg0, 1))
12874 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12875 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12877 tree arg00 = TREE_OPERAND (arg0, 0);
12878 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12879 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12882 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12883 when is C is a power of two, i.e. a single bit. */
12884 if (TREE_CODE (arg0) == BIT_AND_EXPR
12885 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12886 && integer_zerop (arg1)
12887 && integer_pow2p (TREE_OPERAND (arg0, 1))
12888 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12889 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12891 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12892 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12893 arg000, TREE_OPERAND (arg0, 1));
12894 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12895 tem, build_int_cst (TREE_TYPE (tem), 0));
12898 if (integer_zerop (arg1)
12899 && tree_expr_nonzero_p (arg0))
12901 tree res = constant_boolean_node (code==NE_EXPR, type);
12902 return omit_one_operand_loc (loc, type, res, arg0);
12905 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12906 if (TREE_CODE (arg0) == NEGATE_EXPR
12907 && TREE_CODE (arg1) == NEGATE_EXPR)
12908 return fold_build2_loc (loc, code, type,
12909 TREE_OPERAND (arg0, 0),
12910 fold_convert_loc (loc, TREE_TYPE (arg0),
12911 TREE_OPERAND (arg1, 0)));
12913 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12914 if (TREE_CODE (arg0) == BIT_AND_EXPR
12915 && TREE_CODE (arg1) == BIT_AND_EXPR)
12917 tree arg00 = TREE_OPERAND (arg0, 0);
12918 tree arg01 = TREE_OPERAND (arg0, 1);
12919 tree arg10 = TREE_OPERAND (arg1, 0);
12920 tree arg11 = TREE_OPERAND (arg1, 1);
12921 tree itype = TREE_TYPE (arg0);
12923 if (operand_equal_p (arg01, arg11, 0))
12924 return fold_build2_loc (loc, code, type,
12925 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12926 fold_build2_loc (loc,
12927 BIT_XOR_EXPR, itype,
12928 arg00, arg10),
12929 arg01),
12930 build_int_cst (itype, 0));
12932 if (operand_equal_p (arg01, arg10, 0))
12933 return fold_build2_loc (loc, code, type,
12934 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12935 fold_build2_loc (loc,
12936 BIT_XOR_EXPR, itype,
12937 arg00, arg11),
12938 arg01),
12939 build_int_cst (itype, 0));
12941 if (operand_equal_p (arg00, arg11, 0))
12942 return fold_build2_loc (loc, code, type,
12943 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12944 fold_build2_loc (loc,
12945 BIT_XOR_EXPR, itype,
12946 arg01, arg10),
12947 arg00),
12948 build_int_cst (itype, 0));
12950 if (operand_equal_p (arg00, arg10, 0))
12951 return fold_build2_loc (loc, code, type,
12952 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12953 fold_build2_loc (loc,
12954 BIT_XOR_EXPR, itype,
12955 arg01, arg11),
12956 arg00),
12957 build_int_cst (itype, 0));
12960 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12961 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12963 tree arg00 = TREE_OPERAND (arg0, 0);
12964 tree arg01 = TREE_OPERAND (arg0, 1);
12965 tree arg10 = TREE_OPERAND (arg1, 0);
12966 tree arg11 = TREE_OPERAND (arg1, 1);
12967 tree itype = TREE_TYPE (arg0);
12969 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12970 operand_equal_p guarantees no side-effects so we don't need
12971 to use omit_one_operand on Z. */
12972 if (operand_equal_p (arg01, arg11, 0))
12973 return fold_build2_loc (loc, code, type, arg00,
12974 fold_convert_loc (loc, TREE_TYPE (arg00),
12975 arg10));
12976 if (operand_equal_p (arg01, arg10, 0))
12977 return fold_build2_loc (loc, code, type, arg00,
12978 fold_convert_loc (loc, TREE_TYPE (arg00),
12979 arg11));
12980 if (operand_equal_p (arg00, arg11, 0))
12981 return fold_build2_loc (loc, code, type, arg01,
12982 fold_convert_loc (loc, TREE_TYPE (arg01),
12983 arg10));
12984 if (operand_equal_p (arg00, arg10, 0))
12985 return fold_build2_loc (loc, code, type, arg01,
12986 fold_convert_loc (loc, TREE_TYPE (arg01),
12987 arg11));
12989 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12990 if (TREE_CODE (arg01) == INTEGER_CST
12991 && TREE_CODE (arg11) == INTEGER_CST)
12993 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12994 fold_convert_loc (loc, itype, arg11));
12995 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12996 return fold_build2_loc (loc, code, type, tem,
12997 fold_convert_loc (loc, itype, arg10));
13001 /* Attempt to simplify equality/inequality comparisons of complex
13002 values. Only lower the comparison if the result is known or
13003 can be simplified to a single scalar comparison. */
13004 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13005 || TREE_CODE (arg0) == COMPLEX_CST)
13006 && (TREE_CODE (arg1) == COMPLEX_EXPR
13007 || TREE_CODE (arg1) == COMPLEX_CST))
13009 tree real0, imag0, real1, imag1;
13010 tree rcond, icond;
13012 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13014 real0 = TREE_OPERAND (arg0, 0);
13015 imag0 = TREE_OPERAND (arg0, 1);
13017 else
13019 real0 = TREE_REALPART (arg0);
13020 imag0 = TREE_IMAGPART (arg0);
13023 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13025 real1 = TREE_OPERAND (arg1, 0);
13026 imag1 = TREE_OPERAND (arg1, 1);
13028 else
13030 real1 = TREE_REALPART (arg1);
13031 imag1 = TREE_IMAGPART (arg1);
13034 rcond = fold_binary_loc (loc, code, type, real0, real1);
13035 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13037 if (integer_zerop (rcond))
13039 if (code == EQ_EXPR)
13040 return omit_two_operands_loc (loc, type, boolean_false_node,
13041 imag0, imag1);
13042 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13044 else
13046 if (code == NE_EXPR)
13047 return omit_two_operands_loc (loc, type, boolean_true_node,
13048 imag0, imag1);
13049 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13053 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13054 if (icond && TREE_CODE (icond) == INTEGER_CST)
13056 if (integer_zerop (icond))
13058 if (code == EQ_EXPR)
13059 return omit_two_operands_loc (loc, type, boolean_false_node,
13060 real0, real1);
13061 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13063 else
13065 if (code == NE_EXPR)
13066 return omit_two_operands_loc (loc, type, boolean_true_node,
13067 real0, real1);
13068 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13073 return NULL_TREE;
13075 case LT_EXPR:
13076 case GT_EXPR:
13077 case LE_EXPR:
13078 case GE_EXPR:
13079 tem = fold_comparison (loc, code, type, op0, op1);
13080 if (tem != NULL_TREE)
13081 return tem;
13083 /* Transform comparisons of the form X +- C CMP X. */
13084 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13085 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13086 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13087 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13088 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13089 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13091 tree arg01 = TREE_OPERAND (arg0, 1);
13092 enum tree_code code0 = TREE_CODE (arg0);
13093 int is_positive;
13095 if (TREE_CODE (arg01) == REAL_CST)
13096 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13097 else
13098 is_positive = tree_int_cst_sgn (arg01);
13100 /* (X - c) > X becomes false. */
13101 if (code == GT_EXPR
13102 && ((code0 == MINUS_EXPR && is_positive >= 0)
13103 || (code0 == PLUS_EXPR && is_positive <= 0)))
13105 if (TREE_CODE (arg01) == INTEGER_CST
13106 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13107 fold_overflow_warning (("assuming signed overflow does not "
13108 "occur when assuming that (X - c) > X "
13109 "is always false"),
13110 WARN_STRICT_OVERFLOW_ALL);
13111 return constant_boolean_node (0, type);
13114 /* Likewise (X + c) < X becomes false. */
13115 if (code == LT_EXPR
13116 && ((code0 == PLUS_EXPR && is_positive >= 0)
13117 || (code0 == MINUS_EXPR && is_positive <= 0)))
13119 if (TREE_CODE (arg01) == INTEGER_CST
13120 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13121 fold_overflow_warning (("assuming signed overflow does not "
13122 "occur when assuming that "
13123 "(X + c) < X is always false"),
13124 WARN_STRICT_OVERFLOW_ALL);
13125 return constant_boolean_node (0, type);
13128 /* Convert (X - c) <= X to true. */
13129 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13130 && code == LE_EXPR
13131 && ((code0 == MINUS_EXPR && is_positive >= 0)
13132 || (code0 == PLUS_EXPR && is_positive <= 0)))
13134 if (TREE_CODE (arg01) == INTEGER_CST
13135 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13136 fold_overflow_warning (("assuming signed overflow does not "
13137 "occur when assuming that "
13138 "(X - c) <= X is always true"),
13139 WARN_STRICT_OVERFLOW_ALL);
13140 return constant_boolean_node (1, type);
13143 /* Convert (X + c) >= X to true. */
13144 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13145 && code == GE_EXPR
13146 && ((code0 == PLUS_EXPR && is_positive >= 0)
13147 || (code0 == MINUS_EXPR && is_positive <= 0)))
13149 if (TREE_CODE (arg01) == INTEGER_CST
13150 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13151 fold_overflow_warning (("assuming signed overflow does not "
13152 "occur when assuming that "
13153 "(X + c) >= X is always true"),
13154 WARN_STRICT_OVERFLOW_ALL);
13155 return constant_boolean_node (1, type);
13158 if (TREE_CODE (arg01) == INTEGER_CST)
13160 /* Convert X + c > X and X - c < X to true for integers. */
13161 if (code == GT_EXPR
13162 && ((code0 == PLUS_EXPR && is_positive > 0)
13163 || (code0 == MINUS_EXPR && is_positive < 0)))
13165 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13166 fold_overflow_warning (("assuming signed overflow does "
13167 "not occur when assuming that "
13168 "(X + c) > X is always true"),
13169 WARN_STRICT_OVERFLOW_ALL);
13170 return constant_boolean_node (1, type);
13173 if (code == LT_EXPR
13174 && ((code0 == MINUS_EXPR && is_positive > 0)
13175 || (code0 == PLUS_EXPR && is_positive < 0)))
13177 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13178 fold_overflow_warning (("assuming signed overflow does "
13179 "not occur when assuming that "
13180 "(X - c) < X is always true"),
13181 WARN_STRICT_OVERFLOW_ALL);
13182 return constant_boolean_node (1, type);
13185 /* Convert X + c <= X and X - c >= X to false for integers. */
13186 if (code == LE_EXPR
13187 && ((code0 == PLUS_EXPR && is_positive > 0)
13188 || (code0 == MINUS_EXPR && is_positive < 0)))
13190 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13191 fold_overflow_warning (("assuming signed overflow does "
13192 "not occur when assuming that "
13193 "(X + c) <= X is always false"),
13194 WARN_STRICT_OVERFLOW_ALL);
13195 return constant_boolean_node (0, type);
13198 if (code == GE_EXPR
13199 && ((code0 == MINUS_EXPR && is_positive > 0)
13200 || (code0 == PLUS_EXPR && is_positive < 0)))
13202 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13203 fold_overflow_warning (("assuming signed overflow does "
13204 "not occur when assuming that "
13205 "(X - c) >= X is always false"),
13206 WARN_STRICT_OVERFLOW_ALL);
13207 return constant_boolean_node (0, type);
13212 /* Comparisons with the highest or lowest possible integer of
13213 the specified precision will have known values. */
13215 tree arg1_type = TREE_TYPE (arg1);
13216 unsigned int width = TYPE_PRECISION (arg1_type);
13218 if (TREE_CODE (arg1) == INTEGER_CST
13219 && width <= 2 * HOST_BITS_PER_WIDE_INT
13220 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13222 HOST_WIDE_INT signed_max_hi;
13223 unsigned HOST_WIDE_INT signed_max_lo;
13224 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13226 if (width <= HOST_BITS_PER_WIDE_INT)
13228 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13229 - 1;
13230 signed_max_hi = 0;
13231 max_hi = 0;
13233 if (TYPE_UNSIGNED (arg1_type))
13235 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13236 min_lo = 0;
13237 min_hi = 0;
13239 else
13241 max_lo = signed_max_lo;
13242 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13243 min_hi = -1;
13246 else
13248 width -= HOST_BITS_PER_WIDE_INT;
13249 signed_max_lo = -1;
13250 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13251 - 1;
13252 max_lo = -1;
13253 min_lo = 0;
13255 if (TYPE_UNSIGNED (arg1_type))
13257 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13258 min_hi = 0;
13260 else
13262 max_hi = signed_max_hi;
13263 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13267 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13268 && TREE_INT_CST_LOW (arg1) == max_lo)
13269 switch (code)
13271 case GT_EXPR:
13272 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13274 case GE_EXPR:
13275 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13277 case LE_EXPR:
13278 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13280 case LT_EXPR:
13281 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13283 /* The GE_EXPR and LT_EXPR cases above are not normally
13284 reached because of previous transformations. */
13286 default:
13287 break;
13289 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13290 == max_hi
13291 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13292 switch (code)
13294 case GT_EXPR:
13295 arg1 = const_binop (PLUS_EXPR, arg1,
13296 build_int_cst (TREE_TYPE (arg1), 1));
13297 return fold_build2_loc (loc, EQ_EXPR, type,
13298 fold_convert_loc (loc,
13299 TREE_TYPE (arg1), arg0),
13300 arg1);
13301 case LE_EXPR:
13302 arg1 = const_binop (PLUS_EXPR, arg1,
13303 build_int_cst (TREE_TYPE (arg1), 1));
13304 return fold_build2_loc (loc, NE_EXPR, type,
13305 fold_convert_loc (loc, TREE_TYPE (arg1),
13306 arg0),
13307 arg1);
13308 default:
13309 break;
13311 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13312 == min_hi
13313 && TREE_INT_CST_LOW (arg1) == min_lo)
13314 switch (code)
13316 case LT_EXPR:
13317 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13319 case LE_EXPR:
13320 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13322 case GE_EXPR:
13323 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13325 case GT_EXPR:
13326 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13328 default:
13329 break;
13331 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13332 == min_hi
13333 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13334 switch (code)
13336 case GE_EXPR:
13337 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13338 return fold_build2_loc (loc, NE_EXPR, type,
13339 fold_convert_loc (loc,
13340 TREE_TYPE (arg1), arg0),
13341 arg1);
13342 case LT_EXPR:
13343 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13344 return fold_build2_loc (loc, EQ_EXPR, type,
13345 fold_convert_loc (loc, TREE_TYPE (arg1),
13346 arg0),
13347 arg1);
13348 default:
13349 break;
13352 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13353 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13354 && TYPE_UNSIGNED (arg1_type)
13355 /* We will flip the signedness of the comparison operator
13356 associated with the mode of arg1, so the sign bit is
13357 specified by this mode. Check that arg1 is the signed
13358 max associated with this sign bit. */
13359 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13360 /* signed_type does not work on pointer types. */
13361 && INTEGRAL_TYPE_P (arg1_type))
13363 /* The following case also applies to X < signed_max+1
13364 and X >= signed_max+1 because previous transformations. */
13365 if (code == LE_EXPR || code == GT_EXPR)
13367 tree st;
13368 st = signed_type_for (TREE_TYPE (arg1));
13369 return fold_build2_loc (loc,
13370 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13371 type, fold_convert_loc (loc, st, arg0),
13372 build_int_cst (st, 0));
13378 /* If we are comparing an ABS_EXPR with a constant, we can
13379 convert all the cases into explicit comparisons, but they may
13380 well not be faster than doing the ABS and one comparison.
13381 But ABS (X) <= C is a range comparison, which becomes a subtraction
13382 and a comparison, and is probably faster. */
13383 if (code == LE_EXPR
13384 && TREE_CODE (arg1) == INTEGER_CST
13385 && TREE_CODE (arg0) == ABS_EXPR
13386 && ! TREE_SIDE_EFFECTS (arg0)
13387 && (0 != (tem = negate_expr (arg1)))
13388 && TREE_CODE (tem) == INTEGER_CST
13389 && !TREE_OVERFLOW (tem))
13390 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13391 build2 (GE_EXPR, type,
13392 TREE_OPERAND (arg0, 0), tem),
13393 build2 (LE_EXPR, type,
13394 TREE_OPERAND (arg0, 0), arg1));
13396 /* Convert ABS_EXPR<x> >= 0 to true. */
13397 strict_overflow_p = false;
13398 if (code == GE_EXPR
13399 && (integer_zerop (arg1)
13400 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13401 && real_zerop (arg1)))
13402 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13404 if (strict_overflow_p)
13405 fold_overflow_warning (("assuming signed overflow does not occur "
13406 "when simplifying comparison of "
13407 "absolute value and zero"),
13408 WARN_STRICT_OVERFLOW_CONDITIONAL);
13409 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13412 /* Convert ABS_EXPR<x> < 0 to false. */
13413 strict_overflow_p = false;
13414 if (code == LT_EXPR
13415 && (integer_zerop (arg1) || real_zerop (arg1))
13416 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13418 if (strict_overflow_p)
13419 fold_overflow_warning (("assuming signed overflow does not occur "
13420 "when simplifying comparison of "
13421 "absolute value and zero"),
13422 WARN_STRICT_OVERFLOW_CONDITIONAL);
13423 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13426 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13427 and similarly for >= into !=. */
13428 if ((code == LT_EXPR || code == GE_EXPR)
13429 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13430 && TREE_CODE (arg1) == LSHIFT_EXPR
13431 && integer_onep (TREE_OPERAND (arg1, 0)))
13432 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13433 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13434 TREE_OPERAND (arg1, 1)),
13435 build_int_cst (TREE_TYPE (arg0), 0));
13437 if ((code == LT_EXPR || code == GE_EXPR)
13438 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13439 && CONVERT_EXPR_P (arg1)
13440 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13441 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13443 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13444 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13445 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13446 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13447 build_int_cst (TREE_TYPE (arg0), 0));
13450 return NULL_TREE;
13452 case UNORDERED_EXPR:
13453 case ORDERED_EXPR:
13454 case UNLT_EXPR:
13455 case UNLE_EXPR:
13456 case UNGT_EXPR:
13457 case UNGE_EXPR:
13458 case UNEQ_EXPR:
13459 case LTGT_EXPR:
13460 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13462 t1 = fold_relational_const (code, type, arg0, arg1);
13463 if (t1 != NULL_TREE)
13464 return t1;
13467 /* If the first operand is NaN, the result is constant. */
13468 if (TREE_CODE (arg0) == REAL_CST
13469 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13470 && (code != LTGT_EXPR || ! flag_trapping_math))
13472 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13473 ? integer_zero_node
13474 : integer_one_node;
13475 return omit_one_operand_loc (loc, type, t1, arg1);
13478 /* If the second operand is NaN, the result is constant. */
13479 if (TREE_CODE (arg1) == REAL_CST
13480 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13481 && (code != LTGT_EXPR || ! flag_trapping_math))
13483 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13484 ? integer_zero_node
13485 : integer_one_node;
13486 return omit_one_operand_loc (loc, type, t1, arg0);
13489 /* Simplify unordered comparison of something with itself. */
13490 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13491 && operand_equal_p (arg0, arg1, 0))
13492 return constant_boolean_node (1, type);
13494 if (code == LTGT_EXPR
13495 && !flag_trapping_math
13496 && operand_equal_p (arg0, arg1, 0))
13497 return constant_boolean_node (0, type);
13499 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13501 tree targ0 = strip_float_extensions (arg0);
13502 tree targ1 = strip_float_extensions (arg1);
13503 tree newtype = TREE_TYPE (targ0);
13505 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13506 newtype = TREE_TYPE (targ1);
13508 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13509 return fold_build2_loc (loc, code, type,
13510 fold_convert_loc (loc, newtype, targ0),
13511 fold_convert_loc (loc, newtype, targ1));
13514 return NULL_TREE;
13516 case COMPOUND_EXPR:
13517 /* When pedantic, a compound expression can be neither an lvalue
13518 nor an integer constant expression. */
13519 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13520 return NULL_TREE;
13521 /* Don't let (0, 0) be null pointer constant. */
13522 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13523 : fold_convert_loc (loc, type, arg1);
13524 return pedantic_non_lvalue_loc (loc, tem);
13526 case COMPLEX_EXPR:
13527 if ((TREE_CODE (arg0) == REAL_CST
13528 && TREE_CODE (arg1) == REAL_CST)
13529 || (TREE_CODE (arg0) == INTEGER_CST
13530 && TREE_CODE (arg1) == INTEGER_CST))
13531 return build_complex (type, arg0, arg1);
13532 if (TREE_CODE (arg0) == REALPART_EXPR
13533 && TREE_CODE (arg1) == IMAGPART_EXPR
13534 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13535 && operand_equal_p (TREE_OPERAND (arg0, 0),
13536 TREE_OPERAND (arg1, 0), 0))
13537 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13538 TREE_OPERAND (arg1, 0));
13539 return NULL_TREE;
13541 case ASSERT_EXPR:
13542 /* An ASSERT_EXPR should never be passed to fold_binary. */
13543 gcc_unreachable ();
13545 case VEC_PACK_TRUNC_EXPR:
13546 case VEC_PACK_FIX_TRUNC_EXPR:
13548 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13549 tree *elts, vals = NULL_TREE;
13551 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13552 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13553 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13554 return NULL_TREE;
13556 elts = XALLOCAVEC (tree, nelts);
13557 if (!vec_cst_ctor_to_array (arg0, elts)
13558 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13559 return NULL_TREE;
13561 for (i = 0; i < nelts; i++)
13563 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13564 ? NOP_EXPR : FIX_TRUNC_EXPR,
13565 TREE_TYPE (type), elts[i]);
13566 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13567 return NULL_TREE;
13570 for (i = 0; i < nelts; i++)
13571 vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
13572 return build_vector (type, vals);
13575 case VEC_WIDEN_MULT_LO_EXPR:
13576 case VEC_WIDEN_MULT_HI_EXPR:
13578 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13579 tree *elts, vals = NULL_TREE;
13581 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13582 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13583 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13584 return NULL_TREE;
13586 elts = XALLOCAVEC (tree, nelts * 4);
13587 if (!vec_cst_ctor_to_array (arg0, elts)
13588 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13589 return NULL_TREE;
13591 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR))
13592 elts += nelts;
13594 for (i = 0; i < nelts; i++)
13596 elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]);
13597 elts[i + nelts * 2]
13598 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
13599 elts[i + nelts * 2]);
13600 if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE)
13601 return NULL_TREE;
13602 elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]);
13603 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13604 return NULL_TREE;
13607 for (i = 0; i < nelts; i++)
13608 vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
13609 return build_vector (type, vals);
13612 default:
13613 return NULL_TREE;
13614 } /* switch (code) */
13617 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13618 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13619 of GOTO_EXPR. */
13621 static tree
13622 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13624 switch (TREE_CODE (*tp))
13626 case LABEL_EXPR:
13627 return *tp;
13629 case GOTO_EXPR:
13630 *walk_subtrees = 0;
13632 /* ... fall through ... */
13634 default:
13635 return NULL_TREE;
13639 /* Return whether the sub-tree ST contains a label which is accessible from
13640 outside the sub-tree. */
13642 static bool
13643 contains_label_p (tree st)
13645 return
13646 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13649 /* Fold a ternary expression of code CODE and type TYPE with operands
13650 OP0, OP1, and OP2. Return the folded expression if folding is
13651 successful. Otherwise, return NULL_TREE. */
13653 tree
13654 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13655 tree op0, tree op1, tree op2)
13657 tree tem;
13658 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13659 enum tree_code_class kind = TREE_CODE_CLASS (code);
13661 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13662 && TREE_CODE_LENGTH (code) == 3);
13664 /* Strip any conversions that don't change the mode. This is safe
13665 for every expression, except for a comparison expression because
13666 its signedness is derived from its operands. So, in the latter
13667 case, only strip conversions that don't change the signedness.
13669 Note that this is done as an internal manipulation within the
13670 constant folder, in order to find the simplest representation of
13671 the arguments so that their form can be studied. In any cases,
13672 the appropriate type conversions should be put back in the tree
13673 that will get out of the constant folder. */
13674 if (op0)
13676 arg0 = op0;
13677 STRIP_NOPS (arg0);
13680 if (op1)
13682 arg1 = op1;
13683 STRIP_NOPS (arg1);
13686 if (op2)
13688 arg2 = op2;
13689 STRIP_NOPS (arg2);
13692 switch (code)
13694 case COMPONENT_REF:
13695 if (TREE_CODE (arg0) == CONSTRUCTOR
13696 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13698 unsigned HOST_WIDE_INT idx;
13699 tree field, value;
13700 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13701 if (field == arg1)
13702 return value;
13704 return NULL_TREE;
13706 case COND_EXPR:
13707 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13708 so all simple results must be passed through pedantic_non_lvalue. */
13709 if (TREE_CODE (arg0) == INTEGER_CST)
13711 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13712 tem = integer_zerop (arg0) ? op2 : op1;
13713 /* Only optimize constant conditions when the selected branch
13714 has the same type as the COND_EXPR. This avoids optimizing
13715 away "c ? x : throw", where the throw has a void type.
13716 Avoid throwing away that operand which contains label. */
13717 if ((!TREE_SIDE_EFFECTS (unused_op)
13718 || !contains_label_p (unused_op))
13719 && (! VOID_TYPE_P (TREE_TYPE (tem))
13720 || VOID_TYPE_P (type)))
13721 return pedantic_non_lvalue_loc (loc, tem);
13722 return NULL_TREE;
13724 if (operand_equal_p (arg1, op2, 0))
13725 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13727 /* If we have A op B ? A : C, we may be able to convert this to a
13728 simpler expression, depending on the operation and the values
13729 of B and C. Signed zeros prevent all of these transformations,
13730 for reasons given above each one.
13732 Also try swapping the arguments and inverting the conditional. */
13733 if (COMPARISON_CLASS_P (arg0)
13734 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13735 arg1, TREE_OPERAND (arg0, 1))
13736 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13738 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13739 if (tem)
13740 return tem;
13743 if (COMPARISON_CLASS_P (arg0)
13744 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13745 op2,
13746 TREE_OPERAND (arg0, 1))
13747 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13749 location_t loc0 = expr_location_or (arg0, loc);
13750 tem = fold_truth_not_expr (loc0, arg0);
13751 if (tem && COMPARISON_CLASS_P (tem))
13753 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13754 if (tem)
13755 return tem;
13759 /* If the second operand is simpler than the third, swap them
13760 since that produces better jump optimization results. */
13761 if (truth_value_p (TREE_CODE (arg0))
13762 && tree_swap_operands_p (op1, op2, false))
13764 location_t loc0 = expr_location_or (arg0, loc);
13765 /* See if this can be inverted. If it can't, possibly because
13766 it was a floating-point inequality comparison, don't do
13767 anything. */
13768 tem = fold_truth_not_expr (loc0, arg0);
13769 if (tem)
13770 return fold_build3_loc (loc, code, type, tem, op2, op1);
13773 /* Convert A ? 1 : 0 to simply A. */
13774 if (integer_onep (op1)
13775 && integer_zerop (op2)
13776 /* If we try to convert OP0 to our type, the
13777 call to fold will try to move the conversion inside
13778 a COND, which will recurse. In that case, the COND_EXPR
13779 is probably the best choice, so leave it alone. */
13780 && type == TREE_TYPE (arg0))
13781 return pedantic_non_lvalue_loc (loc, arg0);
13783 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13784 over COND_EXPR in cases such as floating point comparisons. */
13785 if (integer_zerop (op1)
13786 && integer_onep (op2)
13787 && truth_value_p (TREE_CODE (arg0)))
13788 return pedantic_non_lvalue_loc (loc,
13789 fold_convert_loc (loc, type,
13790 invert_truthvalue_loc (loc,
13791 arg0)));
13793 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13794 if (TREE_CODE (arg0) == LT_EXPR
13795 && integer_zerop (TREE_OPERAND (arg0, 1))
13796 && integer_zerop (op2)
13797 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13799 /* sign_bit_p only checks ARG1 bits within A's precision.
13800 If <sign bit of A> has wider type than A, bits outside
13801 of A's precision in <sign bit of A> need to be checked.
13802 If they are all 0, this optimization needs to be done
13803 in unsigned A's type, if they are all 1 in signed A's type,
13804 otherwise this can't be done. */
13805 if (TYPE_PRECISION (TREE_TYPE (tem))
13806 < TYPE_PRECISION (TREE_TYPE (arg1))
13807 && TYPE_PRECISION (TREE_TYPE (tem))
13808 < TYPE_PRECISION (type))
13810 unsigned HOST_WIDE_INT mask_lo;
13811 HOST_WIDE_INT mask_hi;
13812 int inner_width, outer_width;
13813 tree tem_type;
13815 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13816 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13817 if (outer_width > TYPE_PRECISION (type))
13818 outer_width = TYPE_PRECISION (type);
13820 if (outer_width > HOST_BITS_PER_WIDE_INT)
13822 mask_hi = ((unsigned HOST_WIDE_INT) -1
13823 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13824 mask_lo = -1;
13826 else
13828 mask_hi = 0;
13829 mask_lo = ((unsigned HOST_WIDE_INT) -1
13830 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13832 if (inner_width > HOST_BITS_PER_WIDE_INT)
13834 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13835 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13836 mask_lo = 0;
13838 else
13839 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13840 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13842 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13843 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13845 tem_type = signed_type_for (TREE_TYPE (tem));
13846 tem = fold_convert_loc (loc, tem_type, tem);
13848 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13849 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13851 tem_type = unsigned_type_for (TREE_TYPE (tem));
13852 tem = fold_convert_loc (loc, tem_type, tem);
13854 else
13855 tem = NULL;
13858 if (tem)
13859 return
13860 fold_convert_loc (loc, type,
13861 fold_build2_loc (loc, BIT_AND_EXPR,
13862 TREE_TYPE (tem), tem,
13863 fold_convert_loc (loc,
13864 TREE_TYPE (tem),
13865 arg1)));
13868 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13869 already handled above. */
13870 if (TREE_CODE (arg0) == BIT_AND_EXPR
13871 && integer_onep (TREE_OPERAND (arg0, 1))
13872 && integer_zerop (op2)
13873 && integer_pow2p (arg1))
13875 tree tem = TREE_OPERAND (arg0, 0);
13876 STRIP_NOPS (tem);
13877 if (TREE_CODE (tem) == RSHIFT_EXPR
13878 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13879 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13880 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13881 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13882 TREE_OPERAND (tem, 0), arg1);
13885 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13886 is probably obsolete because the first operand should be a
13887 truth value (that's why we have the two cases above), but let's
13888 leave it in until we can confirm this for all front-ends. */
13889 if (integer_zerop (op2)
13890 && TREE_CODE (arg0) == NE_EXPR
13891 && integer_zerop (TREE_OPERAND (arg0, 1))
13892 && integer_pow2p (arg1)
13893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13894 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13895 arg1, OEP_ONLY_CONST))
13896 return pedantic_non_lvalue_loc (loc,
13897 fold_convert_loc (loc, type,
13898 TREE_OPERAND (arg0, 0)));
13900 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13901 if (integer_zerop (op2)
13902 && truth_value_p (TREE_CODE (arg0))
13903 && truth_value_p (TREE_CODE (arg1)))
13904 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13905 fold_convert_loc (loc, type, arg0),
13906 arg1);
13908 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13909 if (integer_onep (op2)
13910 && truth_value_p (TREE_CODE (arg0))
13911 && truth_value_p (TREE_CODE (arg1)))
13913 location_t loc0 = expr_location_or (arg0, loc);
13914 /* Only perform transformation if ARG0 is easily inverted. */
13915 tem = fold_truth_not_expr (loc0, arg0);
13916 if (tem)
13917 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13918 fold_convert_loc (loc, type, tem),
13919 arg1);
13922 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13923 if (integer_zerop (arg1)
13924 && truth_value_p (TREE_CODE (arg0))
13925 && truth_value_p (TREE_CODE (op2)))
13927 location_t loc0 = expr_location_or (arg0, loc);
13928 /* Only perform transformation if ARG0 is easily inverted. */
13929 tem = fold_truth_not_expr (loc0, arg0);
13930 if (tem)
13931 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13932 fold_convert_loc (loc, type, tem),
13933 op2);
13936 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13937 if (integer_onep (arg1)
13938 && truth_value_p (TREE_CODE (arg0))
13939 && truth_value_p (TREE_CODE (op2)))
13940 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13941 fold_convert_loc (loc, type, arg0),
13942 op2);
13944 return NULL_TREE;
13946 case CALL_EXPR:
13947 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13948 of fold_ternary on them. */
13949 gcc_unreachable ();
13951 case BIT_FIELD_REF:
13952 if ((TREE_CODE (arg0) == VECTOR_CST
13953 || TREE_CODE (arg0) == CONSTRUCTOR)
13954 && type == TREE_TYPE (TREE_TYPE (arg0)))
13956 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13957 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13959 if (width != 0
13960 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13961 && (idx % width) == 0
13962 && (idx = idx / width)
13963 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13965 if (TREE_CODE (arg0) == VECTOR_CST)
13967 tree elements = TREE_VECTOR_CST_ELTS (arg0);
13968 while (idx-- > 0 && elements)
13969 elements = TREE_CHAIN (elements);
13970 if (elements)
13971 return TREE_VALUE (elements);
13973 else if (idx < CONSTRUCTOR_NELTS (arg0))
13974 return CONSTRUCTOR_ELT (arg0, idx)->value;
13975 return build_zero_cst (type);
13979 /* A bit-field-ref that referenced the full argument can be stripped. */
13980 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13981 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13982 && integer_zerop (op2))
13983 return fold_convert_loc (loc, type, arg0);
13985 return NULL_TREE;
13987 case FMA_EXPR:
13988 /* For integers we can decompose the FMA if possible. */
13989 if (TREE_CODE (arg0) == INTEGER_CST
13990 && TREE_CODE (arg1) == INTEGER_CST)
13991 return fold_build2_loc (loc, PLUS_EXPR, type,
13992 const_binop (MULT_EXPR, arg0, arg1), arg2);
13993 if (integer_zerop (arg2))
13994 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13996 return fold_fma (loc, type, arg0, arg1, arg2);
13998 case VEC_PERM_EXPR:
13999 if (TREE_CODE (arg2) == VECTOR_CST)
14001 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14002 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14003 tree t;
14004 bool need_mask_canon = false;
14006 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)));
14007 for (i = 0, t = TREE_VECTOR_CST_ELTS (arg2);
14008 i < nelts && t; i++, t = TREE_CHAIN (t))
14010 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_CST)
14011 return NULL_TREE;
14013 sel[i] = TREE_INT_CST_LOW (TREE_VALUE (t)) & (2 * nelts - 1);
14014 if (TREE_INT_CST_HIGH (TREE_VALUE (t))
14015 || ((unsigned HOST_WIDE_INT)
14016 TREE_INT_CST_LOW (TREE_VALUE (t)) != sel[i]))
14017 need_mask_canon = true;
14019 if (t)
14020 return NULL_TREE;
14021 for (; i < nelts; i++)
14022 sel[i] = 0;
14024 if ((TREE_CODE (arg0) == VECTOR_CST
14025 || TREE_CODE (arg0) == CONSTRUCTOR)
14026 && (TREE_CODE (arg1) == VECTOR_CST
14027 || TREE_CODE (arg1) == CONSTRUCTOR))
14029 t = fold_vec_perm (type, arg0, arg1, sel);
14030 if (t != NULL_TREE)
14031 return t;
14034 if (need_mask_canon && arg2 == op2)
14036 tree list = NULL_TREE, eltype = TREE_TYPE (TREE_TYPE (arg2));
14037 for (i = 0; i < nelts; i++)
14038 list = tree_cons (NULL_TREE,
14039 build_int_cst (eltype, sel[nelts - i - 1]),
14040 list);
14041 t = build_vector (TREE_TYPE (arg2), list);
14042 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
14045 return NULL_TREE;
14047 default:
14048 return NULL_TREE;
14049 } /* switch (code) */
14052 /* Perform constant folding and related simplification of EXPR.
14053 The related simplifications include x*1 => x, x*0 => 0, etc.,
14054 and application of the associative law.
14055 NOP_EXPR conversions may be removed freely (as long as we
14056 are careful not to change the type of the overall expression).
14057 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14058 but we can constant-fold them if they have constant operands. */
14060 #ifdef ENABLE_FOLD_CHECKING
14061 # define fold(x) fold_1 (x)
14062 static tree fold_1 (tree);
14063 static
14064 #endif
14065 tree
14066 fold (tree expr)
14068 const tree t = expr;
14069 enum tree_code code = TREE_CODE (t);
14070 enum tree_code_class kind = TREE_CODE_CLASS (code);
14071 tree tem;
14072 location_t loc = EXPR_LOCATION (expr);
14074 /* Return right away if a constant. */
14075 if (kind == tcc_constant)
14076 return t;
14078 /* CALL_EXPR-like objects with variable numbers of operands are
14079 treated specially. */
14080 if (kind == tcc_vl_exp)
14082 if (code == CALL_EXPR)
14084 tem = fold_call_expr (loc, expr, false);
14085 return tem ? tem : expr;
14087 return expr;
14090 if (IS_EXPR_CODE_CLASS (kind))
14092 tree type = TREE_TYPE (t);
14093 tree op0, op1, op2;
14095 switch (TREE_CODE_LENGTH (code))
14097 case 1:
14098 op0 = TREE_OPERAND (t, 0);
14099 tem = fold_unary_loc (loc, code, type, op0);
14100 return tem ? tem : expr;
14101 case 2:
14102 op0 = TREE_OPERAND (t, 0);
14103 op1 = TREE_OPERAND (t, 1);
14104 tem = fold_binary_loc (loc, code, type, op0, op1);
14105 return tem ? tem : expr;
14106 case 3:
14107 op0 = TREE_OPERAND (t, 0);
14108 op1 = TREE_OPERAND (t, 1);
14109 op2 = TREE_OPERAND (t, 2);
14110 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14111 return tem ? tem : expr;
14112 default:
14113 break;
14117 switch (code)
14119 case ARRAY_REF:
14121 tree op0 = TREE_OPERAND (t, 0);
14122 tree op1 = TREE_OPERAND (t, 1);
14124 if (TREE_CODE (op1) == INTEGER_CST
14125 && TREE_CODE (op0) == CONSTRUCTOR
14126 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14128 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14129 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14130 unsigned HOST_WIDE_INT begin = 0;
14132 /* Find a matching index by means of a binary search. */
14133 while (begin != end)
14135 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14136 tree index = VEC_index (constructor_elt, elts, middle)->index;
14138 if (TREE_CODE (index) == INTEGER_CST
14139 && tree_int_cst_lt (index, op1))
14140 begin = middle + 1;
14141 else if (TREE_CODE (index) == INTEGER_CST
14142 && tree_int_cst_lt (op1, index))
14143 end = middle;
14144 else if (TREE_CODE (index) == RANGE_EXPR
14145 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14146 begin = middle + 1;
14147 else if (TREE_CODE (index) == RANGE_EXPR
14148 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14149 end = middle;
14150 else
14151 return VEC_index (constructor_elt, elts, middle)->value;
14155 return t;
14158 case CONST_DECL:
14159 return fold (DECL_INITIAL (t));
14161 default:
14162 return t;
14163 } /* switch (code) */
14166 #ifdef ENABLE_FOLD_CHECKING
14167 #undef fold
14169 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14170 static void fold_check_failed (const_tree, const_tree);
14171 void print_fold_checksum (const_tree);
14173 /* When --enable-checking=fold, compute a digest of expr before
14174 and after actual fold call to see if fold did not accidentally
14175 change original expr. */
14177 tree
14178 fold (tree expr)
14180 tree ret;
14181 struct md5_ctx ctx;
14182 unsigned char checksum_before[16], checksum_after[16];
14183 htab_t ht;
14185 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14186 md5_init_ctx (&ctx);
14187 fold_checksum_tree (expr, &ctx, ht);
14188 md5_finish_ctx (&ctx, checksum_before);
14189 htab_empty (ht);
14191 ret = fold_1 (expr);
14193 md5_init_ctx (&ctx);
14194 fold_checksum_tree (expr, &ctx, ht);
14195 md5_finish_ctx (&ctx, checksum_after);
14196 htab_delete (ht);
14198 if (memcmp (checksum_before, checksum_after, 16))
14199 fold_check_failed (expr, ret);
14201 return ret;
14204 void
14205 print_fold_checksum (const_tree expr)
14207 struct md5_ctx ctx;
14208 unsigned char checksum[16], cnt;
14209 htab_t ht;
14211 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14212 md5_init_ctx (&ctx);
14213 fold_checksum_tree (expr, &ctx, ht);
14214 md5_finish_ctx (&ctx, checksum);
14215 htab_delete (ht);
14216 for (cnt = 0; cnt < 16; ++cnt)
14217 fprintf (stderr, "%02x", checksum[cnt]);
14218 putc ('\n', stderr);
14221 static void
14222 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14224 internal_error ("fold check: original tree changed by fold");
14227 static void
14228 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14230 void **slot;
14231 enum tree_code code;
14232 union tree_node buf;
14233 int i, len;
14235 recursive_label:
14236 if (expr == NULL)
14237 return;
14238 slot = (void **) htab_find_slot (ht, expr, INSERT);
14239 if (*slot != NULL)
14240 return;
14241 *slot = CONST_CAST_TREE (expr);
14242 code = TREE_CODE (expr);
14243 if (TREE_CODE_CLASS (code) == tcc_declaration
14244 && DECL_ASSEMBLER_NAME_SET_P (expr))
14246 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14247 memcpy ((char *) &buf, expr, tree_size (expr));
14248 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14249 expr = (tree) &buf;
14251 else if (TREE_CODE_CLASS (code) == tcc_type
14252 && (TYPE_POINTER_TO (expr)
14253 || TYPE_REFERENCE_TO (expr)
14254 || TYPE_CACHED_VALUES_P (expr)
14255 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14256 || TYPE_NEXT_VARIANT (expr)))
14258 /* Allow these fields to be modified. */
14259 tree tmp;
14260 memcpy ((char *) &buf, expr, tree_size (expr));
14261 expr = tmp = (tree) &buf;
14262 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14263 TYPE_POINTER_TO (tmp) = NULL;
14264 TYPE_REFERENCE_TO (tmp) = NULL;
14265 TYPE_NEXT_VARIANT (tmp) = NULL;
14266 if (TYPE_CACHED_VALUES_P (tmp))
14268 TYPE_CACHED_VALUES_P (tmp) = 0;
14269 TYPE_CACHED_VALUES (tmp) = NULL;
14272 md5_process_bytes (expr, tree_size (expr), ctx);
14273 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14274 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14275 if (TREE_CODE_CLASS (code) != tcc_type
14276 && TREE_CODE_CLASS (code) != tcc_declaration
14277 && code != TREE_LIST
14278 && code != SSA_NAME
14279 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14280 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14281 switch (TREE_CODE_CLASS (code))
14283 case tcc_constant:
14284 switch (code)
14286 case STRING_CST:
14287 md5_process_bytes (TREE_STRING_POINTER (expr),
14288 TREE_STRING_LENGTH (expr), ctx);
14289 break;
14290 case COMPLEX_CST:
14291 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14292 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14293 break;
14294 case VECTOR_CST:
14295 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14296 break;
14297 default:
14298 break;
14300 break;
14301 case tcc_exceptional:
14302 switch (code)
14304 case TREE_LIST:
14305 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14306 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14307 expr = TREE_CHAIN (expr);
14308 goto recursive_label;
14309 break;
14310 case TREE_VEC:
14311 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14312 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14313 break;
14314 default:
14315 break;
14317 break;
14318 case tcc_expression:
14319 case tcc_reference:
14320 case tcc_comparison:
14321 case tcc_unary:
14322 case tcc_binary:
14323 case tcc_statement:
14324 case tcc_vl_exp:
14325 len = TREE_OPERAND_LENGTH (expr);
14326 for (i = 0; i < len; ++i)
14327 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14328 break;
14329 case tcc_declaration:
14330 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14331 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14332 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14334 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14335 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14336 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14337 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14338 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14340 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14341 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14343 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14345 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14346 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14347 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14349 break;
14350 case tcc_type:
14351 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14352 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14353 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14354 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14355 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14356 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14357 if (INTEGRAL_TYPE_P (expr)
14358 || SCALAR_FLOAT_TYPE_P (expr))
14360 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14361 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14363 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14364 if (TREE_CODE (expr) == RECORD_TYPE
14365 || TREE_CODE (expr) == UNION_TYPE
14366 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14367 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14368 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14369 break;
14370 default:
14371 break;
14375 /* Helper function for outputting the checksum of a tree T. When
14376 debugging with gdb, you can "define mynext" to be "next" followed
14377 by "call debug_fold_checksum (op0)", then just trace down till the
14378 outputs differ. */
14380 DEBUG_FUNCTION void
14381 debug_fold_checksum (const_tree t)
14383 int i;
14384 unsigned char checksum[16];
14385 struct md5_ctx ctx;
14386 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14388 md5_init_ctx (&ctx);
14389 fold_checksum_tree (t, &ctx, ht);
14390 md5_finish_ctx (&ctx, checksum);
14391 htab_empty (ht);
14393 for (i = 0; i < 16; i++)
14394 fprintf (stderr, "%d ", checksum[i]);
14396 fprintf (stderr, "\n");
14399 #endif
14401 /* Fold a unary tree expression with code CODE of type TYPE with an
14402 operand OP0. LOC is the location of the resulting expression.
14403 Return a folded expression if successful. Otherwise, return a tree
14404 expression with code CODE of type TYPE with an operand OP0. */
14406 tree
14407 fold_build1_stat_loc (location_t loc,
14408 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14410 tree tem;
14411 #ifdef ENABLE_FOLD_CHECKING
14412 unsigned char checksum_before[16], checksum_after[16];
14413 struct md5_ctx ctx;
14414 htab_t ht;
14416 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14417 md5_init_ctx (&ctx);
14418 fold_checksum_tree (op0, &ctx, ht);
14419 md5_finish_ctx (&ctx, checksum_before);
14420 htab_empty (ht);
14421 #endif
14423 tem = fold_unary_loc (loc, code, type, op0);
14424 if (!tem)
14425 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14427 #ifdef ENABLE_FOLD_CHECKING
14428 md5_init_ctx (&ctx);
14429 fold_checksum_tree (op0, &ctx, ht);
14430 md5_finish_ctx (&ctx, checksum_after);
14431 htab_delete (ht);
14433 if (memcmp (checksum_before, checksum_after, 16))
14434 fold_check_failed (op0, tem);
14435 #endif
14436 return tem;
14439 /* Fold a binary tree expression with code CODE of type TYPE with
14440 operands OP0 and OP1. LOC is the location of the resulting
14441 expression. Return a folded expression if successful. Otherwise,
14442 return a tree expression with code CODE of type TYPE with operands
14443 OP0 and OP1. */
14445 tree
14446 fold_build2_stat_loc (location_t loc,
14447 enum tree_code code, tree type, tree op0, tree op1
14448 MEM_STAT_DECL)
14450 tree tem;
14451 #ifdef ENABLE_FOLD_CHECKING
14452 unsigned char checksum_before_op0[16],
14453 checksum_before_op1[16],
14454 checksum_after_op0[16],
14455 checksum_after_op1[16];
14456 struct md5_ctx ctx;
14457 htab_t ht;
14459 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14460 md5_init_ctx (&ctx);
14461 fold_checksum_tree (op0, &ctx, ht);
14462 md5_finish_ctx (&ctx, checksum_before_op0);
14463 htab_empty (ht);
14465 md5_init_ctx (&ctx);
14466 fold_checksum_tree (op1, &ctx, ht);
14467 md5_finish_ctx (&ctx, checksum_before_op1);
14468 htab_empty (ht);
14469 #endif
14471 tem = fold_binary_loc (loc, code, type, op0, op1);
14472 if (!tem)
14473 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14475 #ifdef ENABLE_FOLD_CHECKING
14476 md5_init_ctx (&ctx);
14477 fold_checksum_tree (op0, &ctx, ht);
14478 md5_finish_ctx (&ctx, checksum_after_op0);
14479 htab_empty (ht);
14481 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14482 fold_check_failed (op0, tem);
14484 md5_init_ctx (&ctx);
14485 fold_checksum_tree (op1, &ctx, ht);
14486 md5_finish_ctx (&ctx, checksum_after_op1);
14487 htab_delete (ht);
14489 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14490 fold_check_failed (op1, tem);
14491 #endif
14492 return tem;
14495 /* Fold a ternary tree expression with code CODE of type TYPE with
14496 operands OP0, OP1, and OP2. Return a folded expression if
14497 successful. Otherwise, return a tree expression with code CODE of
14498 type TYPE with operands OP0, OP1, and OP2. */
14500 tree
14501 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14502 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14504 tree tem;
14505 #ifdef ENABLE_FOLD_CHECKING
14506 unsigned char checksum_before_op0[16],
14507 checksum_before_op1[16],
14508 checksum_before_op2[16],
14509 checksum_after_op0[16],
14510 checksum_after_op1[16],
14511 checksum_after_op2[16];
14512 struct md5_ctx ctx;
14513 htab_t ht;
14515 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14516 md5_init_ctx (&ctx);
14517 fold_checksum_tree (op0, &ctx, ht);
14518 md5_finish_ctx (&ctx, checksum_before_op0);
14519 htab_empty (ht);
14521 md5_init_ctx (&ctx);
14522 fold_checksum_tree (op1, &ctx, ht);
14523 md5_finish_ctx (&ctx, checksum_before_op1);
14524 htab_empty (ht);
14526 md5_init_ctx (&ctx);
14527 fold_checksum_tree (op2, &ctx, ht);
14528 md5_finish_ctx (&ctx, checksum_before_op2);
14529 htab_empty (ht);
14530 #endif
14532 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14533 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14534 if (!tem)
14535 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14537 #ifdef ENABLE_FOLD_CHECKING
14538 md5_init_ctx (&ctx);
14539 fold_checksum_tree (op0, &ctx, ht);
14540 md5_finish_ctx (&ctx, checksum_after_op0);
14541 htab_empty (ht);
14543 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14544 fold_check_failed (op0, tem);
14546 md5_init_ctx (&ctx);
14547 fold_checksum_tree (op1, &ctx, ht);
14548 md5_finish_ctx (&ctx, checksum_after_op1);
14549 htab_empty (ht);
14551 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14552 fold_check_failed (op1, tem);
14554 md5_init_ctx (&ctx);
14555 fold_checksum_tree (op2, &ctx, ht);
14556 md5_finish_ctx (&ctx, checksum_after_op2);
14557 htab_delete (ht);
14559 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14560 fold_check_failed (op2, tem);
14561 #endif
14562 return tem;
14565 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14566 arguments in ARGARRAY, and a null static chain.
14567 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14568 of type TYPE from the given operands as constructed by build_call_array. */
14570 tree
14571 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14572 int nargs, tree *argarray)
14574 tree tem;
14575 #ifdef ENABLE_FOLD_CHECKING
14576 unsigned char checksum_before_fn[16],
14577 checksum_before_arglist[16],
14578 checksum_after_fn[16],
14579 checksum_after_arglist[16];
14580 struct md5_ctx ctx;
14581 htab_t ht;
14582 int i;
14584 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14585 md5_init_ctx (&ctx);
14586 fold_checksum_tree (fn, &ctx, ht);
14587 md5_finish_ctx (&ctx, checksum_before_fn);
14588 htab_empty (ht);
14590 md5_init_ctx (&ctx);
14591 for (i = 0; i < nargs; i++)
14592 fold_checksum_tree (argarray[i], &ctx, ht);
14593 md5_finish_ctx (&ctx, checksum_before_arglist);
14594 htab_empty (ht);
14595 #endif
14597 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14599 #ifdef ENABLE_FOLD_CHECKING
14600 md5_init_ctx (&ctx);
14601 fold_checksum_tree (fn, &ctx, ht);
14602 md5_finish_ctx (&ctx, checksum_after_fn);
14603 htab_empty (ht);
14605 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14606 fold_check_failed (fn, tem);
14608 md5_init_ctx (&ctx);
14609 for (i = 0; i < nargs; i++)
14610 fold_checksum_tree (argarray[i], &ctx, ht);
14611 md5_finish_ctx (&ctx, checksum_after_arglist);
14612 htab_delete (ht);
14614 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14615 fold_check_failed (NULL_TREE, tem);
14616 #endif
14617 return tem;
14620 /* Perform constant folding and related simplification of initializer
14621 expression EXPR. These behave identically to "fold_buildN" but ignore
14622 potential run-time traps and exceptions that fold must preserve. */
14624 #define START_FOLD_INIT \
14625 int saved_signaling_nans = flag_signaling_nans;\
14626 int saved_trapping_math = flag_trapping_math;\
14627 int saved_rounding_math = flag_rounding_math;\
14628 int saved_trapv = flag_trapv;\
14629 int saved_folding_initializer = folding_initializer;\
14630 flag_signaling_nans = 0;\
14631 flag_trapping_math = 0;\
14632 flag_rounding_math = 0;\
14633 flag_trapv = 0;\
14634 folding_initializer = 1;
14636 #define END_FOLD_INIT \
14637 flag_signaling_nans = saved_signaling_nans;\
14638 flag_trapping_math = saved_trapping_math;\
14639 flag_rounding_math = saved_rounding_math;\
14640 flag_trapv = saved_trapv;\
14641 folding_initializer = saved_folding_initializer;
14643 tree
14644 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14645 tree type, tree op)
14647 tree result;
14648 START_FOLD_INIT;
14650 result = fold_build1_loc (loc, code, type, op);
14652 END_FOLD_INIT;
14653 return result;
14656 tree
14657 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14658 tree type, tree op0, tree op1)
14660 tree result;
14661 START_FOLD_INIT;
14663 result = fold_build2_loc (loc, code, type, op0, op1);
14665 END_FOLD_INIT;
14666 return result;
14669 tree
14670 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14671 tree type, tree op0, tree op1, tree op2)
14673 tree result;
14674 START_FOLD_INIT;
14676 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14678 END_FOLD_INIT;
14679 return result;
14682 tree
14683 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14684 int nargs, tree *argarray)
14686 tree result;
14687 START_FOLD_INIT;
14689 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14691 END_FOLD_INIT;
14692 return result;
14695 #undef START_FOLD_INIT
14696 #undef END_FOLD_INIT
14698 /* Determine if first argument is a multiple of second argument. Return 0 if
14699 it is not, or we cannot easily determined it to be.
14701 An example of the sort of thing we care about (at this point; this routine
14702 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14703 fold cases do now) is discovering that
14705 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14707 is a multiple of
14709 SAVE_EXPR (J * 8)
14711 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14713 This code also handles discovering that
14715 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14717 is a multiple of 8 so we don't have to worry about dealing with a
14718 possible remainder.
14720 Note that we *look* inside a SAVE_EXPR only to determine how it was
14721 calculated; it is not safe for fold to do much of anything else with the
14722 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14723 at run time. For example, the latter example above *cannot* be implemented
14724 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14725 evaluation time of the original SAVE_EXPR is not necessarily the same at
14726 the time the new expression is evaluated. The only optimization of this
14727 sort that would be valid is changing
14729 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14731 divided by 8 to
14733 SAVE_EXPR (I) * SAVE_EXPR (J)
14735 (where the same SAVE_EXPR (J) is used in the original and the
14736 transformed version). */
14739 multiple_of_p (tree type, const_tree top, const_tree bottom)
14741 if (operand_equal_p (top, bottom, 0))
14742 return 1;
14744 if (TREE_CODE (type) != INTEGER_TYPE)
14745 return 0;
14747 switch (TREE_CODE (top))
14749 case BIT_AND_EXPR:
14750 /* Bitwise and provides a power of two multiple. If the mask is
14751 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14752 if (!integer_pow2p (bottom))
14753 return 0;
14754 /* FALLTHRU */
14756 case MULT_EXPR:
14757 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14758 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14760 case PLUS_EXPR:
14761 case MINUS_EXPR:
14762 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14763 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14765 case LSHIFT_EXPR:
14766 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14768 tree op1, t1;
14770 op1 = TREE_OPERAND (top, 1);
14771 /* const_binop may not detect overflow correctly,
14772 so check for it explicitly here. */
14773 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14774 > TREE_INT_CST_LOW (op1)
14775 && TREE_INT_CST_HIGH (op1) == 0
14776 && 0 != (t1 = fold_convert (type,
14777 const_binop (LSHIFT_EXPR,
14778 size_one_node,
14779 op1)))
14780 && !TREE_OVERFLOW (t1))
14781 return multiple_of_p (type, t1, bottom);
14783 return 0;
14785 case NOP_EXPR:
14786 /* Can't handle conversions from non-integral or wider integral type. */
14787 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14788 || (TYPE_PRECISION (type)
14789 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14790 return 0;
14792 /* .. fall through ... */
14794 case SAVE_EXPR:
14795 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14797 case COND_EXPR:
14798 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14799 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14801 case INTEGER_CST:
14802 if (TREE_CODE (bottom) != INTEGER_CST
14803 || integer_zerop (bottom)
14804 || (TYPE_UNSIGNED (type)
14805 && (tree_int_cst_sgn (top) < 0
14806 || tree_int_cst_sgn (bottom) < 0)))
14807 return 0;
14808 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14809 top, bottom));
14811 default:
14812 return 0;
14816 /* Return true if CODE or TYPE is known to be non-negative. */
14818 static bool
14819 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14821 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14822 && truth_value_p (code))
14823 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14824 have a signed:1 type (where the value is -1 and 0). */
14825 return true;
14826 return false;
14829 /* Return true if (CODE OP0) is known to be non-negative. If the return
14830 value is based on the assumption that signed overflow is undefined,
14831 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14832 *STRICT_OVERFLOW_P. */
14834 bool
14835 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14836 bool *strict_overflow_p)
14838 if (TYPE_UNSIGNED (type))
14839 return true;
14841 switch (code)
14843 case ABS_EXPR:
14844 /* We can't return 1 if flag_wrapv is set because
14845 ABS_EXPR<INT_MIN> = INT_MIN. */
14846 if (!INTEGRAL_TYPE_P (type))
14847 return true;
14848 if (TYPE_OVERFLOW_UNDEFINED (type))
14850 *strict_overflow_p = true;
14851 return true;
14853 break;
14855 case NON_LVALUE_EXPR:
14856 case FLOAT_EXPR:
14857 case FIX_TRUNC_EXPR:
14858 return tree_expr_nonnegative_warnv_p (op0,
14859 strict_overflow_p);
14861 case NOP_EXPR:
14863 tree inner_type = TREE_TYPE (op0);
14864 tree outer_type = type;
14866 if (TREE_CODE (outer_type) == REAL_TYPE)
14868 if (TREE_CODE (inner_type) == REAL_TYPE)
14869 return tree_expr_nonnegative_warnv_p (op0,
14870 strict_overflow_p);
14871 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14873 if (TYPE_UNSIGNED (inner_type))
14874 return true;
14875 return tree_expr_nonnegative_warnv_p (op0,
14876 strict_overflow_p);
14879 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14881 if (TREE_CODE (inner_type) == REAL_TYPE)
14882 return tree_expr_nonnegative_warnv_p (op0,
14883 strict_overflow_p);
14884 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14885 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14886 && TYPE_UNSIGNED (inner_type);
14889 break;
14891 default:
14892 return tree_simple_nonnegative_warnv_p (code, type);
14895 /* We don't know sign of `t', so be conservative and return false. */
14896 return false;
14899 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14900 value is based on the assumption that signed overflow is undefined,
14901 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14902 *STRICT_OVERFLOW_P. */
14904 bool
14905 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14906 tree op1, bool *strict_overflow_p)
14908 if (TYPE_UNSIGNED (type))
14909 return true;
14911 switch (code)
14913 case POINTER_PLUS_EXPR:
14914 case PLUS_EXPR:
14915 if (FLOAT_TYPE_P (type))
14916 return (tree_expr_nonnegative_warnv_p (op0,
14917 strict_overflow_p)
14918 && tree_expr_nonnegative_warnv_p (op1,
14919 strict_overflow_p));
14921 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14922 both unsigned and at least 2 bits shorter than the result. */
14923 if (TREE_CODE (type) == INTEGER_TYPE
14924 && TREE_CODE (op0) == NOP_EXPR
14925 && TREE_CODE (op1) == NOP_EXPR)
14927 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14928 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14929 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14930 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14932 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14933 TYPE_PRECISION (inner2)) + 1;
14934 return prec < TYPE_PRECISION (type);
14937 break;
14939 case MULT_EXPR:
14940 if (FLOAT_TYPE_P (type))
14942 /* x * x for floating point x is always non-negative. */
14943 if (operand_equal_p (op0, op1, 0))
14944 return true;
14945 return (tree_expr_nonnegative_warnv_p (op0,
14946 strict_overflow_p)
14947 && tree_expr_nonnegative_warnv_p (op1,
14948 strict_overflow_p));
14951 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14952 both unsigned and their total bits is shorter than the result. */
14953 if (TREE_CODE (type) == INTEGER_TYPE
14954 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14955 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14957 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14958 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14959 : TREE_TYPE (op0);
14960 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14961 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14962 : TREE_TYPE (op1);
14964 bool unsigned0 = TYPE_UNSIGNED (inner0);
14965 bool unsigned1 = TYPE_UNSIGNED (inner1);
14967 if (TREE_CODE (op0) == INTEGER_CST)
14968 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14970 if (TREE_CODE (op1) == INTEGER_CST)
14971 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14973 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14974 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14976 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14977 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14978 : TYPE_PRECISION (inner0);
14980 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14981 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14982 : TYPE_PRECISION (inner1);
14984 return precision0 + precision1 < TYPE_PRECISION (type);
14987 return false;
14989 case BIT_AND_EXPR:
14990 case MAX_EXPR:
14991 return (tree_expr_nonnegative_warnv_p (op0,
14992 strict_overflow_p)
14993 || tree_expr_nonnegative_warnv_p (op1,
14994 strict_overflow_p));
14996 case BIT_IOR_EXPR:
14997 case BIT_XOR_EXPR:
14998 case MIN_EXPR:
14999 case RDIV_EXPR:
15000 case TRUNC_DIV_EXPR:
15001 case CEIL_DIV_EXPR:
15002 case FLOOR_DIV_EXPR:
15003 case ROUND_DIV_EXPR:
15004 return (tree_expr_nonnegative_warnv_p (op0,
15005 strict_overflow_p)
15006 && tree_expr_nonnegative_warnv_p (op1,
15007 strict_overflow_p));
15009 case TRUNC_MOD_EXPR:
15010 case CEIL_MOD_EXPR:
15011 case FLOOR_MOD_EXPR:
15012 case ROUND_MOD_EXPR:
15013 return tree_expr_nonnegative_warnv_p (op0,
15014 strict_overflow_p);
15015 default:
15016 return tree_simple_nonnegative_warnv_p (code, type);
15019 /* We don't know sign of `t', so be conservative and return false. */
15020 return false;
15023 /* Return true if T is known to be non-negative. If the return
15024 value is based on the assumption that signed overflow is undefined,
15025 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15026 *STRICT_OVERFLOW_P. */
15028 bool
15029 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15031 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15032 return true;
15034 switch (TREE_CODE (t))
15036 case INTEGER_CST:
15037 return tree_int_cst_sgn (t) >= 0;
15039 case REAL_CST:
15040 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15042 case FIXED_CST:
15043 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15045 case COND_EXPR:
15046 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15047 strict_overflow_p)
15048 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15049 strict_overflow_p));
15050 default:
15051 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15052 TREE_TYPE (t));
15054 /* We don't know sign of `t', so be conservative and return false. */
15055 return false;
15058 /* Return true if T is known to be non-negative. If the return
15059 value is based on the assumption that signed overflow is undefined,
15060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15061 *STRICT_OVERFLOW_P. */
15063 bool
15064 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15065 tree arg0, tree arg1, bool *strict_overflow_p)
15067 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15068 switch (DECL_FUNCTION_CODE (fndecl))
15070 CASE_FLT_FN (BUILT_IN_ACOS):
15071 CASE_FLT_FN (BUILT_IN_ACOSH):
15072 CASE_FLT_FN (BUILT_IN_CABS):
15073 CASE_FLT_FN (BUILT_IN_COSH):
15074 CASE_FLT_FN (BUILT_IN_ERFC):
15075 CASE_FLT_FN (BUILT_IN_EXP):
15076 CASE_FLT_FN (BUILT_IN_EXP10):
15077 CASE_FLT_FN (BUILT_IN_EXP2):
15078 CASE_FLT_FN (BUILT_IN_FABS):
15079 CASE_FLT_FN (BUILT_IN_FDIM):
15080 CASE_FLT_FN (BUILT_IN_HYPOT):
15081 CASE_FLT_FN (BUILT_IN_POW10):
15082 CASE_INT_FN (BUILT_IN_FFS):
15083 CASE_INT_FN (BUILT_IN_PARITY):
15084 CASE_INT_FN (BUILT_IN_POPCOUNT):
15085 case BUILT_IN_BSWAP32:
15086 case BUILT_IN_BSWAP64:
15087 /* Always true. */
15088 return true;
15090 CASE_FLT_FN (BUILT_IN_SQRT):
15091 /* sqrt(-0.0) is -0.0. */
15092 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15093 return true;
15094 return tree_expr_nonnegative_warnv_p (arg0,
15095 strict_overflow_p);
15097 CASE_FLT_FN (BUILT_IN_ASINH):
15098 CASE_FLT_FN (BUILT_IN_ATAN):
15099 CASE_FLT_FN (BUILT_IN_ATANH):
15100 CASE_FLT_FN (BUILT_IN_CBRT):
15101 CASE_FLT_FN (BUILT_IN_CEIL):
15102 CASE_FLT_FN (BUILT_IN_ERF):
15103 CASE_FLT_FN (BUILT_IN_EXPM1):
15104 CASE_FLT_FN (BUILT_IN_FLOOR):
15105 CASE_FLT_FN (BUILT_IN_FMOD):
15106 CASE_FLT_FN (BUILT_IN_FREXP):
15107 CASE_FLT_FN (BUILT_IN_ICEIL):
15108 CASE_FLT_FN (BUILT_IN_IFLOOR):
15109 CASE_FLT_FN (BUILT_IN_IRINT):
15110 CASE_FLT_FN (BUILT_IN_IROUND):
15111 CASE_FLT_FN (BUILT_IN_LCEIL):
15112 CASE_FLT_FN (BUILT_IN_LDEXP):
15113 CASE_FLT_FN (BUILT_IN_LFLOOR):
15114 CASE_FLT_FN (BUILT_IN_LLCEIL):
15115 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15116 CASE_FLT_FN (BUILT_IN_LLRINT):
15117 CASE_FLT_FN (BUILT_IN_LLROUND):
15118 CASE_FLT_FN (BUILT_IN_LRINT):
15119 CASE_FLT_FN (BUILT_IN_LROUND):
15120 CASE_FLT_FN (BUILT_IN_MODF):
15121 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15122 CASE_FLT_FN (BUILT_IN_RINT):
15123 CASE_FLT_FN (BUILT_IN_ROUND):
15124 CASE_FLT_FN (BUILT_IN_SCALB):
15125 CASE_FLT_FN (BUILT_IN_SCALBLN):
15126 CASE_FLT_FN (BUILT_IN_SCALBN):
15127 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15128 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15129 CASE_FLT_FN (BUILT_IN_SINH):
15130 CASE_FLT_FN (BUILT_IN_TANH):
15131 CASE_FLT_FN (BUILT_IN_TRUNC):
15132 /* True if the 1st argument is nonnegative. */
15133 return tree_expr_nonnegative_warnv_p (arg0,
15134 strict_overflow_p);
15136 CASE_FLT_FN (BUILT_IN_FMAX):
15137 /* True if the 1st OR 2nd arguments are nonnegative. */
15138 return (tree_expr_nonnegative_warnv_p (arg0,
15139 strict_overflow_p)
15140 || (tree_expr_nonnegative_warnv_p (arg1,
15141 strict_overflow_p)));
15143 CASE_FLT_FN (BUILT_IN_FMIN):
15144 /* True if the 1st AND 2nd arguments are nonnegative. */
15145 return (tree_expr_nonnegative_warnv_p (arg0,
15146 strict_overflow_p)
15147 && (tree_expr_nonnegative_warnv_p (arg1,
15148 strict_overflow_p)));
15150 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15151 /* True if the 2nd argument is nonnegative. */
15152 return tree_expr_nonnegative_warnv_p (arg1,
15153 strict_overflow_p);
15155 CASE_FLT_FN (BUILT_IN_POWI):
15156 /* True if the 1st argument is nonnegative or the second
15157 argument is an even integer. */
15158 if (TREE_CODE (arg1) == INTEGER_CST
15159 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15160 return true;
15161 return tree_expr_nonnegative_warnv_p (arg0,
15162 strict_overflow_p);
15164 CASE_FLT_FN (BUILT_IN_POW):
15165 /* True if the 1st argument is nonnegative or the second
15166 argument is an even integer valued real. */
15167 if (TREE_CODE (arg1) == REAL_CST)
15169 REAL_VALUE_TYPE c;
15170 HOST_WIDE_INT n;
15172 c = TREE_REAL_CST (arg1);
15173 n = real_to_integer (&c);
15174 if ((n & 1) == 0)
15176 REAL_VALUE_TYPE cint;
15177 real_from_integer (&cint, VOIDmode, n,
15178 n < 0 ? -1 : 0, 0);
15179 if (real_identical (&c, &cint))
15180 return true;
15183 return tree_expr_nonnegative_warnv_p (arg0,
15184 strict_overflow_p);
15186 default:
15187 break;
15189 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15190 type);
15193 /* Return true if T is known to be non-negative. If the return
15194 value is based on the assumption that signed overflow is undefined,
15195 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15196 *STRICT_OVERFLOW_P. */
15198 bool
15199 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15201 enum tree_code code = TREE_CODE (t);
15202 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15203 return true;
15205 switch (code)
15207 case TARGET_EXPR:
15209 tree temp = TARGET_EXPR_SLOT (t);
15210 t = TARGET_EXPR_INITIAL (t);
15212 /* If the initializer is non-void, then it's a normal expression
15213 that will be assigned to the slot. */
15214 if (!VOID_TYPE_P (t))
15215 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15217 /* Otherwise, the initializer sets the slot in some way. One common
15218 way is an assignment statement at the end of the initializer. */
15219 while (1)
15221 if (TREE_CODE (t) == BIND_EXPR)
15222 t = expr_last (BIND_EXPR_BODY (t));
15223 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15224 || TREE_CODE (t) == TRY_CATCH_EXPR)
15225 t = expr_last (TREE_OPERAND (t, 0));
15226 else if (TREE_CODE (t) == STATEMENT_LIST)
15227 t = expr_last (t);
15228 else
15229 break;
15231 if (TREE_CODE (t) == MODIFY_EXPR
15232 && TREE_OPERAND (t, 0) == temp)
15233 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15234 strict_overflow_p);
15236 return false;
15239 case CALL_EXPR:
15241 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15242 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15244 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15245 get_callee_fndecl (t),
15246 arg0,
15247 arg1,
15248 strict_overflow_p);
15250 case COMPOUND_EXPR:
15251 case MODIFY_EXPR:
15252 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15253 strict_overflow_p);
15254 case BIND_EXPR:
15255 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15256 strict_overflow_p);
15257 case SAVE_EXPR:
15258 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15259 strict_overflow_p);
15261 default:
15262 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15263 TREE_TYPE (t));
15266 /* We don't know sign of `t', so be conservative and return false. */
15267 return false;
15270 /* Return true if T is known to be non-negative. If the return
15271 value is based on the assumption that signed overflow is undefined,
15272 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15273 *STRICT_OVERFLOW_P. */
15275 bool
15276 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15278 enum tree_code code;
15279 if (t == error_mark_node)
15280 return false;
15282 code = TREE_CODE (t);
15283 switch (TREE_CODE_CLASS (code))
15285 case tcc_binary:
15286 case tcc_comparison:
15287 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15288 TREE_TYPE (t),
15289 TREE_OPERAND (t, 0),
15290 TREE_OPERAND (t, 1),
15291 strict_overflow_p);
15293 case tcc_unary:
15294 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15295 TREE_TYPE (t),
15296 TREE_OPERAND (t, 0),
15297 strict_overflow_p);
15299 case tcc_constant:
15300 case tcc_declaration:
15301 case tcc_reference:
15302 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15304 default:
15305 break;
15308 switch (code)
15310 case TRUTH_AND_EXPR:
15311 case TRUTH_OR_EXPR:
15312 case TRUTH_XOR_EXPR:
15313 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15314 TREE_TYPE (t),
15315 TREE_OPERAND (t, 0),
15316 TREE_OPERAND (t, 1),
15317 strict_overflow_p);
15318 case TRUTH_NOT_EXPR:
15319 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15320 TREE_TYPE (t),
15321 TREE_OPERAND (t, 0),
15322 strict_overflow_p);
15324 case COND_EXPR:
15325 case CONSTRUCTOR:
15326 case OBJ_TYPE_REF:
15327 case ASSERT_EXPR:
15328 case ADDR_EXPR:
15329 case WITH_SIZE_EXPR:
15330 case SSA_NAME:
15331 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15333 default:
15334 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15338 /* Return true if `t' is known to be non-negative. Handle warnings
15339 about undefined signed overflow. */
15341 bool
15342 tree_expr_nonnegative_p (tree t)
15344 bool ret, strict_overflow_p;
15346 strict_overflow_p = false;
15347 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15348 if (strict_overflow_p)
15349 fold_overflow_warning (("assuming signed overflow does not occur when "
15350 "determining that expression is always "
15351 "non-negative"),
15352 WARN_STRICT_OVERFLOW_MISC);
15353 return ret;
15357 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15358 For floating point we further ensure that T is not denormal.
15359 Similar logic is present in nonzero_address in rtlanal.h.
15361 If the return value is based on the assumption that signed overflow
15362 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15363 change *STRICT_OVERFLOW_P. */
15365 bool
15366 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15367 bool *strict_overflow_p)
15369 switch (code)
15371 case ABS_EXPR:
15372 return tree_expr_nonzero_warnv_p (op0,
15373 strict_overflow_p);
15375 case NOP_EXPR:
15377 tree inner_type = TREE_TYPE (op0);
15378 tree outer_type = type;
15380 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15381 && tree_expr_nonzero_warnv_p (op0,
15382 strict_overflow_p));
15384 break;
15386 case NON_LVALUE_EXPR:
15387 return tree_expr_nonzero_warnv_p (op0,
15388 strict_overflow_p);
15390 default:
15391 break;
15394 return false;
15397 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15398 For floating point we further ensure that T is not denormal.
15399 Similar logic is present in nonzero_address in rtlanal.h.
15401 If the return value is based on the assumption that signed overflow
15402 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15403 change *STRICT_OVERFLOW_P. */
15405 bool
15406 tree_binary_nonzero_warnv_p (enum tree_code code,
15407 tree type,
15408 tree op0,
15409 tree op1, bool *strict_overflow_p)
15411 bool sub_strict_overflow_p;
15412 switch (code)
15414 case POINTER_PLUS_EXPR:
15415 case PLUS_EXPR:
15416 if (TYPE_OVERFLOW_UNDEFINED (type))
15418 /* With the presence of negative values it is hard
15419 to say something. */
15420 sub_strict_overflow_p = false;
15421 if (!tree_expr_nonnegative_warnv_p (op0,
15422 &sub_strict_overflow_p)
15423 || !tree_expr_nonnegative_warnv_p (op1,
15424 &sub_strict_overflow_p))
15425 return false;
15426 /* One of operands must be positive and the other non-negative. */
15427 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15428 overflows, on a twos-complement machine the sum of two
15429 nonnegative numbers can never be zero. */
15430 return (tree_expr_nonzero_warnv_p (op0,
15431 strict_overflow_p)
15432 || tree_expr_nonzero_warnv_p (op1,
15433 strict_overflow_p));
15435 break;
15437 case MULT_EXPR:
15438 if (TYPE_OVERFLOW_UNDEFINED (type))
15440 if (tree_expr_nonzero_warnv_p (op0,
15441 strict_overflow_p)
15442 && tree_expr_nonzero_warnv_p (op1,
15443 strict_overflow_p))
15445 *strict_overflow_p = true;
15446 return true;
15449 break;
15451 case MIN_EXPR:
15452 sub_strict_overflow_p = false;
15453 if (tree_expr_nonzero_warnv_p (op0,
15454 &sub_strict_overflow_p)
15455 && tree_expr_nonzero_warnv_p (op1,
15456 &sub_strict_overflow_p))
15458 if (sub_strict_overflow_p)
15459 *strict_overflow_p = true;
15461 break;
15463 case MAX_EXPR:
15464 sub_strict_overflow_p = false;
15465 if (tree_expr_nonzero_warnv_p (op0,
15466 &sub_strict_overflow_p))
15468 if (sub_strict_overflow_p)
15469 *strict_overflow_p = true;
15471 /* When both operands are nonzero, then MAX must be too. */
15472 if (tree_expr_nonzero_warnv_p (op1,
15473 strict_overflow_p))
15474 return true;
15476 /* MAX where operand 0 is positive is positive. */
15477 return tree_expr_nonnegative_warnv_p (op0,
15478 strict_overflow_p);
15480 /* MAX where operand 1 is positive is positive. */
15481 else if (tree_expr_nonzero_warnv_p (op1,
15482 &sub_strict_overflow_p)
15483 && tree_expr_nonnegative_warnv_p (op1,
15484 &sub_strict_overflow_p))
15486 if (sub_strict_overflow_p)
15487 *strict_overflow_p = true;
15488 return true;
15490 break;
15492 case BIT_IOR_EXPR:
15493 return (tree_expr_nonzero_warnv_p (op1,
15494 strict_overflow_p)
15495 || tree_expr_nonzero_warnv_p (op0,
15496 strict_overflow_p));
15498 default:
15499 break;
15502 return false;
15505 /* Return true when T is an address and is known to be nonzero.
15506 For floating point we further ensure that T is not denormal.
15507 Similar logic is present in nonzero_address in rtlanal.h.
15509 If the return value is based on the assumption that signed overflow
15510 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15511 change *STRICT_OVERFLOW_P. */
15513 bool
15514 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15516 bool sub_strict_overflow_p;
15517 switch (TREE_CODE (t))
15519 case INTEGER_CST:
15520 return !integer_zerop (t);
15522 case ADDR_EXPR:
15524 tree base = TREE_OPERAND (t, 0);
15525 if (!DECL_P (base))
15526 base = get_base_address (base);
15528 if (!base)
15529 return false;
15531 /* Weak declarations may link to NULL. Other things may also be NULL
15532 so protect with -fdelete-null-pointer-checks; but not variables
15533 allocated on the stack. */
15534 if (DECL_P (base)
15535 && (flag_delete_null_pointer_checks
15536 || (DECL_CONTEXT (base)
15537 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15538 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15539 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15541 /* Constants are never weak. */
15542 if (CONSTANT_CLASS_P (base))
15543 return true;
15545 return false;
15548 case COND_EXPR:
15549 sub_strict_overflow_p = false;
15550 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15551 &sub_strict_overflow_p)
15552 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15553 &sub_strict_overflow_p))
15555 if (sub_strict_overflow_p)
15556 *strict_overflow_p = true;
15557 return true;
15559 break;
15561 default:
15562 break;
15564 return false;
15567 /* Return true when T is an address and is known to be nonzero.
15568 For floating point we further ensure that T is not denormal.
15569 Similar logic is present in nonzero_address in rtlanal.h.
15571 If the return value is based on the assumption that signed overflow
15572 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15573 change *STRICT_OVERFLOW_P. */
15575 bool
15576 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15578 tree type = TREE_TYPE (t);
15579 enum tree_code code;
15581 /* Doing something useful for floating point would need more work. */
15582 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15583 return false;
15585 code = TREE_CODE (t);
15586 switch (TREE_CODE_CLASS (code))
15588 case tcc_unary:
15589 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15590 strict_overflow_p);
15591 case tcc_binary:
15592 case tcc_comparison:
15593 return tree_binary_nonzero_warnv_p (code, type,
15594 TREE_OPERAND (t, 0),
15595 TREE_OPERAND (t, 1),
15596 strict_overflow_p);
15597 case tcc_constant:
15598 case tcc_declaration:
15599 case tcc_reference:
15600 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15602 default:
15603 break;
15606 switch (code)
15608 case TRUTH_NOT_EXPR:
15609 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15610 strict_overflow_p);
15612 case TRUTH_AND_EXPR:
15613 case TRUTH_OR_EXPR:
15614 case TRUTH_XOR_EXPR:
15615 return tree_binary_nonzero_warnv_p (code, type,
15616 TREE_OPERAND (t, 0),
15617 TREE_OPERAND (t, 1),
15618 strict_overflow_p);
15620 case COND_EXPR:
15621 case CONSTRUCTOR:
15622 case OBJ_TYPE_REF:
15623 case ASSERT_EXPR:
15624 case ADDR_EXPR:
15625 case WITH_SIZE_EXPR:
15626 case SSA_NAME:
15627 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15629 case COMPOUND_EXPR:
15630 case MODIFY_EXPR:
15631 case BIND_EXPR:
15632 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15633 strict_overflow_p);
15635 case SAVE_EXPR:
15636 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15637 strict_overflow_p);
15639 case CALL_EXPR:
15640 return alloca_call_p (t);
15642 default:
15643 break;
15645 return false;
15648 /* Return true when T is an address and is known to be nonzero.
15649 Handle warnings about undefined signed overflow. */
15651 bool
15652 tree_expr_nonzero_p (tree t)
15654 bool ret, strict_overflow_p;
15656 strict_overflow_p = false;
15657 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15658 if (strict_overflow_p)
15659 fold_overflow_warning (("assuming signed overflow does not occur when "
15660 "determining that expression is always "
15661 "non-zero"),
15662 WARN_STRICT_OVERFLOW_MISC);
15663 return ret;
15666 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15667 attempt to fold the expression to a constant without modifying TYPE,
15668 OP0 or OP1.
15670 If the expression could be simplified to a constant, then return
15671 the constant. If the expression would not be simplified to a
15672 constant, then return NULL_TREE. */
15674 tree
15675 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15677 tree tem = fold_binary (code, type, op0, op1);
15678 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15681 /* Given the components of a unary expression CODE, TYPE and OP0,
15682 attempt to fold the expression to a constant without modifying
15683 TYPE or OP0.
15685 If the expression could be simplified to a constant, then return
15686 the constant. If the expression would not be simplified to a
15687 constant, then return NULL_TREE. */
15689 tree
15690 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15692 tree tem = fold_unary (code, type, op0);
15693 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15696 /* If EXP represents referencing an element in a constant string
15697 (either via pointer arithmetic or array indexing), return the
15698 tree representing the value accessed, otherwise return NULL. */
15700 tree
15701 fold_read_from_constant_string (tree exp)
15703 if ((TREE_CODE (exp) == INDIRECT_REF
15704 || TREE_CODE (exp) == ARRAY_REF)
15705 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15707 tree exp1 = TREE_OPERAND (exp, 0);
15708 tree index;
15709 tree string;
15710 location_t loc = EXPR_LOCATION (exp);
15712 if (TREE_CODE (exp) == INDIRECT_REF)
15713 string = string_constant (exp1, &index);
15714 else
15716 tree low_bound = array_ref_low_bound (exp);
15717 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15719 /* Optimize the special-case of a zero lower bound.
15721 We convert the low_bound to sizetype to avoid some problems
15722 with constant folding. (E.g. suppose the lower bound is 1,
15723 and its mode is QI. Without the conversion,l (ARRAY
15724 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15725 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15726 if (! integer_zerop (low_bound))
15727 index = size_diffop_loc (loc, index,
15728 fold_convert_loc (loc, sizetype, low_bound));
15730 string = exp1;
15733 if (string
15734 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15735 && TREE_CODE (string) == STRING_CST
15736 && TREE_CODE (index) == INTEGER_CST
15737 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15738 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15739 == MODE_INT)
15740 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15741 return build_int_cst_type (TREE_TYPE (exp),
15742 (TREE_STRING_POINTER (string)
15743 [TREE_INT_CST_LOW (index)]));
15745 return NULL;
15748 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15749 an integer constant, real, or fixed-point constant.
15751 TYPE is the type of the result. */
15753 static tree
15754 fold_negate_const (tree arg0, tree type)
15756 tree t = NULL_TREE;
15758 switch (TREE_CODE (arg0))
15760 case INTEGER_CST:
15762 double_int val = tree_to_double_int (arg0);
15763 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15765 t = force_fit_type_double (type, val, 1,
15766 (overflow | TREE_OVERFLOW (arg0))
15767 && !TYPE_UNSIGNED (type));
15768 break;
15771 case REAL_CST:
15772 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15773 break;
15775 case FIXED_CST:
15777 FIXED_VALUE_TYPE f;
15778 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15779 &(TREE_FIXED_CST (arg0)), NULL,
15780 TYPE_SATURATING (type));
15781 t = build_fixed (type, f);
15782 /* Propagate overflow flags. */
15783 if (overflow_p | TREE_OVERFLOW (arg0))
15784 TREE_OVERFLOW (t) = 1;
15785 break;
15788 default:
15789 gcc_unreachable ();
15792 return t;
15795 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15796 an integer constant or real constant.
15798 TYPE is the type of the result. */
15800 tree
15801 fold_abs_const (tree arg0, tree type)
15803 tree t = NULL_TREE;
15805 switch (TREE_CODE (arg0))
15807 case INTEGER_CST:
15809 double_int val = tree_to_double_int (arg0);
15811 /* If the value is unsigned or non-negative, then the absolute value
15812 is the same as the ordinary value. */
15813 if (TYPE_UNSIGNED (type)
15814 || !double_int_negative_p (val))
15815 t = arg0;
15817 /* If the value is negative, then the absolute value is
15818 its negation. */
15819 else
15821 int overflow;
15823 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15824 t = force_fit_type_double (type, val, -1,
15825 overflow | TREE_OVERFLOW (arg0));
15828 break;
15830 case REAL_CST:
15831 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15832 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15833 else
15834 t = arg0;
15835 break;
15837 default:
15838 gcc_unreachable ();
15841 return t;
15844 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15845 constant. TYPE is the type of the result. */
15847 static tree
15848 fold_not_const (const_tree arg0, tree type)
15850 double_int val;
15852 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15854 val = double_int_not (tree_to_double_int (arg0));
15855 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15858 /* Given CODE, a relational operator, the target type, TYPE and two
15859 constant operands OP0 and OP1, return the result of the
15860 relational operation. If the result is not a compile time
15861 constant, then return NULL_TREE. */
15863 static tree
15864 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15866 int result, invert;
15868 /* From here on, the only cases we handle are when the result is
15869 known to be a constant. */
15871 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15873 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15874 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15876 /* Handle the cases where either operand is a NaN. */
15877 if (real_isnan (c0) || real_isnan (c1))
15879 switch (code)
15881 case EQ_EXPR:
15882 case ORDERED_EXPR:
15883 result = 0;
15884 break;
15886 case NE_EXPR:
15887 case UNORDERED_EXPR:
15888 case UNLT_EXPR:
15889 case UNLE_EXPR:
15890 case UNGT_EXPR:
15891 case UNGE_EXPR:
15892 case UNEQ_EXPR:
15893 result = 1;
15894 break;
15896 case LT_EXPR:
15897 case LE_EXPR:
15898 case GT_EXPR:
15899 case GE_EXPR:
15900 case LTGT_EXPR:
15901 if (flag_trapping_math)
15902 return NULL_TREE;
15903 result = 0;
15904 break;
15906 default:
15907 gcc_unreachable ();
15910 return constant_boolean_node (result, type);
15913 return constant_boolean_node (real_compare (code, c0, c1), type);
15916 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15918 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15919 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15920 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15923 /* Handle equality/inequality of complex constants. */
15924 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15926 tree rcond = fold_relational_const (code, type,
15927 TREE_REALPART (op0),
15928 TREE_REALPART (op1));
15929 tree icond = fold_relational_const (code, type,
15930 TREE_IMAGPART (op0),
15931 TREE_IMAGPART (op1));
15932 if (code == EQ_EXPR)
15933 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15934 else if (code == NE_EXPR)
15935 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15936 else
15937 return NULL_TREE;
15940 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15942 To compute GT, swap the arguments and do LT.
15943 To compute GE, do LT and invert the result.
15944 To compute LE, swap the arguments, do LT and invert the result.
15945 To compute NE, do EQ and invert the result.
15947 Therefore, the code below must handle only EQ and LT. */
15949 if (code == LE_EXPR || code == GT_EXPR)
15951 tree tem = op0;
15952 op0 = op1;
15953 op1 = tem;
15954 code = swap_tree_comparison (code);
15957 /* Note that it is safe to invert for real values here because we
15958 have already handled the one case that it matters. */
15960 invert = 0;
15961 if (code == NE_EXPR || code == GE_EXPR)
15963 invert = 1;
15964 code = invert_tree_comparison (code, false);
15967 /* Compute a result for LT or EQ if args permit;
15968 Otherwise return T. */
15969 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15971 if (code == EQ_EXPR)
15972 result = tree_int_cst_equal (op0, op1);
15973 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15974 result = INT_CST_LT_UNSIGNED (op0, op1);
15975 else
15976 result = INT_CST_LT (op0, op1);
15978 else
15979 return NULL_TREE;
15981 if (invert)
15982 result ^= 1;
15983 return constant_boolean_node (result, type);
15986 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15987 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15988 itself. */
15990 tree
15991 fold_build_cleanup_point_expr (tree type, tree expr)
15993 /* If the expression does not have side effects then we don't have to wrap
15994 it with a cleanup point expression. */
15995 if (!TREE_SIDE_EFFECTS (expr))
15996 return expr;
15998 /* If the expression is a return, check to see if the expression inside the
15999 return has no side effects or the right hand side of the modify expression
16000 inside the return. If either don't have side effects set we don't need to
16001 wrap the expression in a cleanup point expression. Note we don't check the
16002 left hand side of the modify because it should always be a return decl. */
16003 if (TREE_CODE (expr) == RETURN_EXPR)
16005 tree op = TREE_OPERAND (expr, 0);
16006 if (!op || !TREE_SIDE_EFFECTS (op))
16007 return expr;
16008 op = TREE_OPERAND (op, 1);
16009 if (!TREE_SIDE_EFFECTS (op))
16010 return expr;
16013 return build1 (CLEANUP_POINT_EXPR, type, expr);
16016 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16017 of an indirection through OP0, or NULL_TREE if no simplification is
16018 possible. */
16020 tree
16021 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16023 tree sub = op0;
16024 tree subtype;
16026 STRIP_NOPS (sub);
16027 subtype = TREE_TYPE (sub);
16028 if (!POINTER_TYPE_P (subtype))
16029 return NULL_TREE;
16031 if (TREE_CODE (sub) == ADDR_EXPR)
16033 tree op = TREE_OPERAND (sub, 0);
16034 tree optype = TREE_TYPE (op);
16035 /* *&CONST_DECL -> to the value of the const decl. */
16036 if (TREE_CODE (op) == CONST_DECL)
16037 return DECL_INITIAL (op);
16038 /* *&p => p; make sure to handle *&"str"[cst] here. */
16039 if (type == optype)
16041 tree fop = fold_read_from_constant_string (op);
16042 if (fop)
16043 return fop;
16044 else
16045 return op;
16047 /* *(foo *)&fooarray => fooarray[0] */
16048 else if (TREE_CODE (optype) == ARRAY_TYPE
16049 && type == TREE_TYPE (optype)
16050 && (!in_gimple_form
16051 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16053 tree type_domain = TYPE_DOMAIN (optype);
16054 tree min_val = size_zero_node;
16055 if (type_domain && TYPE_MIN_VALUE (type_domain))
16056 min_val = TYPE_MIN_VALUE (type_domain);
16057 if (in_gimple_form
16058 && TREE_CODE (min_val) != INTEGER_CST)
16059 return NULL_TREE;
16060 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16061 NULL_TREE, NULL_TREE);
16063 /* *(foo *)&complexfoo => __real__ complexfoo */
16064 else if (TREE_CODE (optype) == COMPLEX_TYPE
16065 && type == TREE_TYPE (optype))
16066 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16067 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16068 else if (TREE_CODE (optype) == VECTOR_TYPE
16069 && type == TREE_TYPE (optype))
16071 tree part_width = TYPE_SIZE (type);
16072 tree index = bitsize_int (0);
16073 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16077 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16078 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16080 tree op00 = TREE_OPERAND (sub, 0);
16081 tree op01 = TREE_OPERAND (sub, 1);
16083 STRIP_NOPS (op00);
16084 if (TREE_CODE (op00) == ADDR_EXPR)
16086 tree op00type;
16087 op00 = TREE_OPERAND (op00, 0);
16088 op00type = TREE_TYPE (op00);
16090 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16091 if (TREE_CODE (op00type) == VECTOR_TYPE
16092 && type == TREE_TYPE (op00type))
16094 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16095 tree part_width = TYPE_SIZE (type);
16096 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16097 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16098 tree index = bitsize_int (indexi);
16100 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16101 return fold_build3_loc (loc,
16102 BIT_FIELD_REF, type, op00,
16103 part_width, index);
16106 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16107 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16108 && type == TREE_TYPE (op00type))
16110 tree size = TYPE_SIZE_UNIT (type);
16111 if (tree_int_cst_equal (size, op01))
16112 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16114 /* ((foo *)&fooarray)[1] => fooarray[1] */
16115 else if (TREE_CODE (op00type) == ARRAY_TYPE
16116 && type == TREE_TYPE (op00type))
16118 tree type_domain = TYPE_DOMAIN (op00type);
16119 tree min_val = size_zero_node;
16120 if (type_domain && TYPE_MIN_VALUE (type_domain))
16121 min_val = TYPE_MIN_VALUE (type_domain);
16122 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16123 TYPE_SIZE_UNIT (type));
16124 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16125 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16126 NULL_TREE, NULL_TREE);
16131 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16132 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16133 && type == TREE_TYPE (TREE_TYPE (subtype))
16134 && (!in_gimple_form
16135 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16137 tree type_domain;
16138 tree min_val = size_zero_node;
16139 sub = build_fold_indirect_ref_loc (loc, sub);
16140 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16141 if (type_domain && TYPE_MIN_VALUE (type_domain))
16142 min_val = TYPE_MIN_VALUE (type_domain);
16143 if (in_gimple_form
16144 && TREE_CODE (min_val) != INTEGER_CST)
16145 return NULL_TREE;
16146 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16147 NULL_TREE);
16150 return NULL_TREE;
16153 /* Builds an expression for an indirection through T, simplifying some
16154 cases. */
16156 tree
16157 build_fold_indirect_ref_loc (location_t loc, tree t)
16159 tree type = TREE_TYPE (TREE_TYPE (t));
16160 tree sub = fold_indirect_ref_1 (loc, type, t);
16162 if (sub)
16163 return sub;
16165 return build1_loc (loc, INDIRECT_REF, type, t);
16168 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16170 tree
16171 fold_indirect_ref_loc (location_t loc, tree t)
16173 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16175 if (sub)
16176 return sub;
16177 else
16178 return t;
16181 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16182 whose result is ignored. The type of the returned tree need not be
16183 the same as the original expression. */
16185 tree
16186 fold_ignored_result (tree t)
16188 if (!TREE_SIDE_EFFECTS (t))
16189 return integer_zero_node;
16191 for (;;)
16192 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16194 case tcc_unary:
16195 t = TREE_OPERAND (t, 0);
16196 break;
16198 case tcc_binary:
16199 case tcc_comparison:
16200 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16201 t = TREE_OPERAND (t, 0);
16202 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16203 t = TREE_OPERAND (t, 1);
16204 else
16205 return t;
16206 break;
16208 case tcc_expression:
16209 switch (TREE_CODE (t))
16211 case COMPOUND_EXPR:
16212 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16213 return t;
16214 t = TREE_OPERAND (t, 0);
16215 break;
16217 case COND_EXPR:
16218 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16219 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16220 return t;
16221 t = TREE_OPERAND (t, 0);
16222 break;
16224 default:
16225 return t;
16227 break;
16229 default:
16230 return t;
16234 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16235 This can only be applied to objects of a sizetype. */
16237 tree
16238 round_up_loc (location_t loc, tree value, int divisor)
16240 tree div = NULL_TREE;
16242 gcc_assert (divisor > 0);
16243 if (divisor == 1)
16244 return value;
16246 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16247 have to do anything. Only do this when we are not given a const,
16248 because in that case, this check is more expensive than just
16249 doing it. */
16250 if (TREE_CODE (value) != INTEGER_CST)
16252 div = build_int_cst (TREE_TYPE (value), divisor);
16254 if (multiple_of_p (TREE_TYPE (value), value, div))
16255 return value;
16258 /* If divisor is a power of two, simplify this to bit manipulation. */
16259 if (divisor == (divisor & -divisor))
16261 if (TREE_CODE (value) == INTEGER_CST)
16263 double_int val = tree_to_double_int (value);
16264 bool overflow_p;
16266 if ((val.low & (divisor - 1)) == 0)
16267 return value;
16269 overflow_p = TREE_OVERFLOW (value);
16270 val.low &= ~(divisor - 1);
16271 val.low += divisor;
16272 if (val.low == 0)
16274 val.high++;
16275 if (val.high == 0)
16276 overflow_p = true;
16279 return force_fit_type_double (TREE_TYPE (value), val,
16280 -1, overflow_p);
16282 else
16284 tree t;
16286 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16287 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16288 t = build_int_cst (TREE_TYPE (value), -divisor);
16289 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16292 else
16294 if (!div)
16295 div = build_int_cst (TREE_TYPE (value), divisor);
16296 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16297 value = size_binop_loc (loc, MULT_EXPR, value, div);
16300 return value;
16303 /* Likewise, but round down. */
16305 tree
16306 round_down_loc (location_t loc, tree value, int divisor)
16308 tree div = NULL_TREE;
16310 gcc_assert (divisor > 0);
16311 if (divisor == 1)
16312 return value;
16314 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16315 have to do anything. Only do this when we are not given a const,
16316 because in that case, this check is more expensive than just
16317 doing it. */
16318 if (TREE_CODE (value) != INTEGER_CST)
16320 div = build_int_cst (TREE_TYPE (value), divisor);
16322 if (multiple_of_p (TREE_TYPE (value), value, div))
16323 return value;
16326 /* If divisor is a power of two, simplify this to bit manipulation. */
16327 if (divisor == (divisor & -divisor))
16329 tree t;
16331 t = build_int_cst (TREE_TYPE (value), -divisor);
16332 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16334 else
16336 if (!div)
16337 div = build_int_cst (TREE_TYPE (value), divisor);
16338 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16339 value = size_binop_loc (loc, MULT_EXPR, value, div);
16342 return value;
16345 /* Returns the pointer to the base of the object addressed by EXP and
16346 extracts the information about the offset of the access, storing it
16347 to PBITPOS and POFFSET. */
16349 static tree
16350 split_address_to_core_and_offset (tree exp,
16351 HOST_WIDE_INT *pbitpos, tree *poffset)
16353 tree core;
16354 enum machine_mode mode;
16355 int unsignedp, volatilep;
16356 HOST_WIDE_INT bitsize;
16357 location_t loc = EXPR_LOCATION (exp);
16359 if (TREE_CODE (exp) == ADDR_EXPR)
16361 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16362 poffset, &mode, &unsignedp, &volatilep,
16363 false);
16364 core = build_fold_addr_expr_loc (loc, core);
16366 else
16368 core = exp;
16369 *pbitpos = 0;
16370 *poffset = NULL_TREE;
16373 return core;
16376 /* Returns true if addresses of E1 and E2 differ by a constant, false
16377 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16379 bool
16380 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16382 tree core1, core2;
16383 HOST_WIDE_INT bitpos1, bitpos2;
16384 tree toffset1, toffset2, tdiff, type;
16386 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16387 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16389 if (bitpos1 % BITS_PER_UNIT != 0
16390 || bitpos2 % BITS_PER_UNIT != 0
16391 || !operand_equal_p (core1, core2, 0))
16392 return false;
16394 if (toffset1 && toffset2)
16396 type = TREE_TYPE (toffset1);
16397 if (type != TREE_TYPE (toffset2))
16398 toffset2 = fold_convert (type, toffset2);
16400 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16401 if (!cst_and_fits_in_hwi (tdiff))
16402 return false;
16404 *diff = int_cst_value (tdiff);
16406 else if (toffset1 || toffset2)
16408 /* If only one of the offsets is non-constant, the difference cannot
16409 be a constant. */
16410 return false;
16412 else
16413 *diff = 0;
16415 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16416 return true;
16419 /* Simplify the floating point expression EXP when the sign of the
16420 result is not significant. Return NULL_TREE if no simplification
16421 is possible. */
16423 tree
16424 fold_strip_sign_ops (tree exp)
16426 tree arg0, arg1;
16427 location_t loc = EXPR_LOCATION (exp);
16429 switch (TREE_CODE (exp))
16431 case ABS_EXPR:
16432 case NEGATE_EXPR:
16433 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16434 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16436 case MULT_EXPR:
16437 case RDIV_EXPR:
16438 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16439 return NULL_TREE;
16440 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16441 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16442 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16443 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16444 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16445 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16446 break;
16448 case COMPOUND_EXPR:
16449 arg0 = TREE_OPERAND (exp, 0);
16450 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16451 if (arg1)
16452 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16453 break;
16455 case COND_EXPR:
16456 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16457 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16458 if (arg0 || arg1)
16459 return fold_build3_loc (loc,
16460 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16461 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16462 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16463 break;
16465 case CALL_EXPR:
16467 const enum built_in_function fcode = builtin_mathfn_code (exp);
16468 switch (fcode)
16470 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16471 /* Strip copysign function call, return the 1st argument. */
16472 arg0 = CALL_EXPR_ARG (exp, 0);
16473 arg1 = CALL_EXPR_ARG (exp, 1);
16474 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16476 default:
16477 /* Strip sign ops from the argument of "odd" math functions. */
16478 if (negate_mathfn_p (fcode))
16480 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16481 if (arg0)
16482 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16484 break;
16487 break;
16489 default:
16490 break;
16492 return NULL_TREE;