Fix a bug that broke -freorder-functions
[official-gcc.git] / gcc / fold-const.c
blob0cdf682ae386518d844f394b7f2c13638345c8c7
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
120 tree, tree);
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
126 tree, tree, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
147 static location_t
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
169 return x;
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
176 addition.
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
180 sign. */
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
187 tree
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
190 double_int quo, rem;
191 int uns;
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
199 uns = false;
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
203 uns, code, &rem);
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
208 return NULL_TREE;
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
218 used. */
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
237 void
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
250 deferred code. */
252 void
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
255 const char *warnmsg;
256 location_t locus;
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
263 && code != 0
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
266 return;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
273 return;
275 if (gimple_no_warning_p (stmt))
276 return;
278 /* Use the smallest code level when deciding to issue the
279 warning. */
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
284 return;
286 if (stmt == NULL)
287 locus = input_location;
288 else
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
294 warnings. */
296 void
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
304 bool
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
313 static void
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
332 static bool
333 negate_mathfn_p (enum built_in_function code)
335 switch (code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
360 return true;
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
368 default:
369 break;
371 return false;
374 /* Check whether we may negate an integer constant T without causing
375 overflow. */
377 bool
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
381 unsigned int prec;
382 tree type;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
388 return false;
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
394 return true;
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
398 else
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
408 static bool
409 negate_expr_p (tree t)
411 tree type;
413 if (t == 0)
414 return false;
416 type = TREE_TYPE (t);
418 STRIP_SIGN_NOPS (t);
419 switch (TREE_CODE (t))
421 case INTEGER_CST:
422 if (TYPE_OVERFLOW_WRAPS (type))
423 return true;
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
427 case BIT_NOT_EXPR:
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
431 case FIXED_CST:
432 case NEGATE_EXPR:
433 return true;
435 case REAL_CST:
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
440 case COMPLEX_CST:
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
444 case COMPLEX_EXPR:
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
448 case CONJ_EXPR:
449 return negate_expr_p (TREE_OPERAND (t, 0));
451 case PLUS_EXPR:
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
454 return false;
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
459 return true;
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
463 case MINUS_EXPR:
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
470 case MULT_EXPR:
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
472 break;
474 /* Fall through. */
476 case RDIV_EXPR:
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
480 break;
482 case TRUNC_DIV_EXPR:
483 case ROUND_DIV_EXPR:
484 case FLOOR_DIV_EXPR:
485 case CEIL_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
491 overflow. */
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
494 break;
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
506 break;
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
522 return true;
524 break;
526 default:
527 break;
529 return false;
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
537 static tree
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
541 tree tem;
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
550 break;
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
556 return tem;
557 break;
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
563 return tem;
564 break;
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
570 case COMPLEX_CST:
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
581 break;
583 case COMPLEX_EXPR:
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 break;
590 case CONJ_EXPR:
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 break;
596 case NEGATE_EXPR:
597 return TREE_OPERAND (t, 0);
599 case PLUS_EXPR:
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
621 break;
623 case MINUS_EXPR:
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
630 break;
632 case MULT_EXPR:
633 if (TYPE_UNSIGNED (type))
634 break;
636 /* Fall through. */
638 case RDIV_EXPR:
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
650 break;
652 case TRUNC_DIV_EXPR:
653 case ROUND_DIV_EXPR:
654 case FLOOR_DIV_EXPR:
655 case CEIL_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
661 overflow. */
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
687 break;
689 case NOP_EXPR:
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
697 break;
699 case CALL_EXPR:
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
704 tree fndecl, arg;
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
710 break;
712 case RSHIFT_EXPR:
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
729 break;
731 default:
732 break;
735 return NULL_TREE;
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
740 return NULL_TREE. */
742 static tree
743 negate_expr (tree t)
745 tree type, tem;
746 location_t loc;
748 if (t == NULL_TREE)
749 return NULL_TREE;
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
753 STRIP_SIGN_NOPS (t);
755 tem = fold_negate_expr (loc, t);
756 if (!tem)
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
781 static tree
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
785 tree var = 0;
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
843 *conp = in;
844 else
845 var = in;
847 if (negate_p)
849 if (*litp)
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
857 return var;
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
865 static tree
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 if (t1 == 0)
869 return t2;
870 else if (t2 == 0)
871 return t1;
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
911 static bool
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
915 return false;
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
917 return false;
919 switch (code)
921 case LSHIFT_EXPR:
922 case RSHIFT_EXPR:
923 case LROTATE_EXPR:
924 case RROTATE_EXPR:
925 return true;
927 default:
928 break;
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
941 tree
942 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
944 double_int op1, op2, res, tmp;
945 tree t;
946 tree type = TREE_TYPE (arg1);
947 bool uns = TYPE_UNSIGNED (type);
948 bool is_sizetype
949 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
950 bool overflow = false;
952 op1 = tree_to_double_int (arg1);
953 op2 = tree_to_double_int (arg2);
955 switch (code)
957 case BIT_IOR_EXPR:
958 res = double_int_ior (op1, op2);
959 break;
961 case BIT_XOR_EXPR:
962 res = double_int_xor (op1, op2);
963 break;
965 case BIT_AND_EXPR:
966 res = double_int_and (op1, op2);
967 break;
969 case RSHIFT_EXPR:
970 res = double_int_rshift (op1, double_int_to_shwi (op2),
971 TYPE_PRECISION (type), !uns);
972 break;
974 case LSHIFT_EXPR:
975 /* It's unclear from the C standard whether shifts can overflow.
976 The following code ignores overflow; perhaps a C standard
977 interpretation ruling is needed. */
978 res = double_int_lshift (op1, double_int_to_shwi (op2),
979 TYPE_PRECISION (type), !uns);
980 break;
982 case RROTATE_EXPR:
983 res = double_int_rrotate (op1, double_int_to_shwi (op2),
984 TYPE_PRECISION (type));
985 break;
987 case LROTATE_EXPR:
988 res = double_int_lrotate (op1, double_int_to_shwi (op2),
989 TYPE_PRECISION (type));
990 break;
992 case PLUS_EXPR:
993 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
994 &res.low, &res.high);
995 break;
997 case MINUS_EXPR:
998 neg_double (op2.low, op2.high, &res.low, &res.high);
999 add_double (op1.low, op1.high, res.low, res.high,
1000 &res.low, &res.high);
1001 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1002 break;
1004 case MULT_EXPR:
1005 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1006 &res.low, &res.high);
1007 break;
1009 case TRUNC_DIV_EXPR:
1010 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1011 case EXACT_DIV_EXPR:
1012 /* This is a shortcut for a common special case. */
1013 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1014 && !TREE_OVERFLOW (arg1)
1015 && !TREE_OVERFLOW (arg2)
1016 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1018 if (code == CEIL_DIV_EXPR)
1019 op1.low += op2.low - 1;
1021 res.low = op1.low / op2.low, res.high = 0;
1022 break;
1025 /* ... fall through ... */
1027 case ROUND_DIV_EXPR:
1028 if (double_int_zero_p (op2))
1029 return NULL_TREE;
1030 if (double_int_one_p (op2))
1032 res = op1;
1033 break;
1035 if (double_int_equal_p (op1, op2)
1036 && ! double_int_zero_p (op1))
1038 res = double_int_one;
1039 break;
1041 overflow = div_and_round_double (code, uns,
1042 op1.low, op1.high, op2.low, op2.high,
1043 &res.low, &res.high,
1044 &tmp.low, &tmp.high);
1045 break;
1047 case TRUNC_MOD_EXPR:
1048 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049 /* This is a shortcut for a common special case. */
1050 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 && !TREE_OVERFLOW (arg1)
1052 && !TREE_OVERFLOW (arg2)
1053 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1055 if (code == CEIL_MOD_EXPR)
1056 op1.low += op2.low - 1;
1057 res.low = op1.low % op2.low, res.high = 0;
1058 break;
1061 /* ... fall through ... */
1063 case ROUND_MOD_EXPR:
1064 if (double_int_zero_p (op2))
1065 return NULL_TREE;
1066 overflow = div_and_round_double (code, uns,
1067 op1.low, op1.high, op2.low, op2.high,
1068 &tmp.low, &tmp.high,
1069 &res.low, &res.high);
1070 break;
1072 case MIN_EXPR:
1073 res = double_int_min (op1, op2, uns);
1074 break;
1076 case MAX_EXPR:
1077 res = double_int_max (op1, op2, uns);
1078 break;
1080 default:
1081 return NULL_TREE;
1084 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1085 ((!uns || is_sizetype) && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1088 return t;
1091 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1092 constant. We assume ARG1 and ARG2 have the same data type, or at least
1093 are the same kind of constant and the same machine mode. Return zero if
1094 combining the constants is not allowed in the current operating mode. */
1096 static tree
1097 const_binop (enum tree_code code, tree arg1, tree arg2)
1099 /* Sanity check for the recursive cases. */
1100 if (!arg1 || !arg2)
1101 return NULL_TREE;
1103 STRIP_NOPS (arg1);
1104 STRIP_NOPS (arg2);
1106 if (TREE_CODE (arg1) == INTEGER_CST)
1107 return int_const_binop (code, arg1, arg2);
1109 if (TREE_CODE (arg1) == REAL_CST)
1111 enum machine_mode mode;
1112 REAL_VALUE_TYPE d1;
1113 REAL_VALUE_TYPE d2;
1114 REAL_VALUE_TYPE value;
1115 REAL_VALUE_TYPE result;
1116 bool inexact;
1117 tree t, type;
1119 /* The following codes are handled by real_arithmetic. */
1120 switch (code)
1122 case PLUS_EXPR:
1123 case MINUS_EXPR:
1124 case MULT_EXPR:
1125 case RDIV_EXPR:
1126 case MIN_EXPR:
1127 case MAX_EXPR:
1128 break;
1130 default:
1131 return NULL_TREE;
1134 d1 = TREE_REAL_CST (arg1);
1135 d2 = TREE_REAL_CST (arg2);
1137 type = TREE_TYPE (arg1);
1138 mode = TYPE_MODE (type);
1140 /* Don't perform operation if we honor signaling NaNs and
1141 either operand is a NaN. */
1142 if (HONOR_SNANS (mode)
1143 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1144 return NULL_TREE;
1146 /* Don't perform operation if it would raise a division
1147 by zero exception. */
1148 if (code == RDIV_EXPR
1149 && REAL_VALUES_EQUAL (d2, dconst0)
1150 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1151 return NULL_TREE;
1153 /* If either operand is a NaN, just return it. Otherwise, set up
1154 for floating-point trap; we return an overflow. */
1155 if (REAL_VALUE_ISNAN (d1))
1156 return arg1;
1157 else if (REAL_VALUE_ISNAN (d2))
1158 return arg2;
1160 inexact = real_arithmetic (&value, code, &d1, &d2);
1161 real_convert (&result, mode, &value);
1163 /* Don't constant fold this floating point operation if
1164 the result has overflowed and flag_trapping_math. */
1165 if (flag_trapping_math
1166 && MODE_HAS_INFINITIES (mode)
1167 && REAL_VALUE_ISINF (result)
1168 && !REAL_VALUE_ISINF (d1)
1169 && !REAL_VALUE_ISINF (d2))
1170 return NULL_TREE;
1172 /* Don't constant fold this floating point operation if the
1173 result may dependent upon the run-time rounding mode and
1174 flag_rounding_math is set, or if GCC's software emulation
1175 is unable to accurately represent the result. */
1176 if ((flag_rounding_math
1177 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1178 && (inexact || !real_identical (&result, &value)))
1179 return NULL_TREE;
1181 t = build_real (type, result);
1183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1184 return t;
1187 if (TREE_CODE (arg1) == FIXED_CST)
1189 FIXED_VALUE_TYPE f1;
1190 FIXED_VALUE_TYPE f2;
1191 FIXED_VALUE_TYPE result;
1192 tree t, type;
1193 int sat_p;
1194 bool overflow_p;
1196 /* The following codes are handled by fixed_arithmetic. */
1197 switch (code)
1199 case PLUS_EXPR:
1200 case MINUS_EXPR:
1201 case MULT_EXPR:
1202 case TRUNC_DIV_EXPR:
1203 f2 = TREE_FIXED_CST (arg2);
1204 break;
1206 case LSHIFT_EXPR:
1207 case RSHIFT_EXPR:
1208 f2.data.high = TREE_INT_CST_HIGH (arg2);
1209 f2.data.low = TREE_INT_CST_LOW (arg2);
1210 f2.mode = SImode;
1211 break;
1213 default:
1214 return NULL_TREE;
1217 f1 = TREE_FIXED_CST (arg1);
1218 type = TREE_TYPE (arg1);
1219 sat_p = TYPE_SATURATING (type);
1220 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1221 t = build_fixed (type, result);
1222 /* Propagate overflow flags. */
1223 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1224 TREE_OVERFLOW (t) = 1;
1225 return t;
1228 if (TREE_CODE (arg1) == COMPLEX_CST)
1230 tree type = TREE_TYPE (arg1);
1231 tree r1 = TREE_REALPART (arg1);
1232 tree i1 = TREE_IMAGPART (arg1);
1233 tree r2 = TREE_REALPART (arg2);
1234 tree i2 = TREE_IMAGPART (arg2);
1235 tree real, imag;
1237 switch (code)
1239 case PLUS_EXPR:
1240 case MINUS_EXPR:
1241 real = const_binop (code, r1, r2);
1242 imag = const_binop (code, i1, i2);
1243 break;
1245 case MULT_EXPR:
1246 if (COMPLEX_FLOAT_TYPE_P (type))
1247 return do_mpc_arg2 (arg1, arg2, type,
1248 /* do_nonfinite= */ folding_initializer,
1249 mpc_mul);
1251 real = const_binop (MINUS_EXPR,
1252 const_binop (MULT_EXPR, r1, r2),
1253 const_binop (MULT_EXPR, i1, i2));
1254 imag = const_binop (PLUS_EXPR,
1255 const_binop (MULT_EXPR, r1, i2),
1256 const_binop (MULT_EXPR, i1, r2));
1257 break;
1259 case RDIV_EXPR:
1260 if (COMPLEX_FLOAT_TYPE_P (type))
1261 return do_mpc_arg2 (arg1, arg2, type,
1262 /* do_nonfinite= */ folding_initializer,
1263 mpc_div);
1264 /* Fallthru ... */
1265 case TRUNC_DIV_EXPR:
1266 case CEIL_DIV_EXPR:
1267 case FLOOR_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 if (flag_complex_method == 0)
1271 /* Keep this algorithm in sync with
1272 tree-complex.c:expand_complex_div_straight().
1274 Expand complex division to scalars, straightforward algorithm.
1275 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 t = br*br + bi*bi
1278 tree magsquared
1279 = const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR, r2, r2),
1281 const_binop (MULT_EXPR, i2, i2));
1282 tree t1
1283 = const_binop (PLUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1286 tree t2
1287 = const_binop (MINUS_EXPR,
1288 const_binop (MULT_EXPR, i1, r2),
1289 const_binop (MULT_EXPR, r1, i2));
1291 real = const_binop (code, t1, magsquared);
1292 imag = const_binop (code, t2, magsquared);
1294 else
1296 /* Keep this algorithm in sync with
1297 tree-complex.c:expand_complex_div_wide().
1299 Expand complex division to scalars, modified algorithm to minimize
1300 overflow with wide input ranges. */
1301 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1302 fold_abs_const (r2, TREE_TYPE (type)),
1303 fold_abs_const (i2, TREE_TYPE (type)));
1305 if (integer_nonzerop (compare))
1307 /* In the TRUE branch, we compute
1308 ratio = br/bi;
1309 div = (br * ratio) + bi;
1310 tr = (ar * ratio) + ai;
1311 ti = (ai * ratio) - ar;
1312 tr = tr / div;
1313 ti = ti / div; */
1314 tree ratio = const_binop (code, r2, i2);
1315 tree div = const_binop (PLUS_EXPR, i2,
1316 const_binop (MULT_EXPR, r2, ratio));
1317 real = const_binop (MULT_EXPR, r1, ratio);
1318 real = const_binop (PLUS_EXPR, real, i1);
1319 real = const_binop (code, real, div);
1321 imag = const_binop (MULT_EXPR, i1, ratio);
1322 imag = const_binop (MINUS_EXPR, imag, r1);
1323 imag = const_binop (code, imag, div);
1325 else
1327 /* In the FALSE branch, we compute
1328 ratio = d/c;
1329 divisor = (d * ratio) + c;
1330 tr = (b * ratio) + a;
1331 ti = b - (a * ratio);
1332 tr = tr / div;
1333 ti = ti / div; */
1334 tree ratio = const_binop (code, i2, r2);
1335 tree div = const_binop (PLUS_EXPR, r2,
1336 const_binop (MULT_EXPR, i2, ratio));
1338 real = const_binop (MULT_EXPR, i1, ratio);
1339 real = const_binop (PLUS_EXPR, real, r1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, r1, ratio);
1343 imag = const_binop (MINUS_EXPR, i1, imag);
1344 imag = const_binop (code, imag, div);
1347 break;
1349 default:
1350 return NULL_TREE;
1353 if (real && imag)
1354 return build_complex (type, real, imag);
1357 if (TREE_CODE (arg1) == VECTOR_CST)
1359 tree type = TREE_TYPE(arg1);
1360 int count = TYPE_VECTOR_SUBPARTS (type), i;
1361 tree elements1, elements2, list = NULL_TREE;
1363 if(TREE_CODE(arg2) != VECTOR_CST)
1364 return NULL_TREE;
1366 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1367 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1369 for (i = 0; i < count; i++)
1371 tree elem1, elem2, elem;
1373 /* The trailing elements can be empty and should be treated as 0 */
1374 if(!elements1)
1375 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1376 else
1378 elem1 = TREE_VALUE(elements1);
1379 elements1 = TREE_CHAIN (elements1);
1382 if(!elements2)
1383 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1384 else
1386 elem2 = TREE_VALUE(elements2);
1387 elements2 = TREE_CHAIN (elements2);
1390 elem = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if(elem == NULL_TREE)
1395 return NULL_TREE;
1397 list = tree_cons (NULL_TREE, elem, list);
1399 return build_vector(type, nreverse(list));
1401 return NULL_TREE;
1404 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1405 indicates which particular sizetype to create. */
1407 tree
1408 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1410 return build_int_cst (sizetype_tab[(int) kind], number);
1413 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1414 is a tree code. The type of the result is taken from the operands.
1415 Both must be equivalent integer types, ala int_binop_types_match_p.
1416 If the operands are constant, so is the result. */
1418 tree
1419 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1421 tree type = TREE_TYPE (arg0);
1423 if (arg0 == error_mark_node || arg1 == error_mark_node)
1424 return error_mark_node;
1426 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1427 TREE_TYPE (arg1)));
1429 /* Handle the special case of two integer constants faster. */
1430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1432 /* And some specific cases even faster than that. */
1433 if (code == PLUS_EXPR)
1435 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1436 return arg1;
1437 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1438 return arg0;
1440 else if (code == MINUS_EXPR)
1442 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1443 return arg0;
1445 else if (code == MULT_EXPR)
1447 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1448 return arg1;
1451 /* Handle general case of two integer constants. */
1452 return int_const_binop (code, arg0, arg1);
1455 return fold_build2_loc (loc, code, type, arg0, arg1);
1458 /* Given two values, either both of sizetype or both of bitsizetype,
1459 compute the difference between the two values. Return the value
1460 in signed type corresponding to the type of the operands. */
1462 tree
1463 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1465 tree type = TREE_TYPE (arg0);
1466 tree ctype;
1468 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1469 TREE_TYPE (arg1)));
1471 /* If the type is already signed, just do the simple thing. */
1472 if (!TYPE_UNSIGNED (type))
1473 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1475 if (type == sizetype)
1476 ctype = ssizetype;
1477 else if (type == bitsizetype)
1478 ctype = sbitsizetype;
1479 else
1480 ctype = signed_type_for (type);
1482 /* If either operand is not a constant, do the conversions to the signed
1483 type and subtract. The hardware will do the right thing with any
1484 overflow in the subtraction. */
1485 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1486 return size_binop_loc (loc, MINUS_EXPR,
1487 fold_convert_loc (loc, ctype, arg0),
1488 fold_convert_loc (loc, ctype, arg1));
1490 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1491 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1492 overflow) and negate (which can't either). Special-case a result
1493 of zero while we're here. */
1494 if (tree_int_cst_equal (arg0, arg1))
1495 return build_int_cst (ctype, 0);
1496 else if (tree_int_cst_lt (arg1, arg0))
1497 return fold_convert_loc (loc, ctype,
1498 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1499 else
1500 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1501 fold_convert_loc (loc, ctype,
1502 size_binop_loc (loc,
1503 MINUS_EXPR,
1504 arg1, arg0)));
1507 /* A subroutine of fold_convert_const handling conversions of an
1508 INTEGER_CST to another integer type. */
1510 static tree
1511 fold_convert_const_int_from_int (tree type, const_tree arg1)
1513 tree t;
1515 /* Given an integer constant, make new constant with new type,
1516 appropriately sign-extended or truncated. */
1517 t = force_fit_type_double (type, tree_to_double_int (arg1),
1518 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1519 (TREE_INT_CST_HIGH (arg1) < 0
1520 && (TYPE_UNSIGNED (type)
1521 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1522 | TREE_OVERFLOW (arg1));
1524 return t;
1527 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1528 to an integer type. */
1530 static tree
1531 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1533 int overflow = 0;
1534 tree t;
1536 /* The following code implements the floating point to integer
1537 conversion rules required by the Java Language Specification,
1538 that IEEE NaNs are mapped to zero and values that overflow
1539 the target precision saturate, i.e. values greater than
1540 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1541 are mapped to INT_MIN. These semantics are allowed by the
1542 C and C++ standards that simply state that the behavior of
1543 FP-to-integer conversion is unspecified upon overflow. */
1545 double_int val;
1546 REAL_VALUE_TYPE r;
1547 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1549 switch (code)
1551 case FIX_TRUNC_EXPR:
1552 real_trunc (&r, VOIDmode, &x);
1553 break;
1555 default:
1556 gcc_unreachable ();
1559 /* If R is NaN, return zero and show we have an overflow. */
1560 if (REAL_VALUE_ISNAN (r))
1562 overflow = 1;
1563 val = double_int_zero;
1566 /* See if R is less than the lower bound or greater than the
1567 upper bound. */
1569 if (! overflow)
1571 tree lt = TYPE_MIN_VALUE (type);
1572 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1573 if (REAL_VALUES_LESS (r, l))
1575 overflow = 1;
1576 val = tree_to_double_int (lt);
1580 if (! overflow)
1582 tree ut = TYPE_MAX_VALUE (type);
1583 if (ut)
1585 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1586 if (REAL_VALUES_LESS (u, r))
1588 overflow = 1;
1589 val = tree_to_double_int (ut);
1594 if (! overflow)
1595 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1597 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1598 return t;
1601 /* A subroutine of fold_convert_const handling conversions of a
1602 FIXED_CST to an integer type. */
1604 static tree
1605 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1607 tree t;
1608 double_int temp, temp_trunc;
1609 unsigned int mode;
1611 /* Right shift FIXED_CST to temp by fbit. */
1612 temp = TREE_FIXED_CST (arg1).data;
1613 mode = TREE_FIXED_CST (arg1).mode;
1614 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1616 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1617 HOST_BITS_PER_DOUBLE_INT,
1618 SIGNED_FIXED_POINT_MODE_P (mode));
1620 /* Left shift temp to temp_trunc by fbit. */
1621 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1622 HOST_BITS_PER_DOUBLE_INT,
1623 SIGNED_FIXED_POINT_MODE_P (mode));
1625 else
1627 temp = double_int_zero;
1628 temp_trunc = double_int_zero;
1631 /* If FIXED_CST is negative, we need to round the value toward 0.
1632 By checking if the fractional bits are not zero to add 1 to temp. */
1633 if (SIGNED_FIXED_POINT_MODE_P (mode)
1634 && double_int_negative_p (temp_trunc)
1635 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1636 temp = double_int_add (temp, double_int_one);
1638 /* Given a fixed-point constant, make new constant with new type,
1639 appropriately sign-extended or truncated. */
1640 t = force_fit_type_double (type, temp, -1,
1641 (double_int_negative_p (temp)
1642 && (TYPE_UNSIGNED (type)
1643 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1644 | TREE_OVERFLOW (arg1));
1646 return t;
1649 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1650 to another floating point type. */
1652 static tree
1653 fold_convert_const_real_from_real (tree type, const_tree arg1)
1655 REAL_VALUE_TYPE value;
1656 tree t;
1658 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1659 t = build_real (type, value);
1661 /* If converting an infinity or NAN to a representation that doesn't
1662 have one, set the overflow bit so that we can produce some kind of
1663 error message at the appropriate point if necessary. It's not the
1664 most user-friendly message, but it's better than nothing. */
1665 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1669 && !MODE_HAS_NANS (TYPE_MODE (type)))
1670 TREE_OVERFLOW (t) = 1;
1671 /* Regular overflow, conversion produced an infinity in a mode that
1672 can't represent them. */
1673 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1674 && REAL_VALUE_ISINF (value)
1675 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1676 TREE_OVERFLOW (t) = 1;
1677 else
1678 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1679 return t;
1682 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1683 to a floating point type. */
1685 static tree
1686 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1688 REAL_VALUE_TYPE value;
1689 tree t;
1691 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1692 t = build_real (type, value);
1694 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1695 return t;
1698 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1699 to another fixed-point type. */
1701 static tree
1702 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1704 FIXED_VALUE_TYPE value;
1705 tree t;
1706 bool overflow_p;
1708 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1709 TYPE_SATURATING (type));
1710 t = build_fixed (type, value);
1712 /* Propagate overflow flags. */
1713 if (overflow_p | TREE_OVERFLOW (arg1))
1714 TREE_OVERFLOW (t) = 1;
1715 return t;
1718 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1719 to a fixed-point type. */
1721 static tree
1722 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1724 FIXED_VALUE_TYPE value;
1725 tree t;
1726 bool overflow_p;
1728 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1729 TREE_INT_CST (arg1),
1730 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1731 TYPE_SATURATING (type));
1732 t = build_fixed (type, value);
1734 /* Propagate overflow flags. */
1735 if (overflow_p | TREE_OVERFLOW (arg1))
1736 TREE_OVERFLOW (t) = 1;
1737 return t;
1740 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1741 to a fixed-point type. */
1743 static tree
1744 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1746 FIXED_VALUE_TYPE value;
1747 tree t;
1748 bool overflow_p;
1750 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1751 &TREE_REAL_CST (arg1),
1752 TYPE_SATURATING (type));
1753 t = build_fixed (type, value);
1755 /* Propagate overflow flags. */
1756 if (overflow_p | TREE_OVERFLOW (arg1))
1757 TREE_OVERFLOW (t) = 1;
1758 return t;
1761 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1762 type TYPE. If no simplification can be done return NULL_TREE. */
1764 static tree
1765 fold_convert_const (enum tree_code code, tree type, tree arg1)
1767 if (TREE_TYPE (arg1) == type)
1768 return arg1;
1770 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1771 || TREE_CODE (type) == OFFSET_TYPE)
1773 if (TREE_CODE (arg1) == INTEGER_CST)
1774 return fold_convert_const_int_from_int (type, arg1);
1775 else if (TREE_CODE (arg1) == REAL_CST)
1776 return fold_convert_const_int_from_real (code, type, arg1);
1777 else if (TREE_CODE (arg1) == FIXED_CST)
1778 return fold_convert_const_int_from_fixed (type, arg1);
1780 else if (TREE_CODE (type) == REAL_TYPE)
1782 if (TREE_CODE (arg1) == INTEGER_CST)
1783 return build_real_from_int_cst (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_real_from_real (type, arg1);
1786 else if (TREE_CODE (arg1) == FIXED_CST)
1787 return fold_convert_const_real_from_fixed (type, arg1);
1789 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1791 if (TREE_CODE (arg1) == FIXED_CST)
1792 return fold_convert_const_fixed_from_fixed (type, arg1);
1793 else if (TREE_CODE (arg1) == INTEGER_CST)
1794 return fold_convert_const_fixed_from_int (type, arg1);
1795 else if (TREE_CODE (arg1) == REAL_CST)
1796 return fold_convert_const_fixed_from_real (type, arg1);
1798 return NULL_TREE;
1801 /* Construct a vector of zero elements of vector type TYPE. */
1803 static tree
1804 build_zero_vector (tree type)
1806 tree t;
1808 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1809 return build_vector_from_val (type, t);
1812 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1814 bool
1815 fold_convertible_p (const_tree type, const_tree arg)
1817 tree orig = TREE_TYPE (arg);
1819 if (type == orig)
1820 return true;
1822 if (TREE_CODE (arg) == ERROR_MARK
1823 || TREE_CODE (type) == ERROR_MARK
1824 || TREE_CODE (orig) == ERROR_MARK)
1825 return false;
1827 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1828 return true;
1830 switch (TREE_CODE (type))
1832 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1833 case POINTER_TYPE: case REFERENCE_TYPE:
1834 case OFFSET_TYPE:
1835 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1836 || TREE_CODE (orig) == OFFSET_TYPE)
1837 return true;
1838 return (TREE_CODE (orig) == VECTOR_TYPE
1839 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1841 case REAL_TYPE:
1842 case FIXED_POINT_TYPE:
1843 case COMPLEX_TYPE:
1844 case VECTOR_TYPE:
1845 case VOID_TYPE:
1846 return TREE_CODE (type) == TREE_CODE (orig);
1848 default:
1849 return false;
1853 /* Convert expression ARG to type TYPE. Used by the middle-end for
1854 simple conversions in preference to calling the front-end's convert. */
1856 tree
1857 fold_convert_loc (location_t loc, tree type, tree arg)
1859 tree orig = TREE_TYPE (arg);
1860 tree tem;
1862 if (type == orig)
1863 return arg;
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return error_mark_node;
1870 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1871 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1873 switch (TREE_CODE (type))
1875 case POINTER_TYPE:
1876 case REFERENCE_TYPE:
1877 /* Handle conversions between pointers to different address spaces. */
1878 if (POINTER_TYPE_P (orig)
1879 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1880 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1881 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1882 /* fall through */
1884 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1885 case OFFSET_TYPE:
1886 if (TREE_CODE (arg) == INTEGER_CST)
1888 tem = fold_convert_const (NOP_EXPR, type, arg);
1889 if (tem != NULL_TREE)
1890 return tem;
1892 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1893 || TREE_CODE (orig) == OFFSET_TYPE)
1894 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 if (TREE_CODE (orig) == COMPLEX_TYPE)
1896 return fold_convert_loc (loc, type,
1897 fold_build1_loc (loc, REALPART_EXPR,
1898 TREE_TYPE (orig), arg));
1899 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1900 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1901 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1903 case REAL_TYPE:
1904 if (TREE_CODE (arg) == INTEGER_CST)
1906 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1908 return tem;
1910 else if (TREE_CODE (arg) == REAL_CST)
1912 tem = fold_convert_const (NOP_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1916 else if (TREE_CODE (arg) == FIXED_CST)
1918 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1920 return tem;
1923 switch (TREE_CODE (orig))
1925 case INTEGER_TYPE:
1926 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1927 case POINTER_TYPE: case REFERENCE_TYPE:
1928 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1930 case REAL_TYPE:
1931 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1933 case FIXED_POINT_TYPE:
1934 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1936 case COMPLEX_TYPE:
1937 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1938 return fold_convert_loc (loc, type, tem);
1940 default:
1941 gcc_unreachable ();
1944 case FIXED_POINT_TYPE:
1945 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1946 || TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1950 goto fold_convert_exit;
1953 switch (TREE_CODE (orig))
1955 case FIXED_POINT_TYPE:
1956 case INTEGER_TYPE:
1957 case ENUMERAL_TYPE:
1958 case BOOLEAN_TYPE:
1959 case REAL_TYPE:
1960 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1962 case COMPLEX_TYPE:
1963 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1964 return fold_convert_loc (loc, type, tem);
1966 default:
1967 gcc_unreachable ();
1970 case COMPLEX_TYPE:
1971 switch (TREE_CODE (orig))
1973 case INTEGER_TYPE:
1974 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1975 case POINTER_TYPE: case REFERENCE_TYPE:
1976 case REAL_TYPE:
1977 case FIXED_POINT_TYPE:
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1979 fold_convert_loc (loc, TREE_TYPE (type), arg),
1980 fold_convert_loc (loc, TREE_TYPE (type),
1981 integer_zero_node));
1982 case COMPLEX_TYPE:
1984 tree rpart, ipart;
1986 if (TREE_CODE (arg) == COMPLEX_EXPR)
1988 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 0));
1990 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1991 TREE_OPERAND (arg, 1));
1992 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 arg = save_expr (arg);
1996 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1997 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1998 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1999 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2000 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2003 default:
2004 gcc_unreachable ();
2007 case VECTOR_TYPE:
2008 if (integer_zerop (arg))
2009 return build_zero_vector (type);
2010 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2011 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2012 || TREE_CODE (orig) == VECTOR_TYPE);
2013 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2015 case VOID_TYPE:
2016 tem = fold_ignored_result (arg);
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2019 default:
2020 gcc_unreachable ();
2022 fold_convert_exit:
2023 protected_set_expr_location_unshare (tem, loc);
2024 return tem;
2027 /* Return false if expr can be assumed not to be an lvalue, true
2028 otherwise. */
2030 static bool
2031 maybe_lvalue_p (const_tree x)
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2036 case VAR_DECL:
2037 case PARM_DECL:
2038 case RESULT_DECL:
2039 case LABEL_DECL:
2040 case FUNCTION_DECL:
2041 case SSA_NAME:
2043 case COMPONENT_REF:
2044 case MEM_REF:
2045 case INDIRECT_REF:
2046 case ARRAY_REF:
2047 case ARRAY_RANGE_REF:
2048 case BIT_FIELD_REF:
2049 case OBJ_TYPE_REF:
2051 case REALPART_EXPR:
2052 case IMAGPART_EXPR:
2053 case PREINCREMENT_EXPR:
2054 case PREDECREMENT_EXPR:
2055 case SAVE_EXPR:
2056 case TRY_CATCH_EXPR:
2057 case WITH_CLEANUP_EXPR:
2058 case COMPOUND_EXPR:
2059 case MODIFY_EXPR:
2060 case TARGET_EXPR:
2061 case COND_EXPR:
2062 case BIND_EXPR:
2063 break;
2065 default:
2066 /* Assume the worst for front-end tree codes. */
2067 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2068 break;
2069 return false;
2072 return true;
2075 /* Return an expr equal to X but certainly not valid as an lvalue. */
2077 tree
2078 non_lvalue_loc (location_t loc, tree x)
2080 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2081 us. */
2082 if (in_gimple_form)
2083 return x;
2085 if (! maybe_lvalue_p (x))
2086 return x;
2087 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2090 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2091 Zero means allow extended lvalues. */
2093 int pedantic_lvalues;
2095 /* When pedantic, return an expr equal to X but certainly not valid as a
2096 pedantic lvalue. Otherwise, return X. */
2098 static tree
2099 pedantic_non_lvalue_loc (location_t loc, tree x)
2101 if (pedantic_lvalues)
2102 return non_lvalue_loc (loc, x);
2104 return protected_set_expr_location_unshare (x, loc);
2107 /* Given a tree comparison code, return the code that is the logical inverse
2108 of the given code. It is not safe to do this for floating-point
2109 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2110 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2112 enum tree_code
2113 invert_tree_comparison (enum tree_code code, bool honor_nans)
2115 if (honor_nans && flag_trapping_math)
2116 return ERROR_MARK;
2118 switch (code)
2120 case EQ_EXPR:
2121 return NE_EXPR;
2122 case NE_EXPR:
2123 return EQ_EXPR;
2124 case GT_EXPR:
2125 return honor_nans ? UNLE_EXPR : LE_EXPR;
2126 case GE_EXPR:
2127 return honor_nans ? UNLT_EXPR : LT_EXPR;
2128 case LT_EXPR:
2129 return honor_nans ? UNGE_EXPR : GE_EXPR;
2130 case LE_EXPR:
2131 return honor_nans ? UNGT_EXPR : GT_EXPR;
2132 case LTGT_EXPR:
2133 return UNEQ_EXPR;
2134 case UNEQ_EXPR:
2135 return LTGT_EXPR;
2136 case UNGT_EXPR:
2137 return LE_EXPR;
2138 case UNGE_EXPR:
2139 return LT_EXPR;
2140 case UNLT_EXPR:
2141 return GE_EXPR;
2142 case UNLE_EXPR:
2143 return GT_EXPR;
2144 case ORDERED_EXPR:
2145 return UNORDERED_EXPR;
2146 case UNORDERED_EXPR:
2147 return ORDERED_EXPR;
2148 default:
2149 gcc_unreachable ();
2153 /* Similar, but return the comparison that results if the operands are
2154 swapped. This is safe for floating-point. */
2156 enum tree_code
2157 swap_tree_comparison (enum tree_code code)
2159 switch (code)
2161 case EQ_EXPR:
2162 case NE_EXPR:
2163 case ORDERED_EXPR:
2164 case UNORDERED_EXPR:
2165 case LTGT_EXPR:
2166 case UNEQ_EXPR:
2167 return code;
2168 case GT_EXPR:
2169 return LT_EXPR;
2170 case GE_EXPR:
2171 return LE_EXPR;
2172 case LT_EXPR:
2173 return GT_EXPR;
2174 case LE_EXPR:
2175 return GE_EXPR;
2176 case UNGT_EXPR:
2177 return UNLT_EXPR;
2178 case UNGE_EXPR:
2179 return UNLE_EXPR;
2180 case UNLT_EXPR:
2181 return UNGT_EXPR;
2182 case UNLE_EXPR:
2183 return UNGE_EXPR;
2184 default:
2185 gcc_unreachable ();
2190 /* Convert a comparison tree code from an enum tree_code representation
2191 into a compcode bit-based encoding. This function is the inverse of
2192 compcode_to_comparison. */
2194 static enum comparison_code
2195 comparison_to_compcode (enum tree_code code)
2197 switch (code)
2199 case LT_EXPR:
2200 return COMPCODE_LT;
2201 case EQ_EXPR:
2202 return COMPCODE_EQ;
2203 case LE_EXPR:
2204 return COMPCODE_LE;
2205 case GT_EXPR:
2206 return COMPCODE_GT;
2207 case NE_EXPR:
2208 return COMPCODE_NE;
2209 case GE_EXPR:
2210 return COMPCODE_GE;
2211 case ORDERED_EXPR:
2212 return COMPCODE_ORD;
2213 case UNORDERED_EXPR:
2214 return COMPCODE_UNORD;
2215 case UNLT_EXPR:
2216 return COMPCODE_UNLT;
2217 case UNEQ_EXPR:
2218 return COMPCODE_UNEQ;
2219 case UNLE_EXPR:
2220 return COMPCODE_UNLE;
2221 case UNGT_EXPR:
2222 return COMPCODE_UNGT;
2223 case LTGT_EXPR:
2224 return COMPCODE_LTGT;
2225 case UNGE_EXPR:
2226 return COMPCODE_UNGE;
2227 default:
2228 gcc_unreachable ();
2232 /* Convert a compcode bit-based encoding of a comparison operator back
2233 to GCC's enum tree_code representation. This function is the
2234 inverse of comparison_to_compcode. */
2236 static enum tree_code
2237 compcode_to_comparison (enum comparison_code code)
2239 switch (code)
2241 case COMPCODE_LT:
2242 return LT_EXPR;
2243 case COMPCODE_EQ:
2244 return EQ_EXPR;
2245 case COMPCODE_LE:
2246 return LE_EXPR;
2247 case COMPCODE_GT:
2248 return GT_EXPR;
2249 case COMPCODE_NE:
2250 return NE_EXPR;
2251 case COMPCODE_GE:
2252 return GE_EXPR;
2253 case COMPCODE_ORD:
2254 return ORDERED_EXPR;
2255 case COMPCODE_UNORD:
2256 return UNORDERED_EXPR;
2257 case COMPCODE_UNLT:
2258 return UNLT_EXPR;
2259 case COMPCODE_UNEQ:
2260 return UNEQ_EXPR;
2261 case COMPCODE_UNLE:
2262 return UNLE_EXPR;
2263 case COMPCODE_UNGT:
2264 return UNGT_EXPR;
2265 case COMPCODE_LTGT:
2266 return LTGT_EXPR;
2267 case COMPCODE_UNGE:
2268 return UNGE_EXPR;
2269 default:
2270 gcc_unreachable ();
2274 /* Return a tree for the comparison which is the combination of
2275 doing the AND or OR (depending on CODE) of the two operations LCODE
2276 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2277 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2278 if this makes the transformation invalid. */
2280 tree
2281 combine_comparisons (location_t loc,
2282 enum tree_code code, enum tree_code lcode,
2283 enum tree_code rcode, tree truth_type,
2284 tree ll_arg, tree lr_arg)
2286 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2287 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2288 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2289 int compcode;
2291 switch (code)
2293 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2294 compcode = lcompcode & rcompcode;
2295 break;
2297 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2298 compcode = lcompcode | rcompcode;
2299 break;
2301 default:
2302 return NULL_TREE;
2305 if (!honor_nans)
2307 /* Eliminate unordered comparisons, as well as LTGT and ORD
2308 which are not used unless the mode has NaNs. */
2309 compcode &= ~COMPCODE_UNORD;
2310 if (compcode == COMPCODE_LTGT)
2311 compcode = COMPCODE_NE;
2312 else if (compcode == COMPCODE_ORD)
2313 compcode = COMPCODE_TRUE;
2315 else if (flag_trapping_math)
2317 /* Check that the original operation and the optimized ones will trap
2318 under the same condition. */
2319 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2320 && (lcompcode != COMPCODE_EQ)
2321 && (lcompcode != COMPCODE_ORD);
2322 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2323 && (rcompcode != COMPCODE_EQ)
2324 && (rcompcode != COMPCODE_ORD);
2325 bool trap = (compcode & COMPCODE_UNORD) == 0
2326 && (compcode != COMPCODE_EQ)
2327 && (compcode != COMPCODE_ORD);
2329 /* In a short-circuited boolean expression the LHS might be
2330 such that the RHS, if evaluated, will never trap. For
2331 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2332 if neither x nor y is NaN. (This is a mixed blessing: for
2333 example, the expression above will never trap, hence
2334 optimizing it to x < y would be invalid). */
2335 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2336 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2337 rtrap = false;
2339 /* If the comparison was short-circuited, and only the RHS
2340 trapped, we may now generate a spurious trap. */
2341 if (rtrap && !ltrap
2342 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2343 return NULL_TREE;
2345 /* If we changed the conditions that cause a trap, we lose. */
2346 if ((ltrap || rtrap) != trap)
2347 return NULL_TREE;
2350 if (compcode == COMPCODE_TRUE)
2351 return constant_boolean_node (true, truth_type);
2352 else if (compcode == COMPCODE_FALSE)
2353 return constant_boolean_node (false, truth_type);
2354 else
2356 enum tree_code tcode;
2358 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2359 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2363 /* Return nonzero if two operands (typically of the same tree node)
2364 are necessarily equal. If either argument has side-effects this
2365 function returns zero. FLAGS modifies behavior as follows:
2367 If OEP_ONLY_CONST is set, only return nonzero for constants.
2368 This function tests whether the operands are indistinguishable;
2369 it does not test whether they are equal using C's == operation.
2370 The distinction is important for IEEE floating point, because
2371 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2372 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2374 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2375 even though it may hold multiple values during a function.
2376 This is because a GCC tree node guarantees that nothing else is
2377 executed between the evaluation of its "operands" (which may often
2378 be evaluated in arbitrary order). Hence if the operands themselves
2379 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2380 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2381 unset means assuming isochronic (or instantaneous) tree equivalence.
2382 Unless comparing arbitrary expression trees, such as from different
2383 statements, this flag can usually be left unset.
2385 If OEP_PURE_SAME is set, then pure functions with identical arguments
2386 are considered the same. It is used when the caller has other ways
2387 to ensure that global memory is unchanged in between. */
2390 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2392 /* If either is ERROR_MARK, they aren't equal. */
2393 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2394 || TREE_TYPE (arg0) == error_mark_node
2395 || TREE_TYPE (arg1) == error_mark_node)
2396 return 0;
2398 /* Similar, if either does not have a type (like a released SSA name),
2399 they aren't equal. */
2400 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2401 return 0;
2403 /* Check equality of integer constants before bailing out due to
2404 precision differences. */
2405 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2406 return tree_int_cst_equal (arg0, arg1);
2408 /* If both types don't have the same signedness, then we can't consider
2409 them equal. We must check this before the STRIP_NOPS calls
2410 because they may change the signedness of the arguments. As pointers
2411 strictly don't have a signedness, require either two pointers or
2412 two non-pointers as well. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2414 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2415 return 0;
2417 /* We cannot consider pointers to different address space equal. */
2418 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2419 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2420 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2421 return 0;
2423 /* If both types don't have the same precision, then it is not safe
2424 to strip NOPs. */
2425 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2426 return 0;
2428 STRIP_NOPS (arg0);
2429 STRIP_NOPS (arg1);
2431 /* In case both args are comparisons but with different comparison
2432 code, try to swap the comparison operands of one arg to produce
2433 a match and compare that variant. */
2434 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2435 && COMPARISON_CLASS_P (arg0)
2436 && COMPARISON_CLASS_P (arg1))
2438 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2440 if (TREE_CODE (arg0) == swap_code)
2441 return operand_equal_p (TREE_OPERAND (arg0, 0),
2442 TREE_OPERAND (arg1, 1), flags)
2443 && operand_equal_p (TREE_OPERAND (arg0, 1),
2444 TREE_OPERAND (arg1, 0), flags);
2447 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2448 /* This is needed for conversions and for COMPONENT_REF.
2449 Might as well play it safe and always test this. */
2450 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2451 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2452 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2453 return 0;
2455 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2456 We don't care about side effects in that case because the SAVE_EXPR
2457 takes care of that for us. In all other cases, two expressions are
2458 equal if they have no side effects. If we have two identical
2459 expressions with side effects that should be treated the same due
2460 to the only side effects being identical SAVE_EXPR's, that will
2461 be detected in the recursive calls below.
2462 If we are taking an invariant address of two identical objects
2463 they are necessarily equal as well. */
2464 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2465 && (TREE_CODE (arg0) == SAVE_EXPR
2466 || (flags & OEP_CONSTANT_ADDRESS_OF)
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2468 return 1;
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2475 case INTEGER_CST:
2476 return tree_int_cst_equal (arg0, arg1);
2478 case FIXED_CST:
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2482 case REAL_CST:
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2485 return 1;
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2493 return 1;
2495 return 0;
2497 case VECTOR_CST:
2499 tree v1, v2;
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2503 while (v1 && v2)
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2506 flags))
2507 return 0;
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2512 return v1 == v2;
2515 case COMPLEX_CST:
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2517 flags)
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2519 flags));
2521 case STRING_CST:
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2527 case ADDR_EXPR:
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2529 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2530 ? OEP_CONSTANT_ADDRESS_OF : 0);
2531 default:
2532 break;
2535 if (flags & OEP_ONLY_CONST)
2536 return 0;
2538 /* Define macros to test an operand from arg0 and arg1 for equality and a
2539 variant that allows null and views null as being different from any
2540 non-null value. In the latter case, if either is null, the both
2541 must be; otherwise, do the normal comparison. */
2542 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2543 TREE_OPERAND (arg1, N), flags)
2545 #define OP_SAME_WITH_NULL(N) \
2546 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2547 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2549 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2551 case tcc_unary:
2552 /* Two conversions are equal only if signedness and modes match. */
2553 switch (TREE_CODE (arg0))
2555 CASE_CONVERT:
2556 case FIX_TRUNC_EXPR:
2557 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2558 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2559 return 0;
2560 break;
2561 default:
2562 break;
2565 return OP_SAME (0);
2568 case tcc_comparison:
2569 case tcc_binary:
2570 if (OP_SAME (0) && OP_SAME (1))
2571 return 1;
2573 /* For commutative ops, allow the other order. */
2574 return (commutative_tree_code (TREE_CODE (arg0))
2575 && operand_equal_p (TREE_OPERAND (arg0, 0),
2576 TREE_OPERAND (arg1, 1), flags)
2577 && operand_equal_p (TREE_OPERAND (arg0, 1),
2578 TREE_OPERAND (arg1, 0), flags));
2580 case tcc_reference:
2581 /* If either of the pointer (or reference) expressions we are
2582 dereferencing contain a side effect, these cannot be equal. */
2583 if (TREE_SIDE_EFFECTS (arg0)
2584 || TREE_SIDE_EFFECTS (arg1))
2585 return 0;
2587 switch (TREE_CODE (arg0))
2589 case INDIRECT_REF:
2590 case REALPART_EXPR:
2591 case IMAGPART_EXPR:
2592 return OP_SAME (0);
2594 case MEM_REF:
2595 /* Require equal access sizes, and similar pointer types.
2596 We can have incomplete types for array references of
2597 variable-sized arrays from the Fortran frontent
2598 though. */
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2605 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2606 && OP_SAME (0) && OP_SAME (1));
2608 case ARRAY_REF:
2609 case ARRAY_RANGE_REF:
2610 /* Operands 2 and 3 may be null.
2611 Compare the array index by value if it is constant first as we
2612 may have different types but same value here. */
2613 return (OP_SAME (0)
2614 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2615 TREE_OPERAND (arg1, 1))
2616 || OP_SAME (1))
2617 && OP_SAME_WITH_NULL (2)
2618 && OP_SAME_WITH_NULL (3));
2620 case COMPONENT_REF:
2621 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2622 may be NULL when we're called to compare MEM_EXPRs. */
2623 return OP_SAME_WITH_NULL (0)
2624 && OP_SAME (1)
2625 && OP_SAME_WITH_NULL (2);
2627 case BIT_FIELD_REF:
2628 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2630 default:
2631 return 0;
2634 case tcc_expression:
2635 switch (TREE_CODE (arg0))
2637 case ADDR_EXPR:
2638 case TRUTH_NOT_EXPR:
2639 return OP_SAME (0);
2641 case TRUTH_ANDIF_EXPR:
2642 case TRUTH_ORIF_EXPR:
2643 return OP_SAME (0) && OP_SAME (1);
2645 case FMA_EXPR:
2646 case WIDEN_MULT_PLUS_EXPR:
2647 case WIDEN_MULT_MINUS_EXPR:
2648 if (!OP_SAME (2))
2649 return 0;
2650 /* The multiplcation operands are commutative. */
2651 /* FALLTHRU */
2653 case TRUTH_AND_EXPR:
2654 case TRUTH_OR_EXPR:
2655 case TRUTH_XOR_EXPR:
2656 if (OP_SAME (0) && OP_SAME (1))
2657 return 1;
2659 /* Otherwise take into account this is a commutative operation. */
2660 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2665 case COND_EXPR:
2666 case VEC_COND_EXPR:
2667 case DOT_PROD_EXPR:
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2670 default:
2671 return 0;
2674 case tcc_vl_exp:
2675 switch (TREE_CODE (arg0))
2677 case CALL_EXPR:
2678 /* If the CALL_EXPRs call different functions, then they
2679 clearly can not be equal. */
2680 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2681 flags))
2682 return 0;
2685 unsigned int cef = call_expr_flags (arg0);
2686 if (flags & OEP_PURE_SAME)
2687 cef &= ECF_CONST | ECF_PURE;
2688 else
2689 cef &= ECF_CONST;
2690 if (!cef)
2691 return 0;
2694 /* Now see if all the arguments are the same. */
2696 const_call_expr_arg_iterator iter0, iter1;
2697 const_tree a0, a1;
2698 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2699 a1 = first_const_call_expr_arg (arg1, &iter1);
2700 a0 && a1;
2701 a0 = next_const_call_expr_arg (&iter0),
2702 a1 = next_const_call_expr_arg (&iter1))
2703 if (! operand_equal_p (a0, a1, flags))
2704 return 0;
2706 /* If we get here and both argument lists are exhausted
2707 then the CALL_EXPRs are equal. */
2708 return ! (a0 || a1);
2710 default:
2711 return 0;
2714 case tcc_declaration:
2715 /* Consider __builtin_sqrt equal to sqrt. */
2716 return (TREE_CODE (arg0) == FUNCTION_DECL
2717 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2718 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2719 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2721 default:
2722 return 0;
2725 #undef OP_SAME
2726 #undef OP_SAME_WITH_NULL
2729 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2730 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2732 When in doubt, return 0. */
2734 static int
2735 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2737 int unsignedp1, unsignedpo;
2738 tree primarg0, primarg1, primother;
2739 unsigned int correct_width;
2741 if (operand_equal_p (arg0, arg1, 0))
2742 return 1;
2744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2746 return 0;
2748 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2749 and see if the inner values are the same. This removes any
2750 signedness comparison, which doesn't matter here. */
2751 primarg0 = arg0, primarg1 = arg1;
2752 STRIP_NOPS (primarg0);
2753 STRIP_NOPS (primarg1);
2754 if (operand_equal_p (primarg0, primarg1, 0))
2755 return 1;
2757 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2758 actual comparison operand, ARG0.
2760 First throw away any conversions to wider types
2761 already present in the operands. */
2763 primarg1 = get_narrower (arg1, &unsignedp1);
2764 primother = get_narrower (other, &unsignedpo);
2766 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2767 if (unsignedp1 == unsignedpo
2768 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2769 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2771 tree type = TREE_TYPE (arg0);
2773 /* Make sure shorter operand is extended the right way
2774 to match the longer operand. */
2775 primarg1 = fold_convert (signed_or_unsigned_type_for
2776 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2778 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2779 return 1;
2782 return 0;
2785 /* See if ARG is an expression that is either a comparison or is performing
2786 arithmetic on comparisons. The comparisons must only be comparing
2787 two different values, which will be stored in *CVAL1 and *CVAL2; if
2788 they are nonzero it means that some operands have already been found.
2789 No variables may be used anywhere else in the expression except in the
2790 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2791 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2793 If this is true, return 1. Otherwise, return zero. */
2795 static int
2796 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2798 enum tree_code code = TREE_CODE (arg);
2799 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2801 /* We can handle some of the tcc_expression cases here. */
2802 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2803 tclass = tcc_unary;
2804 else if (tclass == tcc_expression
2805 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2806 || code == COMPOUND_EXPR))
2807 tclass = tcc_binary;
2809 else if (tclass == tcc_expression && code == SAVE_EXPR
2810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2812 /* If we've already found a CVAL1 or CVAL2, this expression is
2813 two complex to handle. */
2814 if (*cval1 || *cval2)
2815 return 0;
2817 tclass = tcc_unary;
2818 *save_p = 1;
2821 switch (tclass)
2823 case tcc_unary:
2824 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2826 case tcc_binary:
2827 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2829 cval1, cval2, save_p));
2831 case tcc_constant:
2832 return 1;
2834 case tcc_expression:
2835 if (code == COND_EXPR)
2836 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2841 cval1, cval2, save_p));
2842 return 0;
2844 case tcc_comparison:
2845 /* First see if we can handle the first operand, then the second. For
2846 the second operand, we know *CVAL1 can't be zero. It must be that
2847 one side of the comparison is each of the values; test for the
2848 case where this isn't true by failing if the two operands
2849 are the same. */
2851 if (operand_equal_p (TREE_OPERAND (arg, 0),
2852 TREE_OPERAND (arg, 1), 0))
2853 return 0;
2855 if (*cval1 == 0)
2856 *cval1 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2859 else if (*cval2 == 0)
2860 *cval2 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2863 else
2864 return 0;
2866 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2868 else if (*cval2 == 0)
2869 *cval2 = TREE_OPERAND (arg, 1);
2870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2872 else
2873 return 0;
2875 return 1;
2877 default:
2878 return 0;
2882 /* ARG is a tree that is known to contain just arithmetic operations and
2883 comparisons. Evaluate the operations in the tree substituting NEW0 for
2884 any occurrence of OLD0 as an operand of a comparison and likewise for
2885 NEW1 and OLD1. */
2887 static tree
2888 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2889 tree old1, tree new1)
2891 tree type = TREE_TYPE (arg);
2892 enum tree_code code = TREE_CODE (arg);
2893 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2895 /* We can handle some of the tcc_expression cases here. */
2896 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2897 tclass = tcc_unary;
2898 else if (tclass == tcc_expression
2899 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 tclass = tcc_binary;
2902 switch (tclass)
2904 case tcc_unary:
2905 return fold_build1_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1));
2909 case tcc_binary:
2910 return fold_build2_loc (loc, code, type,
2911 eval_subst (loc, TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (loc, TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1));
2916 case tcc_expression:
2917 switch (code)
2919 case SAVE_EXPR:
2920 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2921 old1, new1);
2923 case COMPOUND_EXPR:
2924 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2925 old1, new1);
2927 case COND_EXPR:
2928 return fold_build3_loc (loc, code, type,
2929 eval_subst (loc, TREE_OPERAND (arg, 0),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 1),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 2),
2934 old0, new0, old1, new1));
2935 default:
2936 break;
2938 /* Fall through - ??? */
2940 case tcc_comparison:
2942 tree arg0 = TREE_OPERAND (arg, 0);
2943 tree arg1 = TREE_OPERAND (arg, 1);
2945 /* We need to check both for exact equality and tree equality. The
2946 former will be true if the operand has a side-effect. In that
2947 case, we know the operand occurred exactly once. */
2949 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2950 arg0 = new0;
2951 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2952 arg0 = new1;
2954 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2955 arg1 = new0;
2956 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2957 arg1 = new1;
2959 return fold_build2_loc (loc, code, type, arg0, arg1);
2962 default:
2963 return arg;
2967 /* Return a tree for the case when the result of an expression is RESULT
2968 converted to TYPE and OMITTED was previously an operand of the expression
2969 but is now not needed (e.g., we folded OMITTED * 0).
2971 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2972 the conversion of RESULT to TYPE. */
2974 tree
2975 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2977 tree t = fold_convert_loc (loc, type, result);
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 return build1_loc (loc, NOP_EXPR, void_type_node,
2983 fold_ignored_result (omitted));
2985 if (TREE_SIDE_EFFECTS (omitted))
2986 return build2_loc (loc, COMPOUND_EXPR, type,
2987 fold_ignored_result (omitted), t);
2989 return non_lvalue_loc (loc, t);
2992 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2994 static tree
2995 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2996 tree omitted)
2998 tree t = fold_convert_loc (loc, type, result);
3000 /* If the resulting operand is an empty statement, just return the omitted
3001 statement casted to void. */
3002 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3003 return build1_loc (loc, NOP_EXPR, void_type_node,
3004 fold_ignored_result (omitted));
3006 if (TREE_SIDE_EFFECTS (omitted))
3007 return build2_loc (loc, COMPOUND_EXPR, type,
3008 fold_ignored_result (omitted), t);
3010 return pedantic_non_lvalue_loc (loc, t);
3013 /* Return a tree for the case when the result of an expression is RESULT
3014 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3015 of the expression but are now not needed.
3017 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3018 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3019 evaluated before OMITTED2. Otherwise, if neither has side effects,
3020 just do the conversion of RESULT to TYPE. */
3022 tree
3023 omit_two_operands_loc (location_t loc, tree type, tree result,
3024 tree omitted1, tree omitted2)
3026 tree t = fold_convert_loc (loc, type, result);
3028 if (TREE_SIDE_EFFECTS (omitted2))
3029 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3030 if (TREE_SIDE_EFFECTS (omitted1))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3033 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3037 /* Return a simplified tree node for the truth-negation of ARG. This
3038 never alters ARG itself. We assume that ARG is an operation that
3039 returns a truth value (0 or 1).
3041 FIXME: one would think we would fold the result, but it causes
3042 problems with the dominator optimizer. */
3044 tree
3045 fold_truth_not_expr (location_t loc, tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3049 location_t loc1, loc2;
3051 /* If this is a comparison, we can simply invert it, except for
3052 floating-point non-equality comparisons, in which case we just
3053 enclose a TRUTH_NOT_EXPR around what we have. */
3055 if (TREE_CODE_CLASS (code) == tcc_comparison)
3057 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3058 if (FLOAT_TYPE_P (op_type)
3059 && flag_trapping_math
3060 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3061 && code != NE_EXPR && code != EQ_EXPR)
3062 return NULL_TREE;
3064 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3065 if (code == ERROR_MARK)
3066 return NULL_TREE;
3068 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3069 TREE_OPERAND (arg, 1));
3072 switch (code)
3074 case INTEGER_CST:
3075 return constant_boolean_node (integer_zerop (arg), type);
3077 case TRUTH_AND_EXPR:
3078 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3079 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3080 return build2_loc (loc, TRUTH_OR_EXPR, type,
3081 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3082 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3084 case TRUTH_OR_EXPR:
3085 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3086 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3087 return build2_loc (loc, TRUTH_AND_EXPR, type,
3088 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3089 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 case TRUTH_XOR_EXPR:
3092 /* Here we can invert either operand. We invert the first operand
3093 unless the second operand is a TRUTH_NOT_EXPR in which case our
3094 result is the XOR of the first operand with the inside of the
3095 negation of the second operand. */
3097 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3098 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3099 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3100 else
3101 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3102 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3103 TREE_OPERAND (arg, 1));
3105 case TRUTH_ANDIF_EXPR:
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 case TRUTH_ORIF_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3115 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3116 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3117 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3119 case TRUTH_NOT_EXPR:
3120 return TREE_OPERAND (arg, 0);
3122 case COND_EXPR:
3124 tree arg1 = TREE_OPERAND (arg, 1);
3125 tree arg2 = TREE_OPERAND (arg, 2);
3127 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3130 /* A COND_EXPR may have a throw as one operand, which
3131 then has void type. Just leave void operands
3132 as they are. */
3133 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3134 VOID_TYPE_P (TREE_TYPE (arg1))
3135 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3136 VOID_TYPE_P (TREE_TYPE (arg2))
3137 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3140 case COMPOUND_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3142 return build2_loc (loc, COMPOUND_EXPR, type,
3143 TREE_OPERAND (arg, 0),
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3146 case NON_LVALUE_EXPR:
3147 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3148 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3150 CASE_CONVERT:
3151 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3152 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3154 /* ... fall through ... */
3156 case FLOAT_EXPR:
3157 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3158 return build1_loc (loc, TREE_CODE (arg), type,
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3161 case BIT_AND_EXPR:
3162 if (!integer_onep (TREE_OPERAND (arg, 1)))
3163 return NULL_TREE;
3164 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3166 case SAVE_EXPR:
3167 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3169 case CLEANUP_POINT_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3174 default:
3175 return NULL_TREE;
3179 /* Return a simplified tree node for the truth-negation of ARG. This
3180 never alters ARG itself. We assume that ARG is an operation that
3181 returns a truth value (0 or 1).
3183 FIXME: one would think we would fold the result, but it causes
3184 problems with the dominator optimizer. */
3186 tree
3187 invert_truthvalue_loc (location_t loc, tree arg)
3189 tree tem;
3191 if (TREE_CODE (arg) == ERROR_MARK)
3192 return arg;
3194 tem = fold_truth_not_expr (loc, arg);
3195 if (!tem)
3196 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3198 return tem;
3201 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3202 operands are another bit-wise operation with a common input. If so,
3203 distribute the bit operations to save an operation and possibly two if
3204 constants are involved. For example, convert
3205 (A | B) & (A | C) into A | (B & C)
3206 Further simplification will occur if B and C are constants.
3208 If this optimization cannot be done, 0 will be returned. */
3210 static tree
3211 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3212 tree arg0, tree arg1)
3214 tree common;
3215 tree left, right;
3217 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218 || TREE_CODE (arg0) == code
3219 || (TREE_CODE (arg0) != BIT_AND_EXPR
3220 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3221 return 0;
3223 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3225 common = TREE_OPERAND (arg0, 0);
3226 left = TREE_OPERAND (arg0, 1);
3227 right = TREE_OPERAND (arg1, 1);
3229 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3231 common = TREE_OPERAND (arg0, 0);
3232 left = TREE_OPERAND (arg0, 1);
3233 right = TREE_OPERAND (arg1, 0);
3235 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3237 common = TREE_OPERAND (arg0, 1);
3238 left = TREE_OPERAND (arg0, 0);
3239 right = TREE_OPERAND (arg1, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3243 common = TREE_OPERAND (arg0, 1);
3244 left = TREE_OPERAND (arg0, 0);
3245 right = TREE_OPERAND (arg1, 0);
3247 else
3248 return 0;
3250 common = fold_convert_loc (loc, type, common);
3251 left = fold_convert_loc (loc, type, left);
3252 right = fold_convert_loc (loc, type, right);
3253 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3254 fold_build2_loc (loc, code, type, left, right));
3257 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3258 with code CODE. This optimization is unsafe. */
3259 static tree
3260 distribute_real_division (location_t loc, enum tree_code code, tree type,
3261 tree arg0, tree arg1)
3263 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3264 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3266 /* (A / C) +- (B / C) -> (A +- B) / C. */
3267 if (mul0 == mul1
3268 && operand_equal_p (TREE_OPERAND (arg0, 1),
3269 TREE_OPERAND (arg1, 1), 0))
3270 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3271 fold_build2_loc (loc, code, type,
3272 TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0)),
3274 TREE_OPERAND (arg0, 1));
3276 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3277 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3278 TREE_OPERAND (arg1, 0), 0)
3279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3280 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3282 REAL_VALUE_TYPE r0, r1;
3283 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3284 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3285 if (!mul0)
3286 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3287 if (!mul1)
3288 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3289 real_arithmetic (&r0, code, &r0, &r1);
3290 return fold_build2_loc (loc, MULT_EXPR, type,
3291 TREE_OPERAND (arg0, 0),
3292 build_real (type, r0));
3295 return NULL_TREE;
3298 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3299 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3301 static tree
3302 make_bit_field_ref (location_t loc, tree inner, tree type,
3303 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3305 tree result, bftype;
3307 if (bitpos == 0)
3309 tree size = TYPE_SIZE (TREE_TYPE (inner));
3310 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3311 || POINTER_TYPE_P (TREE_TYPE (inner)))
3312 && host_integerp (size, 0)
3313 && tree_low_cst (size, 0) == bitsize)
3314 return fold_convert_loc (loc, type, inner);
3317 bftype = type;
3318 if (TYPE_PRECISION (bftype) != bitsize
3319 || TYPE_UNSIGNED (bftype) == !unsignedp)
3320 bftype = build_nonstandard_integer_type (bitsize, 0);
3322 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3323 size_int (bitsize), bitsize_int (bitpos));
3325 if (bftype != type)
3326 result = fold_convert_loc (loc, type, result);
3328 return result;
3331 /* Optimize a bit-field compare.
3333 There are two cases: First is a compare against a constant and the
3334 second is a comparison of two items where the fields are at the same
3335 bit position relative to the start of a chunk (byte, halfword, word)
3336 large enough to contain it. In these cases we can avoid the shift
3337 implicit in bitfield extractions.
3339 For constants, we emit a compare of the shifted constant with the
3340 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3341 compared. For two fields at the same position, we do the ANDs with the
3342 similar mask and compare the result of the ANDs.
3344 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3345 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3346 are the left and right operands of the comparison, respectively.
3348 If the optimization described above can be done, we return the resulting
3349 tree. Otherwise we return zero. */
3351 static tree
3352 optimize_bit_field_compare (location_t loc, enum tree_code code,
3353 tree compare_type, tree lhs, tree rhs)
3355 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3356 tree type = TREE_TYPE (lhs);
3357 tree signed_type, unsigned_type;
3358 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3359 enum machine_mode lmode, rmode, nmode;
3360 int lunsignedp, runsignedp;
3361 int lvolatilep = 0, rvolatilep = 0;
3362 tree linner, rinner = NULL_TREE;
3363 tree mask;
3364 tree offset;
3366 /* Get all the information about the extractions being done. If the bit size
3367 if the same as the size of the underlying object, we aren't doing an
3368 extraction at all and so can do nothing. We also don't want to
3369 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3370 then will no longer be able to replace it. */
3371 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3372 &lunsignedp, &lvolatilep, false);
3373 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3374 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3375 return 0;
3377 if (!const_p)
3379 /* If this is not a constant, we can only do something if bit positions,
3380 sizes, and signedness are the same. */
3381 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3382 &runsignedp, &rvolatilep, false);
3384 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3385 || lunsignedp != runsignedp || offset != 0
3386 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3387 return 0;
3390 /* See if we can find a mode to refer to this field. We should be able to,
3391 but fail if we can't. */
3392 if (lvolatilep
3393 && GET_MODE_BITSIZE (lmode) > 0
3394 && flag_strict_volatile_bitfields > 0)
3395 nmode = lmode;
3396 else
3397 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3398 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3399 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3400 TYPE_ALIGN (TREE_TYPE (rinner))),
3401 word_mode, lvolatilep || rvolatilep);
3402 if (nmode == VOIDmode)
3403 return 0;
3405 /* Set signed and unsigned types of the precision of this mode for the
3406 shifts below. */
3407 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3408 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3410 /* Compute the bit position and size for the new reference and our offset
3411 within it. If the new reference is the same size as the original, we
3412 won't optimize anything, so return zero. */
3413 nbitsize = GET_MODE_BITSIZE (nmode);
3414 nbitpos = lbitpos & ~ (nbitsize - 1);
3415 lbitpos -= nbitpos;
3416 if (nbitsize == lbitsize)
3417 return 0;
3419 if (BYTES_BIG_ENDIAN)
3420 lbitpos = nbitsize - lbitsize - lbitpos;
3422 /* Make the mask to be used against the extracted field. */
3423 mask = build_int_cst_type (unsigned_type, -1);
3424 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3425 mask = const_binop (RSHIFT_EXPR, mask,
3426 size_int (nbitsize - lbitsize - lbitpos));
3428 if (! const_p)
3429 /* If not comparing with constant, just rework the comparison
3430 and return. */
3431 return fold_build2_loc (loc, code, compare_type,
3432 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3433 make_bit_field_ref (loc, linner,
3434 unsigned_type,
3435 nbitsize, nbitpos,
3437 mask),
3438 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3439 make_bit_field_ref (loc, rinner,
3440 unsigned_type,
3441 nbitsize, nbitpos,
3443 mask));
3445 /* Otherwise, we are handling the constant case. See if the constant is too
3446 big for the field. Warn and return a tree of for 0 (false) if so. We do
3447 this not only for its own sake, but to avoid having to test for this
3448 error case below. If we didn't, we might generate wrong code.
3450 For unsigned fields, the constant shifted right by the field length should
3451 be all zero. For signed fields, the high-order bits should agree with
3452 the sign bit. */
3454 if (lunsignedp)
3456 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3457 fold_convert_loc (loc,
3458 unsigned_type, rhs),
3459 size_int (lbitsize))))
3461 warning (0, "comparison is always %d due to width of bit-field",
3462 code == NE_EXPR);
3463 return constant_boolean_node (code == NE_EXPR, compare_type);
3466 else
3468 tree tem = const_binop (RSHIFT_EXPR,
3469 fold_convert_loc (loc, signed_type, rhs),
3470 size_int (lbitsize - 1));
3471 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3473 warning (0, "comparison is always %d due to width of bit-field",
3474 code == NE_EXPR);
3475 return constant_boolean_node (code == NE_EXPR, compare_type);
3479 /* Single-bit compares should always be against zero. */
3480 if (lbitsize == 1 && ! integer_zerop (rhs))
3482 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3483 rhs = build_int_cst (type, 0);
3486 /* Make a new bitfield reference, shift the constant over the
3487 appropriate number of bits and mask it with the computed mask
3488 (in case this was a signed field). If we changed it, make a new one. */
3489 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3490 if (lvolatilep)
3492 TREE_SIDE_EFFECTS (lhs) = 1;
3493 TREE_THIS_VOLATILE (lhs) = 1;
3496 rhs = const_binop (BIT_AND_EXPR,
3497 const_binop (LSHIFT_EXPR,
3498 fold_convert_loc (loc, unsigned_type, rhs),
3499 size_int (lbitpos)),
3500 mask);
3502 lhs = build2_loc (loc, code, compare_type,
3503 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3504 return lhs;
3507 /* Subroutine for fold_truthop: decode a field reference.
3509 If EXP is a comparison reference, we return the innermost reference.
3511 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3512 set to the starting bit number.
3514 If the innermost field can be completely contained in a mode-sized
3515 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3517 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3518 otherwise it is not changed.
3520 *PUNSIGNEDP is set to the signedness of the field.
3522 *PMASK is set to the mask used. This is either contained in a
3523 BIT_AND_EXPR or derived from the width of the field.
3525 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3527 Return 0 if this is not a component reference or is one that we can't
3528 do anything with. */
3530 static tree
3531 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3532 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3533 int *punsignedp, int *pvolatilep,
3534 tree *pmask, tree *pand_mask)
3536 tree outer_type = 0;
3537 tree and_mask = 0;
3538 tree mask, inner, offset;
3539 tree unsigned_type;
3540 unsigned int precision;
3542 /* All the optimizations using this function assume integer fields.
3543 There are problems with FP fields since the type_for_size call
3544 below can fail for, e.g., XFmode. */
3545 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3546 return 0;
3548 /* We are interested in the bare arrangement of bits, so strip everything
3549 that doesn't affect the machine mode. However, record the type of the
3550 outermost expression if it may matter below. */
3551 if (CONVERT_EXPR_P (exp)
3552 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3553 outer_type = TREE_TYPE (exp);
3554 STRIP_NOPS (exp);
3556 if (TREE_CODE (exp) == BIT_AND_EXPR)
3558 and_mask = TREE_OPERAND (exp, 1);
3559 exp = TREE_OPERAND (exp, 0);
3560 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3561 if (TREE_CODE (and_mask) != INTEGER_CST)
3562 return 0;
3565 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3566 punsignedp, pvolatilep, false);
3567 if ((inner == exp && and_mask == 0)
3568 || *pbitsize < 0 || offset != 0
3569 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3570 return 0;
3572 /* If the number of bits in the reference is the same as the bitsize of
3573 the outer type, then the outer type gives the signedness. Otherwise
3574 (in case of a small bitfield) the signedness is unchanged. */
3575 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3576 *punsignedp = TYPE_UNSIGNED (outer_type);
3578 /* Compute the mask to access the bitfield. */
3579 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3580 precision = TYPE_PRECISION (unsigned_type);
3582 mask = build_int_cst_type (unsigned_type, -1);
3584 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3585 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3587 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3588 if (and_mask != 0)
3589 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3590 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3592 *pmask = mask;
3593 *pand_mask = and_mask;
3594 return inner;
3597 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3598 bit positions. */
3600 static int
3601 all_ones_mask_p (const_tree mask, int size)
3603 tree type = TREE_TYPE (mask);
3604 unsigned int precision = TYPE_PRECISION (type);
3605 tree tmask;
3607 tmask = build_int_cst_type (signed_type_for (type), -1);
3609 return
3610 tree_int_cst_equal (mask,
3611 const_binop (RSHIFT_EXPR,
3612 const_binop (LSHIFT_EXPR, tmask,
3613 size_int (precision - size)),
3614 size_int (precision - size)));
3617 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3618 represents the sign bit of EXP's type. If EXP represents a sign
3619 or zero extension, also test VAL against the unextended type.
3620 The return value is the (sub)expression whose sign bit is VAL,
3621 or NULL_TREE otherwise. */
3623 static tree
3624 sign_bit_p (tree exp, const_tree val)
3626 unsigned HOST_WIDE_INT mask_lo, lo;
3627 HOST_WIDE_INT mask_hi, hi;
3628 int width;
3629 tree t;
3631 /* Tree EXP must have an integral type. */
3632 t = TREE_TYPE (exp);
3633 if (! INTEGRAL_TYPE_P (t))
3634 return NULL_TREE;
3636 /* Tree VAL must be an integer constant. */
3637 if (TREE_CODE (val) != INTEGER_CST
3638 || TREE_OVERFLOW (val))
3639 return NULL_TREE;
3641 width = TYPE_PRECISION (t);
3642 if (width > HOST_BITS_PER_WIDE_INT)
3644 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3645 lo = 0;
3647 mask_hi = ((unsigned HOST_WIDE_INT) -1
3648 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3649 mask_lo = -1;
3651 else
3653 hi = 0;
3654 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3656 mask_hi = 0;
3657 mask_lo = ((unsigned HOST_WIDE_INT) -1
3658 >> (HOST_BITS_PER_WIDE_INT - width));
3661 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3662 treat VAL as if it were unsigned. */
3663 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3664 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3665 return exp;
3667 /* Handle extension from a narrower type. */
3668 if (TREE_CODE (exp) == NOP_EXPR
3669 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3670 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3672 return NULL_TREE;
3675 /* Subroutine for fold_truthop: determine if an operand is simple enough
3676 to be evaluated unconditionally. */
3678 static int
3679 simple_operand_p (const_tree exp)
3681 /* Strip any conversions that don't change the machine mode. */
3682 STRIP_NOPS (exp);
3684 return (CONSTANT_CLASS_P (exp)
3685 || TREE_CODE (exp) == SSA_NAME
3686 || (DECL_P (exp)
3687 && ! TREE_ADDRESSABLE (exp)
3688 && ! TREE_THIS_VOLATILE (exp)
3689 && ! DECL_NONLOCAL (exp)
3690 /* Don't regard global variables as simple. They may be
3691 allocated in ways unknown to the compiler (shared memory,
3692 #pragma weak, etc). */
3693 && ! TREE_PUBLIC (exp)
3694 && ! DECL_EXTERNAL (exp)
3695 /* Loading a static variable is unduly expensive, but global
3696 registers aren't expensive. */
3697 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3700 /* The following functions are subroutines to fold_range_test and allow it to
3701 try to change a logical combination of comparisons into a range test.
3703 For example, both
3704 X == 2 || X == 3 || X == 4 || X == 5
3706 X >= 2 && X <= 5
3707 are converted to
3708 (unsigned) (X - 2) <= 3
3710 We describe each set of comparisons as being either inside or outside
3711 a range, using a variable named like IN_P, and then describe the
3712 range with a lower and upper bound. If one of the bounds is omitted,
3713 it represents either the highest or lowest value of the type.
3715 In the comments below, we represent a range by two numbers in brackets
3716 preceded by a "+" to designate being inside that range, or a "-" to
3717 designate being outside that range, so the condition can be inverted by
3718 flipping the prefix. An omitted bound is represented by a "-". For
3719 example, "- [-, 10]" means being outside the range starting at the lowest
3720 possible value and ending at 10, in other words, being greater than 10.
3721 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3722 always false.
3724 We set up things so that the missing bounds are handled in a consistent
3725 manner so neither a missing bound nor "true" and "false" need to be
3726 handled using a special case. */
3728 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3729 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3730 and UPPER1_P are nonzero if the respective argument is an upper bound
3731 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3732 must be specified for a comparison. ARG1 will be converted to ARG0's
3733 type if both are specified. */
3735 static tree
3736 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3737 tree arg1, int upper1_p)
3739 tree tem;
3740 int result;
3741 int sgn0, sgn1;
3743 /* If neither arg represents infinity, do the normal operation.
3744 Else, if not a comparison, return infinity. Else handle the special
3745 comparison rules. Note that most of the cases below won't occur, but
3746 are handled for consistency. */
3748 if (arg0 != 0 && arg1 != 0)
3750 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3751 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3752 STRIP_NOPS (tem);
3753 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3756 if (TREE_CODE_CLASS (code) != tcc_comparison)
3757 return 0;
3759 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3760 for neither. In real maths, we cannot assume open ended ranges are
3761 the same. But, this is computer arithmetic, where numbers are finite.
3762 We can therefore make the transformation of any unbounded range with
3763 the value Z, Z being greater than any representable number. This permits
3764 us to treat unbounded ranges as equal. */
3765 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3766 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3767 switch (code)
3769 case EQ_EXPR:
3770 result = sgn0 == sgn1;
3771 break;
3772 case NE_EXPR:
3773 result = sgn0 != sgn1;
3774 break;
3775 case LT_EXPR:
3776 result = sgn0 < sgn1;
3777 break;
3778 case LE_EXPR:
3779 result = sgn0 <= sgn1;
3780 break;
3781 case GT_EXPR:
3782 result = sgn0 > sgn1;
3783 break;
3784 case GE_EXPR:
3785 result = sgn0 >= sgn1;
3786 break;
3787 default:
3788 gcc_unreachable ();
3791 return constant_boolean_node (result, type);
3794 /* Given EXP, a logical expression, set the range it is testing into
3795 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3796 actually being tested. *PLOW and *PHIGH will be made of the same
3797 type as the returned expression. If EXP is not a comparison, we
3798 will most likely not be returning a useful value and range. Set
3799 *STRICT_OVERFLOW_P to true if the return value is only valid
3800 because signed overflow is undefined; otherwise, do not change
3801 *STRICT_OVERFLOW_P. */
3803 tree
3804 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3805 bool *strict_overflow_p)
3807 enum tree_code code;
3808 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3809 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3810 int in_p, n_in_p;
3811 tree low, high, n_low, n_high;
3812 location_t loc = EXPR_LOCATION (exp);
3814 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3815 and see if we can refine the range. Some of the cases below may not
3816 happen, but it doesn't seem worth worrying about this. We "continue"
3817 the outer loop when we've changed something; otherwise we "break"
3818 the switch, which will "break" the while. */
3820 in_p = 0;
3821 low = high = build_int_cst (TREE_TYPE (exp), 0);
3823 while (1)
3825 code = TREE_CODE (exp);
3826 exp_type = TREE_TYPE (exp);
3828 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3830 if (TREE_OPERAND_LENGTH (exp) > 0)
3831 arg0 = TREE_OPERAND (exp, 0);
3832 if (TREE_CODE_CLASS (code) == tcc_comparison
3833 || TREE_CODE_CLASS (code) == tcc_unary
3834 || TREE_CODE_CLASS (code) == tcc_binary)
3835 arg0_type = TREE_TYPE (arg0);
3836 if (TREE_CODE_CLASS (code) == tcc_binary
3837 || TREE_CODE_CLASS (code) == tcc_comparison
3838 || (TREE_CODE_CLASS (code) == tcc_expression
3839 && TREE_OPERAND_LENGTH (exp) > 1))
3840 arg1 = TREE_OPERAND (exp, 1);
3843 switch (code)
3845 case TRUTH_NOT_EXPR:
3846 in_p = ! in_p, exp = arg0;
3847 continue;
3849 case EQ_EXPR: case NE_EXPR:
3850 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3851 /* We can only do something if the range is testing for zero
3852 and if the second operand is an integer constant. Note that
3853 saying something is "in" the range we make is done by
3854 complementing IN_P since it will set in the initial case of
3855 being not equal to zero; "out" is leaving it alone. */
3856 if (low == 0 || high == 0
3857 || ! integer_zerop (low) || ! integer_zerop (high)
3858 || TREE_CODE (arg1) != INTEGER_CST)
3859 break;
3861 switch (code)
3863 case NE_EXPR: /* - [c, c] */
3864 low = high = arg1;
3865 break;
3866 case EQ_EXPR: /* + [c, c] */
3867 in_p = ! in_p, low = high = arg1;
3868 break;
3869 case GT_EXPR: /* - [-, c] */
3870 low = 0, high = arg1;
3871 break;
3872 case GE_EXPR: /* + [c, -] */
3873 in_p = ! in_p, low = arg1, high = 0;
3874 break;
3875 case LT_EXPR: /* - [c, -] */
3876 low = arg1, high = 0;
3877 break;
3878 case LE_EXPR: /* + [-, c] */
3879 in_p = ! in_p, low = 0, high = arg1;
3880 break;
3881 default:
3882 gcc_unreachable ();
3885 /* If this is an unsigned comparison, we also know that EXP is
3886 greater than or equal to zero. We base the range tests we make
3887 on that fact, so we record it here so we can parse existing
3888 range tests. We test arg0_type since often the return type
3889 of, e.g. EQ_EXPR, is boolean. */
3890 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3892 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3893 in_p, low, high, 1,
3894 build_int_cst (arg0_type, 0),
3895 NULL_TREE))
3896 break;
3898 in_p = n_in_p, low = n_low, high = n_high;
3900 /* If the high bound is missing, but we have a nonzero low
3901 bound, reverse the range so it goes from zero to the low bound
3902 minus 1. */
3903 if (high == 0 && low && ! integer_zerop (low))
3905 in_p = ! in_p;
3906 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3907 integer_one_node, 0);
3908 low = build_int_cst (arg0_type, 0);
3912 exp = arg0;
3913 continue;
3915 case NEGATE_EXPR:
3916 /* (-x) IN [a,b] -> x in [-b, -a] */
3917 n_low = range_binop (MINUS_EXPR, exp_type,
3918 build_int_cst (exp_type, 0),
3919 0, high, 1);
3920 n_high = range_binop (MINUS_EXPR, exp_type,
3921 build_int_cst (exp_type, 0),
3922 0, low, 0);
3923 if (n_high != 0 && TREE_OVERFLOW (n_high))
3924 break;
3925 goto normalize;
3927 case BIT_NOT_EXPR:
3928 /* ~ X -> -X - 1 */
3929 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3930 build_int_cst (exp_type, 1));
3931 continue;
3933 case PLUS_EXPR: case MINUS_EXPR:
3934 if (TREE_CODE (arg1) != INTEGER_CST)
3935 break;
3937 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3938 move a constant to the other side. */
3939 if (!TYPE_UNSIGNED (arg0_type)
3940 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3941 break;
3943 /* If EXP is signed, any overflow in the computation is undefined,
3944 so we don't worry about it so long as our computations on
3945 the bounds don't overflow. For unsigned, overflow is defined
3946 and this is exactly the right thing. */
3947 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3948 arg0_type, low, 0, arg1, 0);
3949 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3950 arg0_type, high, 1, arg1, 0);
3951 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3952 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3953 break;
3955 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 *strict_overflow_p = true;
3958 normalize:
3959 /* Check for an unsigned range which has wrapped around the maximum
3960 value thus making n_high < n_low, and normalize it. */
3961 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3963 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3964 integer_one_node, 0);
3965 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3966 integer_one_node, 0);
3968 /* If the range is of the form +/- [ x+1, x ], we won't
3969 be able to normalize it. But then, it represents the
3970 whole range or the empty set, so make it
3971 +/- [ -, - ]. */
3972 if (tree_int_cst_equal (n_low, low)
3973 && tree_int_cst_equal (n_high, high))
3974 low = high = 0;
3975 else
3976 in_p = ! in_p;
3978 else
3979 low = n_low, high = n_high;
3981 exp = arg0;
3982 continue;
3984 CASE_CONVERT: case NON_LVALUE_EXPR:
3985 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3986 break;
3988 if (! INTEGRAL_TYPE_P (arg0_type)
3989 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3990 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3991 break;
3993 n_low = low, n_high = high;
3995 if (n_low != 0)
3996 n_low = fold_convert_loc (loc, arg0_type, n_low);
3998 if (n_high != 0)
3999 n_high = fold_convert_loc (loc, arg0_type, n_high);
4002 /* If we're converting arg0 from an unsigned type, to exp,
4003 a signed type, we will be doing the comparison as unsigned.
4004 The tests above have already verified that LOW and HIGH
4005 are both positive.
4007 So we have to ensure that we will handle large unsigned
4008 values the same way that the current signed bounds treat
4009 negative values. */
4011 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4013 tree high_positive;
4014 tree equiv_type;
4015 /* For fixed-point modes, we need to pass the saturating flag
4016 as the 2nd parameter. */
4017 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4018 equiv_type = lang_hooks.types.type_for_mode
4019 (TYPE_MODE (arg0_type),
4020 TYPE_SATURATING (arg0_type));
4021 else
4022 equiv_type = lang_hooks.types.type_for_mode
4023 (TYPE_MODE (arg0_type), 1);
4025 /* A range without an upper bound is, naturally, unbounded.
4026 Since convert would have cropped a very large value, use
4027 the max value for the destination type. */
4028 high_positive
4029 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4030 : TYPE_MAX_VALUE (arg0_type);
4032 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4033 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4034 fold_convert_loc (loc, arg0_type,
4035 high_positive),
4036 build_int_cst (arg0_type, 1));
4038 /* If the low bound is specified, "and" the range with the
4039 range for which the original unsigned value will be
4040 positive. */
4041 if (low != 0)
4043 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4044 1, n_low, n_high, 1,
4045 fold_convert_loc (loc, arg0_type,
4046 integer_zero_node),
4047 high_positive))
4048 break;
4050 in_p = (n_in_p == in_p);
4052 else
4054 /* Otherwise, "or" the range with the range of the input
4055 that will be interpreted as negative. */
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 0, n_low, n_high, 1,
4058 fold_convert_loc (loc, arg0_type,
4059 integer_zero_node),
4060 high_positive))
4061 break;
4063 in_p = (in_p != n_in_p);
4067 exp = arg0;
4068 low = n_low, high = n_high;
4069 continue;
4071 default:
4072 break;
4075 break;
4078 /* If EXP is a constant, we can evaluate whether this is true or false. */
4079 if (TREE_CODE (exp) == INTEGER_CST)
4081 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4082 exp, 0, low, 0))
4083 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4084 exp, 1, high, 1)));
4085 low = high = 0;
4086 exp = 0;
4089 *pin_p = in_p, *plow = low, *phigh = high;
4090 return exp;
4093 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4094 type, TYPE, return an expression to test if EXP is in (or out of, depending
4095 on IN_P) the range. Return 0 if the test couldn't be created. */
4097 tree
4098 build_range_check (location_t loc, tree type, tree exp, int in_p,
4099 tree low, tree high)
4101 tree etype = TREE_TYPE (exp), value;
4103 #ifdef HAVE_canonicalize_funcptr_for_compare
4104 /* Disable this optimization for function pointer expressions
4105 on targets that require function pointer canonicalization. */
4106 if (HAVE_canonicalize_funcptr_for_compare
4107 && TREE_CODE (etype) == POINTER_TYPE
4108 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4109 return NULL_TREE;
4110 #endif
4112 if (! in_p)
4114 value = build_range_check (loc, type, exp, 1, low, high);
4115 if (value != 0)
4116 return invert_truthvalue_loc (loc, value);
4118 return 0;
4121 if (low == 0 && high == 0)
4122 return build_int_cst (type, 1);
4124 if (low == 0)
4125 return fold_build2_loc (loc, LE_EXPR, type, exp,
4126 fold_convert_loc (loc, etype, high));
4128 if (high == 0)
4129 return fold_build2_loc (loc, GE_EXPR, type, exp,
4130 fold_convert_loc (loc, etype, low));
4132 if (operand_equal_p (low, high, 0))
4133 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4134 fold_convert_loc (loc, etype, low));
4136 if (integer_zerop (low))
4138 if (! TYPE_UNSIGNED (etype))
4140 etype = unsigned_type_for (etype);
4141 high = fold_convert_loc (loc, etype, high);
4142 exp = fold_convert_loc (loc, etype, exp);
4144 return build_range_check (loc, type, exp, 1, 0, high);
4147 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4148 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4150 unsigned HOST_WIDE_INT lo;
4151 HOST_WIDE_INT hi;
4152 int prec;
4154 prec = TYPE_PRECISION (etype);
4155 if (prec <= HOST_BITS_PER_WIDE_INT)
4157 hi = 0;
4158 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4160 else
4162 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4163 lo = (unsigned HOST_WIDE_INT) -1;
4166 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4168 if (TYPE_UNSIGNED (etype))
4170 tree signed_etype = signed_type_for (etype);
4171 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4172 etype
4173 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4174 else
4175 etype = signed_etype;
4176 exp = fold_convert_loc (loc, etype, exp);
4178 return fold_build2_loc (loc, GT_EXPR, type, exp,
4179 build_int_cst (etype, 0));
4183 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4184 This requires wrap-around arithmetics for the type of the expression.
4185 First make sure that arithmetics in this type is valid, then make sure
4186 that it wraps around. */
4187 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4188 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4189 TYPE_UNSIGNED (etype));
4191 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4193 tree utype, minv, maxv;
4195 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4196 for the type in question, as we rely on this here. */
4197 utype = unsigned_type_for (etype);
4198 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4199 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4200 integer_one_node, 1);
4201 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4203 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4204 minv, 1, maxv, 1)))
4205 etype = utype;
4206 else
4207 return 0;
4210 high = fold_convert_loc (loc, etype, high);
4211 low = fold_convert_loc (loc, etype, low);
4212 exp = fold_convert_loc (loc, etype, exp);
4214 value = const_binop (MINUS_EXPR, high, low);
4217 if (POINTER_TYPE_P (etype))
4219 if (value != 0 && !TREE_OVERFLOW (value))
4221 low = fold_convert_loc (loc, sizetype, low);
4222 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4223 return build_range_check (loc, type,
4224 fold_build_pointer_plus_loc (loc, exp, low),
4225 1, build_int_cst (etype, 0), value);
4227 return 0;
4230 if (value != 0 && !TREE_OVERFLOW (value))
4231 return build_range_check (loc, type,
4232 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4233 1, build_int_cst (etype, 0), value);
4235 return 0;
4238 /* Return the predecessor of VAL in its type, handling the infinite case. */
4240 static tree
4241 range_predecessor (tree val)
4243 tree type = TREE_TYPE (val);
4245 if (INTEGRAL_TYPE_P (type)
4246 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4247 return 0;
4248 else
4249 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4252 /* Return the successor of VAL in its type, handling the infinite case. */
4254 static tree
4255 range_successor (tree val)
4257 tree type = TREE_TYPE (val);
4259 if (INTEGRAL_TYPE_P (type)
4260 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4261 return 0;
4262 else
4263 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4266 /* Given two ranges, see if we can merge them into one. Return 1 if we
4267 can, 0 if we can't. Set the output range into the specified parameters. */
4269 bool
4270 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4271 tree high0, int in1_p, tree low1, tree high1)
4273 int no_overlap;
4274 int subset;
4275 int temp;
4276 tree tem;
4277 int in_p;
4278 tree low, high;
4279 int lowequal = ((low0 == 0 && low1 == 0)
4280 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4281 low0, 0, low1, 0)));
4282 int highequal = ((high0 == 0 && high1 == 0)
4283 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4284 high0, 1, high1, 1)));
4286 /* Make range 0 be the range that starts first, or ends last if they
4287 start at the same value. Swap them if it isn't. */
4288 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4289 low0, 0, low1, 0))
4290 || (lowequal
4291 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4292 high1, 1, high0, 1))))
4294 temp = in0_p, in0_p = in1_p, in1_p = temp;
4295 tem = low0, low0 = low1, low1 = tem;
4296 tem = high0, high0 = high1, high1 = tem;
4299 /* Now flag two cases, whether the ranges are disjoint or whether the
4300 second range is totally subsumed in the first. Note that the tests
4301 below are simplified by the ones above. */
4302 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4303 high0, 1, low1, 0));
4304 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4305 high1, 1, high0, 1));
4307 /* We now have four cases, depending on whether we are including or
4308 excluding the two ranges. */
4309 if (in0_p && in1_p)
4311 /* If they don't overlap, the result is false. If the second range
4312 is a subset it is the result. Otherwise, the range is from the start
4313 of the second to the end of the first. */
4314 if (no_overlap)
4315 in_p = 0, low = high = 0;
4316 else if (subset)
4317 in_p = 1, low = low1, high = high1;
4318 else
4319 in_p = 1, low = low1, high = high0;
4322 else if (in0_p && ! in1_p)
4324 /* If they don't overlap, the result is the first range. If they are
4325 equal, the result is false. If the second range is a subset of the
4326 first, and the ranges begin at the same place, we go from just after
4327 the end of the second range to the end of the first. If the second
4328 range is not a subset of the first, or if it is a subset and both
4329 ranges end at the same place, the range starts at the start of the
4330 first range and ends just before the second range.
4331 Otherwise, we can't describe this as a single range. */
4332 if (no_overlap)
4333 in_p = 1, low = low0, high = high0;
4334 else if (lowequal && highequal)
4335 in_p = 0, low = high = 0;
4336 else if (subset && lowequal)
4338 low = range_successor (high1);
4339 high = high0;
4340 in_p = 1;
4341 if (low == 0)
4343 /* We are in the weird situation where high0 > high1 but
4344 high1 has no successor. Punt. */
4345 return 0;
4348 else if (! subset || highequal)
4350 low = low0;
4351 high = range_predecessor (low1);
4352 in_p = 1;
4353 if (high == 0)
4355 /* low0 < low1 but low1 has no predecessor. Punt. */
4356 return 0;
4359 else
4360 return 0;
4363 else if (! in0_p && in1_p)
4365 /* If they don't overlap, the result is the second range. If the second
4366 is a subset of the first, the result is false. Otherwise,
4367 the range starts just after the first range and ends at the
4368 end of the second. */
4369 if (no_overlap)
4370 in_p = 1, low = low1, high = high1;
4371 else if (subset || highequal)
4372 in_p = 0, low = high = 0;
4373 else
4375 low = range_successor (high0);
4376 high = high1;
4377 in_p = 1;
4378 if (low == 0)
4380 /* high1 > high0 but high0 has no successor. Punt. */
4381 return 0;
4386 else
4388 /* The case where we are excluding both ranges. Here the complex case
4389 is if they don't overlap. In that case, the only time we have a
4390 range is if they are adjacent. If the second is a subset of the
4391 first, the result is the first. Otherwise, the range to exclude
4392 starts at the beginning of the first range and ends at the end of the
4393 second. */
4394 if (no_overlap)
4396 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 range_successor (high0),
4398 1, low1, 0)))
4399 in_p = 0, low = low0, high = high1;
4400 else
4402 /* Canonicalize - [min, x] into - [-, x]. */
4403 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4404 switch (TREE_CODE (TREE_TYPE (low0)))
4406 case ENUMERAL_TYPE:
4407 if (TYPE_PRECISION (TREE_TYPE (low0))
4408 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4409 break;
4410 /* FALLTHROUGH */
4411 case INTEGER_TYPE:
4412 if (tree_int_cst_equal (low0,
4413 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4414 low0 = 0;
4415 break;
4416 case POINTER_TYPE:
4417 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4418 && integer_zerop (low0))
4419 low0 = 0;
4420 break;
4421 default:
4422 break;
4425 /* Canonicalize - [x, max] into - [x, -]. */
4426 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4427 switch (TREE_CODE (TREE_TYPE (high1)))
4429 case ENUMERAL_TYPE:
4430 if (TYPE_PRECISION (TREE_TYPE (high1))
4431 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4432 break;
4433 /* FALLTHROUGH */
4434 case INTEGER_TYPE:
4435 if (tree_int_cst_equal (high1,
4436 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4437 high1 = 0;
4438 break;
4439 case POINTER_TYPE:
4440 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4441 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4442 high1, 1,
4443 integer_one_node, 1)))
4444 high1 = 0;
4445 break;
4446 default:
4447 break;
4450 /* The ranges might be also adjacent between the maximum and
4451 minimum values of the given type. For
4452 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4453 return + [x + 1, y - 1]. */
4454 if (low0 == 0 && high1 == 0)
4456 low = range_successor (high0);
4457 high = range_predecessor (low1);
4458 if (low == 0 || high == 0)
4459 return 0;
4461 in_p = 1;
4463 else
4464 return 0;
4467 else if (subset)
4468 in_p = 0, low = low0, high = high0;
4469 else
4470 in_p = 0, low = low0, high = high1;
4473 *pin_p = in_p, *plow = low, *phigh = high;
4474 return 1;
4478 /* Subroutine of fold, looking inside expressions of the form
4479 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4480 of the COND_EXPR. This function is being used also to optimize
4481 A op B ? C : A, by reversing the comparison first.
4483 Return a folded expression whose code is not a COND_EXPR
4484 anymore, or NULL_TREE if no folding opportunity is found. */
4486 static tree
4487 fold_cond_expr_with_comparison (location_t loc, tree type,
4488 tree arg0, tree arg1, tree arg2)
4490 enum tree_code comp_code = TREE_CODE (arg0);
4491 tree arg00 = TREE_OPERAND (arg0, 0);
4492 tree arg01 = TREE_OPERAND (arg0, 1);
4493 tree arg1_type = TREE_TYPE (arg1);
4494 tree tem;
4496 STRIP_NOPS (arg1);
4497 STRIP_NOPS (arg2);
4499 /* If we have A op 0 ? A : -A, consider applying the following
4500 transformations:
4502 A == 0? A : -A same as -A
4503 A != 0? A : -A same as A
4504 A >= 0? A : -A same as abs (A)
4505 A > 0? A : -A same as abs (A)
4506 A <= 0? A : -A same as -abs (A)
4507 A < 0? A : -A same as -abs (A)
4509 None of these transformations work for modes with signed
4510 zeros. If A is +/-0, the first two transformations will
4511 change the sign of the result (from +0 to -0, or vice
4512 versa). The last four will fix the sign of the result,
4513 even though the original expressions could be positive or
4514 negative, depending on the sign of A.
4516 Note that all these transformations are correct if A is
4517 NaN, since the two alternatives (A and -A) are also NaNs. */
4518 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4519 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4520 ? real_zerop (arg01)
4521 : integer_zerop (arg01))
4522 && ((TREE_CODE (arg2) == NEGATE_EXPR
4523 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4524 /* In the case that A is of the form X-Y, '-A' (arg2) may
4525 have already been folded to Y-X, check for that. */
4526 || (TREE_CODE (arg1) == MINUS_EXPR
4527 && TREE_CODE (arg2) == MINUS_EXPR
4528 && operand_equal_p (TREE_OPERAND (arg1, 0),
4529 TREE_OPERAND (arg2, 1), 0)
4530 && operand_equal_p (TREE_OPERAND (arg1, 1),
4531 TREE_OPERAND (arg2, 0), 0))))
4532 switch (comp_code)
4534 case EQ_EXPR:
4535 case UNEQ_EXPR:
4536 tem = fold_convert_loc (loc, arg1_type, arg1);
4537 return pedantic_non_lvalue_loc (loc,
4538 fold_convert_loc (loc, type,
4539 negate_expr (tem)));
4540 case NE_EXPR:
4541 case LTGT_EXPR:
4542 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4543 case UNGE_EXPR:
4544 case UNGT_EXPR:
4545 if (flag_trapping_math)
4546 break;
4547 /* Fall through. */
4548 case GE_EXPR:
4549 case GT_EXPR:
4550 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4551 arg1 = fold_convert_loc (loc, signed_type_for
4552 (TREE_TYPE (arg1)), arg1);
4553 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4554 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4555 case UNLE_EXPR:
4556 case UNLT_EXPR:
4557 if (flag_trapping_math)
4558 break;
4559 case LE_EXPR:
4560 case LT_EXPR:
4561 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4562 arg1 = fold_convert_loc (loc, signed_type_for
4563 (TREE_TYPE (arg1)), arg1);
4564 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4565 return negate_expr (fold_convert_loc (loc, type, tem));
4566 default:
4567 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4568 break;
4571 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4572 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4573 both transformations are correct when A is NaN: A != 0
4574 is then true, and A == 0 is false. */
4576 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4577 && integer_zerop (arg01) && integer_zerop (arg2))
4579 if (comp_code == NE_EXPR)
4580 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4581 else if (comp_code == EQ_EXPR)
4582 return build_int_cst (type, 0);
4585 /* Try some transformations of A op B ? A : B.
4587 A == B? A : B same as B
4588 A != B? A : B same as A
4589 A >= B? A : B same as max (A, B)
4590 A > B? A : B same as max (B, A)
4591 A <= B? A : B same as min (A, B)
4592 A < B? A : B same as min (B, A)
4594 As above, these transformations don't work in the presence
4595 of signed zeros. For example, if A and B are zeros of
4596 opposite sign, the first two transformations will change
4597 the sign of the result. In the last four, the original
4598 expressions give different results for (A=+0, B=-0) and
4599 (A=-0, B=+0), but the transformed expressions do not.
4601 The first two transformations are correct if either A or B
4602 is a NaN. In the first transformation, the condition will
4603 be false, and B will indeed be chosen. In the case of the
4604 second transformation, the condition A != B will be true,
4605 and A will be chosen.
4607 The conversions to max() and min() are not correct if B is
4608 a number and A is not. The conditions in the original
4609 expressions will be false, so all four give B. The min()
4610 and max() versions would give a NaN instead. */
4611 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4612 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4613 /* Avoid these transformations if the COND_EXPR may be used
4614 as an lvalue in the C++ front-end. PR c++/19199. */
4615 && (in_gimple_form
4616 || (strcmp (lang_hooks.name, "GNU C++") != 0
4617 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4618 || ! maybe_lvalue_p (arg1)
4619 || ! maybe_lvalue_p (arg2)))
4621 tree comp_op0 = arg00;
4622 tree comp_op1 = arg01;
4623 tree comp_type = TREE_TYPE (comp_op0);
4625 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4626 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4628 comp_type = type;
4629 comp_op0 = arg1;
4630 comp_op1 = arg2;
4633 switch (comp_code)
4635 case EQ_EXPR:
4636 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4637 case NE_EXPR:
4638 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4639 case LE_EXPR:
4640 case LT_EXPR:
4641 case UNLE_EXPR:
4642 case UNLT_EXPR:
4643 /* In C++ a ?: expression can be an lvalue, so put the
4644 operand which will be used if they are equal first
4645 so that we can convert this back to the
4646 corresponding COND_EXPR. */
4647 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4649 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4650 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4651 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4652 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4653 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4654 comp_op1, comp_op0);
4655 return pedantic_non_lvalue_loc (loc,
4656 fold_convert_loc (loc, type, tem));
4658 break;
4659 case GE_EXPR:
4660 case GT_EXPR:
4661 case UNGE_EXPR:
4662 case UNGT_EXPR:
4663 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4665 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4666 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4667 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4668 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4669 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4670 comp_op1, comp_op0);
4671 return pedantic_non_lvalue_loc (loc,
4672 fold_convert_loc (loc, type, tem));
4674 break;
4675 case UNEQ_EXPR:
4676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4677 return pedantic_non_lvalue_loc (loc,
4678 fold_convert_loc (loc, type, arg2));
4679 break;
4680 case LTGT_EXPR:
4681 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4682 return pedantic_non_lvalue_loc (loc,
4683 fold_convert_loc (loc, type, arg1));
4684 break;
4685 default:
4686 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4687 break;
4691 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4692 we might still be able to simplify this. For example,
4693 if C1 is one less or one more than C2, this might have started
4694 out as a MIN or MAX and been transformed by this function.
4695 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4697 if (INTEGRAL_TYPE_P (type)
4698 && TREE_CODE (arg01) == INTEGER_CST
4699 && TREE_CODE (arg2) == INTEGER_CST)
4700 switch (comp_code)
4702 case EQ_EXPR:
4703 if (TREE_CODE (arg1) == INTEGER_CST)
4704 break;
4705 /* We can replace A with C1 in this case. */
4706 arg1 = fold_convert_loc (loc, type, arg01);
4707 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4709 case LT_EXPR:
4710 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4711 MIN_EXPR, to preserve the signedness of the comparison. */
4712 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4713 OEP_ONLY_CONST)
4714 && operand_equal_p (arg01,
4715 const_binop (PLUS_EXPR, arg2,
4716 build_int_cst (type, 1)),
4717 OEP_ONLY_CONST))
4719 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4720 fold_convert_loc (loc, TREE_TYPE (arg00),
4721 arg2));
4722 return pedantic_non_lvalue_loc (loc,
4723 fold_convert_loc (loc, type, tem));
4725 break;
4727 case LE_EXPR:
4728 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4729 as above. */
4730 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4731 OEP_ONLY_CONST)
4732 && operand_equal_p (arg01,
4733 const_binop (MINUS_EXPR, arg2,
4734 build_int_cst (type, 1)),
4735 OEP_ONLY_CONST))
4737 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4738 fold_convert_loc (loc, TREE_TYPE (arg00),
4739 arg2));
4740 return pedantic_non_lvalue_loc (loc,
4741 fold_convert_loc (loc, type, tem));
4743 break;
4745 case GT_EXPR:
4746 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4747 MAX_EXPR, to preserve the signedness of the comparison. */
4748 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4749 OEP_ONLY_CONST)
4750 && operand_equal_p (arg01,
4751 const_binop (MINUS_EXPR, arg2,
4752 build_int_cst (type, 1)),
4753 OEP_ONLY_CONST))
4755 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4756 fold_convert_loc (loc, TREE_TYPE (arg00),
4757 arg2));
4758 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4760 break;
4762 case GE_EXPR:
4763 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4764 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4765 OEP_ONLY_CONST)
4766 && operand_equal_p (arg01,
4767 const_binop (PLUS_EXPR, arg2,
4768 build_int_cst (type, 1)),
4769 OEP_ONLY_CONST))
4771 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4772 fold_convert_loc (loc, TREE_TYPE (arg00),
4773 arg2));
4774 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4776 break;
4777 case NE_EXPR:
4778 break;
4779 default:
4780 gcc_unreachable ();
4783 return NULL_TREE;
4788 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4789 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4790 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4791 false) >= 2)
4792 #endif
4794 /* EXP is some logical combination of boolean tests. See if we can
4795 merge it into some range test. Return the new tree if so. */
4797 static tree
4798 fold_range_test (location_t loc, enum tree_code code, tree type,
4799 tree op0, tree op1)
4801 int or_op = (code == TRUTH_ORIF_EXPR
4802 || code == TRUTH_OR_EXPR);
4803 int in0_p, in1_p, in_p;
4804 tree low0, low1, low, high0, high1, high;
4805 bool strict_overflow_p = false;
4806 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4807 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4808 tree tem;
4809 const char * const warnmsg = G_("assuming signed overflow does not occur "
4810 "when simplifying range test");
4812 /* If this is an OR operation, invert both sides; we will invert
4813 again at the end. */
4814 if (or_op)
4815 in0_p = ! in0_p, in1_p = ! in1_p;
4817 /* If both expressions are the same, if we can merge the ranges, and we
4818 can build the range test, return it or it inverted. If one of the
4819 ranges is always true or always false, consider it to be the same
4820 expression as the other. */
4821 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4822 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4823 in1_p, low1, high1)
4824 && 0 != (tem = (build_range_check (loc, type,
4825 lhs != 0 ? lhs
4826 : rhs != 0 ? rhs : integer_zero_node,
4827 in_p, low, high))))
4829 if (strict_overflow_p)
4830 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4831 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4834 /* On machines where the branch cost is expensive, if this is a
4835 short-circuited branch and the underlying object on both sides
4836 is the same, make a non-short-circuit operation. */
4837 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4838 && lhs != 0 && rhs != 0
4839 && (code == TRUTH_ANDIF_EXPR
4840 || code == TRUTH_ORIF_EXPR)
4841 && operand_equal_p (lhs, rhs, 0))
4843 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4844 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4845 which cases we can't do this. */
4846 if (simple_operand_p (lhs))
4847 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4848 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4849 type, op0, op1);
4851 else if (!lang_hooks.decls.global_bindings_p ()
4852 && !CONTAINS_PLACEHOLDER_P (lhs))
4854 tree common = save_expr (lhs);
4856 if (0 != (lhs = build_range_check (loc, type, common,
4857 or_op ? ! in0_p : in0_p,
4858 low0, high0))
4859 && (0 != (rhs = build_range_check (loc, type, common,
4860 or_op ? ! in1_p : in1_p,
4861 low1, high1))))
4863 if (strict_overflow_p)
4864 fold_overflow_warning (warnmsg,
4865 WARN_STRICT_OVERFLOW_COMPARISON);
4866 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4867 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4868 type, lhs, rhs);
4873 return 0;
4876 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4877 bit value. Arrange things so the extra bits will be set to zero if and
4878 only if C is signed-extended to its full width. If MASK is nonzero,
4879 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4881 static tree
4882 unextend (tree c, int p, int unsignedp, tree mask)
4884 tree type = TREE_TYPE (c);
4885 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4886 tree temp;
4888 if (p == modesize || unsignedp)
4889 return c;
4891 /* We work by getting just the sign bit into the low-order bit, then
4892 into the high-order bit, then sign-extend. We then XOR that value
4893 with C. */
4894 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4895 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4897 /* We must use a signed type in order to get an arithmetic right shift.
4898 However, we must also avoid introducing accidental overflows, so that
4899 a subsequent call to integer_zerop will work. Hence we must
4900 do the type conversion here. At this point, the constant is either
4901 zero or one, and the conversion to a signed type can never overflow.
4902 We could get an overflow if this conversion is done anywhere else. */
4903 if (TYPE_UNSIGNED (type))
4904 temp = fold_convert (signed_type_for (type), temp);
4906 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4907 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4908 if (mask != 0)
4909 temp = const_binop (BIT_AND_EXPR, temp,
4910 fold_convert (TREE_TYPE (c), mask));
4911 /* If necessary, convert the type back to match the type of C. */
4912 if (TYPE_UNSIGNED (type))
4913 temp = fold_convert (type, temp);
4915 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4918 /* For an expression that has the form
4919 (A && B) || ~B
4921 (A || B) && ~B,
4922 we can drop one of the inner expressions and simplify to
4923 A || ~B
4925 A && ~B
4926 LOC is the location of the resulting expression. OP is the inner
4927 logical operation; the left-hand side in the examples above, while CMPOP
4928 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4929 removing a condition that guards another, as in
4930 (A != NULL && A->...) || A == NULL
4931 which we must not transform. If RHS_ONLY is true, only eliminate the
4932 right-most operand of the inner logical operation. */
4934 static tree
4935 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4936 bool rhs_only)
4938 tree type = TREE_TYPE (cmpop);
4939 enum tree_code code = TREE_CODE (cmpop);
4940 enum tree_code truthop_code = TREE_CODE (op);
4941 tree lhs = TREE_OPERAND (op, 0);
4942 tree rhs = TREE_OPERAND (op, 1);
4943 tree orig_lhs = lhs, orig_rhs = rhs;
4944 enum tree_code rhs_code = TREE_CODE (rhs);
4945 enum tree_code lhs_code = TREE_CODE (lhs);
4946 enum tree_code inv_code;
4948 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4949 return NULL_TREE;
4951 if (TREE_CODE_CLASS (code) != tcc_comparison)
4952 return NULL_TREE;
4954 if (rhs_code == truthop_code)
4956 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4957 if (newrhs != NULL_TREE)
4959 rhs = newrhs;
4960 rhs_code = TREE_CODE (rhs);
4963 if (lhs_code == truthop_code && !rhs_only)
4965 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4966 if (newlhs != NULL_TREE)
4968 lhs = newlhs;
4969 lhs_code = TREE_CODE (lhs);
4973 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4974 if (inv_code == rhs_code
4975 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4976 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4977 return lhs;
4978 if (!rhs_only && inv_code == lhs_code
4979 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4980 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4981 return rhs;
4982 if (rhs != orig_rhs || lhs != orig_lhs)
4983 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4984 lhs, rhs);
4985 return NULL_TREE;
4988 /* Find ways of folding logical expressions of LHS and RHS:
4989 Try to merge two comparisons to the same innermost item.
4990 Look for range tests like "ch >= '0' && ch <= '9'".
4991 Look for combinations of simple terms on machines with expensive branches
4992 and evaluate the RHS unconditionally.
4994 For example, if we have p->a == 2 && p->b == 4 and we can make an
4995 object large enough to span both A and B, we can do this with a comparison
4996 against the object ANDed with the a mask.
4998 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4999 operations to do this with one comparison.
5001 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5002 function and the one above.
5004 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5005 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5007 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5008 two operands.
5010 We return the simplified tree or 0 if no optimization is possible. */
5012 static tree
5013 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5014 tree lhs, tree rhs)
5016 /* If this is the "or" of two comparisons, we can do something if
5017 the comparisons are NE_EXPR. If this is the "and", we can do something
5018 if the comparisons are EQ_EXPR. I.e.,
5019 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5021 WANTED_CODE is this operation code. For single bit fields, we can
5022 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5023 comparison for one-bit fields. */
5025 enum tree_code wanted_code;
5026 enum tree_code lcode, rcode;
5027 tree ll_arg, lr_arg, rl_arg, rr_arg;
5028 tree ll_inner, lr_inner, rl_inner, rr_inner;
5029 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5030 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5031 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5032 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5033 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5034 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5035 enum machine_mode lnmode, rnmode;
5036 tree ll_mask, lr_mask, rl_mask, rr_mask;
5037 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5038 tree l_const, r_const;
5039 tree lntype, rntype, result;
5040 HOST_WIDE_INT first_bit, end_bit;
5041 int volatilep;
5042 tree orig_lhs = lhs, orig_rhs = rhs;
5043 enum tree_code orig_code = code;
5045 /* Start by getting the comparison codes. Fail if anything is volatile.
5046 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5047 it were surrounded with a NE_EXPR. */
5049 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5050 return 0;
5052 lcode = TREE_CODE (lhs);
5053 rcode = TREE_CODE (rhs);
5055 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5057 lhs = build2 (NE_EXPR, truth_type, lhs,
5058 build_int_cst (TREE_TYPE (lhs), 0));
5059 lcode = NE_EXPR;
5062 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5064 rhs = build2 (NE_EXPR, truth_type, rhs,
5065 build_int_cst (TREE_TYPE (rhs), 0));
5066 rcode = NE_EXPR;
5069 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5070 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5071 return 0;
5073 ll_arg = TREE_OPERAND (lhs, 0);
5074 lr_arg = TREE_OPERAND (lhs, 1);
5075 rl_arg = TREE_OPERAND (rhs, 0);
5076 rr_arg = TREE_OPERAND (rhs, 1);
5078 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5079 if (simple_operand_p (ll_arg)
5080 && simple_operand_p (lr_arg))
5082 if (operand_equal_p (ll_arg, rl_arg, 0)
5083 && operand_equal_p (lr_arg, rr_arg, 0))
5085 result = combine_comparisons (loc, code, lcode, rcode,
5086 truth_type, ll_arg, lr_arg);
5087 if (result)
5088 return result;
5090 else if (operand_equal_p (ll_arg, rr_arg, 0)
5091 && operand_equal_p (lr_arg, rl_arg, 0))
5093 result = combine_comparisons (loc, code, lcode,
5094 swap_tree_comparison (rcode),
5095 truth_type, ll_arg, lr_arg);
5096 if (result)
5097 return result;
5101 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5102 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5104 /* If the RHS can be evaluated unconditionally and its operands are
5105 simple, it wins to evaluate the RHS unconditionally on machines
5106 with expensive branches. In this case, this isn't a comparison
5107 that can be merged. Avoid doing this if the RHS is a floating-point
5108 comparison since those can trap. */
5110 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5111 false) >= 2
5112 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5113 && simple_operand_p (rl_arg)
5114 && simple_operand_p (rr_arg))
5116 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5117 if (code == TRUTH_OR_EXPR
5118 && lcode == NE_EXPR && integer_zerop (lr_arg)
5119 && rcode == NE_EXPR && integer_zerop (rr_arg)
5120 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5121 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5122 return build2_loc (loc, NE_EXPR, truth_type,
5123 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5124 ll_arg, rl_arg),
5125 build_int_cst (TREE_TYPE (ll_arg), 0));
5127 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5128 if (code == TRUTH_AND_EXPR
5129 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5130 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5131 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5132 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5133 return build2_loc (loc, EQ_EXPR, truth_type,
5134 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5135 ll_arg, rl_arg),
5136 build_int_cst (TREE_TYPE (ll_arg), 0));
5138 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5140 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5141 return build2_loc (loc, code, truth_type, lhs, rhs);
5142 return NULL_TREE;
5146 /* See if the comparisons can be merged. Then get all the parameters for
5147 each side. */
5149 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5150 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5151 return 0;
5153 volatilep = 0;
5154 ll_inner = decode_field_reference (loc, ll_arg,
5155 &ll_bitsize, &ll_bitpos, &ll_mode,
5156 &ll_unsignedp, &volatilep, &ll_mask,
5157 &ll_and_mask);
5158 lr_inner = decode_field_reference (loc, lr_arg,
5159 &lr_bitsize, &lr_bitpos, &lr_mode,
5160 &lr_unsignedp, &volatilep, &lr_mask,
5161 &lr_and_mask);
5162 rl_inner = decode_field_reference (loc, rl_arg,
5163 &rl_bitsize, &rl_bitpos, &rl_mode,
5164 &rl_unsignedp, &volatilep, &rl_mask,
5165 &rl_and_mask);
5166 rr_inner = decode_field_reference (loc, rr_arg,
5167 &rr_bitsize, &rr_bitpos, &rr_mode,
5168 &rr_unsignedp, &volatilep, &rr_mask,
5169 &rr_and_mask);
5171 /* It must be true that the inner operation on the lhs of each
5172 comparison must be the same if we are to be able to do anything.
5173 Then see if we have constants. If not, the same must be true for
5174 the rhs's. */
5175 if (volatilep || ll_inner == 0 || rl_inner == 0
5176 || ! operand_equal_p (ll_inner, rl_inner, 0))
5177 return 0;
5179 if (TREE_CODE (lr_arg) == INTEGER_CST
5180 && TREE_CODE (rr_arg) == INTEGER_CST)
5181 l_const = lr_arg, r_const = rr_arg;
5182 else if (lr_inner == 0 || rr_inner == 0
5183 || ! operand_equal_p (lr_inner, rr_inner, 0))
5184 return 0;
5185 else
5186 l_const = r_const = 0;
5188 /* If either comparison code is not correct for our logical operation,
5189 fail. However, we can convert a one-bit comparison against zero into
5190 the opposite comparison against that bit being set in the field. */
5192 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5193 if (lcode != wanted_code)
5195 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5197 /* Make the left operand unsigned, since we are only interested
5198 in the value of one bit. Otherwise we are doing the wrong
5199 thing below. */
5200 ll_unsignedp = 1;
5201 l_const = ll_mask;
5203 else
5204 return 0;
5207 /* This is analogous to the code for l_const above. */
5208 if (rcode != wanted_code)
5210 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5212 rl_unsignedp = 1;
5213 r_const = rl_mask;
5215 else
5216 return 0;
5219 /* See if we can find a mode that contains both fields being compared on
5220 the left. If we can't, fail. Otherwise, update all constants and masks
5221 to be relative to a field of that size. */
5222 first_bit = MIN (ll_bitpos, rl_bitpos);
5223 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5224 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5225 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5226 volatilep);
5227 if (lnmode == VOIDmode)
5228 return 0;
5230 lnbitsize = GET_MODE_BITSIZE (lnmode);
5231 lnbitpos = first_bit & ~ (lnbitsize - 1);
5232 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5233 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5235 if (BYTES_BIG_ENDIAN)
5237 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5238 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5241 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5242 size_int (xll_bitpos));
5243 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5244 size_int (xrl_bitpos));
5246 if (l_const)
5248 l_const = fold_convert_loc (loc, lntype, l_const);
5249 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5250 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5251 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5252 fold_build1_loc (loc, BIT_NOT_EXPR,
5253 lntype, ll_mask))))
5255 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5257 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5260 if (r_const)
5262 r_const = fold_convert_loc (loc, lntype, r_const);
5263 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5264 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5265 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5266 fold_build1_loc (loc, BIT_NOT_EXPR,
5267 lntype, rl_mask))))
5269 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5271 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5275 /* If the right sides are not constant, do the same for it. Also,
5276 disallow this optimization if a size or signedness mismatch occurs
5277 between the left and right sides. */
5278 if (l_const == 0)
5280 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5281 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5282 /* Make sure the two fields on the right
5283 correspond to the left without being swapped. */
5284 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5285 return 0;
5287 first_bit = MIN (lr_bitpos, rr_bitpos);
5288 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5289 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5290 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5291 volatilep);
5292 if (rnmode == VOIDmode)
5293 return 0;
5295 rnbitsize = GET_MODE_BITSIZE (rnmode);
5296 rnbitpos = first_bit & ~ (rnbitsize - 1);
5297 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5298 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5300 if (BYTES_BIG_ENDIAN)
5302 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5303 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5306 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5307 rntype, lr_mask),
5308 size_int (xlr_bitpos));
5309 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5310 rntype, rr_mask),
5311 size_int (xrr_bitpos));
5313 /* Make a mask that corresponds to both fields being compared.
5314 Do this for both items being compared. If the operands are the
5315 same size and the bits being compared are in the same position
5316 then we can do this by masking both and comparing the masked
5317 results. */
5318 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5319 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5320 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5322 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5323 ll_unsignedp || rl_unsignedp);
5324 if (! all_ones_mask_p (ll_mask, lnbitsize))
5325 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5327 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5328 lr_unsignedp || rr_unsignedp);
5329 if (! all_ones_mask_p (lr_mask, rnbitsize))
5330 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5332 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5335 /* There is still another way we can do something: If both pairs of
5336 fields being compared are adjacent, we may be able to make a wider
5337 field containing them both.
5339 Note that we still must mask the lhs/rhs expressions. Furthermore,
5340 the mask must be shifted to account for the shift done by
5341 make_bit_field_ref. */
5342 if ((ll_bitsize + ll_bitpos == rl_bitpos
5343 && lr_bitsize + lr_bitpos == rr_bitpos)
5344 || (ll_bitpos == rl_bitpos + rl_bitsize
5345 && lr_bitpos == rr_bitpos + rr_bitsize))
5347 tree type;
5349 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5350 ll_bitsize + rl_bitsize,
5351 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5352 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5353 lr_bitsize + rr_bitsize,
5354 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5356 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5357 size_int (MIN (xll_bitpos, xrl_bitpos)));
5358 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5359 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5361 /* Convert to the smaller type before masking out unwanted bits. */
5362 type = lntype;
5363 if (lntype != rntype)
5365 if (lnbitsize > rnbitsize)
5367 lhs = fold_convert_loc (loc, rntype, lhs);
5368 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5369 type = rntype;
5371 else if (lnbitsize < rnbitsize)
5373 rhs = fold_convert_loc (loc, lntype, rhs);
5374 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5375 type = lntype;
5379 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5380 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5382 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5383 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5385 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5388 return 0;
5391 /* Handle the case of comparisons with constants. If there is something in
5392 common between the masks, those bits of the constants must be the same.
5393 If not, the condition is always false. Test for this to avoid generating
5394 incorrect code below. */
5395 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5396 if (! integer_zerop (result)
5397 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5398 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5400 if (wanted_code == NE_EXPR)
5402 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5403 return constant_boolean_node (true, truth_type);
5405 else
5407 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5408 return constant_boolean_node (false, truth_type);
5412 /* Construct the expression we will return. First get the component
5413 reference we will make. Unless the mask is all ones the width of
5414 that field, perform the mask operation. Then compare with the
5415 merged constant. */
5416 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5417 ll_unsignedp || rl_unsignedp);
5419 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5420 if (! all_ones_mask_p (ll_mask, lnbitsize))
5421 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5423 return build2_loc (loc, wanted_code, truth_type, result,
5424 const_binop (BIT_IOR_EXPR, l_const, r_const));
5427 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5428 constant. */
5430 static tree
5431 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5432 tree op0, tree op1)
5434 tree arg0 = op0;
5435 enum tree_code op_code;
5436 tree comp_const;
5437 tree minmax_const;
5438 int consts_equal, consts_lt;
5439 tree inner;
5441 STRIP_SIGN_NOPS (arg0);
5443 op_code = TREE_CODE (arg0);
5444 minmax_const = TREE_OPERAND (arg0, 1);
5445 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5446 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5447 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5448 inner = TREE_OPERAND (arg0, 0);
5450 /* If something does not permit us to optimize, return the original tree. */
5451 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5452 || TREE_CODE (comp_const) != INTEGER_CST
5453 || TREE_OVERFLOW (comp_const)
5454 || TREE_CODE (minmax_const) != INTEGER_CST
5455 || TREE_OVERFLOW (minmax_const))
5456 return NULL_TREE;
5458 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5459 and GT_EXPR, doing the rest with recursive calls using logical
5460 simplifications. */
5461 switch (code)
5463 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5465 tree tem
5466 = optimize_minmax_comparison (loc,
5467 invert_tree_comparison (code, false),
5468 type, op0, op1);
5469 if (tem)
5470 return invert_truthvalue_loc (loc, tem);
5471 return NULL_TREE;
5474 case GE_EXPR:
5475 return
5476 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5477 optimize_minmax_comparison
5478 (loc, EQ_EXPR, type, arg0, comp_const),
5479 optimize_minmax_comparison
5480 (loc, GT_EXPR, type, arg0, comp_const));
5482 case EQ_EXPR:
5483 if (op_code == MAX_EXPR && consts_equal)
5484 /* MAX (X, 0) == 0 -> X <= 0 */
5485 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5487 else if (op_code == MAX_EXPR && consts_lt)
5488 /* MAX (X, 0) == 5 -> X == 5 */
5489 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5491 else if (op_code == MAX_EXPR)
5492 /* MAX (X, 0) == -1 -> false */
5493 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5495 else if (consts_equal)
5496 /* MIN (X, 0) == 0 -> X >= 0 */
5497 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5499 else if (consts_lt)
5500 /* MIN (X, 0) == 5 -> false */
5501 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5503 else
5504 /* MIN (X, 0) == -1 -> X == -1 */
5505 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5507 case GT_EXPR:
5508 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5509 /* MAX (X, 0) > 0 -> X > 0
5510 MAX (X, 0) > 5 -> X > 5 */
5511 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5513 else if (op_code == MAX_EXPR)
5514 /* MAX (X, 0) > -1 -> true */
5515 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5517 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5518 /* MIN (X, 0) > 0 -> false
5519 MIN (X, 0) > 5 -> false */
5520 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5522 else
5523 /* MIN (X, 0) > -1 -> X > -1 */
5524 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5526 default:
5527 return NULL_TREE;
5531 /* T is an integer expression that is being multiplied, divided, or taken a
5532 modulus (CODE says which and what kind of divide or modulus) by a
5533 constant C. See if we can eliminate that operation by folding it with
5534 other operations already in T. WIDE_TYPE, if non-null, is a type that
5535 should be used for the computation if wider than our type.
5537 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5538 (X * 2) + (Y * 4). We must, however, be assured that either the original
5539 expression would not overflow or that overflow is undefined for the type
5540 in the language in question.
5542 If we return a non-null expression, it is an equivalent form of the
5543 original computation, but need not be in the original type.
5545 We set *STRICT_OVERFLOW_P to true if the return values depends on
5546 signed overflow being undefined. Otherwise we do not change
5547 *STRICT_OVERFLOW_P. */
5549 static tree
5550 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5551 bool *strict_overflow_p)
5553 /* To avoid exponential search depth, refuse to allow recursion past
5554 three levels. Beyond that (1) it's highly unlikely that we'll find
5555 something interesting and (2) we've probably processed it before
5556 when we built the inner expression. */
5558 static int depth;
5559 tree ret;
5561 if (depth > 3)
5562 return NULL;
5564 depth++;
5565 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5566 depth--;
5568 return ret;
5571 static tree
5572 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5573 bool *strict_overflow_p)
5575 tree type = TREE_TYPE (t);
5576 enum tree_code tcode = TREE_CODE (t);
5577 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5578 > GET_MODE_SIZE (TYPE_MODE (type)))
5579 ? wide_type : type);
5580 tree t1, t2;
5581 int same_p = tcode == code;
5582 tree op0 = NULL_TREE, op1 = NULL_TREE;
5583 bool sub_strict_overflow_p;
5585 /* Don't deal with constants of zero here; they confuse the code below. */
5586 if (integer_zerop (c))
5587 return NULL_TREE;
5589 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5590 op0 = TREE_OPERAND (t, 0);
5592 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5593 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5595 /* Note that we need not handle conditional operations here since fold
5596 already handles those cases. So just do arithmetic here. */
5597 switch (tcode)
5599 case INTEGER_CST:
5600 /* For a constant, we can always simplify if we are a multiply
5601 or (for divide and modulus) if it is a multiple of our constant. */
5602 if (code == MULT_EXPR
5603 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5604 return const_binop (code, fold_convert (ctype, t),
5605 fold_convert (ctype, c));
5606 break;
5608 CASE_CONVERT: case NON_LVALUE_EXPR:
5609 /* If op0 is an expression ... */
5610 if ((COMPARISON_CLASS_P (op0)
5611 || UNARY_CLASS_P (op0)
5612 || BINARY_CLASS_P (op0)
5613 || VL_EXP_CLASS_P (op0)
5614 || EXPRESSION_CLASS_P (op0))
5615 /* ... and has wrapping overflow, and its type is smaller
5616 than ctype, then we cannot pass through as widening. */
5617 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5618 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5619 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5620 && (TYPE_PRECISION (ctype)
5621 > TYPE_PRECISION (TREE_TYPE (op0))))
5622 /* ... or this is a truncation (t is narrower than op0),
5623 then we cannot pass through this narrowing. */
5624 || (TYPE_PRECISION (type)
5625 < TYPE_PRECISION (TREE_TYPE (op0)))
5626 /* ... or signedness changes for division or modulus,
5627 then we cannot pass through this conversion. */
5628 || (code != MULT_EXPR
5629 && (TYPE_UNSIGNED (ctype)
5630 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5631 /* ... or has undefined overflow while the converted to
5632 type has not, we cannot do the operation in the inner type
5633 as that would introduce undefined overflow. */
5634 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5635 && !TYPE_OVERFLOW_UNDEFINED (type))))
5636 break;
5638 /* Pass the constant down and see if we can make a simplification. If
5639 we can, replace this expression with the inner simplification for
5640 possible later conversion to our or some other type. */
5641 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5642 && TREE_CODE (t2) == INTEGER_CST
5643 && !TREE_OVERFLOW (t2)
5644 && (0 != (t1 = extract_muldiv (op0, t2, code,
5645 code == MULT_EXPR
5646 ? ctype : NULL_TREE,
5647 strict_overflow_p))))
5648 return t1;
5649 break;
5651 case ABS_EXPR:
5652 /* If widening the type changes it from signed to unsigned, then we
5653 must avoid building ABS_EXPR itself as unsigned. */
5654 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5656 tree cstype = (*signed_type_for) (ctype);
5657 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5658 != 0)
5660 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5661 return fold_convert (ctype, t1);
5663 break;
5665 /* If the constant is negative, we cannot simplify this. */
5666 if (tree_int_cst_sgn (c) == -1)
5667 break;
5668 /* FALLTHROUGH */
5669 case NEGATE_EXPR:
5670 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5671 != 0)
5672 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5673 break;
5675 case MIN_EXPR: case MAX_EXPR:
5676 /* If widening the type changes the signedness, then we can't perform
5677 this optimization as that changes the result. */
5678 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5679 break;
5681 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5682 sub_strict_overflow_p = false;
5683 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5684 &sub_strict_overflow_p)) != 0
5685 && (t2 = extract_muldiv (op1, c, code, wide_type,
5686 &sub_strict_overflow_p)) != 0)
5688 if (tree_int_cst_sgn (c) < 0)
5689 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5690 if (sub_strict_overflow_p)
5691 *strict_overflow_p = true;
5692 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5693 fold_convert (ctype, t2));
5695 break;
5697 case LSHIFT_EXPR: case RSHIFT_EXPR:
5698 /* If the second operand is constant, this is a multiplication
5699 or floor division, by a power of two, so we can treat it that
5700 way unless the multiplier or divisor overflows. Signed
5701 left-shift overflow is implementation-defined rather than
5702 undefined in C90, so do not convert signed left shift into
5703 multiplication. */
5704 if (TREE_CODE (op1) == INTEGER_CST
5705 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5706 /* const_binop may not detect overflow correctly,
5707 so check for it explicitly here. */
5708 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5709 && TREE_INT_CST_HIGH (op1) == 0
5710 && 0 != (t1 = fold_convert (ctype,
5711 const_binop (LSHIFT_EXPR,
5712 size_one_node,
5713 op1)))
5714 && !TREE_OVERFLOW (t1))
5715 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5716 ? MULT_EXPR : FLOOR_DIV_EXPR,
5717 ctype,
5718 fold_convert (ctype, op0),
5719 t1),
5720 c, code, wide_type, strict_overflow_p);
5721 break;
5723 case PLUS_EXPR: case MINUS_EXPR:
5724 /* See if we can eliminate the operation on both sides. If we can, we
5725 can return a new PLUS or MINUS. If we can't, the only remaining
5726 cases where we can do anything are if the second operand is a
5727 constant. */
5728 sub_strict_overflow_p = false;
5729 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5730 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5731 if (t1 != 0 && t2 != 0
5732 && (code == MULT_EXPR
5733 /* If not multiplication, we can only do this if both operands
5734 are divisible by c. */
5735 || (multiple_of_p (ctype, op0, c)
5736 && multiple_of_p (ctype, op1, c))))
5738 if (sub_strict_overflow_p)
5739 *strict_overflow_p = true;
5740 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5741 fold_convert (ctype, t2));
5744 /* If this was a subtraction, negate OP1 and set it to be an addition.
5745 This simplifies the logic below. */
5746 if (tcode == MINUS_EXPR)
5748 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5749 /* If OP1 was not easily negatable, the constant may be OP0. */
5750 if (TREE_CODE (op0) == INTEGER_CST)
5752 tree tem = op0;
5753 op0 = op1;
5754 op1 = tem;
5755 tem = t1;
5756 t1 = t2;
5757 t2 = tem;
5761 if (TREE_CODE (op1) != INTEGER_CST)
5762 break;
5764 /* If either OP1 or C are negative, this optimization is not safe for
5765 some of the division and remainder types while for others we need
5766 to change the code. */
5767 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5769 if (code == CEIL_DIV_EXPR)
5770 code = FLOOR_DIV_EXPR;
5771 else if (code == FLOOR_DIV_EXPR)
5772 code = CEIL_DIV_EXPR;
5773 else if (code != MULT_EXPR
5774 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5775 break;
5778 /* If it's a multiply or a division/modulus operation of a multiple
5779 of our constant, do the operation and verify it doesn't overflow. */
5780 if (code == MULT_EXPR
5781 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5783 op1 = const_binop (code, fold_convert (ctype, op1),
5784 fold_convert (ctype, c));
5785 /* We allow the constant to overflow with wrapping semantics. */
5786 if (op1 == 0
5787 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5788 break;
5790 else
5791 break;
5793 /* If we have an unsigned type is not a sizetype, we cannot widen
5794 the operation since it will change the result if the original
5795 computation overflowed. */
5796 if (TYPE_UNSIGNED (ctype)
5797 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5798 && ctype != type)
5799 break;
5801 /* If we were able to eliminate our operation from the first side,
5802 apply our operation to the second side and reform the PLUS. */
5803 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5804 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5806 /* The last case is if we are a multiply. In that case, we can
5807 apply the distributive law to commute the multiply and addition
5808 if the multiplication of the constants doesn't overflow. */
5809 if (code == MULT_EXPR)
5810 return fold_build2 (tcode, ctype,
5811 fold_build2 (code, ctype,
5812 fold_convert (ctype, op0),
5813 fold_convert (ctype, c)),
5814 op1);
5816 break;
5818 case MULT_EXPR:
5819 /* We have a special case here if we are doing something like
5820 (C * 8) % 4 since we know that's zero. */
5821 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5822 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5823 /* If the multiplication can overflow we cannot optimize this.
5824 ??? Until we can properly mark individual operations as
5825 not overflowing we need to treat sizetype special here as
5826 stor-layout relies on this opimization to make
5827 DECL_FIELD_BIT_OFFSET always a constant. */
5828 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5829 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5830 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5831 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5832 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5834 *strict_overflow_p = true;
5835 return omit_one_operand (type, integer_zero_node, op0);
5838 /* ... fall through ... */
5840 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5841 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5842 /* If we can extract our operation from the LHS, do so and return a
5843 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5844 do something only if the second operand is a constant. */
5845 if (same_p
5846 && (t1 = extract_muldiv (op0, c, code, wide_type,
5847 strict_overflow_p)) != 0)
5848 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5849 fold_convert (ctype, op1));
5850 else if (tcode == MULT_EXPR && code == MULT_EXPR
5851 && (t1 = extract_muldiv (op1, c, code, wide_type,
5852 strict_overflow_p)) != 0)
5853 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5854 fold_convert (ctype, t1));
5855 else if (TREE_CODE (op1) != INTEGER_CST)
5856 return 0;
5858 /* If these are the same operation types, we can associate them
5859 assuming no overflow. */
5860 if (tcode == code)
5862 double_int mul;
5863 int overflow_p;
5864 mul = double_int_mul_with_sign
5865 (double_int_ext
5866 (tree_to_double_int (op1),
5867 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5868 double_int_ext
5869 (tree_to_double_int (c),
5870 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5871 false, &overflow_p);
5872 overflow_p = (((!TYPE_UNSIGNED (ctype)
5873 || (TREE_CODE (ctype) == INTEGER_TYPE
5874 && TYPE_IS_SIZETYPE (ctype)))
5875 && overflow_p)
5876 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5877 if (!double_int_fits_to_tree_p (ctype, mul)
5878 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5879 || !TYPE_UNSIGNED (ctype)
5880 || (TREE_CODE (ctype) == INTEGER_TYPE
5881 && TYPE_IS_SIZETYPE (ctype))))
5882 overflow_p = 1;
5883 if (!overflow_p)
5884 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5885 double_int_to_tree (ctype, mul));
5888 /* If these operations "cancel" each other, we have the main
5889 optimizations of this pass, which occur when either constant is a
5890 multiple of the other, in which case we replace this with either an
5891 operation or CODE or TCODE.
5893 If we have an unsigned type that is not a sizetype, we cannot do
5894 this since it will change the result if the original computation
5895 overflowed. */
5896 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5897 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5898 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5899 || (tcode == MULT_EXPR
5900 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5901 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5902 && code != MULT_EXPR)))
5904 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5906 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5907 *strict_overflow_p = true;
5908 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5909 fold_convert (ctype,
5910 const_binop (TRUNC_DIV_EXPR,
5911 op1, c)));
5913 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5915 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5916 *strict_overflow_p = true;
5917 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5918 fold_convert (ctype,
5919 const_binop (TRUNC_DIV_EXPR,
5920 c, op1)));
5923 break;
5925 default:
5926 break;
5929 return 0;
5932 /* Return a node which has the indicated constant VALUE (either 0 or
5933 1), and is of the indicated TYPE. */
5935 tree
5936 constant_boolean_node (int value, tree type)
5938 if (type == integer_type_node)
5939 return value ? integer_one_node : integer_zero_node;
5940 else if (type == boolean_type_node)
5941 return value ? boolean_true_node : boolean_false_node;
5942 else
5943 return build_int_cst (type, value);
5947 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5948 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5949 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5950 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5951 COND is the first argument to CODE; otherwise (as in the example
5952 given here), it is the second argument. TYPE is the type of the
5953 original expression. Return NULL_TREE if no simplification is
5954 possible. */
5956 static tree
5957 fold_binary_op_with_conditional_arg (location_t loc,
5958 enum tree_code code,
5959 tree type, tree op0, tree op1,
5960 tree cond, tree arg, int cond_first_p)
5962 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5963 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5964 tree test, true_value, false_value;
5965 tree lhs = NULL_TREE;
5966 tree rhs = NULL_TREE;
5968 if (TREE_CODE (cond) == COND_EXPR)
5970 test = TREE_OPERAND (cond, 0);
5971 true_value = TREE_OPERAND (cond, 1);
5972 false_value = TREE_OPERAND (cond, 2);
5973 /* If this operand throws an expression, then it does not make
5974 sense to try to perform a logical or arithmetic operation
5975 involving it. */
5976 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5977 lhs = true_value;
5978 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5979 rhs = false_value;
5981 else
5983 tree testtype = TREE_TYPE (cond);
5984 test = cond;
5985 true_value = constant_boolean_node (true, testtype);
5986 false_value = constant_boolean_node (false, testtype);
5989 /* This transformation is only worthwhile if we don't have to wrap ARG
5990 in a SAVE_EXPR and the operation can be simplified on at least one
5991 of the branches once its pushed inside the COND_EXPR. */
5992 if (!TREE_CONSTANT (arg)
5993 && (TREE_SIDE_EFFECTS (arg)
5994 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5995 return NULL_TREE;
5997 arg = fold_convert_loc (loc, arg_type, arg);
5998 if (lhs == 0)
6000 true_value = fold_convert_loc (loc, cond_type, true_value);
6001 if (cond_first_p)
6002 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6003 else
6004 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6006 if (rhs == 0)
6008 false_value = fold_convert_loc (loc, cond_type, false_value);
6009 if (cond_first_p)
6010 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6011 else
6012 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6015 /* Check that we have simplified at least one of the branches. */
6016 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6017 return NULL_TREE;
6019 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6023 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6025 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6026 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6027 ADDEND is the same as X.
6029 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6030 and finite. The problematic cases are when X is zero, and its mode
6031 has signed zeros. In the case of rounding towards -infinity,
6032 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6033 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6035 bool
6036 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6038 if (!real_zerop (addend))
6039 return false;
6041 /* Don't allow the fold with -fsignaling-nans. */
6042 if (HONOR_SNANS (TYPE_MODE (type)))
6043 return false;
6045 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6046 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6047 return true;
6049 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6050 if (TREE_CODE (addend) == REAL_CST
6051 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6052 negate = !negate;
6054 /* The mode has signed zeros, and we have to honor their sign.
6055 In this situation, there is only one case we can return true for.
6056 X - 0 is the same as X unless rounding towards -infinity is
6057 supported. */
6058 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6061 /* Subroutine of fold() that checks comparisons of built-in math
6062 functions against real constants.
6064 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6065 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6066 is the type of the result and ARG0 and ARG1 are the operands of the
6067 comparison. ARG1 must be a TREE_REAL_CST.
6069 The function returns the constant folded tree if a simplification
6070 can be made, and NULL_TREE otherwise. */
6072 static tree
6073 fold_mathfn_compare (location_t loc,
6074 enum built_in_function fcode, enum tree_code code,
6075 tree type, tree arg0, tree arg1)
6077 REAL_VALUE_TYPE c;
6079 if (BUILTIN_SQRT_P (fcode))
6081 tree arg = CALL_EXPR_ARG (arg0, 0);
6082 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6084 c = TREE_REAL_CST (arg1);
6085 if (REAL_VALUE_NEGATIVE (c))
6087 /* sqrt(x) < y is always false, if y is negative. */
6088 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6089 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6091 /* sqrt(x) > y is always true, if y is negative and we
6092 don't care about NaNs, i.e. negative values of x. */
6093 if (code == NE_EXPR || !HONOR_NANS (mode))
6094 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6096 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6097 return fold_build2_loc (loc, GE_EXPR, type, arg,
6098 build_real (TREE_TYPE (arg), dconst0));
6100 else if (code == GT_EXPR || code == GE_EXPR)
6102 REAL_VALUE_TYPE c2;
6104 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6105 real_convert (&c2, mode, &c2);
6107 if (REAL_VALUE_ISINF (c2))
6109 /* sqrt(x) > y is x == +Inf, when y is very large. */
6110 if (HONOR_INFINITIES (mode))
6111 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6112 build_real (TREE_TYPE (arg), c2));
6114 /* sqrt(x) > y is always false, when y is very large
6115 and we don't care about infinities. */
6116 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6119 /* sqrt(x) > c is the same as x > c*c. */
6120 return fold_build2_loc (loc, code, type, arg,
6121 build_real (TREE_TYPE (arg), c2));
6123 else if (code == LT_EXPR || code == LE_EXPR)
6125 REAL_VALUE_TYPE c2;
6127 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6128 real_convert (&c2, mode, &c2);
6130 if (REAL_VALUE_ISINF (c2))
6132 /* sqrt(x) < y is always true, when y is a very large
6133 value and we don't care about NaNs or Infinities. */
6134 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6135 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6137 /* sqrt(x) < y is x != +Inf when y is very large and we
6138 don't care about NaNs. */
6139 if (! HONOR_NANS (mode))
6140 return fold_build2_loc (loc, NE_EXPR, type, arg,
6141 build_real (TREE_TYPE (arg), c2));
6143 /* sqrt(x) < y is x >= 0 when y is very large and we
6144 don't care about Infinities. */
6145 if (! HONOR_INFINITIES (mode))
6146 return fold_build2_loc (loc, GE_EXPR, type, arg,
6147 build_real (TREE_TYPE (arg), dconst0));
6149 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6150 arg = save_expr (arg);
6151 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6152 fold_build2_loc (loc, GE_EXPR, type, arg,
6153 build_real (TREE_TYPE (arg),
6154 dconst0)),
6155 fold_build2_loc (loc, NE_EXPR, type, arg,
6156 build_real (TREE_TYPE (arg),
6157 c2)));
6160 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6161 if (! HONOR_NANS (mode))
6162 return fold_build2_loc (loc, code, type, arg,
6163 build_real (TREE_TYPE (arg), c2));
6165 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6166 arg = save_expr (arg);
6167 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6168 fold_build2_loc (loc, GE_EXPR, type, arg,
6169 build_real (TREE_TYPE (arg),
6170 dconst0)),
6171 fold_build2_loc (loc, code, type, arg,
6172 build_real (TREE_TYPE (arg),
6173 c2)));
6177 return NULL_TREE;
6180 /* Subroutine of fold() that optimizes comparisons against Infinities,
6181 either +Inf or -Inf.
6183 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6184 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6185 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6187 The function returns the constant folded tree if a simplification
6188 can be made, and NULL_TREE otherwise. */
6190 static tree
6191 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6192 tree arg0, tree arg1)
6194 enum machine_mode mode;
6195 REAL_VALUE_TYPE max;
6196 tree temp;
6197 bool neg;
6199 mode = TYPE_MODE (TREE_TYPE (arg0));
6201 /* For negative infinity swap the sense of the comparison. */
6202 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6203 if (neg)
6204 code = swap_tree_comparison (code);
6206 switch (code)
6208 case GT_EXPR:
6209 /* x > +Inf is always false, if with ignore sNANs. */
6210 if (HONOR_SNANS (mode))
6211 return NULL_TREE;
6212 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6214 case LE_EXPR:
6215 /* x <= +Inf is always true, if we don't case about NaNs. */
6216 if (! HONOR_NANS (mode))
6217 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6219 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6220 arg0 = save_expr (arg0);
6221 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6223 case EQ_EXPR:
6224 case GE_EXPR:
6225 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6226 real_maxval (&max, neg, mode);
6227 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6228 arg0, build_real (TREE_TYPE (arg0), max));
6230 case LT_EXPR:
6231 /* x < +Inf is always equal to x <= DBL_MAX. */
6232 real_maxval (&max, neg, mode);
6233 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6234 arg0, build_real (TREE_TYPE (arg0), max));
6236 case NE_EXPR:
6237 /* x != +Inf is always equal to !(x > DBL_MAX). */
6238 real_maxval (&max, neg, mode);
6239 if (! HONOR_NANS (mode))
6240 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6241 arg0, build_real (TREE_TYPE (arg0), max));
6243 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6244 arg0, build_real (TREE_TYPE (arg0), max));
6245 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6247 default:
6248 break;
6251 return NULL_TREE;
6254 /* Subroutine of fold() that optimizes comparisons of a division by
6255 a nonzero integer constant against an integer constant, i.e.
6256 X/C1 op C2.
6258 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6259 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6260 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6262 The function returns the constant folded tree if a simplification
6263 can be made, and NULL_TREE otherwise. */
6265 static tree
6266 fold_div_compare (location_t loc,
6267 enum tree_code code, tree type, tree arg0, tree arg1)
6269 tree prod, tmp, hi, lo;
6270 tree arg00 = TREE_OPERAND (arg0, 0);
6271 tree arg01 = TREE_OPERAND (arg0, 1);
6272 double_int val;
6273 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6274 bool neg_overflow;
6275 int overflow;
6277 /* We have to do this the hard way to detect unsigned overflow.
6278 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6279 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6280 TREE_INT_CST_HIGH (arg01),
6281 TREE_INT_CST_LOW (arg1),
6282 TREE_INT_CST_HIGH (arg1),
6283 &val.low, &val.high, unsigned_p);
6284 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6285 neg_overflow = false;
6287 if (unsigned_p)
6289 tmp = int_const_binop (MINUS_EXPR, arg01,
6290 build_int_cst (TREE_TYPE (arg01), 1));
6291 lo = prod;
6293 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6294 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6295 TREE_INT_CST_HIGH (prod),
6296 TREE_INT_CST_LOW (tmp),
6297 TREE_INT_CST_HIGH (tmp),
6298 &val.low, &val.high, unsigned_p);
6299 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6300 -1, overflow | TREE_OVERFLOW (prod));
6302 else if (tree_int_cst_sgn (arg01) >= 0)
6304 tmp = int_const_binop (MINUS_EXPR, arg01,
6305 build_int_cst (TREE_TYPE (arg01), 1));
6306 switch (tree_int_cst_sgn (arg1))
6308 case -1:
6309 neg_overflow = true;
6310 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6311 hi = prod;
6312 break;
6314 case 0:
6315 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6316 hi = tmp;
6317 break;
6319 case 1:
6320 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6321 lo = prod;
6322 break;
6324 default:
6325 gcc_unreachable ();
6328 else
6330 /* A negative divisor reverses the relational operators. */
6331 code = swap_tree_comparison (code);
6333 tmp = int_const_binop (PLUS_EXPR, arg01,
6334 build_int_cst (TREE_TYPE (arg01), 1));
6335 switch (tree_int_cst_sgn (arg1))
6337 case -1:
6338 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6339 lo = prod;
6340 break;
6342 case 0:
6343 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6344 lo = tmp;
6345 break;
6347 case 1:
6348 neg_overflow = true;
6349 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6350 hi = prod;
6351 break;
6353 default:
6354 gcc_unreachable ();
6358 switch (code)
6360 case EQ_EXPR:
6361 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6362 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6363 if (TREE_OVERFLOW (hi))
6364 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6365 if (TREE_OVERFLOW (lo))
6366 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6367 return build_range_check (loc, type, arg00, 1, lo, hi);
6369 case NE_EXPR:
6370 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6371 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6372 if (TREE_OVERFLOW (hi))
6373 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6374 if (TREE_OVERFLOW (lo))
6375 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6376 return build_range_check (loc, type, arg00, 0, lo, hi);
6378 case LT_EXPR:
6379 if (TREE_OVERFLOW (lo))
6381 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6382 return omit_one_operand_loc (loc, type, tmp, arg00);
6384 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6386 case LE_EXPR:
6387 if (TREE_OVERFLOW (hi))
6389 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6390 return omit_one_operand_loc (loc, type, tmp, arg00);
6392 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6394 case GT_EXPR:
6395 if (TREE_OVERFLOW (hi))
6397 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6398 return omit_one_operand_loc (loc, type, tmp, arg00);
6400 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6402 case GE_EXPR:
6403 if (TREE_OVERFLOW (lo))
6405 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6406 return omit_one_operand_loc (loc, type, tmp, arg00);
6408 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6410 default:
6411 break;
6414 return NULL_TREE;
6418 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6419 equality/inequality test, then return a simplified form of the test
6420 using a sign testing. Otherwise return NULL. TYPE is the desired
6421 result type. */
6423 static tree
6424 fold_single_bit_test_into_sign_test (location_t loc,
6425 enum tree_code code, tree arg0, tree arg1,
6426 tree result_type)
6428 /* If this is testing a single bit, we can optimize the test. */
6429 if ((code == NE_EXPR || code == EQ_EXPR)
6430 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6431 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6433 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6434 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6435 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6437 if (arg00 != NULL_TREE
6438 /* This is only a win if casting to a signed type is cheap,
6439 i.e. when arg00's type is not a partial mode. */
6440 && TYPE_PRECISION (TREE_TYPE (arg00))
6441 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6443 tree stype = signed_type_for (TREE_TYPE (arg00));
6444 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6445 result_type,
6446 fold_convert_loc (loc, stype, arg00),
6447 build_int_cst (stype, 0));
6451 return NULL_TREE;
6454 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6455 equality/inequality test, then return a simplified form of
6456 the test using shifts and logical operations. Otherwise return
6457 NULL. TYPE is the desired result type. */
6459 tree
6460 fold_single_bit_test (location_t loc, enum tree_code code,
6461 tree arg0, tree arg1, tree result_type)
6463 /* If this is testing a single bit, we can optimize the test. */
6464 if ((code == NE_EXPR || code == EQ_EXPR)
6465 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6466 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6468 tree inner = TREE_OPERAND (arg0, 0);
6469 tree type = TREE_TYPE (arg0);
6470 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6471 enum machine_mode operand_mode = TYPE_MODE (type);
6472 int ops_unsigned;
6473 tree signed_type, unsigned_type, intermediate_type;
6474 tree tem, one;
6476 /* First, see if we can fold the single bit test into a sign-bit
6477 test. */
6478 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6479 result_type);
6480 if (tem)
6481 return tem;
6483 /* Otherwise we have (A & C) != 0 where C is a single bit,
6484 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6485 Similarly for (A & C) == 0. */
6487 /* If INNER is a right shift of a constant and it plus BITNUM does
6488 not overflow, adjust BITNUM and INNER. */
6489 if (TREE_CODE (inner) == RSHIFT_EXPR
6490 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6491 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6492 && bitnum < TYPE_PRECISION (type)
6493 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6494 bitnum - TYPE_PRECISION (type)))
6496 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6497 inner = TREE_OPERAND (inner, 0);
6500 /* If we are going to be able to omit the AND below, we must do our
6501 operations as unsigned. If we must use the AND, we have a choice.
6502 Normally unsigned is faster, but for some machines signed is. */
6503 #ifdef LOAD_EXTEND_OP
6504 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6505 && !flag_syntax_only) ? 0 : 1;
6506 #else
6507 ops_unsigned = 1;
6508 #endif
6510 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6511 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6512 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6513 inner = fold_convert_loc (loc, intermediate_type, inner);
6515 if (bitnum != 0)
6516 inner = build2 (RSHIFT_EXPR, intermediate_type,
6517 inner, size_int (bitnum));
6519 one = build_int_cst (intermediate_type, 1);
6521 if (code == EQ_EXPR)
6522 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6524 /* Put the AND last so it can combine with more things. */
6525 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6527 /* Make sure to return the proper type. */
6528 inner = fold_convert_loc (loc, result_type, inner);
6530 return inner;
6532 return NULL_TREE;
6535 /* Check whether we are allowed to reorder operands arg0 and arg1,
6536 such that the evaluation of arg1 occurs before arg0. */
6538 static bool
6539 reorder_operands_p (const_tree arg0, const_tree arg1)
6541 if (! flag_evaluation_order)
6542 return true;
6543 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6544 return true;
6545 return ! TREE_SIDE_EFFECTS (arg0)
6546 && ! TREE_SIDE_EFFECTS (arg1);
6549 /* Test whether it is preferable two swap two operands, ARG0 and
6550 ARG1, for example because ARG0 is an integer constant and ARG1
6551 isn't. If REORDER is true, only recommend swapping if we can
6552 evaluate the operands in reverse order. */
6554 bool
6555 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6557 STRIP_SIGN_NOPS (arg0);
6558 STRIP_SIGN_NOPS (arg1);
6560 if (TREE_CODE (arg1) == INTEGER_CST)
6561 return 0;
6562 if (TREE_CODE (arg0) == INTEGER_CST)
6563 return 1;
6565 if (TREE_CODE (arg1) == REAL_CST)
6566 return 0;
6567 if (TREE_CODE (arg0) == REAL_CST)
6568 return 1;
6570 if (TREE_CODE (arg1) == FIXED_CST)
6571 return 0;
6572 if (TREE_CODE (arg0) == FIXED_CST)
6573 return 1;
6575 if (TREE_CODE (arg1) == COMPLEX_CST)
6576 return 0;
6577 if (TREE_CODE (arg0) == COMPLEX_CST)
6578 return 1;
6580 if (TREE_CONSTANT (arg1))
6581 return 0;
6582 if (TREE_CONSTANT (arg0))
6583 return 1;
6585 if (optimize_function_for_size_p (cfun))
6586 return 0;
6588 if (reorder && flag_evaluation_order
6589 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6590 return 0;
6592 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6593 for commutative and comparison operators. Ensuring a canonical
6594 form allows the optimizers to find additional redundancies without
6595 having to explicitly check for both orderings. */
6596 if (TREE_CODE (arg0) == SSA_NAME
6597 && TREE_CODE (arg1) == SSA_NAME
6598 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6599 return 1;
6601 /* Put SSA_NAMEs last. */
6602 if (TREE_CODE (arg1) == SSA_NAME)
6603 return 0;
6604 if (TREE_CODE (arg0) == SSA_NAME)
6605 return 1;
6607 /* Put variables last. */
6608 if (DECL_P (arg1))
6609 return 0;
6610 if (DECL_P (arg0))
6611 return 1;
6613 return 0;
6616 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6617 ARG0 is extended to a wider type. */
6619 static tree
6620 fold_widened_comparison (location_t loc, enum tree_code code,
6621 tree type, tree arg0, tree arg1)
6623 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6624 tree arg1_unw;
6625 tree shorter_type, outer_type;
6626 tree min, max;
6627 bool above, below;
6629 if (arg0_unw == arg0)
6630 return NULL_TREE;
6631 shorter_type = TREE_TYPE (arg0_unw);
6633 #ifdef HAVE_canonicalize_funcptr_for_compare
6634 /* Disable this optimization if we're casting a function pointer
6635 type on targets that require function pointer canonicalization. */
6636 if (HAVE_canonicalize_funcptr_for_compare
6637 && TREE_CODE (shorter_type) == POINTER_TYPE
6638 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6639 return NULL_TREE;
6640 #endif
6642 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6643 return NULL_TREE;
6645 arg1_unw = get_unwidened (arg1, NULL_TREE);
6647 /* If possible, express the comparison in the shorter mode. */
6648 if ((code == EQ_EXPR || code == NE_EXPR
6649 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6650 && (TREE_TYPE (arg1_unw) == shorter_type
6651 || ((TYPE_PRECISION (shorter_type)
6652 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6653 && (TYPE_UNSIGNED (shorter_type)
6654 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6655 || (TREE_CODE (arg1_unw) == INTEGER_CST
6656 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6657 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6658 && int_fits_type_p (arg1_unw, shorter_type))))
6659 return fold_build2_loc (loc, code, type, arg0_unw,
6660 fold_convert_loc (loc, shorter_type, arg1_unw));
6662 if (TREE_CODE (arg1_unw) != INTEGER_CST
6663 || TREE_CODE (shorter_type) != INTEGER_TYPE
6664 || !int_fits_type_p (arg1_unw, shorter_type))
6665 return NULL_TREE;
6667 /* If we are comparing with the integer that does not fit into the range
6668 of the shorter type, the result is known. */
6669 outer_type = TREE_TYPE (arg1_unw);
6670 min = lower_bound_in_type (outer_type, shorter_type);
6671 max = upper_bound_in_type (outer_type, shorter_type);
6673 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6674 max, arg1_unw));
6675 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6676 arg1_unw, min));
6678 switch (code)
6680 case EQ_EXPR:
6681 if (above || below)
6682 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6683 break;
6685 case NE_EXPR:
6686 if (above || below)
6687 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6688 break;
6690 case LT_EXPR:
6691 case LE_EXPR:
6692 if (above)
6693 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6694 else if (below)
6695 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6697 case GT_EXPR:
6698 case GE_EXPR:
6699 if (above)
6700 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 else if (below)
6702 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6704 default:
6705 break;
6708 return NULL_TREE;
6711 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6712 ARG0 just the signedness is changed. */
6714 static tree
6715 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6716 tree arg0, tree arg1)
6718 tree arg0_inner;
6719 tree inner_type, outer_type;
6721 if (!CONVERT_EXPR_P (arg0))
6722 return NULL_TREE;
6724 outer_type = TREE_TYPE (arg0);
6725 arg0_inner = TREE_OPERAND (arg0, 0);
6726 inner_type = TREE_TYPE (arg0_inner);
6728 #ifdef HAVE_canonicalize_funcptr_for_compare
6729 /* Disable this optimization if we're casting a function pointer
6730 type on targets that require function pointer canonicalization. */
6731 if (HAVE_canonicalize_funcptr_for_compare
6732 && TREE_CODE (inner_type) == POINTER_TYPE
6733 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6734 return NULL_TREE;
6735 #endif
6737 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6738 return NULL_TREE;
6740 if (TREE_CODE (arg1) != INTEGER_CST
6741 && !(CONVERT_EXPR_P (arg1)
6742 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6743 return NULL_TREE;
6745 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6746 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6747 && code != NE_EXPR
6748 && code != EQ_EXPR)
6749 return NULL_TREE;
6751 if (TREE_CODE (arg1) == INTEGER_CST)
6752 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6753 0, TREE_OVERFLOW (arg1));
6754 else
6755 arg1 = fold_convert_loc (loc, inner_type, arg1);
6757 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6760 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6761 step of the array. Reconstructs s and delta in the case of s *
6762 delta being an integer constant (and thus already folded). ADDR is
6763 the address. MULT is the multiplicative expression. If the
6764 function succeeds, the new address expression is returned.
6765 Otherwise NULL_TREE is returned. LOC is the location of the
6766 resulting expression. */
6768 static tree
6769 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6771 tree s, delta, step;
6772 tree ref = TREE_OPERAND (addr, 0), pref;
6773 tree ret, pos;
6774 tree itype;
6775 bool mdim = false;
6777 /* Strip the nops that might be added when converting op1 to sizetype. */
6778 STRIP_NOPS (op1);
6780 /* Canonicalize op1 into a possibly non-constant delta
6781 and an INTEGER_CST s. */
6782 if (TREE_CODE (op1) == MULT_EXPR)
6784 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6786 STRIP_NOPS (arg0);
6787 STRIP_NOPS (arg1);
6789 if (TREE_CODE (arg0) == INTEGER_CST)
6791 s = arg0;
6792 delta = arg1;
6794 else if (TREE_CODE (arg1) == INTEGER_CST)
6796 s = arg1;
6797 delta = arg0;
6799 else
6800 return NULL_TREE;
6802 else if (TREE_CODE (op1) == INTEGER_CST)
6804 delta = op1;
6805 s = NULL_TREE;
6807 else
6809 /* Simulate we are delta * 1. */
6810 delta = op1;
6811 s = integer_one_node;
6814 for (;; ref = TREE_OPERAND (ref, 0))
6816 if (TREE_CODE (ref) == ARRAY_REF)
6818 tree domain;
6820 /* Remember if this was a multi-dimensional array. */
6821 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6822 mdim = true;
6824 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6825 if (! domain)
6826 continue;
6827 itype = TREE_TYPE (domain);
6829 step = array_ref_element_size (ref);
6830 if (TREE_CODE (step) != INTEGER_CST)
6831 continue;
6833 if (s)
6835 if (! tree_int_cst_equal (step, s))
6836 continue;
6838 else
6840 /* Try if delta is a multiple of step. */
6841 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6842 if (! tmp)
6843 continue;
6844 delta = tmp;
6847 /* Only fold here if we can verify we do not overflow one
6848 dimension of a multi-dimensional array. */
6849 if (mdim)
6851 tree tmp;
6853 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6854 || !TYPE_MAX_VALUE (domain)
6855 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6856 continue;
6858 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6859 fold_convert_loc (loc, itype,
6860 TREE_OPERAND (ref, 1)),
6861 fold_convert_loc (loc, itype, delta));
6862 if (!tmp
6863 || TREE_CODE (tmp) != INTEGER_CST
6864 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6865 continue;
6868 break;
6870 else
6871 mdim = false;
6873 if (!handled_component_p (ref))
6874 return NULL_TREE;
6877 /* We found the suitable array reference. So copy everything up to it,
6878 and replace the index. */
6880 pref = TREE_OPERAND (addr, 0);
6881 ret = copy_node (pref);
6882 SET_EXPR_LOCATION (ret, loc);
6883 pos = ret;
6885 while (pref != ref)
6887 pref = TREE_OPERAND (pref, 0);
6888 TREE_OPERAND (pos, 0) = copy_node (pref);
6889 pos = TREE_OPERAND (pos, 0);
6892 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6893 fold_convert_loc (loc, itype,
6894 TREE_OPERAND (pos, 1)),
6895 fold_convert_loc (loc, itype, delta));
6897 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6901 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6902 means A >= Y && A != MAX, but in this case we know that
6903 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6905 static tree
6906 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6908 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6910 if (TREE_CODE (bound) == LT_EXPR)
6911 a = TREE_OPERAND (bound, 0);
6912 else if (TREE_CODE (bound) == GT_EXPR)
6913 a = TREE_OPERAND (bound, 1);
6914 else
6915 return NULL_TREE;
6917 typea = TREE_TYPE (a);
6918 if (!INTEGRAL_TYPE_P (typea)
6919 && !POINTER_TYPE_P (typea))
6920 return NULL_TREE;
6922 if (TREE_CODE (ineq) == LT_EXPR)
6924 a1 = TREE_OPERAND (ineq, 1);
6925 y = TREE_OPERAND (ineq, 0);
6927 else if (TREE_CODE (ineq) == GT_EXPR)
6929 a1 = TREE_OPERAND (ineq, 0);
6930 y = TREE_OPERAND (ineq, 1);
6932 else
6933 return NULL_TREE;
6935 if (TREE_TYPE (a1) != typea)
6936 return NULL_TREE;
6938 if (POINTER_TYPE_P (typea))
6940 /* Convert the pointer types into integer before taking the difference. */
6941 tree ta = fold_convert_loc (loc, ssizetype, a);
6942 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6943 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6945 else
6946 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6948 if (!diff || !integer_onep (diff))
6949 return NULL_TREE;
6951 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6954 /* Fold a sum or difference of at least one multiplication.
6955 Returns the folded tree or NULL if no simplification could be made. */
6957 static tree
6958 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6959 tree arg0, tree arg1)
6961 tree arg00, arg01, arg10, arg11;
6962 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6964 /* (A * C) +- (B * C) -> (A+-B) * C.
6965 (A * C) +- A -> A * (C+-1).
6966 We are most concerned about the case where C is a constant,
6967 but other combinations show up during loop reduction. Since
6968 it is not difficult, try all four possibilities. */
6970 if (TREE_CODE (arg0) == MULT_EXPR)
6972 arg00 = TREE_OPERAND (arg0, 0);
6973 arg01 = TREE_OPERAND (arg0, 1);
6975 else if (TREE_CODE (arg0) == INTEGER_CST)
6977 arg00 = build_one_cst (type);
6978 arg01 = arg0;
6980 else
6982 /* We cannot generate constant 1 for fract. */
6983 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6984 return NULL_TREE;
6985 arg00 = arg0;
6986 arg01 = build_one_cst (type);
6988 if (TREE_CODE (arg1) == MULT_EXPR)
6990 arg10 = TREE_OPERAND (arg1, 0);
6991 arg11 = TREE_OPERAND (arg1, 1);
6993 else if (TREE_CODE (arg1) == INTEGER_CST)
6995 arg10 = build_one_cst (type);
6996 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6997 the purpose of this canonicalization. */
6998 if (TREE_INT_CST_HIGH (arg1) == -1
6999 && negate_expr_p (arg1)
7000 && code == PLUS_EXPR)
7002 arg11 = negate_expr (arg1);
7003 code = MINUS_EXPR;
7005 else
7006 arg11 = arg1;
7008 else
7010 /* We cannot generate constant 1 for fract. */
7011 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7012 return NULL_TREE;
7013 arg10 = arg1;
7014 arg11 = build_one_cst (type);
7016 same = NULL_TREE;
7018 if (operand_equal_p (arg01, arg11, 0))
7019 same = arg01, alt0 = arg00, alt1 = arg10;
7020 else if (operand_equal_p (arg00, arg10, 0))
7021 same = arg00, alt0 = arg01, alt1 = arg11;
7022 else if (operand_equal_p (arg00, arg11, 0))
7023 same = arg00, alt0 = arg01, alt1 = arg10;
7024 else if (operand_equal_p (arg01, arg10, 0))
7025 same = arg01, alt0 = arg00, alt1 = arg11;
7027 /* No identical multiplicands; see if we can find a common
7028 power-of-two factor in non-power-of-two multiplies. This
7029 can help in multi-dimensional array access. */
7030 else if (host_integerp (arg01, 0)
7031 && host_integerp (arg11, 0))
7033 HOST_WIDE_INT int01, int11, tmp;
7034 bool swap = false;
7035 tree maybe_same;
7036 int01 = TREE_INT_CST_LOW (arg01);
7037 int11 = TREE_INT_CST_LOW (arg11);
7039 /* Move min of absolute values to int11. */
7040 if (abs_hwi (int01) < abs_hwi (int11))
7042 tmp = int01, int01 = int11, int11 = tmp;
7043 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7044 maybe_same = arg01;
7045 swap = true;
7047 else
7048 maybe_same = arg11;
7050 if (exact_log2 (abs_hwi (int11)) > 0 && int01 % int11 == 0
7051 /* The remainder should not be a constant, otherwise we
7052 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7053 increased the number of multiplications necessary. */
7054 && TREE_CODE (arg10) != INTEGER_CST)
7056 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7057 build_int_cst (TREE_TYPE (arg00),
7058 int01 / int11));
7059 alt1 = arg10;
7060 same = maybe_same;
7061 if (swap)
7062 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7066 if (same)
7067 return fold_build2_loc (loc, MULT_EXPR, type,
7068 fold_build2_loc (loc, code, type,
7069 fold_convert_loc (loc, type, alt0),
7070 fold_convert_loc (loc, type, alt1)),
7071 fold_convert_loc (loc, type, same));
7073 return NULL_TREE;
7076 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7077 specified by EXPR into the buffer PTR of length LEN bytes.
7078 Return the number of bytes placed in the buffer, or zero
7079 upon failure. */
7081 static int
7082 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7084 tree type = TREE_TYPE (expr);
7085 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7086 int byte, offset, word, words;
7087 unsigned char value;
7089 if (total_bytes > len)
7090 return 0;
7091 words = total_bytes / UNITS_PER_WORD;
7093 for (byte = 0; byte < total_bytes; byte++)
7095 int bitpos = byte * BITS_PER_UNIT;
7096 if (bitpos < HOST_BITS_PER_WIDE_INT)
7097 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7098 else
7099 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7100 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7102 if (total_bytes > UNITS_PER_WORD)
7104 word = byte / UNITS_PER_WORD;
7105 if (WORDS_BIG_ENDIAN)
7106 word = (words - 1) - word;
7107 offset = word * UNITS_PER_WORD;
7108 if (BYTES_BIG_ENDIAN)
7109 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7110 else
7111 offset += byte % UNITS_PER_WORD;
7113 else
7114 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7115 ptr[offset] = value;
7117 return total_bytes;
7121 /* Subroutine of native_encode_expr. Encode the REAL_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7124 upon failure. */
7126 static int
7127 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7129 tree type = TREE_TYPE (expr);
7130 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7131 int byte, offset, word, words, bitpos;
7132 unsigned char value;
7134 /* There are always 32 bits in each long, no matter the size of
7135 the hosts long. We handle floating point representations with
7136 up to 192 bits. */
7137 long tmp[6];
7139 if (total_bytes > len)
7140 return 0;
7141 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7143 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7145 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7146 bitpos += BITS_PER_UNIT)
7148 byte = (bitpos / BITS_PER_UNIT) & 3;
7149 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7151 if (UNITS_PER_WORD < 4)
7153 word = byte / UNITS_PER_WORD;
7154 if (WORDS_BIG_ENDIAN)
7155 word = (words - 1) - word;
7156 offset = word * UNITS_PER_WORD;
7157 if (BYTES_BIG_ENDIAN)
7158 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7159 else
7160 offset += byte % UNITS_PER_WORD;
7162 else
7163 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7164 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7166 return total_bytes;
7169 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7170 specified by EXPR into the buffer PTR of length LEN bytes.
7171 Return the number of bytes placed in the buffer, or zero
7172 upon failure. */
7174 static int
7175 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7177 int rsize, isize;
7178 tree part;
7180 part = TREE_REALPART (expr);
7181 rsize = native_encode_expr (part, ptr, len);
7182 if (rsize == 0)
7183 return 0;
7184 part = TREE_IMAGPART (expr);
7185 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7186 if (isize != rsize)
7187 return 0;
7188 return rsize + isize;
7192 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7193 specified by EXPR into the buffer PTR of length LEN bytes.
7194 Return the number of bytes placed in the buffer, or zero
7195 upon failure. */
7197 static int
7198 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7200 int i, size, offset, count;
7201 tree itype, elem, elements;
7203 offset = 0;
7204 elements = TREE_VECTOR_CST_ELTS (expr);
7205 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7206 itype = TREE_TYPE (TREE_TYPE (expr));
7207 size = GET_MODE_SIZE (TYPE_MODE (itype));
7208 for (i = 0; i < count; i++)
7210 if (elements)
7212 elem = TREE_VALUE (elements);
7213 elements = TREE_CHAIN (elements);
7215 else
7216 elem = NULL_TREE;
7218 if (elem)
7220 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7221 return 0;
7223 else
7225 if (offset + size > len)
7226 return 0;
7227 memset (ptr+offset, 0, size);
7229 offset += size;
7231 return offset;
7235 /* Subroutine of native_encode_expr. Encode the STRING_CST
7236 specified by EXPR into the buffer PTR of length LEN bytes.
7237 Return the number of bytes placed in the buffer, or zero
7238 upon failure. */
7240 static int
7241 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7243 tree type = TREE_TYPE (expr);
7244 HOST_WIDE_INT total_bytes;
7246 if (TREE_CODE (type) != ARRAY_TYPE
7247 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7248 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7249 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7250 return 0;
7251 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7252 if (total_bytes > len)
7253 return 0;
7254 if (TREE_STRING_LENGTH (expr) < total_bytes)
7256 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7257 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7258 total_bytes - TREE_STRING_LENGTH (expr));
7260 else
7261 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7262 return total_bytes;
7266 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7267 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7268 buffer PTR of length LEN bytes. Return the number of bytes
7269 placed in the buffer, or zero upon failure. */
7272 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7274 switch (TREE_CODE (expr))
7276 case INTEGER_CST:
7277 return native_encode_int (expr, ptr, len);
7279 case REAL_CST:
7280 return native_encode_real (expr, ptr, len);
7282 case COMPLEX_CST:
7283 return native_encode_complex (expr, ptr, len);
7285 case VECTOR_CST:
7286 return native_encode_vector (expr, ptr, len);
7288 case STRING_CST:
7289 return native_encode_string (expr, ptr, len);
7291 default:
7292 return 0;
7297 /* Subroutine of native_interpret_expr. Interpret the contents of
7298 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7299 If the buffer cannot be interpreted, return NULL_TREE. */
7301 static tree
7302 native_interpret_int (tree type, const unsigned char *ptr, int len)
7304 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7305 int byte, offset, word, words;
7306 unsigned char value;
7307 double_int result;
7309 if (total_bytes > len)
7310 return NULL_TREE;
7311 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7312 return NULL_TREE;
7314 result = double_int_zero;
7315 words = total_bytes / UNITS_PER_WORD;
7317 for (byte = 0; byte < total_bytes; byte++)
7319 int bitpos = byte * BITS_PER_UNIT;
7320 if (total_bytes > UNITS_PER_WORD)
7322 word = byte / UNITS_PER_WORD;
7323 if (WORDS_BIG_ENDIAN)
7324 word = (words - 1) - word;
7325 offset = word * UNITS_PER_WORD;
7326 if (BYTES_BIG_ENDIAN)
7327 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7328 else
7329 offset += byte % UNITS_PER_WORD;
7331 else
7332 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7333 value = ptr[offset];
7335 if (bitpos < HOST_BITS_PER_WIDE_INT)
7336 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7337 else
7338 result.high |= (unsigned HOST_WIDE_INT) value
7339 << (bitpos - HOST_BITS_PER_WIDE_INT);
7342 return double_int_to_tree (type, result);
7346 /* Subroutine of native_interpret_expr. Interpret the contents of
7347 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7348 If the buffer cannot be interpreted, return NULL_TREE. */
7350 static tree
7351 native_interpret_real (tree type, const unsigned char *ptr, int len)
7353 enum machine_mode mode = TYPE_MODE (type);
7354 int total_bytes = GET_MODE_SIZE (mode);
7355 int byte, offset, word, words, bitpos;
7356 unsigned char value;
7357 /* There are always 32 bits in each long, no matter the size of
7358 the hosts long. We handle floating point representations with
7359 up to 192 bits. */
7360 REAL_VALUE_TYPE r;
7361 long tmp[6];
7363 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7364 if (total_bytes > len || total_bytes > 24)
7365 return NULL_TREE;
7366 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7368 memset (tmp, 0, sizeof (tmp));
7369 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7370 bitpos += BITS_PER_UNIT)
7372 byte = (bitpos / BITS_PER_UNIT) & 3;
7373 if (UNITS_PER_WORD < 4)
7375 word = byte / UNITS_PER_WORD;
7376 if (WORDS_BIG_ENDIAN)
7377 word = (words - 1) - word;
7378 offset = word * UNITS_PER_WORD;
7379 if (BYTES_BIG_ENDIAN)
7380 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7381 else
7382 offset += byte % UNITS_PER_WORD;
7384 else
7385 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7386 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7388 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7391 real_from_target (&r, tmp, mode);
7392 return build_real (type, r);
7396 /* Subroutine of native_interpret_expr. Interpret the contents of
7397 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7398 If the buffer cannot be interpreted, return NULL_TREE. */
7400 static tree
7401 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7403 tree etype, rpart, ipart;
7404 int size;
7406 etype = TREE_TYPE (type);
7407 size = GET_MODE_SIZE (TYPE_MODE (etype));
7408 if (size * 2 > len)
7409 return NULL_TREE;
7410 rpart = native_interpret_expr (etype, ptr, size);
7411 if (!rpart)
7412 return NULL_TREE;
7413 ipart = native_interpret_expr (etype, ptr+size, size);
7414 if (!ipart)
7415 return NULL_TREE;
7416 return build_complex (type, rpart, ipart);
7420 /* Subroutine of native_interpret_expr. Interpret the contents of
7421 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7422 If the buffer cannot be interpreted, return NULL_TREE. */
7424 static tree
7425 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7427 tree etype, elem, elements;
7428 int i, size, count;
7430 etype = TREE_TYPE (type);
7431 size = GET_MODE_SIZE (TYPE_MODE (etype));
7432 count = TYPE_VECTOR_SUBPARTS (type);
7433 if (size * count > len)
7434 return NULL_TREE;
7436 elements = NULL_TREE;
7437 for (i = count - 1; i >= 0; i--)
7439 elem = native_interpret_expr (etype, ptr+(i*size), size);
7440 if (!elem)
7441 return NULL_TREE;
7442 elements = tree_cons (NULL_TREE, elem, elements);
7444 return build_vector (type, elements);
7448 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7449 the buffer PTR of length LEN as a constant of type TYPE. For
7450 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7451 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7452 return NULL_TREE. */
7454 tree
7455 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7457 switch (TREE_CODE (type))
7459 case INTEGER_TYPE:
7460 case ENUMERAL_TYPE:
7461 case BOOLEAN_TYPE:
7462 return native_interpret_int (type, ptr, len);
7464 case REAL_TYPE:
7465 return native_interpret_real (type, ptr, len);
7467 case COMPLEX_TYPE:
7468 return native_interpret_complex (type, ptr, len);
7470 case VECTOR_TYPE:
7471 return native_interpret_vector (type, ptr, len);
7473 default:
7474 return NULL_TREE;
7479 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7480 TYPE at compile-time. If we're unable to perform the conversion
7481 return NULL_TREE. */
7483 static tree
7484 fold_view_convert_expr (tree type, tree expr)
7486 /* We support up to 512-bit values (for V8DFmode). */
7487 unsigned char buffer[64];
7488 int len;
7490 /* Check that the host and target are sane. */
7491 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7492 return NULL_TREE;
7494 len = native_encode_expr (expr, buffer, sizeof (buffer));
7495 if (len == 0)
7496 return NULL_TREE;
7498 return native_interpret_expr (type, buffer, len);
7501 /* Build an expression for the address of T. Folds away INDIRECT_REF
7502 to avoid confusing the gimplify process. */
7504 tree
7505 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7507 /* The size of the object is not relevant when talking about its address. */
7508 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7509 t = TREE_OPERAND (t, 0);
7511 if (TREE_CODE (t) == INDIRECT_REF)
7513 t = TREE_OPERAND (t, 0);
7515 if (TREE_TYPE (t) != ptrtype)
7516 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7518 else if (TREE_CODE (t) == MEM_REF
7519 && integer_zerop (TREE_OPERAND (t, 1)))
7520 return TREE_OPERAND (t, 0);
7521 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7523 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7525 if (TREE_TYPE (t) != ptrtype)
7526 t = fold_convert_loc (loc, ptrtype, t);
7528 else
7529 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7531 return t;
7534 /* Build an expression for the address of T. */
7536 tree
7537 build_fold_addr_expr_loc (location_t loc, tree t)
7539 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7541 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7544 /* Fold a unary expression of code CODE and type TYPE with operand
7545 OP0. Return the folded expression if folding is successful.
7546 Otherwise, return NULL_TREE. */
7548 tree
7549 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7551 tree tem;
7552 tree arg0;
7553 enum tree_code_class kind = TREE_CODE_CLASS (code);
7555 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7556 && TREE_CODE_LENGTH (code) == 1);
7558 arg0 = op0;
7559 if (arg0)
7561 if (CONVERT_EXPR_CODE_P (code)
7562 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7564 /* Don't use STRIP_NOPS, because signedness of argument type
7565 matters. */
7566 STRIP_SIGN_NOPS (arg0);
7568 else
7570 /* Strip any conversions that don't change the mode. This
7571 is safe for every expression, except for a comparison
7572 expression because its signedness is derived from its
7573 operands.
7575 Note that this is done as an internal manipulation within
7576 the constant folder, in order to find the simplest
7577 representation of the arguments so that their form can be
7578 studied. In any cases, the appropriate type conversions
7579 should be put back in the tree that will get out of the
7580 constant folder. */
7581 STRIP_NOPS (arg0);
7585 if (TREE_CODE_CLASS (code) == tcc_unary)
7587 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7588 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7589 fold_build1_loc (loc, code, type,
7590 fold_convert_loc (loc, TREE_TYPE (op0),
7591 TREE_OPERAND (arg0, 1))));
7592 else if (TREE_CODE (arg0) == COND_EXPR)
7594 tree arg01 = TREE_OPERAND (arg0, 1);
7595 tree arg02 = TREE_OPERAND (arg0, 2);
7596 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7597 arg01 = fold_build1_loc (loc, code, type,
7598 fold_convert_loc (loc,
7599 TREE_TYPE (op0), arg01));
7600 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7601 arg02 = fold_build1_loc (loc, code, type,
7602 fold_convert_loc (loc,
7603 TREE_TYPE (op0), arg02));
7604 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7605 arg01, arg02);
7607 /* If this was a conversion, and all we did was to move into
7608 inside the COND_EXPR, bring it back out. But leave it if
7609 it is a conversion from integer to integer and the
7610 result precision is no wider than a word since such a
7611 conversion is cheap and may be optimized away by combine,
7612 while it couldn't if it were outside the COND_EXPR. Then return
7613 so we don't get into an infinite recursion loop taking the
7614 conversion out and then back in. */
7616 if ((CONVERT_EXPR_CODE_P (code)
7617 || code == NON_LVALUE_EXPR)
7618 && TREE_CODE (tem) == COND_EXPR
7619 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7620 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7621 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7622 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7623 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7624 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7625 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7626 && (INTEGRAL_TYPE_P
7627 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7628 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7629 || flag_syntax_only))
7630 tem = build1_loc (loc, code, type,
7631 build3 (COND_EXPR,
7632 TREE_TYPE (TREE_OPERAND
7633 (TREE_OPERAND (tem, 1), 0)),
7634 TREE_OPERAND (tem, 0),
7635 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7636 TREE_OPERAND (TREE_OPERAND (tem, 2),
7637 0)));
7638 return tem;
7642 switch (code)
7644 case PAREN_EXPR:
7645 /* Re-association barriers around constants and other re-association
7646 barriers can be removed. */
7647 if (CONSTANT_CLASS_P (op0)
7648 || TREE_CODE (op0) == PAREN_EXPR)
7649 return fold_convert_loc (loc, type, op0);
7650 return NULL_TREE;
7652 CASE_CONVERT:
7653 case FLOAT_EXPR:
7654 case FIX_TRUNC_EXPR:
7655 if (TREE_TYPE (op0) == type)
7656 return op0;
7658 if (COMPARISON_CLASS_P (op0))
7660 /* If we have (type) (a CMP b) and type is an integral type, return
7661 new expression involving the new type. Canonicalize
7662 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7663 non-integral type.
7664 Do not fold the result as that would not simplify further, also
7665 folding again results in recursions. */
7666 if (TREE_CODE (type) == BOOLEAN_TYPE)
7667 return build2_loc (loc, TREE_CODE (op0), type,
7668 TREE_OPERAND (op0, 0),
7669 TREE_OPERAND (op0, 1));
7670 else if (!INTEGRAL_TYPE_P (type))
7671 return build3_loc (loc, COND_EXPR, type, op0,
7672 fold_convert (type, boolean_true_node),
7673 fold_convert (type, boolean_false_node));
7676 /* Handle cases of two conversions in a row. */
7677 if (CONVERT_EXPR_P (op0))
7679 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7680 tree inter_type = TREE_TYPE (op0);
7681 int inside_int = INTEGRAL_TYPE_P (inside_type);
7682 int inside_ptr = POINTER_TYPE_P (inside_type);
7683 int inside_float = FLOAT_TYPE_P (inside_type);
7684 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7685 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7686 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7687 int inter_int = INTEGRAL_TYPE_P (inter_type);
7688 int inter_ptr = POINTER_TYPE_P (inter_type);
7689 int inter_float = FLOAT_TYPE_P (inter_type);
7690 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7691 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7692 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7693 int final_int = INTEGRAL_TYPE_P (type);
7694 int final_ptr = POINTER_TYPE_P (type);
7695 int final_float = FLOAT_TYPE_P (type);
7696 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7697 unsigned int final_prec = TYPE_PRECISION (type);
7698 int final_unsignedp = TYPE_UNSIGNED (type);
7700 /* In addition to the cases of two conversions in a row
7701 handled below, if we are converting something to its own
7702 type via an object of identical or wider precision, neither
7703 conversion is needed. */
7704 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7705 && (((inter_int || inter_ptr) && final_int)
7706 || (inter_float && final_float))
7707 && inter_prec >= final_prec)
7708 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7710 /* Likewise, if the intermediate and initial types are either both
7711 float or both integer, we don't need the middle conversion if the
7712 former is wider than the latter and doesn't change the signedness
7713 (for integers). Avoid this if the final type is a pointer since
7714 then we sometimes need the middle conversion. Likewise if the
7715 final type has a precision not equal to the size of its mode. */
7716 if (((inter_int && inside_int)
7717 || (inter_float && inside_float)
7718 || (inter_vec && inside_vec))
7719 && inter_prec >= inside_prec
7720 && (inter_float || inter_vec
7721 || inter_unsignedp == inside_unsignedp)
7722 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7723 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7724 && ! final_ptr
7725 && (! final_vec || inter_prec == inside_prec))
7726 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7728 /* If we have a sign-extension of a zero-extended value, we can
7729 replace that by a single zero-extension. */
7730 if (inside_int && inter_int && final_int
7731 && inside_prec < inter_prec && inter_prec < final_prec
7732 && inside_unsignedp && !inter_unsignedp)
7733 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7735 /* Two conversions in a row are not needed unless:
7736 - some conversion is floating-point (overstrict for now), or
7737 - some conversion is a vector (overstrict for now), or
7738 - the intermediate type is narrower than both initial and
7739 final, or
7740 - the intermediate type and innermost type differ in signedness,
7741 and the outermost type is wider than the intermediate, or
7742 - the initial type is a pointer type and the precisions of the
7743 intermediate and final types differ, or
7744 - the final type is a pointer type and the precisions of the
7745 initial and intermediate types differ. */
7746 if (! inside_float && ! inter_float && ! final_float
7747 && ! inside_vec && ! inter_vec && ! final_vec
7748 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7749 && ! (inside_int && inter_int
7750 && inter_unsignedp != inside_unsignedp
7751 && inter_prec < final_prec)
7752 && ((inter_unsignedp && inter_prec > inside_prec)
7753 == (final_unsignedp && final_prec > inter_prec))
7754 && ! (inside_ptr && inter_prec != final_prec)
7755 && ! (final_ptr && inside_prec != inter_prec)
7756 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7757 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7758 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7761 /* Handle (T *)&A.B.C for A being of type T and B and C
7762 living at offset zero. This occurs frequently in
7763 C++ upcasting and then accessing the base. */
7764 if (TREE_CODE (op0) == ADDR_EXPR
7765 && POINTER_TYPE_P (type)
7766 && handled_component_p (TREE_OPERAND (op0, 0)))
7768 HOST_WIDE_INT bitsize, bitpos;
7769 tree offset;
7770 enum machine_mode mode;
7771 int unsignedp, volatilep;
7772 tree base = TREE_OPERAND (op0, 0);
7773 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7774 &mode, &unsignedp, &volatilep, false);
7775 /* If the reference was to a (constant) zero offset, we can use
7776 the address of the base if it has the same base type
7777 as the result type and the pointer type is unqualified. */
7778 if (! offset && bitpos == 0
7779 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7780 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7781 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7782 return fold_convert_loc (loc, type,
7783 build_fold_addr_expr_loc (loc, base));
7786 if (TREE_CODE (op0) == MODIFY_EXPR
7787 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7788 /* Detect assigning a bitfield. */
7789 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7790 && DECL_BIT_FIELD
7791 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7793 /* Don't leave an assignment inside a conversion
7794 unless assigning a bitfield. */
7795 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7796 /* First do the assignment, then return converted constant. */
7797 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7798 TREE_NO_WARNING (tem) = 1;
7799 TREE_USED (tem) = 1;
7800 return tem;
7803 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7804 constants (if x has signed type, the sign bit cannot be set
7805 in c). This folds extension into the BIT_AND_EXPR.
7806 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7807 very likely don't have maximal range for their precision and this
7808 transformation effectively doesn't preserve non-maximal ranges. */
7809 if (TREE_CODE (type) == INTEGER_TYPE
7810 && TREE_CODE (op0) == BIT_AND_EXPR
7811 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7813 tree and_expr = op0;
7814 tree and0 = TREE_OPERAND (and_expr, 0);
7815 tree and1 = TREE_OPERAND (and_expr, 1);
7816 int change = 0;
7818 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7819 || (TYPE_PRECISION (type)
7820 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7821 change = 1;
7822 else if (TYPE_PRECISION (TREE_TYPE (and1))
7823 <= HOST_BITS_PER_WIDE_INT
7824 && host_integerp (and1, 1))
7826 unsigned HOST_WIDE_INT cst;
7828 cst = tree_low_cst (and1, 1);
7829 cst &= (HOST_WIDE_INT) -1
7830 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7831 change = (cst == 0);
7832 #ifdef LOAD_EXTEND_OP
7833 if (change
7834 && !flag_syntax_only
7835 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7836 == ZERO_EXTEND))
7838 tree uns = unsigned_type_for (TREE_TYPE (and0));
7839 and0 = fold_convert_loc (loc, uns, and0);
7840 and1 = fold_convert_loc (loc, uns, and1);
7842 #endif
7844 if (change)
7846 tem = force_fit_type_double (type, tree_to_double_int (and1),
7847 0, TREE_OVERFLOW (and1));
7848 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7849 fold_convert_loc (loc, type, and0), tem);
7853 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7854 when one of the new casts will fold away. Conservatively we assume
7855 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7856 if (POINTER_TYPE_P (type)
7857 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7858 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7859 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7860 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7862 tree arg00 = TREE_OPERAND (arg0, 0);
7863 tree arg01 = TREE_OPERAND (arg0, 1);
7865 return fold_build2_loc (loc,
7866 TREE_CODE (arg0), type,
7867 fold_convert_loc (loc, type, arg00),
7868 fold_convert_loc (loc, sizetype, arg01));
7871 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7872 of the same precision, and X is an integer type not narrower than
7873 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7874 if (INTEGRAL_TYPE_P (type)
7875 && TREE_CODE (op0) == BIT_NOT_EXPR
7876 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7877 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7878 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7880 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7881 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7882 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7883 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7884 fold_convert_loc (loc, type, tem));
7887 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7888 type of X and Y (integer types only). */
7889 if (INTEGRAL_TYPE_P (type)
7890 && TREE_CODE (op0) == MULT_EXPR
7891 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7892 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7894 /* Be careful not to introduce new overflows. */
7895 tree mult_type;
7896 if (TYPE_OVERFLOW_WRAPS (type))
7897 mult_type = type;
7898 else
7899 mult_type = unsigned_type_for (type);
7901 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7903 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7904 fold_convert_loc (loc, mult_type,
7905 TREE_OPERAND (op0, 0)),
7906 fold_convert_loc (loc, mult_type,
7907 TREE_OPERAND (op0, 1)));
7908 return fold_convert_loc (loc, type, tem);
7912 tem = fold_convert_const (code, type, op0);
7913 return tem ? tem : NULL_TREE;
7915 case ADDR_SPACE_CONVERT_EXPR:
7916 if (integer_zerop (arg0))
7917 return fold_convert_const (code, type, arg0);
7918 return NULL_TREE;
7920 case FIXED_CONVERT_EXPR:
7921 tem = fold_convert_const (code, type, arg0);
7922 return tem ? tem : NULL_TREE;
7924 case VIEW_CONVERT_EXPR:
7925 if (TREE_TYPE (op0) == type)
7926 return op0;
7927 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7928 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7929 type, TREE_OPERAND (op0, 0));
7930 if (TREE_CODE (op0) == MEM_REF)
7931 return fold_build2_loc (loc, MEM_REF, type,
7932 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7934 /* For integral conversions with the same precision or pointer
7935 conversions use a NOP_EXPR instead. */
7936 if ((INTEGRAL_TYPE_P (type)
7937 || POINTER_TYPE_P (type))
7938 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7939 || POINTER_TYPE_P (TREE_TYPE (op0)))
7940 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7941 return fold_convert_loc (loc, type, op0);
7943 /* Strip inner integral conversions that do not change the precision. */
7944 if (CONVERT_EXPR_P (op0)
7945 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7946 || POINTER_TYPE_P (TREE_TYPE (op0)))
7947 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7948 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7949 && (TYPE_PRECISION (TREE_TYPE (op0))
7950 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7951 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7952 type, TREE_OPERAND (op0, 0));
7954 return fold_view_convert_expr (type, op0);
7956 case NEGATE_EXPR:
7957 tem = fold_negate_expr (loc, arg0);
7958 if (tem)
7959 return fold_convert_loc (loc, type, tem);
7960 return NULL_TREE;
7962 case ABS_EXPR:
7963 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7964 return fold_abs_const (arg0, type);
7965 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7966 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7967 /* Convert fabs((double)float) into (double)fabsf(float). */
7968 else if (TREE_CODE (arg0) == NOP_EXPR
7969 && TREE_CODE (type) == REAL_TYPE)
7971 tree targ0 = strip_float_extensions (arg0);
7972 if (targ0 != arg0)
7973 return fold_convert_loc (loc, type,
7974 fold_build1_loc (loc, ABS_EXPR,
7975 TREE_TYPE (targ0),
7976 targ0));
7978 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7979 else if (TREE_CODE (arg0) == ABS_EXPR)
7980 return arg0;
7981 else if (tree_expr_nonnegative_p (arg0))
7982 return arg0;
7984 /* Strip sign ops from argument. */
7985 if (TREE_CODE (type) == REAL_TYPE)
7987 tem = fold_strip_sign_ops (arg0);
7988 if (tem)
7989 return fold_build1_loc (loc, ABS_EXPR, type,
7990 fold_convert_loc (loc, type, tem));
7992 return NULL_TREE;
7994 case CONJ_EXPR:
7995 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7996 return fold_convert_loc (loc, type, arg0);
7997 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7999 tree itype = TREE_TYPE (type);
8000 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8001 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8002 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8003 negate_expr (ipart));
8005 if (TREE_CODE (arg0) == COMPLEX_CST)
8007 tree itype = TREE_TYPE (type);
8008 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8009 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8010 return build_complex (type, rpart, negate_expr (ipart));
8012 if (TREE_CODE (arg0) == CONJ_EXPR)
8013 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8014 return NULL_TREE;
8016 case BIT_NOT_EXPR:
8017 if (TREE_CODE (arg0) == INTEGER_CST)
8018 return fold_not_const (arg0, type);
8019 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8020 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8021 /* Convert ~ (-A) to A - 1. */
8022 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8023 return fold_build2_loc (loc, MINUS_EXPR, type,
8024 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8025 build_int_cst (type, 1));
8026 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8027 else if (INTEGRAL_TYPE_P (type)
8028 && ((TREE_CODE (arg0) == MINUS_EXPR
8029 && integer_onep (TREE_OPERAND (arg0, 1)))
8030 || (TREE_CODE (arg0) == PLUS_EXPR
8031 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8032 return fold_build1_loc (loc, NEGATE_EXPR, type,
8033 fold_convert_loc (loc, type,
8034 TREE_OPERAND (arg0, 0)));
8035 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8036 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8037 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8038 fold_convert_loc (loc, type,
8039 TREE_OPERAND (arg0, 0)))))
8040 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8041 fold_convert_loc (loc, type,
8042 TREE_OPERAND (arg0, 1)));
8043 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8044 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8045 fold_convert_loc (loc, type,
8046 TREE_OPERAND (arg0, 1)))))
8047 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8048 fold_convert_loc (loc, type,
8049 TREE_OPERAND (arg0, 0)), tem);
8050 /* Perform BIT_NOT_EXPR on each element individually. */
8051 else if (TREE_CODE (arg0) == VECTOR_CST)
8053 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8054 int count = TYPE_VECTOR_SUBPARTS (type), i;
8056 for (i = 0; i < count; i++)
8058 if (elements)
8060 elem = TREE_VALUE (elements);
8061 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8062 if (elem == NULL_TREE)
8063 break;
8064 elements = TREE_CHAIN (elements);
8066 else
8067 elem = build_int_cst (TREE_TYPE (type), -1);
8068 list = tree_cons (NULL_TREE, elem, list);
8070 if (i == count)
8071 return build_vector (type, nreverse (list));
8074 return NULL_TREE;
8076 case TRUTH_NOT_EXPR:
8077 /* The argument to invert_truthvalue must have Boolean type. */
8078 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8079 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8081 /* Note that the operand of this must be an int
8082 and its values must be 0 or 1.
8083 ("true" is a fixed value perhaps depending on the language,
8084 but we don't handle values other than 1 correctly yet.) */
8085 tem = fold_truth_not_expr (loc, arg0);
8086 if (!tem)
8087 return NULL_TREE;
8088 return fold_convert_loc (loc, type, tem);
8090 case REALPART_EXPR:
8091 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8092 return fold_convert_loc (loc, type, arg0);
8093 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8094 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8095 TREE_OPERAND (arg0, 1));
8096 if (TREE_CODE (arg0) == COMPLEX_CST)
8097 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8098 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8100 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8101 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8102 fold_build1_loc (loc, REALPART_EXPR, itype,
8103 TREE_OPERAND (arg0, 0)),
8104 fold_build1_loc (loc, REALPART_EXPR, itype,
8105 TREE_OPERAND (arg0, 1)));
8106 return fold_convert_loc (loc, type, tem);
8108 if (TREE_CODE (arg0) == CONJ_EXPR)
8110 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8111 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8112 TREE_OPERAND (arg0, 0));
8113 return fold_convert_loc (loc, type, tem);
8115 if (TREE_CODE (arg0) == CALL_EXPR)
8117 tree fn = get_callee_fndecl (arg0);
8118 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8119 switch (DECL_FUNCTION_CODE (fn))
8121 CASE_FLT_FN (BUILT_IN_CEXPI):
8122 fn = mathfn_built_in (type, BUILT_IN_COS);
8123 if (fn)
8124 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8125 break;
8127 default:
8128 break;
8131 return NULL_TREE;
8133 case IMAGPART_EXPR:
8134 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8135 return build_zero_cst (type);
8136 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8137 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8138 TREE_OPERAND (arg0, 0));
8139 if (TREE_CODE (arg0) == COMPLEX_CST)
8140 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8141 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8143 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8144 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8145 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8146 TREE_OPERAND (arg0, 0)),
8147 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8148 TREE_OPERAND (arg0, 1)));
8149 return fold_convert_loc (loc, type, tem);
8151 if (TREE_CODE (arg0) == CONJ_EXPR)
8153 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8154 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8155 return fold_convert_loc (loc, type, negate_expr (tem));
8157 if (TREE_CODE (arg0) == CALL_EXPR)
8159 tree fn = get_callee_fndecl (arg0);
8160 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8161 switch (DECL_FUNCTION_CODE (fn))
8163 CASE_FLT_FN (BUILT_IN_CEXPI):
8164 fn = mathfn_built_in (type, BUILT_IN_SIN);
8165 if (fn)
8166 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8167 break;
8169 default:
8170 break;
8173 return NULL_TREE;
8175 case INDIRECT_REF:
8176 /* Fold *&X to X if X is an lvalue. */
8177 if (TREE_CODE (op0) == ADDR_EXPR)
8179 tree op00 = TREE_OPERAND (op0, 0);
8180 if ((TREE_CODE (op00) == VAR_DECL
8181 || TREE_CODE (op00) == PARM_DECL
8182 || TREE_CODE (op00) == RESULT_DECL)
8183 && !TREE_READONLY (op00))
8184 return op00;
8186 return NULL_TREE;
8188 default:
8189 return NULL_TREE;
8190 } /* switch (code) */
8194 /* If the operation was a conversion do _not_ mark a resulting constant
8195 with TREE_OVERFLOW if the original constant was not. These conversions
8196 have implementation defined behavior and retaining the TREE_OVERFLOW
8197 flag here would confuse later passes such as VRP. */
8198 tree
8199 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8200 tree type, tree op0)
8202 tree res = fold_unary_loc (loc, code, type, op0);
8203 if (res
8204 && TREE_CODE (res) == INTEGER_CST
8205 && TREE_CODE (op0) == INTEGER_CST
8206 && CONVERT_EXPR_CODE_P (code))
8207 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8209 return res;
8212 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8213 operands OP0 and OP1. LOC is the location of the resulting expression.
8214 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8215 Return the folded expression if folding is successful. Otherwise,
8216 return NULL_TREE. */
8217 static tree
8218 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8219 tree arg0, tree arg1, tree op0, tree op1)
8221 tree tem;
8223 /* We only do these simplifications if we are optimizing. */
8224 if (!optimize)
8225 return NULL_TREE;
8227 /* Check for things like (A || B) && (A || C). We can convert this
8228 to A || (B && C). Note that either operator can be any of the four
8229 truth and/or operations and the transformation will still be
8230 valid. Also note that we only care about order for the
8231 ANDIF and ORIF operators. If B contains side effects, this
8232 might change the truth-value of A. */
8233 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8234 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8235 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8236 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8237 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8238 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8240 tree a00 = TREE_OPERAND (arg0, 0);
8241 tree a01 = TREE_OPERAND (arg0, 1);
8242 tree a10 = TREE_OPERAND (arg1, 0);
8243 tree a11 = TREE_OPERAND (arg1, 1);
8244 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8245 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8246 && (code == TRUTH_AND_EXPR
8247 || code == TRUTH_OR_EXPR));
8249 if (operand_equal_p (a00, a10, 0))
8250 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8251 fold_build2_loc (loc, code, type, a01, a11));
8252 else if (commutative && operand_equal_p (a00, a11, 0))
8253 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8254 fold_build2_loc (loc, code, type, a01, a10));
8255 else if (commutative && operand_equal_p (a01, a10, 0))
8256 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8257 fold_build2_loc (loc, code, type, a00, a11));
8259 /* This case if tricky because we must either have commutative
8260 operators or else A10 must not have side-effects. */
8262 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8263 && operand_equal_p (a01, a11, 0))
8264 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8265 fold_build2_loc (loc, code, type, a00, a10),
8266 a01);
8269 /* See if we can build a range comparison. */
8270 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8271 return tem;
8273 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8274 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8276 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8277 if (tem)
8278 return fold_build2_loc (loc, code, type, tem, arg1);
8281 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8282 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8284 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8285 if (tem)
8286 return fold_build2_loc (loc, code, type, arg0, tem);
8289 /* Check for the possibility of merging component references. If our
8290 lhs is another similar operation, try to merge its rhs with our
8291 rhs. Then try to merge our lhs and rhs. */
8292 if (TREE_CODE (arg0) == code
8293 && 0 != (tem = fold_truthop (loc, code, type,
8294 TREE_OPERAND (arg0, 1), arg1)))
8295 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8297 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
8298 return tem;
8300 return NULL_TREE;
8303 /* Fold a binary expression of code CODE and type TYPE with operands
8304 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8305 Return the folded expression if folding is successful. Otherwise,
8306 return NULL_TREE. */
8308 static tree
8309 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8311 enum tree_code compl_code;
8313 if (code == MIN_EXPR)
8314 compl_code = MAX_EXPR;
8315 else if (code == MAX_EXPR)
8316 compl_code = MIN_EXPR;
8317 else
8318 gcc_unreachable ();
8320 /* MIN (MAX (a, b), b) == b. */
8321 if (TREE_CODE (op0) == compl_code
8322 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8323 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8325 /* MIN (MAX (b, a), b) == b. */
8326 if (TREE_CODE (op0) == compl_code
8327 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8328 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8329 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8331 /* MIN (a, MAX (a, b)) == a. */
8332 if (TREE_CODE (op1) == compl_code
8333 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8334 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8335 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8337 /* MIN (a, MAX (b, a)) == a. */
8338 if (TREE_CODE (op1) == compl_code
8339 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8340 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8341 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8343 return NULL_TREE;
8346 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8347 by changing CODE to reduce the magnitude of constants involved in
8348 ARG0 of the comparison.
8349 Returns a canonicalized comparison tree if a simplification was
8350 possible, otherwise returns NULL_TREE.
8351 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8352 valid if signed overflow is undefined. */
8354 static tree
8355 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8356 tree arg0, tree arg1,
8357 bool *strict_overflow_p)
8359 enum tree_code code0 = TREE_CODE (arg0);
8360 tree t, cst0 = NULL_TREE;
8361 int sgn0;
8362 bool swap = false;
8364 /* Match A +- CST code arg1 and CST code arg1. We can change the
8365 first form only if overflow is undefined. */
8366 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8367 /* In principle pointers also have undefined overflow behavior,
8368 but that causes problems elsewhere. */
8369 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8370 && (code0 == MINUS_EXPR
8371 || code0 == PLUS_EXPR)
8372 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8373 || code0 == INTEGER_CST))
8374 return NULL_TREE;
8376 /* Identify the constant in arg0 and its sign. */
8377 if (code0 == INTEGER_CST)
8378 cst0 = arg0;
8379 else
8380 cst0 = TREE_OPERAND (arg0, 1);
8381 sgn0 = tree_int_cst_sgn (cst0);
8383 /* Overflowed constants and zero will cause problems. */
8384 if (integer_zerop (cst0)
8385 || TREE_OVERFLOW (cst0))
8386 return NULL_TREE;
8388 /* See if we can reduce the magnitude of the constant in
8389 arg0 by changing the comparison code. */
8390 if (code0 == INTEGER_CST)
8392 /* CST <= arg1 -> CST-1 < arg1. */
8393 if (code == LE_EXPR && sgn0 == 1)
8394 code = LT_EXPR;
8395 /* -CST < arg1 -> -CST-1 <= arg1. */
8396 else if (code == LT_EXPR && sgn0 == -1)
8397 code = LE_EXPR;
8398 /* CST > arg1 -> CST-1 >= arg1. */
8399 else if (code == GT_EXPR && sgn0 == 1)
8400 code = GE_EXPR;
8401 /* -CST >= arg1 -> -CST-1 > arg1. */
8402 else if (code == GE_EXPR && sgn0 == -1)
8403 code = GT_EXPR;
8404 else
8405 return NULL_TREE;
8406 /* arg1 code' CST' might be more canonical. */
8407 swap = true;
8409 else
8411 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8412 if (code == LT_EXPR
8413 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8414 code = LE_EXPR;
8415 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8416 else if (code == GT_EXPR
8417 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8418 code = GE_EXPR;
8419 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8420 else if (code == LE_EXPR
8421 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8422 code = LT_EXPR;
8423 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8424 else if (code == GE_EXPR
8425 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8426 code = GT_EXPR;
8427 else
8428 return NULL_TREE;
8429 *strict_overflow_p = true;
8432 /* Now build the constant reduced in magnitude. But not if that
8433 would produce one outside of its types range. */
8434 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8435 && ((sgn0 == 1
8436 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8437 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8438 || (sgn0 == -1
8439 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8440 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8441 /* We cannot swap the comparison here as that would cause us to
8442 endlessly recurse. */
8443 return NULL_TREE;
8445 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8446 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8447 if (code0 != INTEGER_CST)
8448 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8450 /* If swapping might yield to a more canonical form, do so. */
8451 if (swap)
8452 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8453 else
8454 return fold_build2_loc (loc, code, type, t, arg1);
8457 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8458 overflow further. Try to decrease the magnitude of constants involved
8459 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8460 and put sole constants at the second argument position.
8461 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8463 static tree
8464 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8465 tree arg0, tree arg1)
8467 tree t;
8468 bool strict_overflow_p;
8469 const char * const warnmsg = G_("assuming signed overflow does not occur "
8470 "when reducing constant in comparison");
8472 /* Try canonicalization by simplifying arg0. */
8473 strict_overflow_p = false;
8474 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8475 &strict_overflow_p);
8476 if (t)
8478 if (strict_overflow_p)
8479 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8480 return t;
8483 /* Try canonicalization by simplifying arg1 using the swapped
8484 comparison. */
8485 code = swap_tree_comparison (code);
8486 strict_overflow_p = false;
8487 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8488 &strict_overflow_p);
8489 if (t && strict_overflow_p)
8490 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8491 return t;
8494 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8495 space. This is used to avoid issuing overflow warnings for
8496 expressions like &p->x which can not wrap. */
8498 static bool
8499 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8501 unsigned HOST_WIDE_INT offset_low, total_low;
8502 HOST_WIDE_INT size, offset_high, total_high;
8504 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8505 return true;
8507 if (bitpos < 0)
8508 return true;
8510 if (offset == NULL_TREE)
8512 offset_low = 0;
8513 offset_high = 0;
8515 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8516 return true;
8517 else
8519 offset_low = TREE_INT_CST_LOW (offset);
8520 offset_high = TREE_INT_CST_HIGH (offset);
8523 if (add_double_with_sign (offset_low, offset_high,
8524 bitpos / BITS_PER_UNIT, 0,
8525 &total_low, &total_high,
8526 true))
8527 return true;
8529 if (total_high != 0)
8530 return true;
8532 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8533 if (size <= 0)
8534 return true;
8536 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8537 array. */
8538 if (TREE_CODE (base) == ADDR_EXPR)
8540 HOST_WIDE_INT base_size;
8542 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8543 if (base_size > 0 && size < base_size)
8544 size = base_size;
8547 return total_low > (unsigned HOST_WIDE_INT) size;
8550 /* Subroutine of fold_binary. This routine performs all of the
8551 transformations that are common to the equality/inequality
8552 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8553 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8554 fold_binary should call fold_binary. Fold a comparison with
8555 tree code CODE and type TYPE with operands OP0 and OP1. Return
8556 the folded comparison or NULL_TREE. */
8558 static tree
8559 fold_comparison (location_t loc, enum tree_code code, tree type,
8560 tree op0, tree op1)
8562 tree arg0, arg1, tem;
8564 arg0 = op0;
8565 arg1 = op1;
8567 STRIP_SIGN_NOPS (arg0);
8568 STRIP_SIGN_NOPS (arg1);
8570 tem = fold_relational_const (code, type, arg0, arg1);
8571 if (tem != NULL_TREE)
8572 return tem;
8574 /* If one arg is a real or integer constant, put it last. */
8575 if (tree_swap_operands_p (arg0, arg1, true))
8576 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8578 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8579 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8580 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8581 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8582 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8583 && (TREE_CODE (arg1) == INTEGER_CST
8584 && !TREE_OVERFLOW (arg1)))
8586 tree const1 = TREE_OPERAND (arg0, 1);
8587 tree const2 = arg1;
8588 tree variable = TREE_OPERAND (arg0, 0);
8589 tree lhs;
8590 int lhs_add;
8591 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8593 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8594 TREE_TYPE (arg1), const2, const1);
8596 /* If the constant operation overflowed this can be
8597 simplified as a comparison against INT_MAX/INT_MIN. */
8598 if (TREE_CODE (lhs) == INTEGER_CST
8599 && TREE_OVERFLOW (lhs))
8601 int const1_sgn = tree_int_cst_sgn (const1);
8602 enum tree_code code2 = code;
8604 /* Get the sign of the constant on the lhs if the
8605 operation were VARIABLE + CONST1. */
8606 if (TREE_CODE (arg0) == MINUS_EXPR)
8607 const1_sgn = -const1_sgn;
8609 /* The sign of the constant determines if we overflowed
8610 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8611 Canonicalize to the INT_MIN overflow by swapping the comparison
8612 if necessary. */
8613 if (const1_sgn == -1)
8614 code2 = swap_tree_comparison (code);
8616 /* We now can look at the canonicalized case
8617 VARIABLE + 1 CODE2 INT_MIN
8618 and decide on the result. */
8619 if (code2 == LT_EXPR
8620 || code2 == LE_EXPR
8621 || code2 == EQ_EXPR)
8622 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8623 else if (code2 == NE_EXPR
8624 || code2 == GE_EXPR
8625 || code2 == GT_EXPR)
8626 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8629 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8630 && (TREE_CODE (lhs) != INTEGER_CST
8631 || !TREE_OVERFLOW (lhs)))
8633 if (code != EQ_EXPR && code != NE_EXPR)
8634 fold_overflow_warning ("assuming signed overflow does not occur "
8635 "when changing X +- C1 cmp C2 to "
8636 "X cmp C1 +- C2",
8637 WARN_STRICT_OVERFLOW_COMPARISON);
8638 return fold_build2_loc (loc, code, type, variable, lhs);
8642 /* For comparisons of pointers we can decompose it to a compile time
8643 comparison of the base objects and the offsets into the object.
8644 This requires at least one operand being an ADDR_EXPR or a
8645 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8646 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8647 && (TREE_CODE (arg0) == ADDR_EXPR
8648 || TREE_CODE (arg1) == ADDR_EXPR
8649 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8650 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8652 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8653 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8654 enum machine_mode mode;
8655 int volatilep, unsignedp;
8656 bool indirect_base0 = false, indirect_base1 = false;
8658 /* Get base and offset for the access. Strip ADDR_EXPR for
8659 get_inner_reference, but put it back by stripping INDIRECT_REF
8660 off the base object if possible. indirect_baseN will be true
8661 if baseN is not an address but refers to the object itself. */
8662 base0 = arg0;
8663 if (TREE_CODE (arg0) == ADDR_EXPR)
8665 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8666 &bitsize, &bitpos0, &offset0, &mode,
8667 &unsignedp, &volatilep, false);
8668 if (TREE_CODE (base0) == INDIRECT_REF)
8669 base0 = TREE_OPERAND (base0, 0);
8670 else
8671 indirect_base0 = true;
8673 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8675 base0 = TREE_OPERAND (arg0, 0);
8676 STRIP_SIGN_NOPS (base0);
8677 if (TREE_CODE (base0) == ADDR_EXPR)
8679 base0 = TREE_OPERAND (base0, 0);
8680 indirect_base0 = true;
8682 offset0 = TREE_OPERAND (arg0, 1);
8685 base1 = arg1;
8686 if (TREE_CODE (arg1) == ADDR_EXPR)
8688 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8689 &bitsize, &bitpos1, &offset1, &mode,
8690 &unsignedp, &volatilep, false);
8691 if (TREE_CODE (base1) == INDIRECT_REF)
8692 base1 = TREE_OPERAND (base1, 0);
8693 else
8694 indirect_base1 = true;
8696 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8698 base1 = TREE_OPERAND (arg1, 0);
8699 STRIP_SIGN_NOPS (base1);
8700 if (TREE_CODE (base1) == ADDR_EXPR)
8702 base1 = TREE_OPERAND (base1, 0);
8703 indirect_base1 = true;
8705 offset1 = TREE_OPERAND (arg1, 1);
8708 /* A local variable can never be pointed to by
8709 the default SSA name of an incoming parameter. */
8710 if ((TREE_CODE (arg0) == ADDR_EXPR
8711 && indirect_base0
8712 && TREE_CODE (base0) == VAR_DECL
8713 && auto_var_in_fn_p (base0, current_function_decl)
8714 && !indirect_base1
8715 && TREE_CODE (base1) == SSA_NAME
8716 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8717 && SSA_NAME_IS_DEFAULT_DEF (base1))
8718 || (TREE_CODE (arg1) == ADDR_EXPR
8719 && indirect_base1
8720 && TREE_CODE (base1) == VAR_DECL
8721 && auto_var_in_fn_p (base1, current_function_decl)
8722 && !indirect_base0
8723 && TREE_CODE (base0) == SSA_NAME
8724 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8725 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8727 if (code == NE_EXPR)
8728 return constant_boolean_node (1, type);
8729 else if (code == EQ_EXPR)
8730 return constant_boolean_node (0, type);
8732 /* If we have equivalent bases we might be able to simplify. */
8733 else if (indirect_base0 == indirect_base1
8734 && operand_equal_p (base0, base1, 0))
8736 /* We can fold this expression to a constant if the non-constant
8737 offset parts are equal. */
8738 if ((offset0 == offset1
8739 || (offset0 && offset1
8740 && operand_equal_p (offset0, offset1, 0)))
8741 && (code == EQ_EXPR
8742 || code == NE_EXPR
8743 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8746 if (code != EQ_EXPR
8747 && code != NE_EXPR
8748 && bitpos0 != bitpos1
8749 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8750 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8751 fold_overflow_warning (("assuming pointer wraparound does not "
8752 "occur when comparing P +- C1 with "
8753 "P +- C2"),
8754 WARN_STRICT_OVERFLOW_CONDITIONAL);
8756 switch (code)
8758 case EQ_EXPR:
8759 return constant_boolean_node (bitpos0 == bitpos1, type);
8760 case NE_EXPR:
8761 return constant_boolean_node (bitpos0 != bitpos1, type);
8762 case LT_EXPR:
8763 return constant_boolean_node (bitpos0 < bitpos1, type);
8764 case LE_EXPR:
8765 return constant_boolean_node (bitpos0 <= bitpos1, type);
8766 case GE_EXPR:
8767 return constant_boolean_node (bitpos0 >= bitpos1, type);
8768 case GT_EXPR:
8769 return constant_boolean_node (bitpos0 > bitpos1, type);
8770 default:;
8773 /* We can simplify the comparison to a comparison of the variable
8774 offset parts if the constant offset parts are equal.
8775 Be careful to use signed size type here because otherwise we
8776 mess with array offsets in the wrong way. This is possible
8777 because pointer arithmetic is restricted to retain within an
8778 object and overflow on pointer differences is undefined as of
8779 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8780 else if (bitpos0 == bitpos1
8781 && ((code == EQ_EXPR || code == NE_EXPR)
8782 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8784 /* By converting to signed size type we cover middle-end pointer
8785 arithmetic which operates on unsigned pointer types of size
8786 type size and ARRAY_REF offsets which are properly sign or
8787 zero extended from their type in case it is narrower than
8788 size type. */
8789 if (offset0 == NULL_TREE)
8790 offset0 = build_int_cst (ssizetype, 0);
8791 else
8792 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8793 if (offset1 == NULL_TREE)
8794 offset1 = build_int_cst (ssizetype, 0);
8795 else
8796 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8798 if (code != EQ_EXPR
8799 && code != NE_EXPR
8800 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8801 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8802 fold_overflow_warning (("assuming pointer wraparound does not "
8803 "occur when comparing P +- C1 with "
8804 "P +- C2"),
8805 WARN_STRICT_OVERFLOW_COMPARISON);
8807 return fold_build2_loc (loc, code, type, offset0, offset1);
8810 /* For non-equal bases we can simplify if they are addresses
8811 of local binding decls or constants. */
8812 else if (indirect_base0 && indirect_base1
8813 /* We know that !operand_equal_p (base0, base1, 0)
8814 because the if condition was false. But make
8815 sure two decls are not the same. */
8816 && base0 != base1
8817 && TREE_CODE (arg0) == ADDR_EXPR
8818 && TREE_CODE (arg1) == ADDR_EXPR
8819 && (((TREE_CODE (base0) == VAR_DECL
8820 || TREE_CODE (base0) == PARM_DECL)
8821 && (targetm.binds_local_p (base0)
8822 || CONSTANT_CLASS_P (base1)))
8823 || CONSTANT_CLASS_P (base0))
8824 && (((TREE_CODE (base1) == VAR_DECL
8825 || TREE_CODE (base1) == PARM_DECL)
8826 && (targetm.binds_local_p (base1)
8827 || CONSTANT_CLASS_P (base0)))
8828 || CONSTANT_CLASS_P (base1)))
8830 if (code == EQ_EXPR)
8831 return omit_two_operands_loc (loc, type, boolean_false_node,
8832 arg0, arg1);
8833 else if (code == NE_EXPR)
8834 return omit_two_operands_loc (loc, type, boolean_true_node,
8835 arg0, arg1);
8837 /* For equal offsets we can simplify to a comparison of the
8838 base addresses. */
8839 else if (bitpos0 == bitpos1
8840 && (indirect_base0
8841 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8842 && (indirect_base1
8843 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8844 && ((offset0 == offset1)
8845 || (offset0 && offset1
8846 && operand_equal_p (offset0, offset1, 0))))
8848 if (indirect_base0)
8849 base0 = build_fold_addr_expr_loc (loc, base0);
8850 if (indirect_base1)
8851 base1 = build_fold_addr_expr_loc (loc, base1);
8852 return fold_build2_loc (loc, code, type, base0, base1);
8856 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8857 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8858 the resulting offset is smaller in absolute value than the
8859 original one. */
8860 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8861 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8862 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8863 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8864 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8865 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8866 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8868 tree const1 = TREE_OPERAND (arg0, 1);
8869 tree const2 = TREE_OPERAND (arg1, 1);
8870 tree variable1 = TREE_OPERAND (arg0, 0);
8871 tree variable2 = TREE_OPERAND (arg1, 0);
8872 tree cst;
8873 const char * const warnmsg = G_("assuming signed overflow does not "
8874 "occur when combining constants around "
8875 "a comparison");
8877 /* Put the constant on the side where it doesn't overflow and is
8878 of lower absolute value than before. */
8879 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8880 ? MINUS_EXPR : PLUS_EXPR,
8881 const2, const1);
8882 if (!TREE_OVERFLOW (cst)
8883 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8885 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8886 return fold_build2_loc (loc, code, type,
8887 variable1,
8888 fold_build2_loc (loc,
8889 TREE_CODE (arg1), TREE_TYPE (arg1),
8890 variable2, cst));
8893 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8894 ? MINUS_EXPR : PLUS_EXPR,
8895 const1, const2);
8896 if (!TREE_OVERFLOW (cst)
8897 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8899 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8900 return fold_build2_loc (loc, code, type,
8901 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8902 variable1, cst),
8903 variable2);
8907 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8908 signed arithmetic case. That form is created by the compiler
8909 often enough for folding it to be of value. One example is in
8910 computing loop trip counts after Operator Strength Reduction. */
8911 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8912 && TREE_CODE (arg0) == MULT_EXPR
8913 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8914 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8915 && integer_zerop (arg1))
8917 tree const1 = TREE_OPERAND (arg0, 1);
8918 tree const2 = arg1; /* zero */
8919 tree variable1 = TREE_OPERAND (arg0, 0);
8920 enum tree_code cmp_code = code;
8922 /* Handle unfolded multiplication by zero. */
8923 if (integer_zerop (const1))
8924 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8926 fold_overflow_warning (("assuming signed overflow does not occur when "
8927 "eliminating multiplication in comparison "
8928 "with zero"),
8929 WARN_STRICT_OVERFLOW_COMPARISON);
8931 /* If const1 is negative we swap the sense of the comparison. */
8932 if (tree_int_cst_sgn (const1) < 0)
8933 cmp_code = swap_tree_comparison (cmp_code);
8935 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8938 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8939 if (tem)
8940 return tem;
8942 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8944 tree targ0 = strip_float_extensions (arg0);
8945 tree targ1 = strip_float_extensions (arg1);
8946 tree newtype = TREE_TYPE (targ0);
8948 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8949 newtype = TREE_TYPE (targ1);
8951 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8952 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8953 return fold_build2_loc (loc, code, type,
8954 fold_convert_loc (loc, newtype, targ0),
8955 fold_convert_loc (loc, newtype, targ1));
8957 /* (-a) CMP (-b) -> b CMP a */
8958 if (TREE_CODE (arg0) == NEGATE_EXPR
8959 && TREE_CODE (arg1) == NEGATE_EXPR)
8960 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8961 TREE_OPERAND (arg0, 0));
8963 if (TREE_CODE (arg1) == REAL_CST)
8965 REAL_VALUE_TYPE cst;
8966 cst = TREE_REAL_CST (arg1);
8968 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8969 if (TREE_CODE (arg0) == NEGATE_EXPR)
8970 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8971 TREE_OPERAND (arg0, 0),
8972 build_real (TREE_TYPE (arg1),
8973 real_value_negate (&cst)));
8975 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8976 /* a CMP (-0) -> a CMP 0 */
8977 if (REAL_VALUE_MINUS_ZERO (cst))
8978 return fold_build2_loc (loc, code, type, arg0,
8979 build_real (TREE_TYPE (arg1), dconst0));
8981 /* x != NaN is always true, other ops are always false. */
8982 if (REAL_VALUE_ISNAN (cst)
8983 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8985 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8986 return omit_one_operand_loc (loc, type, tem, arg0);
8989 /* Fold comparisons against infinity. */
8990 if (REAL_VALUE_ISINF (cst)
8991 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8993 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8994 if (tem != NULL_TREE)
8995 return tem;
8999 /* If this is a comparison of a real constant with a PLUS_EXPR
9000 or a MINUS_EXPR of a real constant, we can convert it into a
9001 comparison with a revised real constant as long as no overflow
9002 occurs when unsafe_math_optimizations are enabled. */
9003 if (flag_unsafe_math_optimizations
9004 && TREE_CODE (arg1) == REAL_CST
9005 && (TREE_CODE (arg0) == PLUS_EXPR
9006 || TREE_CODE (arg0) == MINUS_EXPR)
9007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9008 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9009 ? MINUS_EXPR : PLUS_EXPR,
9010 arg1, TREE_OPERAND (arg0, 1)))
9011 && !TREE_OVERFLOW (tem))
9012 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9014 /* Likewise, we can simplify a comparison of a real constant with
9015 a MINUS_EXPR whose first operand is also a real constant, i.e.
9016 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9017 floating-point types only if -fassociative-math is set. */
9018 if (flag_associative_math
9019 && TREE_CODE (arg1) == REAL_CST
9020 && TREE_CODE (arg0) == MINUS_EXPR
9021 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9022 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9023 arg1))
9024 && !TREE_OVERFLOW (tem))
9025 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9026 TREE_OPERAND (arg0, 1), tem);
9028 /* Fold comparisons against built-in math functions. */
9029 if (TREE_CODE (arg1) == REAL_CST
9030 && flag_unsafe_math_optimizations
9031 && ! flag_errno_math)
9033 enum built_in_function fcode = builtin_mathfn_code (arg0);
9035 if (fcode != END_BUILTINS)
9037 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9038 if (tem != NULL_TREE)
9039 return tem;
9044 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9045 && CONVERT_EXPR_P (arg0))
9047 /* If we are widening one operand of an integer comparison,
9048 see if the other operand is similarly being widened. Perhaps we
9049 can do the comparison in the narrower type. */
9050 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9051 if (tem)
9052 return tem;
9054 /* Or if we are changing signedness. */
9055 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9056 if (tem)
9057 return tem;
9060 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9061 constant, we can simplify it. */
9062 if (TREE_CODE (arg1) == INTEGER_CST
9063 && (TREE_CODE (arg0) == MIN_EXPR
9064 || TREE_CODE (arg0) == MAX_EXPR)
9065 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9067 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9068 if (tem)
9069 return tem;
9072 /* Simplify comparison of something with itself. (For IEEE
9073 floating-point, we can only do some of these simplifications.) */
9074 if (operand_equal_p (arg0, arg1, 0))
9076 switch (code)
9078 case EQ_EXPR:
9079 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9080 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9081 return constant_boolean_node (1, type);
9082 break;
9084 case GE_EXPR:
9085 case LE_EXPR:
9086 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9087 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9088 return constant_boolean_node (1, type);
9089 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9091 case NE_EXPR:
9092 /* For NE, we can only do this simplification if integer
9093 or we don't honor IEEE floating point NaNs. */
9094 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9095 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9096 break;
9097 /* ... fall through ... */
9098 case GT_EXPR:
9099 case LT_EXPR:
9100 return constant_boolean_node (0, type);
9101 default:
9102 gcc_unreachable ();
9106 /* If we are comparing an expression that just has comparisons
9107 of two integer values, arithmetic expressions of those comparisons,
9108 and constants, we can simplify it. There are only three cases
9109 to check: the two values can either be equal, the first can be
9110 greater, or the second can be greater. Fold the expression for
9111 those three values. Since each value must be 0 or 1, we have
9112 eight possibilities, each of which corresponds to the constant 0
9113 or 1 or one of the six possible comparisons.
9115 This handles common cases like (a > b) == 0 but also handles
9116 expressions like ((x > y) - (y > x)) > 0, which supposedly
9117 occur in macroized code. */
9119 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9121 tree cval1 = 0, cval2 = 0;
9122 int save_p = 0;
9124 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9125 /* Don't handle degenerate cases here; they should already
9126 have been handled anyway. */
9127 && cval1 != 0 && cval2 != 0
9128 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9129 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9130 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9131 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9132 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9133 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9134 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9136 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9137 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9139 /* We can't just pass T to eval_subst in case cval1 or cval2
9140 was the same as ARG1. */
9142 tree high_result
9143 = fold_build2_loc (loc, code, type,
9144 eval_subst (loc, arg0, cval1, maxval,
9145 cval2, minval),
9146 arg1);
9147 tree equal_result
9148 = fold_build2_loc (loc, code, type,
9149 eval_subst (loc, arg0, cval1, maxval,
9150 cval2, maxval),
9151 arg1);
9152 tree low_result
9153 = fold_build2_loc (loc, code, type,
9154 eval_subst (loc, arg0, cval1, minval,
9155 cval2, maxval),
9156 arg1);
9158 /* All three of these results should be 0 or 1. Confirm they are.
9159 Then use those values to select the proper code to use. */
9161 if (TREE_CODE (high_result) == INTEGER_CST
9162 && TREE_CODE (equal_result) == INTEGER_CST
9163 && TREE_CODE (low_result) == INTEGER_CST)
9165 /* Make a 3-bit mask with the high-order bit being the
9166 value for `>', the next for '=', and the low for '<'. */
9167 switch ((integer_onep (high_result) * 4)
9168 + (integer_onep (equal_result) * 2)
9169 + integer_onep (low_result))
9171 case 0:
9172 /* Always false. */
9173 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9174 case 1:
9175 code = LT_EXPR;
9176 break;
9177 case 2:
9178 code = EQ_EXPR;
9179 break;
9180 case 3:
9181 code = LE_EXPR;
9182 break;
9183 case 4:
9184 code = GT_EXPR;
9185 break;
9186 case 5:
9187 code = NE_EXPR;
9188 break;
9189 case 6:
9190 code = GE_EXPR;
9191 break;
9192 case 7:
9193 /* Always true. */
9194 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9197 if (save_p)
9199 tem = save_expr (build2 (code, type, cval1, cval2));
9200 SET_EXPR_LOCATION (tem, loc);
9201 return tem;
9203 return fold_build2_loc (loc, code, type, cval1, cval2);
9208 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9209 into a single range test. */
9210 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9211 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9212 && TREE_CODE (arg1) == INTEGER_CST
9213 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9214 && !integer_zerop (TREE_OPERAND (arg0, 1))
9215 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9216 && !TREE_OVERFLOW (arg1))
9218 tem = fold_div_compare (loc, code, type, arg0, arg1);
9219 if (tem != NULL_TREE)
9220 return tem;
9223 /* Fold ~X op ~Y as Y op X. */
9224 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9225 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9227 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9228 return fold_build2_loc (loc, code, type,
9229 fold_convert_loc (loc, cmp_type,
9230 TREE_OPERAND (arg1, 0)),
9231 TREE_OPERAND (arg0, 0));
9234 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9235 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9236 && TREE_CODE (arg1) == INTEGER_CST)
9238 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9239 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9240 TREE_OPERAND (arg0, 0),
9241 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9242 fold_convert_loc (loc, cmp_type, arg1)));
9245 return NULL_TREE;
9249 /* Subroutine of fold_binary. Optimize complex multiplications of the
9250 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9251 argument EXPR represents the expression "z" of type TYPE. */
9253 static tree
9254 fold_mult_zconjz (location_t loc, tree type, tree expr)
9256 tree itype = TREE_TYPE (type);
9257 tree rpart, ipart, tem;
9259 if (TREE_CODE (expr) == COMPLEX_EXPR)
9261 rpart = TREE_OPERAND (expr, 0);
9262 ipart = TREE_OPERAND (expr, 1);
9264 else if (TREE_CODE (expr) == COMPLEX_CST)
9266 rpart = TREE_REALPART (expr);
9267 ipart = TREE_IMAGPART (expr);
9269 else
9271 expr = save_expr (expr);
9272 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9273 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9276 rpart = save_expr (rpart);
9277 ipart = save_expr (ipart);
9278 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9279 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9280 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9281 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9282 build_zero_cst (itype));
9286 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9287 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9288 guarantees that P and N have the same least significant log2(M) bits.
9289 N is not otherwise constrained. In particular, N is not normalized to
9290 0 <= N < M as is common. In general, the precise value of P is unknown.
9291 M is chosen as large as possible such that constant N can be determined.
9293 Returns M and sets *RESIDUE to N.
9295 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9296 account. This is not always possible due to PR 35705.
9299 static unsigned HOST_WIDE_INT
9300 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9301 bool allow_func_align)
9303 enum tree_code code;
9305 *residue = 0;
9307 code = TREE_CODE (expr);
9308 if (code == ADDR_EXPR)
9310 unsigned int bitalign;
9311 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9312 *residue /= BITS_PER_UNIT;
9313 return bitalign / BITS_PER_UNIT;
9315 else if (code == POINTER_PLUS_EXPR)
9317 tree op0, op1;
9318 unsigned HOST_WIDE_INT modulus;
9319 enum tree_code inner_code;
9321 op0 = TREE_OPERAND (expr, 0);
9322 STRIP_NOPS (op0);
9323 modulus = get_pointer_modulus_and_residue (op0, residue,
9324 allow_func_align);
9326 op1 = TREE_OPERAND (expr, 1);
9327 STRIP_NOPS (op1);
9328 inner_code = TREE_CODE (op1);
9329 if (inner_code == INTEGER_CST)
9331 *residue += TREE_INT_CST_LOW (op1);
9332 return modulus;
9334 else if (inner_code == MULT_EXPR)
9336 op1 = TREE_OPERAND (op1, 1);
9337 if (TREE_CODE (op1) == INTEGER_CST)
9339 unsigned HOST_WIDE_INT align;
9341 /* Compute the greatest power-of-2 divisor of op1. */
9342 align = TREE_INT_CST_LOW (op1);
9343 align &= -align;
9345 /* If align is non-zero and less than *modulus, replace
9346 *modulus with align., If align is 0, then either op1 is 0
9347 or the greatest power-of-2 divisor of op1 doesn't fit in an
9348 unsigned HOST_WIDE_INT. In either case, no additional
9349 constraint is imposed. */
9350 if (align)
9351 modulus = MIN (modulus, align);
9353 return modulus;
9358 /* If we get here, we were unable to determine anything useful about the
9359 expression. */
9360 return 1;
9364 /* Fold a binary expression of code CODE and type TYPE with operands
9365 OP0 and OP1. LOC is the location of the resulting expression.
9366 Return the folded expression if folding is successful. Otherwise,
9367 return NULL_TREE. */
9369 tree
9370 fold_binary_loc (location_t loc,
9371 enum tree_code code, tree type, tree op0, tree op1)
9373 enum tree_code_class kind = TREE_CODE_CLASS (code);
9374 tree arg0, arg1, tem;
9375 tree t1 = NULL_TREE;
9376 bool strict_overflow_p;
9378 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9379 && TREE_CODE_LENGTH (code) == 2
9380 && op0 != NULL_TREE
9381 && op1 != NULL_TREE);
9383 arg0 = op0;
9384 arg1 = op1;
9386 /* Strip any conversions that don't change the mode. This is
9387 safe for every expression, except for a comparison expression
9388 because its signedness is derived from its operands. So, in
9389 the latter case, only strip conversions that don't change the
9390 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9391 preserved.
9393 Note that this is done as an internal manipulation within the
9394 constant folder, in order to find the simplest representation
9395 of the arguments so that their form can be studied. In any
9396 cases, the appropriate type conversions should be put back in
9397 the tree that will get out of the constant folder. */
9399 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9401 STRIP_SIGN_NOPS (arg0);
9402 STRIP_SIGN_NOPS (arg1);
9404 else
9406 STRIP_NOPS (arg0);
9407 STRIP_NOPS (arg1);
9410 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9411 constant but we can't do arithmetic on them. */
9412 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9413 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9414 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9415 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9416 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9417 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9419 if (kind == tcc_binary)
9421 /* Make sure type and arg0 have the same saturating flag. */
9422 gcc_assert (TYPE_SATURATING (type)
9423 == TYPE_SATURATING (TREE_TYPE (arg0)));
9424 tem = const_binop (code, arg0, arg1);
9426 else if (kind == tcc_comparison)
9427 tem = fold_relational_const (code, type, arg0, arg1);
9428 else
9429 tem = NULL_TREE;
9431 if (tem != NULL_TREE)
9433 if (TREE_TYPE (tem) != type)
9434 tem = fold_convert_loc (loc, type, tem);
9435 return tem;
9439 /* If this is a commutative operation, and ARG0 is a constant, move it
9440 to ARG1 to reduce the number of tests below. */
9441 if (commutative_tree_code (code)
9442 && tree_swap_operands_p (arg0, arg1, true))
9443 return fold_build2_loc (loc, code, type, op1, op0);
9445 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9447 First check for cases where an arithmetic operation is applied to a
9448 compound, conditional, or comparison operation. Push the arithmetic
9449 operation inside the compound or conditional to see if any folding
9450 can then be done. Convert comparison to conditional for this purpose.
9451 The also optimizes non-constant cases that used to be done in
9452 expand_expr.
9454 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9455 one of the operands is a comparison and the other is a comparison, a
9456 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9457 code below would make the expression more complex. Change it to a
9458 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9459 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9461 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9462 || code == EQ_EXPR || code == NE_EXPR)
9463 && ((truth_value_p (TREE_CODE (arg0))
9464 && (truth_value_p (TREE_CODE (arg1))
9465 || (TREE_CODE (arg1) == BIT_AND_EXPR
9466 && integer_onep (TREE_OPERAND (arg1, 1)))))
9467 || (truth_value_p (TREE_CODE (arg1))
9468 && (truth_value_p (TREE_CODE (arg0))
9469 || (TREE_CODE (arg0) == BIT_AND_EXPR
9470 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9472 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9473 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9474 : TRUTH_XOR_EXPR,
9475 boolean_type_node,
9476 fold_convert_loc (loc, boolean_type_node, arg0),
9477 fold_convert_loc (loc, boolean_type_node, arg1));
9479 if (code == EQ_EXPR)
9480 tem = invert_truthvalue_loc (loc, tem);
9482 return fold_convert_loc (loc, type, tem);
9485 if (TREE_CODE_CLASS (code) == tcc_binary
9486 || TREE_CODE_CLASS (code) == tcc_comparison)
9488 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9490 tem = fold_build2_loc (loc, code, type,
9491 fold_convert_loc (loc, TREE_TYPE (op0),
9492 TREE_OPERAND (arg0, 1)), op1);
9493 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9494 tem);
9496 if (TREE_CODE (arg1) == COMPOUND_EXPR
9497 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9499 tem = fold_build2_loc (loc, code, type, op0,
9500 fold_convert_loc (loc, TREE_TYPE (op1),
9501 TREE_OPERAND (arg1, 1)));
9502 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9503 tem);
9506 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9508 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9509 arg0, arg1,
9510 /*cond_first_p=*/1);
9511 if (tem != NULL_TREE)
9512 return tem;
9515 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9517 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9518 arg1, arg0,
9519 /*cond_first_p=*/0);
9520 if (tem != NULL_TREE)
9521 return tem;
9525 switch (code)
9527 case MEM_REF:
9528 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9529 if (TREE_CODE (arg0) == ADDR_EXPR
9530 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9532 tree iref = TREE_OPERAND (arg0, 0);
9533 return fold_build2 (MEM_REF, type,
9534 TREE_OPERAND (iref, 0),
9535 int_const_binop (PLUS_EXPR, arg1,
9536 TREE_OPERAND (iref, 1)));
9539 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9540 if (TREE_CODE (arg0) == ADDR_EXPR
9541 && handled_component_p (TREE_OPERAND (arg0, 0)))
9543 tree base;
9544 HOST_WIDE_INT coffset;
9545 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9546 &coffset);
9547 if (!base)
9548 return NULL_TREE;
9549 return fold_build2 (MEM_REF, type,
9550 build_fold_addr_expr (base),
9551 int_const_binop (PLUS_EXPR, arg1,
9552 size_int (coffset)));
9555 return NULL_TREE;
9557 case POINTER_PLUS_EXPR:
9558 /* 0 +p index -> (type)index */
9559 if (integer_zerop (arg0))
9560 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9562 /* PTR +p 0 -> PTR */
9563 if (integer_zerop (arg1))
9564 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9566 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9567 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9568 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9569 return fold_convert_loc (loc, type,
9570 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9571 fold_convert_loc (loc, sizetype,
9572 arg1),
9573 fold_convert_loc (loc, sizetype,
9574 arg0)));
9576 /* (PTR +p B) +p A -> PTR +p (B + A) */
9577 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9579 tree inner;
9580 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9581 tree arg00 = TREE_OPERAND (arg0, 0);
9582 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9583 arg01, fold_convert_loc (loc, sizetype, arg1));
9584 return fold_convert_loc (loc, type,
9585 fold_build_pointer_plus_loc (loc,
9586 arg00, inner));
9589 /* PTR_CST +p CST -> CST1 */
9590 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9591 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9592 fold_convert_loc (loc, type, arg1));
9594 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9595 of the array. Loop optimizer sometimes produce this type of
9596 expressions. */
9597 if (TREE_CODE (arg0) == ADDR_EXPR)
9599 tem = try_move_mult_to_index (loc, arg0,
9600 fold_convert_loc (loc, sizetype, arg1));
9601 if (tem)
9602 return fold_convert_loc (loc, type, tem);
9605 return NULL_TREE;
9607 case PLUS_EXPR:
9608 /* A + (-B) -> A - B */
9609 if (TREE_CODE (arg1) == NEGATE_EXPR)
9610 return fold_build2_loc (loc, MINUS_EXPR, type,
9611 fold_convert_loc (loc, type, arg0),
9612 fold_convert_loc (loc, type,
9613 TREE_OPERAND (arg1, 0)));
9614 /* (-A) + B -> B - A */
9615 if (TREE_CODE (arg0) == NEGATE_EXPR
9616 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9617 return fold_build2_loc (loc, MINUS_EXPR, type,
9618 fold_convert_loc (loc, type, arg1),
9619 fold_convert_loc (loc, type,
9620 TREE_OPERAND (arg0, 0)));
9622 if (INTEGRAL_TYPE_P (type))
9624 /* Convert ~A + 1 to -A. */
9625 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9626 && integer_onep (arg1))
9627 return fold_build1_loc (loc, NEGATE_EXPR, type,
9628 fold_convert_loc (loc, type,
9629 TREE_OPERAND (arg0, 0)));
9631 /* ~X + X is -1. */
9632 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9633 && !TYPE_OVERFLOW_TRAPS (type))
9635 tree tem = TREE_OPERAND (arg0, 0);
9637 STRIP_NOPS (tem);
9638 if (operand_equal_p (tem, arg1, 0))
9640 t1 = build_int_cst_type (type, -1);
9641 return omit_one_operand_loc (loc, type, t1, arg1);
9645 /* X + ~X is -1. */
9646 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9647 && !TYPE_OVERFLOW_TRAPS (type))
9649 tree tem = TREE_OPERAND (arg1, 0);
9651 STRIP_NOPS (tem);
9652 if (operand_equal_p (arg0, tem, 0))
9654 t1 = build_int_cst_type (type, -1);
9655 return omit_one_operand_loc (loc, type, t1, arg0);
9659 /* X + (X / CST) * -CST is X % CST. */
9660 if (TREE_CODE (arg1) == MULT_EXPR
9661 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9662 && operand_equal_p (arg0,
9663 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9665 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9666 tree cst1 = TREE_OPERAND (arg1, 1);
9667 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9668 cst1, cst0);
9669 if (sum && integer_zerop (sum))
9670 return fold_convert_loc (loc, type,
9671 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9672 TREE_TYPE (arg0), arg0,
9673 cst0));
9677 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9678 same or one. Make sure type is not saturating.
9679 fold_plusminus_mult_expr will re-associate. */
9680 if ((TREE_CODE (arg0) == MULT_EXPR
9681 || TREE_CODE (arg1) == MULT_EXPR)
9682 && !TYPE_SATURATING (type)
9683 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9685 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9686 if (tem)
9687 return tem;
9690 if (! FLOAT_TYPE_P (type))
9692 if (integer_zerop (arg1))
9693 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9695 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9696 with a constant, and the two constants have no bits in common,
9697 we should treat this as a BIT_IOR_EXPR since this may produce more
9698 simplifications. */
9699 if (TREE_CODE (arg0) == BIT_AND_EXPR
9700 && TREE_CODE (arg1) == BIT_AND_EXPR
9701 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9702 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9703 && integer_zerop (const_binop (BIT_AND_EXPR,
9704 TREE_OPERAND (arg0, 1),
9705 TREE_OPERAND (arg1, 1))))
9707 code = BIT_IOR_EXPR;
9708 goto bit_ior;
9711 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9712 (plus (plus (mult) (mult)) (foo)) so that we can
9713 take advantage of the factoring cases below. */
9714 if (TYPE_OVERFLOW_WRAPS (type)
9715 && (((TREE_CODE (arg0) == PLUS_EXPR
9716 || TREE_CODE (arg0) == MINUS_EXPR)
9717 && TREE_CODE (arg1) == MULT_EXPR)
9718 || ((TREE_CODE (arg1) == PLUS_EXPR
9719 || TREE_CODE (arg1) == MINUS_EXPR)
9720 && TREE_CODE (arg0) == MULT_EXPR)))
9722 tree parg0, parg1, parg, marg;
9723 enum tree_code pcode;
9725 if (TREE_CODE (arg1) == MULT_EXPR)
9726 parg = arg0, marg = arg1;
9727 else
9728 parg = arg1, marg = arg0;
9729 pcode = TREE_CODE (parg);
9730 parg0 = TREE_OPERAND (parg, 0);
9731 parg1 = TREE_OPERAND (parg, 1);
9732 STRIP_NOPS (parg0);
9733 STRIP_NOPS (parg1);
9735 if (TREE_CODE (parg0) == MULT_EXPR
9736 && TREE_CODE (parg1) != MULT_EXPR)
9737 return fold_build2_loc (loc, pcode, type,
9738 fold_build2_loc (loc, PLUS_EXPR, type,
9739 fold_convert_loc (loc, type,
9740 parg0),
9741 fold_convert_loc (loc, type,
9742 marg)),
9743 fold_convert_loc (loc, type, parg1));
9744 if (TREE_CODE (parg0) != MULT_EXPR
9745 && TREE_CODE (parg1) == MULT_EXPR)
9746 return
9747 fold_build2_loc (loc, PLUS_EXPR, type,
9748 fold_convert_loc (loc, type, parg0),
9749 fold_build2_loc (loc, pcode, type,
9750 fold_convert_loc (loc, type, marg),
9751 fold_convert_loc (loc, type,
9752 parg1)));
9755 else
9757 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9758 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9759 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9761 /* Likewise if the operands are reversed. */
9762 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9763 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9765 /* Convert X + -C into X - C. */
9766 if (TREE_CODE (arg1) == REAL_CST
9767 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9769 tem = fold_negate_const (arg1, type);
9770 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9771 return fold_build2_loc (loc, MINUS_EXPR, type,
9772 fold_convert_loc (loc, type, arg0),
9773 fold_convert_loc (loc, type, tem));
9776 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9777 to __complex__ ( x, y ). This is not the same for SNaNs or
9778 if signed zeros are involved. */
9779 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9780 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9781 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9783 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9784 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9785 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9786 bool arg0rz = false, arg0iz = false;
9787 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9788 || (arg0i && (arg0iz = real_zerop (arg0i))))
9790 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9791 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9792 if (arg0rz && arg1i && real_zerop (arg1i))
9794 tree rp = arg1r ? arg1r
9795 : build1 (REALPART_EXPR, rtype, arg1);
9796 tree ip = arg0i ? arg0i
9797 : build1 (IMAGPART_EXPR, rtype, arg0);
9798 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9800 else if (arg0iz && arg1r && real_zerop (arg1r))
9802 tree rp = arg0r ? arg0r
9803 : build1 (REALPART_EXPR, rtype, arg0);
9804 tree ip = arg1i ? arg1i
9805 : build1 (IMAGPART_EXPR, rtype, arg1);
9806 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9811 if (flag_unsafe_math_optimizations
9812 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9813 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9814 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9815 return tem;
9817 /* Convert x+x into x*2.0. */
9818 if (operand_equal_p (arg0, arg1, 0)
9819 && SCALAR_FLOAT_TYPE_P (type))
9820 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9821 build_real (type, dconst2));
9823 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9824 We associate floats only if the user has specified
9825 -fassociative-math. */
9826 if (flag_associative_math
9827 && TREE_CODE (arg1) == PLUS_EXPR
9828 && TREE_CODE (arg0) != MULT_EXPR)
9830 tree tree10 = TREE_OPERAND (arg1, 0);
9831 tree tree11 = TREE_OPERAND (arg1, 1);
9832 if (TREE_CODE (tree11) == MULT_EXPR
9833 && TREE_CODE (tree10) == MULT_EXPR)
9835 tree tree0;
9836 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9837 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9840 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9841 We associate floats only if the user has specified
9842 -fassociative-math. */
9843 if (flag_associative_math
9844 && TREE_CODE (arg0) == PLUS_EXPR
9845 && TREE_CODE (arg1) != MULT_EXPR)
9847 tree tree00 = TREE_OPERAND (arg0, 0);
9848 tree tree01 = TREE_OPERAND (arg0, 1);
9849 if (TREE_CODE (tree01) == MULT_EXPR
9850 && TREE_CODE (tree00) == MULT_EXPR)
9852 tree tree0;
9853 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9854 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9859 bit_rotate:
9860 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9861 is a rotate of A by C1 bits. */
9862 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9863 is a rotate of A by B bits. */
9865 enum tree_code code0, code1;
9866 tree rtype;
9867 code0 = TREE_CODE (arg0);
9868 code1 = TREE_CODE (arg1);
9869 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9870 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9871 && operand_equal_p (TREE_OPERAND (arg0, 0),
9872 TREE_OPERAND (arg1, 0), 0)
9873 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9874 TYPE_UNSIGNED (rtype))
9875 /* Only create rotates in complete modes. Other cases are not
9876 expanded properly. */
9877 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9879 tree tree01, tree11;
9880 enum tree_code code01, code11;
9882 tree01 = TREE_OPERAND (arg0, 1);
9883 tree11 = TREE_OPERAND (arg1, 1);
9884 STRIP_NOPS (tree01);
9885 STRIP_NOPS (tree11);
9886 code01 = TREE_CODE (tree01);
9887 code11 = TREE_CODE (tree11);
9888 if (code01 == INTEGER_CST
9889 && code11 == INTEGER_CST
9890 && TREE_INT_CST_HIGH (tree01) == 0
9891 && TREE_INT_CST_HIGH (tree11) == 0
9892 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9893 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9895 tem = build2_loc (loc, LROTATE_EXPR,
9896 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9897 TREE_OPERAND (arg0, 0),
9898 code0 == LSHIFT_EXPR ? tree01 : tree11);
9899 return fold_convert_loc (loc, type, tem);
9901 else if (code11 == MINUS_EXPR)
9903 tree tree110, tree111;
9904 tree110 = TREE_OPERAND (tree11, 0);
9905 tree111 = TREE_OPERAND (tree11, 1);
9906 STRIP_NOPS (tree110);
9907 STRIP_NOPS (tree111);
9908 if (TREE_CODE (tree110) == INTEGER_CST
9909 && 0 == compare_tree_int (tree110,
9910 TYPE_PRECISION
9911 (TREE_TYPE (TREE_OPERAND
9912 (arg0, 0))))
9913 && operand_equal_p (tree01, tree111, 0))
9914 return
9915 fold_convert_loc (loc, type,
9916 build2 ((code0 == LSHIFT_EXPR
9917 ? LROTATE_EXPR
9918 : RROTATE_EXPR),
9919 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9920 TREE_OPERAND (arg0, 0), tree01));
9922 else if (code01 == MINUS_EXPR)
9924 tree tree010, tree011;
9925 tree010 = TREE_OPERAND (tree01, 0);
9926 tree011 = TREE_OPERAND (tree01, 1);
9927 STRIP_NOPS (tree010);
9928 STRIP_NOPS (tree011);
9929 if (TREE_CODE (tree010) == INTEGER_CST
9930 && 0 == compare_tree_int (tree010,
9931 TYPE_PRECISION
9932 (TREE_TYPE (TREE_OPERAND
9933 (arg0, 0))))
9934 && operand_equal_p (tree11, tree011, 0))
9935 return fold_convert_loc
9936 (loc, type,
9937 build2 ((code0 != LSHIFT_EXPR
9938 ? LROTATE_EXPR
9939 : RROTATE_EXPR),
9940 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9941 TREE_OPERAND (arg0, 0), tree11));
9946 associate:
9947 /* In most languages, can't associate operations on floats through
9948 parentheses. Rather than remember where the parentheses were, we
9949 don't associate floats at all, unless the user has specified
9950 -fassociative-math.
9951 And, we need to make sure type is not saturating. */
9953 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9954 && !TYPE_SATURATING (type))
9956 tree var0, con0, lit0, minus_lit0;
9957 tree var1, con1, lit1, minus_lit1;
9958 bool ok = true;
9960 /* Split both trees into variables, constants, and literals. Then
9961 associate each group together, the constants with literals,
9962 then the result with variables. This increases the chances of
9963 literals being recombined later and of generating relocatable
9964 expressions for the sum of a constant and literal. */
9965 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9966 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9967 code == MINUS_EXPR);
9969 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9970 if (code == MINUS_EXPR)
9971 code = PLUS_EXPR;
9973 /* With undefined overflow we can only associate constants with one
9974 variable, and constants whose association doesn't overflow. */
9975 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9976 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9978 if (var0 && var1)
9980 tree tmp0 = var0;
9981 tree tmp1 = var1;
9983 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9984 tmp0 = TREE_OPERAND (tmp0, 0);
9985 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9986 tmp1 = TREE_OPERAND (tmp1, 0);
9987 /* The only case we can still associate with two variables
9988 is if they are the same, modulo negation. */
9989 if (!operand_equal_p (tmp0, tmp1, 0))
9990 ok = false;
9993 if (ok && lit0 && lit1)
9995 tree tmp0 = fold_convert (type, lit0);
9996 tree tmp1 = fold_convert (type, lit1);
9998 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9999 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10000 ok = false;
10004 /* Only do something if we found more than two objects. Otherwise,
10005 nothing has changed and we risk infinite recursion. */
10006 if (ok
10007 && (2 < ((var0 != 0) + (var1 != 0)
10008 + (con0 != 0) + (con1 != 0)
10009 + (lit0 != 0) + (lit1 != 0)
10010 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10012 var0 = associate_trees (loc, var0, var1, code, type);
10013 con0 = associate_trees (loc, con0, con1, code, type);
10014 lit0 = associate_trees (loc, lit0, lit1, code, type);
10015 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10017 /* Preserve the MINUS_EXPR if the negative part of the literal is
10018 greater than the positive part. Otherwise, the multiplicative
10019 folding code (i.e extract_muldiv) may be fooled in case
10020 unsigned constants are subtracted, like in the following
10021 example: ((X*2 + 4) - 8U)/2. */
10022 if (minus_lit0 && lit0)
10024 if (TREE_CODE (lit0) == INTEGER_CST
10025 && TREE_CODE (minus_lit0) == INTEGER_CST
10026 && tree_int_cst_lt (lit0, minus_lit0))
10028 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10029 MINUS_EXPR, type);
10030 lit0 = 0;
10032 else
10034 lit0 = associate_trees (loc, lit0, minus_lit0,
10035 MINUS_EXPR, type);
10036 minus_lit0 = 0;
10039 if (minus_lit0)
10041 if (con0 == 0)
10042 return
10043 fold_convert_loc (loc, type,
10044 associate_trees (loc, var0, minus_lit0,
10045 MINUS_EXPR, type));
10046 else
10048 con0 = associate_trees (loc, con0, minus_lit0,
10049 MINUS_EXPR, type);
10050 return
10051 fold_convert_loc (loc, type,
10052 associate_trees (loc, var0, con0,
10053 PLUS_EXPR, type));
10057 con0 = associate_trees (loc, con0, lit0, code, type);
10058 return
10059 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10060 code, type));
10064 return NULL_TREE;
10066 case MINUS_EXPR:
10067 /* Pointer simplifications for subtraction, simple reassociations. */
10068 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10070 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10071 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10072 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10074 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10075 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10076 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10077 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10078 return fold_build2_loc (loc, PLUS_EXPR, type,
10079 fold_build2_loc (loc, MINUS_EXPR, type,
10080 arg00, arg10),
10081 fold_build2_loc (loc, MINUS_EXPR, type,
10082 arg01, arg11));
10084 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10085 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10087 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10088 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10089 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10090 fold_convert_loc (loc, type, arg1));
10091 if (tmp)
10092 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10095 /* A - (-B) -> A + B */
10096 if (TREE_CODE (arg1) == NEGATE_EXPR)
10097 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10098 fold_convert_loc (loc, type,
10099 TREE_OPERAND (arg1, 0)));
10100 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10101 if (TREE_CODE (arg0) == NEGATE_EXPR
10102 && (FLOAT_TYPE_P (type)
10103 || INTEGRAL_TYPE_P (type))
10104 && negate_expr_p (arg1)
10105 && reorder_operands_p (arg0, arg1))
10106 return fold_build2_loc (loc, MINUS_EXPR, type,
10107 fold_convert_loc (loc, type,
10108 negate_expr (arg1)),
10109 fold_convert_loc (loc, type,
10110 TREE_OPERAND (arg0, 0)));
10111 /* Convert -A - 1 to ~A. */
10112 if (INTEGRAL_TYPE_P (type)
10113 && TREE_CODE (arg0) == NEGATE_EXPR
10114 && integer_onep (arg1)
10115 && !TYPE_OVERFLOW_TRAPS (type))
10116 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10117 fold_convert_loc (loc, type,
10118 TREE_OPERAND (arg0, 0)));
10120 /* Convert -1 - A to ~A. */
10121 if (INTEGRAL_TYPE_P (type)
10122 && integer_all_onesp (arg0))
10123 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10126 /* X - (X / CST) * CST is X % CST. */
10127 if (INTEGRAL_TYPE_P (type)
10128 && TREE_CODE (arg1) == MULT_EXPR
10129 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10130 && operand_equal_p (arg0,
10131 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10132 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10133 TREE_OPERAND (arg1, 1), 0))
10134 return
10135 fold_convert_loc (loc, type,
10136 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10137 arg0, TREE_OPERAND (arg1, 1)));
10139 if (! FLOAT_TYPE_P (type))
10141 if (integer_zerop (arg0))
10142 return negate_expr (fold_convert_loc (loc, type, arg1));
10143 if (integer_zerop (arg1))
10144 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10146 /* Fold A - (A & B) into ~B & A. */
10147 if (!TREE_SIDE_EFFECTS (arg0)
10148 && TREE_CODE (arg1) == BIT_AND_EXPR)
10150 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10152 tree arg10 = fold_convert_loc (loc, type,
10153 TREE_OPERAND (arg1, 0));
10154 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10155 fold_build1_loc (loc, BIT_NOT_EXPR,
10156 type, arg10),
10157 fold_convert_loc (loc, type, arg0));
10159 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10161 tree arg11 = fold_convert_loc (loc,
10162 type, TREE_OPERAND (arg1, 1));
10163 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10164 fold_build1_loc (loc, BIT_NOT_EXPR,
10165 type, arg11),
10166 fold_convert_loc (loc, type, arg0));
10170 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10171 any power of 2 minus 1. */
10172 if (TREE_CODE (arg0) == BIT_AND_EXPR
10173 && TREE_CODE (arg1) == BIT_AND_EXPR
10174 && operand_equal_p (TREE_OPERAND (arg0, 0),
10175 TREE_OPERAND (arg1, 0), 0))
10177 tree mask0 = TREE_OPERAND (arg0, 1);
10178 tree mask1 = TREE_OPERAND (arg1, 1);
10179 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10181 if (operand_equal_p (tem, mask1, 0))
10183 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10184 TREE_OPERAND (arg0, 0), mask1);
10185 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10190 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10191 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10194 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10195 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10196 (-ARG1 + ARG0) reduces to -ARG1. */
10197 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10198 return negate_expr (fold_convert_loc (loc, type, arg1));
10200 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10201 __complex__ ( x, -y ). This is not the same for SNaNs or if
10202 signed zeros are involved. */
10203 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10204 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10205 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10207 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10208 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10209 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10210 bool arg0rz = false, arg0iz = false;
10211 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10212 || (arg0i && (arg0iz = real_zerop (arg0i))))
10214 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10215 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10216 if (arg0rz && arg1i && real_zerop (arg1i))
10218 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10219 arg1r ? arg1r
10220 : build1 (REALPART_EXPR, rtype, arg1));
10221 tree ip = arg0i ? arg0i
10222 : build1 (IMAGPART_EXPR, rtype, arg0);
10223 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10225 else if (arg0iz && arg1r && real_zerop (arg1r))
10227 tree rp = arg0r ? arg0r
10228 : build1 (REALPART_EXPR, rtype, arg0);
10229 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10230 arg1i ? arg1i
10231 : build1 (IMAGPART_EXPR, rtype, arg1));
10232 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10237 /* Fold &x - &x. This can happen from &x.foo - &x.
10238 This is unsafe for certain floats even in non-IEEE formats.
10239 In IEEE, it is unsafe because it does wrong for NaNs.
10240 Also note that operand_equal_p is always false if an operand
10241 is volatile. */
10243 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10244 && operand_equal_p (arg0, arg1, 0))
10245 return build_zero_cst (type);
10247 /* A - B -> A + (-B) if B is easily negatable. */
10248 if (negate_expr_p (arg1)
10249 && ((FLOAT_TYPE_P (type)
10250 /* Avoid this transformation if B is a positive REAL_CST. */
10251 && (TREE_CODE (arg1) != REAL_CST
10252 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10253 || INTEGRAL_TYPE_P (type)))
10254 return fold_build2_loc (loc, PLUS_EXPR, type,
10255 fold_convert_loc (loc, type, arg0),
10256 fold_convert_loc (loc, type,
10257 negate_expr (arg1)));
10259 /* Try folding difference of addresses. */
10261 HOST_WIDE_INT diff;
10263 if ((TREE_CODE (arg0) == ADDR_EXPR
10264 || TREE_CODE (arg1) == ADDR_EXPR)
10265 && ptr_difference_const (arg0, arg1, &diff))
10266 return build_int_cst_type (type, diff);
10269 /* Fold &a[i] - &a[j] to i-j. */
10270 if (TREE_CODE (arg0) == ADDR_EXPR
10271 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10272 && TREE_CODE (arg1) == ADDR_EXPR
10273 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10275 tree aref0 = TREE_OPERAND (arg0, 0);
10276 tree aref1 = TREE_OPERAND (arg1, 0);
10277 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10278 TREE_OPERAND (aref1, 0), 0))
10280 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10281 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10282 tree esz = array_ref_element_size (aref0);
10283 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10284 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10285 fold_convert_loc (loc, type, esz));
10290 if (FLOAT_TYPE_P (type)
10291 && flag_unsafe_math_optimizations
10292 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10293 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10294 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10295 return tem;
10297 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10298 same or one. Make sure type is not saturating.
10299 fold_plusminus_mult_expr will re-associate. */
10300 if ((TREE_CODE (arg0) == MULT_EXPR
10301 || TREE_CODE (arg1) == MULT_EXPR)
10302 && !TYPE_SATURATING (type)
10303 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10305 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10306 if (tem)
10307 return tem;
10310 goto associate;
10312 case MULT_EXPR:
10313 /* (-A) * (-B) -> A * B */
10314 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10315 return fold_build2_loc (loc, MULT_EXPR, type,
10316 fold_convert_loc (loc, type,
10317 TREE_OPERAND (arg0, 0)),
10318 fold_convert_loc (loc, type,
10319 negate_expr (arg1)));
10320 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10321 return fold_build2_loc (loc, MULT_EXPR, type,
10322 fold_convert_loc (loc, type,
10323 negate_expr (arg0)),
10324 fold_convert_loc (loc, type,
10325 TREE_OPERAND (arg1, 0)));
10327 if (! FLOAT_TYPE_P (type))
10329 if (integer_zerop (arg1))
10330 return omit_one_operand_loc (loc, type, arg1, arg0);
10331 if (integer_onep (arg1))
10332 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10333 /* Transform x * -1 into -x. Make sure to do the negation
10334 on the original operand with conversions not stripped
10335 because we can only strip non-sign-changing conversions. */
10336 if (integer_all_onesp (arg1))
10337 return fold_convert_loc (loc, type, negate_expr (op0));
10338 /* Transform x * -C into -x * C if x is easily negatable. */
10339 if (TREE_CODE (arg1) == INTEGER_CST
10340 && tree_int_cst_sgn (arg1) == -1
10341 && negate_expr_p (arg0)
10342 && (tem = negate_expr (arg1)) != arg1
10343 && !TREE_OVERFLOW (tem))
10344 return fold_build2_loc (loc, MULT_EXPR, type,
10345 fold_convert_loc (loc, type,
10346 negate_expr (arg0)),
10347 tem);
10349 /* (a * (1 << b)) is (a << b) */
10350 if (TREE_CODE (arg1) == LSHIFT_EXPR
10351 && integer_onep (TREE_OPERAND (arg1, 0)))
10352 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10353 TREE_OPERAND (arg1, 1));
10354 if (TREE_CODE (arg0) == LSHIFT_EXPR
10355 && integer_onep (TREE_OPERAND (arg0, 0)))
10356 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10357 TREE_OPERAND (arg0, 1));
10359 /* (A + A) * C -> A * 2 * C */
10360 if (TREE_CODE (arg0) == PLUS_EXPR
10361 && TREE_CODE (arg1) == INTEGER_CST
10362 && operand_equal_p (TREE_OPERAND (arg0, 0),
10363 TREE_OPERAND (arg0, 1), 0))
10364 return fold_build2_loc (loc, MULT_EXPR, type,
10365 omit_one_operand_loc (loc, type,
10366 TREE_OPERAND (arg0, 0),
10367 TREE_OPERAND (arg0, 1)),
10368 fold_build2_loc (loc, MULT_EXPR, type,
10369 build_int_cst (type, 2) , arg1));
10371 strict_overflow_p = false;
10372 if (TREE_CODE (arg1) == INTEGER_CST
10373 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10374 &strict_overflow_p)))
10376 if (strict_overflow_p)
10377 fold_overflow_warning (("assuming signed overflow does not "
10378 "occur when simplifying "
10379 "multiplication"),
10380 WARN_STRICT_OVERFLOW_MISC);
10381 return fold_convert_loc (loc, type, tem);
10384 /* Optimize z * conj(z) for integer complex numbers. */
10385 if (TREE_CODE (arg0) == CONJ_EXPR
10386 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10387 return fold_mult_zconjz (loc, type, arg1);
10388 if (TREE_CODE (arg1) == CONJ_EXPR
10389 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10390 return fold_mult_zconjz (loc, type, arg0);
10392 else
10394 /* Maybe fold x * 0 to 0. The expressions aren't the same
10395 when x is NaN, since x * 0 is also NaN. Nor are they the
10396 same in modes with signed zeros, since multiplying a
10397 negative value by 0 gives -0, not +0. */
10398 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10399 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10400 && real_zerop (arg1))
10401 return omit_one_operand_loc (loc, type, arg1, arg0);
10402 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10403 Likewise for complex arithmetic with signed zeros. */
10404 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10405 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10406 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10407 && real_onep (arg1))
10408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10410 /* Transform x * -1.0 into -x. */
10411 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10412 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10413 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10414 && real_minus_onep (arg1))
10415 return fold_convert_loc (loc, type, negate_expr (arg0));
10417 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10418 the result for floating point types due to rounding so it is applied
10419 only if -fassociative-math was specify. */
10420 if (flag_associative_math
10421 && TREE_CODE (arg0) == RDIV_EXPR
10422 && TREE_CODE (arg1) == REAL_CST
10423 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10425 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10426 arg1);
10427 if (tem)
10428 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10429 TREE_OPERAND (arg0, 1));
10432 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10433 if (operand_equal_p (arg0, arg1, 0))
10435 tree tem = fold_strip_sign_ops (arg0);
10436 if (tem != NULL_TREE)
10438 tem = fold_convert_loc (loc, type, tem);
10439 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10443 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10444 This is not the same for NaNs or if signed zeros are
10445 involved. */
10446 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10447 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10448 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10449 && TREE_CODE (arg1) == COMPLEX_CST
10450 && real_zerop (TREE_REALPART (arg1)))
10452 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10453 if (real_onep (TREE_IMAGPART (arg1)))
10454 return
10455 fold_build2_loc (loc, COMPLEX_EXPR, type,
10456 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10457 rtype, arg0)),
10458 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10459 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10460 return
10461 fold_build2_loc (loc, COMPLEX_EXPR, type,
10462 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10463 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10464 rtype, arg0)));
10467 /* Optimize z * conj(z) for floating point complex numbers.
10468 Guarded by flag_unsafe_math_optimizations as non-finite
10469 imaginary components don't produce scalar results. */
10470 if (flag_unsafe_math_optimizations
10471 && TREE_CODE (arg0) == CONJ_EXPR
10472 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10473 return fold_mult_zconjz (loc, type, arg1);
10474 if (flag_unsafe_math_optimizations
10475 && TREE_CODE (arg1) == CONJ_EXPR
10476 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10477 return fold_mult_zconjz (loc, type, arg0);
10479 if (flag_unsafe_math_optimizations)
10481 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10482 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10484 /* Optimizations of root(...)*root(...). */
10485 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10487 tree rootfn, arg;
10488 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10489 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10491 /* Optimize sqrt(x)*sqrt(x) as x. */
10492 if (BUILTIN_SQRT_P (fcode0)
10493 && operand_equal_p (arg00, arg10, 0)
10494 && ! HONOR_SNANS (TYPE_MODE (type)))
10495 return arg00;
10497 /* Optimize root(x)*root(y) as root(x*y). */
10498 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10499 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10500 return build_call_expr_loc (loc, rootfn, 1, arg);
10503 /* Optimize expN(x)*expN(y) as expN(x+y). */
10504 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10506 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10507 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10508 CALL_EXPR_ARG (arg0, 0),
10509 CALL_EXPR_ARG (arg1, 0));
10510 return build_call_expr_loc (loc, expfn, 1, arg);
10513 /* Optimizations of pow(...)*pow(...). */
10514 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10515 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10516 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10518 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10519 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10520 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10521 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10523 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10524 if (operand_equal_p (arg01, arg11, 0))
10526 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10527 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10528 arg00, arg10);
10529 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10532 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10533 if (operand_equal_p (arg00, arg10, 0))
10535 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10536 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10537 arg01, arg11);
10538 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10542 /* Optimize tan(x)*cos(x) as sin(x). */
10543 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10544 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10545 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10546 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10547 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10548 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10549 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10550 CALL_EXPR_ARG (arg1, 0), 0))
10552 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10554 if (sinfn != NULL_TREE)
10555 return build_call_expr_loc (loc, sinfn, 1,
10556 CALL_EXPR_ARG (arg0, 0));
10559 /* Optimize x*pow(x,c) as pow(x,c+1). */
10560 if (fcode1 == BUILT_IN_POW
10561 || fcode1 == BUILT_IN_POWF
10562 || fcode1 == BUILT_IN_POWL)
10564 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10565 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10566 if (TREE_CODE (arg11) == REAL_CST
10567 && !TREE_OVERFLOW (arg11)
10568 && operand_equal_p (arg0, arg10, 0))
10570 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10571 REAL_VALUE_TYPE c;
10572 tree arg;
10574 c = TREE_REAL_CST (arg11);
10575 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10576 arg = build_real (type, c);
10577 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10581 /* Optimize pow(x,c)*x as pow(x,c+1). */
10582 if (fcode0 == BUILT_IN_POW
10583 || fcode0 == BUILT_IN_POWF
10584 || fcode0 == BUILT_IN_POWL)
10586 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10587 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10588 if (TREE_CODE (arg01) == REAL_CST
10589 && !TREE_OVERFLOW (arg01)
10590 && operand_equal_p (arg1, arg00, 0))
10592 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10593 REAL_VALUE_TYPE c;
10594 tree arg;
10596 c = TREE_REAL_CST (arg01);
10597 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10598 arg = build_real (type, c);
10599 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10603 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10604 if (!in_gimple_form
10605 && optimize_function_for_speed_p (cfun)
10606 && operand_equal_p (arg0, arg1, 0))
10608 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10610 if (powfn)
10612 tree arg = build_real (type, dconst2);
10613 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10618 goto associate;
10620 case BIT_IOR_EXPR:
10621 bit_ior:
10622 if (integer_all_onesp (arg1))
10623 return omit_one_operand_loc (loc, type, arg1, arg0);
10624 if (integer_zerop (arg1))
10625 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10626 if (operand_equal_p (arg0, arg1, 0))
10627 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10629 /* ~X | X is -1. */
10630 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10631 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10633 t1 = build_zero_cst (type);
10634 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10635 return omit_one_operand_loc (loc, type, t1, arg1);
10638 /* X | ~X is -1. */
10639 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10640 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10642 t1 = build_zero_cst (type);
10643 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10644 return omit_one_operand_loc (loc, type, t1, arg0);
10647 /* Canonicalize (X & C1) | C2. */
10648 if (TREE_CODE (arg0) == BIT_AND_EXPR
10649 && TREE_CODE (arg1) == INTEGER_CST
10650 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10652 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10653 int width = TYPE_PRECISION (type), w;
10654 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10655 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10656 hi2 = TREE_INT_CST_HIGH (arg1);
10657 lo2 = TREE_INT_CST_LOW (arg1);
10659 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10660 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10661 return omit_one_operand_loc (loc, type, arg1,
10662 TREE_OPERAND (arg0, 0));
10664 if (width > HOST_BITS_PER_WIDE_INT)
10666 mhi = (unsigned HOST_WIDE_INT) -1
10667 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10668 mlo = -1;
10670 else
10672 mhi = 0;
10673 mlo = (unsigned HOST_WIDE_INT) -1
10674 >> (HOST_BITS_PER_WIDE_INT - width);
10677 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10678 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10679 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10680 TREE_OPERAND (arg0, 0), arg1);
10682 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10683 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10684 mode which allows further optimizations. */
10685 hi1 &= mhi;
10686 lo1 &= mlo;
10687 hi2 &= mhi;
10688 lo2 &= mlo;
10689 hi3 = hi1 & ~hi2;
10690 lo3 = lo1 & ~lo2;
10691 for (w = BITS_PER_UNIT;
10692 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10693 w <<= 1)
10695 unsigned HOST_WIDE_INT mask
10696 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10697 if (((lo1 | lo2) & mask) == mask
10698 && (lo1 & ~mask) == 0 && hi1 == 0)
10700 hi3 = 0;
10701 lo3 = mask;
10702 break;
10705 if (hi3 != hi1 || lo3 != lo1)
10706 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10707 fold_build2_loc (loc, BIT_AND_EXPR, type,
10708 TREE_OPERAND (arg0, 0),
10709 build_int_cst_wide (type,
10710 lo3, hi3)),
10711 arg1);
10714 /* (X & Y) | Y is (X, Y). */
10715 if (TREE_CODE (arg0) == BIT_AND_EXPR
10716 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10717 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10718 /* (X & Y) | X is (Y, X). */
10719 if (TREE_CODE (arg0) == BIT_AND_EXPR
10720 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10721 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10722 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10723 /* X | (X & Y) is (Y, X). */
10724 if (TREE_CODE (arg1) == BIT_AND_EXPR
10725 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10726 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10727 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10728 /* X | (Y & X) is (Y, X). */
10729 if (TREE_CODE (arg1) == BIT_AND_EXPR
10730 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10731 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10732 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10734 /* (X & ~Y) | (~X & Y) is X ^ Y */
10735 if (TREE_CODE (arg0) == BIT_AND_EXPR
10736 && TREE_CODE (arg1) == BIT_AND_EXPR)
10738 tree a0, a1, l0, l1, n0, n1;
10740 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10741 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10743 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10744 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10746 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10747 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10749 if ((operand_equal_p (n0, a0, 0)
10750 && operand_equal_p (n1, a1, 0))
10751 || (operand_equal_p (n0, a1, 0)
10752 && operand_equal_p (n1, a0, 0)))
10753 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10756 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10757 if (t1 != NULL_TREE)
10758 return t1;
10760 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10762 This results in more efficient code for machines without a NAND
10763 instruction. Combine will canonicalize to the first form
10764 which will allow use of NAND instructions provided by the
10765 backend if they exist. */
10766 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10767 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10769 return
10770 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10771 build2 (BIT_AND_EXPR, type,
10772 fold_convert_loc (loc, type,
10773 TREE_OPERAND (arg0, 0)),
10774 fold_convert_loc (loc, type,
10775 TREE_OPERAND (arg1, 0))));
10778 /* See if this can be simplified into a rotate first. If that
10779 is unsuccessful continue in the association code. */
10780 goto bit_rotate;
10782 case BIT_XOR_EXPR:
10783 if (integer_zerop (arg1))
10784 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10785 if (integer_all_onesp (arg1))
10786 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10787 if (operand_equal_p (arg0, arg1, 0))
10788 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10790 /* ~X ^ X is -1. */
10791 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10792 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10794 t1 = build_zero_cst (type);
10795 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10796 return omit_one_operand_loc (loc, type, t1, arg1);
10799 /* X ^ ~X is -1. */
10800 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10801 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10803 t1 = build_zero_cst (type);
10804 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10805 return omit_one_operand_loc (loc, type, t1, arg0);
10808 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10809 with a constant, and the two constants have no bits in common,
10810 we should treat this as a BIT_IOR_EXPR since this may produce more
10811 simplifications. */
10812 if (TREE_CODE (arg0) == BIT_AND_EXPR
10813 && TREE_CODE (arg1) == BIT_AND_EXPR
10814 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10815 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10816 && integer_zerop (const_binop (BIT_AND_EXPR,
10817 TREE_OPERAND (arg0, 1),
10818 TREE_OPERAND (arg1, 1))))
10820 code = BIT_IOR_EXPR;
10821 goto bit_ior;
10824 /* (X | Y) ^ X -> Y & ~ X*/
10825 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10826 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10828 tree t2 = TREE_OPERAND (arg0, 1);
10829 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10830 arg1);
10831 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10832 fold_convert_loc (loc, type, t2),
10833 fold_convert_loc (loc, type, t1));
10834 return t1;
10837 /* (Y | X) ^ X -> Y & ~ X*/
10838 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10839 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10841 tree t2 = TREE_OPERAND (arg0, 0);
10842 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10843 arg1);
10844 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10845 fold_convert_loc (loc, type, t2),
10846 fold_convert_loc (loc, type, t1));
10847 return t1;
10850 /* X ^ (X | Y) -> Y & ~ X*/
10851 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10852 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10854 tree t2 = TREE_OPERAND (arg1, 1);
10855 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10856 arg0);
10857 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10858 fold_convert_loc (loc, type, t2),
10859 fold_convert_loc (loc, type, t1));
10860 return t1;
10863 /* X ^ (Y | X) -> Y & ~ X*/
10864 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10865 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10867 tree t2 = TREE_OPERAND (arg1, 0);
10868 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10869 arg0);
10870 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10871 fold_convert_loc (loc, type, t2),
10872 fold_convert_loc (loc, type, t1));
10873 return t1;
10876 /* Convert ~X ^ ~Y to X ^ Y. */
10877 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10878 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10879 return fold_build2_loc (loc, code, type,
10880 fold_convert_loc (loc, type,
10881 TREE_OPERAND (arg0, 0)),
10882 fold_convert_loc (loc, type,
10883 TREE_OPERAND (arg1, 0)));
10885 /* Convert ~X ^ C to X ^ ~C. */
10886 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10887 && TREE_CODE (arg1) == INTEGER_CST)
10888 return fold_build2_loc (loc, code, type,
10889 fold_convert_loc (loc, type,
10890 TREE_OPERAND (arg0, 0)),
10891 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10893 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10894 if (TREE_CODE (arg0) == BIT_AND_EXPR
10895 && integer_onep (TREE_OPERAND (arg0, 1))
10896 && integer_onep (arg1))
10897 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10898 build_int_cst (TREE_TYPE (arg0), 0));
10900 /* Fold (X & Y) ^ Y as ~X & Y. */
10901 if (TREE_CODE (arg0) == BIT_AND_EXPR
10902 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10904 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10905 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10906 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10907 fold_convert_loc (loc, type, arg1));
10909 /* Fold (X & Y) ^ X as ~Y & X. */
10910 if (TREE_CODE (arg0) == BIT_AND_EXPR
10911 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10912 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10914 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10915 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10916 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10917 fold_convert_loc (loc, type, arg1));
10919 /* Fold X ^ (X & Y) as X & ~Y. */
10920 if (TREE_CODE (arg1) == BIT_AND_EXPR
10921 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10923 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10924 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10925 fold_convert_loc (loc, type, arg0),
10926 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10928 /* Fold X ^ (Y & X) as ~Y & X. */
10929 if (TREE_CODE (arg1) == BIT_AND_EXPR
10930 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10931 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10933 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10934 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10935 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10936 fold_convert_loc (loc, type, arg0));
10939 /* See if this can be simplified into a rotate first. If that
10940 is unsuccessful continue in the association code. */
10941 goto bit_rotate;
10943 case BIT_AND_EXPR:
10944 if (integer_all_onesp (arg1))
10945 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10946 if (integer_zerop (arg1))
10947 return omit_one_operand_loc (loc, type, arg1, arg0);
10948 if (operand_equal_p (arg0, arg1, 0))
10949 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10951 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10952 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10953 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10954 || (TREE_CODE (arg0) == EQ_EXPR
10955 && integer_zerop (TREE_OPERAND (arg0, 1))))
10956 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10957 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10959 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10960 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10961 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10962 || (TREE_CODE (arg1) == EQ_EXPR
10963 && integer_zerop (TREE_OPERAND (arg1, 1))))
10964 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10965 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10967 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10968 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10969 && TREE_CODE (arg1) == INTEGER_CST
10970 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10972 tree tmp1 = fold_convert_loc (loc, type, arg1);
10973 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10974 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10975 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10976 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10977 return
10978 fold_convert_loc (loc, type,
10979 fold_build2_loc (loc, BIT_IOR_EXPR,
10980 type, tmp2, tmp3));
10983 /* (X | Y) & Y is (X, Y). */
10984 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10985 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10986 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10987 /* (X | Y) & X is (Y, X). */
10988 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10990 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10991 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10992 /* X & (X | Y) is (Y, X). */
10993 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10994 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10995 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10996 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10997 /* X & (Y | X) is (Y, X). */
10998 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10999 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11000 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11001 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11003 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11004 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11005 && integer_onep (TREE_OPERAND (arg0, 1))
11006 && integer_onep (arg1))
11008 tem = TREE_OPERAND (arg0, 0);
11009 return fold_build2_loc (loc, EQ_EXPR, type,
11010 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11011 build_int_cst (TREE_TYPE (tem), 1)),
11012 build_int_cst (TREE_TYPE (tem), 0));
11014 /* Fold ~X & 1 as (X & 1) == 0. */
11015 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11016 && integer_onep (arg1))
11018 tem = TREE_OPERAND (arg0, 0);
11019 return fold_build2_loc (loc, EQ_EXPR, type,
11020 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11021 build_int_cst (TREE_TYPE (tem), 1)),
11022 build_int_cst (TREE_TYPE (tem), 0));
11024 /* Fold !X & 1 as X == 0. */
11025 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11026 && integer_onep (arg1))
11028 tem = TREE_OPERAND (arg0, 0);
11029 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11030 build_int_cst (TREE_TYPE (tem), 0));
11033 /* Fold (X ^ Y) & Y as ~X & Y. */
11034 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11035 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11037 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11038 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11039 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11040 fold_convert_loc (loc, type, arg1));
11042 /* Fold (X ^ Y) & X as ~Y & X. */
11043 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11045 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11047 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11048 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11049 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11050 fold_convert_loc (loc, type, arg1));
11052 /* Fold X & (X ^ Y) as X & ~Y. */
11053 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11054 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11056 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11057 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11058 fold_convert_loc (loc, type, arg0),
11059 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11061 /* Fold X & (Y ^ X) as ~Y & X. */
11062 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11064 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11066 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11067 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11068 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11069 fold_convert_loc (loc, type, arg0));
11072 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11073 ((A & N) + B) & M -> (A + B) & M
11074 Similarly if (N & M) == 0,
11075 ((A | N) + B) & M -> (A + B) & M
11076 and for - instead of + (or unary - instead of +)
11077 and/or ^ instead of |.
11078 If B is constant and (B & M) == 0, fold into A & M. */
11079 if (host_integerp (arg1, 1))
11081 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11082 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11083 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11084 && (TREE_CODE (arg0) == PLUS_EXPR
11085 || TREE_CODE (arg0) == MINUS_EXPR
11086 || TREE_CODE (arg0) == NEGATE_EXPR)
11087 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11088 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11090 tree pmop[2];
11091 int which = 0;
11092 unsigned HOST_WIDE_INT cst0;
11094 /* Now we know that arg0 is (C + D) or (C - D) or
11095 -C and arg1 (M) is == (1LL << cst) - 1.
11096 Store C into PMOP[0] and D into PMOP[1]. */
11097 pmop[0] = TREE_OPERAND (arg0, 0);
11098 pmop[1] = NULL;
11099 if (TREE_CODE (arg0) != NEGATE_EXPR)
11101 pmop[1] = TREE_OPERAND (arg0, 1);
11102 which = 1;
11105 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11106 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11107 & cst1) != cst1)
11108 which = -1;
11110 for (; which >= 0; which--)
11111 switch (TREE_CODE (pmop[which]))
11113 case BIT_AND_EXPR:
11114 case BIT_IOR_EXPR:
11115 case BIT_XOR_EXPR:
11116 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11117 != INTEGER_CST)
11118 break;
11119 /* tree_low_cst not used, because we don't care about
11120 the upper bits. */
11121 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11122 cst0 &= cst1;
11123 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11125 if (cst0 != cst1)
11126 break;
11128 else if (cst0 != 0)
11129 break;
11130 /* If C or D is of the form (A & N) where
11131 (N & M) == M, or of the form (A | N) or
11132 (A ^ N) where (N & M) == 0, replace it with A. */
11133 pmop[which] = TREE_OPERAND (pmop[which], 0);
11134 break;
11135 case INTEGER_CST:
11136 /* If C or D is a N where (N & M) == 0, it can be
11137 omitted (assumed 0). */
11138 if ((TREE_CODE (arg0) == PLUS_EXPR
11139 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11140 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11141 pmop[which] = NULL;
11142 break;
11143 default:
11144 break;
11147 /* Only build anything new if we optimized one or both arguments
11148 above. */
11149 if (pmop[0] != TREE_OPERAND (arg0, 0)
11150 || (TREE_CODE (arg0) != NEGATE_EXPR
11151 && pmop[1] != TREE_OPERAND (arg0, 1)))
11153 tree utype = TREE_TYPE (arg0);
11154 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11156 /* Perform the operations in a type that has defined
11157 overflow behavior. */
11158 utype = unsigned_type_for (TREE_TYPE (arg0));
11159 if (pmop[0] != NULL)
11160 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11161 if (pmop[1] != NULL)
11162 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11165 if (TREE_CODE (arg0) == NEGATE_EXPR)
11166 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11167 else if (TREE_CODE (arg0) == PLUS_EXPR)
11169 if (pmop[0] != NULL && pmop[1] != NULL)
11170 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11171 pmop[0], pmop[1]);
11172 else if (pmop[0] != NULL)
11173 tem = pmop[0];
11174 else if (pmop[1] != NULL)
11175 tem = pmop[1];
11176 else
11177 return build_int_cst (type, 0);
11179 else if (pmop[0] == NULL)
11180 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11181 else
11182 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11183 pmop[0], pmop[1]);
11184 /* TEM is now the new binary +, - or unary - replacement. */
11185 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11186 fold_convert_loc (loc, utype, arg1));
11187 return fold_convert_loc (loc, type, tem);
11192 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11193 if (t1 != NULL_TREE)
11194 return t1;
11195 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11196 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11197 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11199 unsigned int prec
11200 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11202 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11203 && (~TREE_INT_CST_LOW (arg1)
11204 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11205 return
11206 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11209 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11211 This results in more efficient code for machines without a NOR
11212 instruction. Combine will canonicalize to the first form
11213 which will allow use of NOR instructions provided by the
11214 backend if they exist. */
11215 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11216 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11218 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11219 build2 (BIT_IOR_EXPR, type,
11220 fold_convert_loc (loc, type,
11221 TREE_OPERAND (arg0, 0)),
11222 fold_convert_loc (loc, type,
11223 TREE_OPERAND (arg1, 0))));
11226 /* If arg0 is derived from the address of an object or function, we may
11227 be able to fold this expression using the object or function's
11228 alignment. */
11229 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11231 unsigned HOST_WIDE_INT modulus, residue;
11232 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11234 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11235 integer_onep (arg1));
11237 /* This works because modulus is a power of 2. If this weren't the
11238 case, we'd have to replace it by its greatest power-of-2
11239 divisor: modulus & -modulus. */
11240 if (low < modulus)
11241 return build_int_cst (type, residue & low);
11244 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11245 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11246 if the new mask might be further optimized. */
11247 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11248 || TREE_CODE (arg0) == RSHIFT_EXPR)
11249 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11250 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11251 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11252 < TYPE_PRECISION (TREE_TYPE (arg0))
11253 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11254 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11256 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11257 unsigned HOST_WIDE_INT mask
11258 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11259 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11260 tree shift_type = TREE_TYPE (arg0);
11262 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11263 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11264 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11265 && TYPE_PRECISION (TREE_TYPE (arg0))
11266 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11268 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11269 tree arg00 = TREE_OPERAND (arg0, 0);
11270 /* See if more bits can be proven as zero because of
11271 zero extension. */
11272 if (TREE_CODE (arg00) == NOP_EXPR
11273 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11275 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11276 if (TYPE_PRECISION (inner_type)
11277 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11278 && TYPE_PRECISION (inner_type) < prec)
11280 prec = TYPE_PRECISION (inner_type);
11281 /* See if we can shorten the right shift. */
11282 if (shiftc < prec)
11283 shift_type = inner_type;
11286 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11287 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11288 zerobits <<= prec - shiftc;
11289 /* For arithmetic shift if sign bit could be set, zerobits
11290 can contain actually sign bits, so no transformation is
11291 possible, unless MASK masks them all away. In that
11292 case the shift needs to be converted into logical shift. */
11293 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11294 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11296 if ((mask & zerobits) == 0)
11297 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11298 else
11299 zerobits = 0;
11303 /* ((X << 16) & 0xff00) is (X, 0). */
11304 if ((mask & zerobits) == mask)
11305 return omit_one_operand_loc (loc, type,
11306 build_int_cst (type, 0), arg0);
11308 newmask = mask | zerobits;
11309 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11311 unsigned int prec;
11313 /* Only do the transformation if NEWMASK is some integer
11314 mode's mask. */
11315 for (prec = BITS_PER_UNIT;
11316 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11317 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11318 break;
11319 if (prec < HOST_BITS_PER_WIDE_INT
11320 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11322 tree newmaskt;
11324 if (shift_type != TREE_TYPE (arg0))
11326 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11327 fold_convert_loc (loc, shift_type,
11328 TREE_OPERAND (arg0, 0)),
11329 TREE_OPERAND (arg0, 1));
11330 tem = fold_convert_loc (loc, type, tem);
11332 else
11333 tem = op0;
11334 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11335 if (!tree_int_cst_equal (newmaskt, arg1))
11336 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11341 goto associate;
11343 case RDIV_EXPR:
11344 /* Don't touch a floating-point divide by zero unless the mode
11345 of the constant can represent infinity. */
11346 if (TREE_CODE (arg1) == REAL_CST
11347 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11348 && real_zerop (arg1))
11349 return NULL_TREE;
11351 /* Optimize A / A to 1.0 if we don't care about
11352 NaNs or Infinities. Skip the transformation
11353 for non-real operands. */
11354 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11355 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11356 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11357 && operand_equal_p (arg0, arg1, 0))
11359 tree r = build_real (TREE_TYPE (arg0), dconst1);
11361 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11364 /* The complex version of the above A / A optimization. */
11365 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11366 && operand_equal_p (arg0, arg1, 0))
11368 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11369 if (! HONOR_NANS (TYPE_MODE (elem_type))
11370 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11372 tree r = build_real (elem_type, dconst1);
11373 /* omit_two_operands will call fold_convert for us. */
11374 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11378 /* (-A) / (-B) -> A / B */
11379 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11380 return fold_build2_loc (loc, RDIV_EXPR, type,
11381 TREE_OPERAND (arg0, 0),
11382 negate_expr (arg1));
11383 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11384 return fold_build2_loc (loc, RDIV_EXPR, type,
11385 negate_expr (arg0),
11386 TREE_OPERAND (arg1, 0));
11388 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11389 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11390 && real_onep (arg1))
11391 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11393 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11394 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11395 && real_minus_onep (arg1))
11396 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11397 negate_expr (arg0)));
11399 /* If ARG1 is a constant, we can convert this to a multiply by the
11400 reciprocal. This does not have the same rounding properties,
11401 so only do this if -freciprocal-math. We can actually
11402 always safely do it if ARG1 is a power of two, but it's hard to
11403 tell if it is or not in a portable manner. */
11404 if (TREE_CODE (arg1) == REAL_CST)
11406 if (flag_reciprocal_math
11407 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11408 arg1)))
11409 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11410 /* Find the reciprocal if optimizing and the result is exact. */
11411 if (optimize)
11413 REAL_VALUE_TYPE r;
11414 r = TREE_REAL_CST (arg1);
11415 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11417 tem = build_real (type, r);
11418 return fold_build2_loc (loc, MULT_EXPR, type,
11419 fold_convert_loc (loc, type, arg0), tem);
11423 /* Convert A/B/C to A/(B*C). */
11424 if (flag_reciprocal_math
11425 && TREE_CODE (arg0) == RDIV_EXPR)
11426 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11427 fold_build2_loc (loc, MULT_EXPR, type,
11428 TREE_OPERAND (arg0, 1), arg1));
11430 /* Convert A/(B/C) to (A/B)*C. */
11431 if (flag_reciprocal_math
11432 && TREE_CODE (arg1) == RDIV_EXPR)
11433 return fold_build2_loc (loc, MULT_EXPR, type,
11434 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11435 TREE_OPERAND (arg1, 0)),
11436 TREE_OPERAND (arg1, 1));
11438 /* Convert C1/(X*C2) into (C1/C2)/X. */
11439 if (flag_reciprocal_math
11440 && TREE_CODE (arg1) == MULT_EXPR
11441 && TREE_CODE (arg0) == REAL_CST
11442 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11444 tree tem = const_binop (RDIV_EXPR, arg0,
11445 TREE_OPERAND (arg1, 1));
11446 if (tem)
11447 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11448 TREE_OPERAND (arg1, 0));
11451 if (flag_unsafe_math_optimizations)
11453 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11454 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11456 /* Optimize sin(x)/cos(x) as tan(x). */
11457 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11458 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11459 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11460 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11461 CALL_EXPR_ARG (arg1, 0), 0))
11463 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11465 if (tanfn != NULL_TREE)
11466 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11469 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11470 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11471 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11472 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11473 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11474 CALL_EXPR_ARG (arg1, 0), 0))
11476 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11478 if (tanfn != NULL_TREE)
11480 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11481 CALL_EXPR_ARG (arg0, 0));
11482 return fold_build2_loc (loc, RDIV_EXPR, type,
11483 build_real (type, dconst1), tmp);
11487 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11488 NaNs or Infinities. */
11489 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11490 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11491 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11493 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11494 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11496 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11497 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11498 && operand_equal_p (arg00, arg01, 0))
11500 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11502 if (cosfn != NULL_TREE)
11503 return build_call_expr_loc (loc, cosfn, 1, arg00);
11507 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11508 NaNs or Infinities. */
11509 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11510 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11511 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11513 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11514 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11516 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11517 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11518 && operand_equal_p (arg00, arg01, 0))
11520 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11522 if (cosfn != NULL_TREE)
11524 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11525 return fold_build2_loc (loc, RDIV_EXPR, type,
11526 build_real (type, dconst1),
11527 tmp);
11532 /* Optimize pow(x,c)/x as pow(x,c-1). */
11533 if (fcode0 == BUILT_IN_POW
11534 || fcode0 == BUILT_IN_POWF
11535 || fcode0 == BUILT_IN_POWL)
11537 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11538 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11539 if (TREE_CODE (arg01) == REAL_CST
11540 && !TREE_OVERFLOW (arg01)
11541 && operand_equal_p (arg1, arg00, 0))
11543 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11544 REAL_VALUE_TYPE c;
11545 tree arg;
11547 c = TREE_REAL_CST (arg01);
11548 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11549 arg = build_real (type, c);
11550 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11554 /* Optimize a/root(b/c) into a*root(c/b). */
11555 if (BUILTIN_ROOT_P (fcode1))
11557 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11559 if (TREE_CODE (rootarg) == RDIV_EXPR)
11561 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11562 tree b = TREE_OPERAND (rootarg, 0);
11563 tree c = TREE_OPERAND (rootarg, 1);
11565 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11567 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11568 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11572 /* Optimize x/expN(y) into x*expN(-y). */
11573 if (BUILTIN_EXPONENT_P (fcode1))
11575 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11576 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11577 arg1 = build_call_expr_loc (loc,
11578 expfn, 1,
11579 fold_convert_loc (loc, type, arg));
11580 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11583 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11584 if (fcode1 == BUILT_IN_POW
11585 || fcode1 == BUILT_IN_POWF
11586 || fcode1 == BUILT_IN_POWL)
11588 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11589 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11590 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11591 tree neg11 = fold_convert_loc (loc, type,
11592 negate_expr (arg11));
11593 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11594 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11597 return NULL_TREE;
11599 case TRUNC_DIV_EXPR:
11600 /* Optimize (X & (-A)) / A where A is a power of 2,
11601 to X >> log2(A) */
11602 if (TREE_CODE (arg0) == BIT_AND_EXPR
11603 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11604 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11606 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11607 arg1, TREE_OPERAND (arg0, 1));
11608 if (sum && integer_zerop (sum)) {
11609 unsigned long pow2;
11611 if (TREE_INT_CST_LOW (arg1))
11612 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11613 else
11614 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11615 + HOST_BITS_PER_WIDE_INT;
11617 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11618 TREE_OPERAND (arg0, 0),
11619 build_int_cst (integer_type_node, pow2));
11623 /* Fall thru */
11625 case FLOOR_DIV_EXPR:
11626 /* Simplify A / (B << N) where A and B are positive and B is
11627 a power of 2, to A >> (N + log2(B)). */
11628 strict_overflow_p = false;
11629 if (TREE_CODE (arg1) == LSHIFT_EXPR
11630 && (TYPE_UNSIGNED (type)
11631 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11633 tree sval = TREE_OPERAND (arg1, 0);
11634 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11636 tree sh_cnt = TREE_OPERAND (arg1, 1);
11637 unsigned long pow2;
11639 if (TREE_INT_CST_LOW (sval))
11640 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11641 else
11642 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11643 + HOST_BITS_PER_WIDE_INT;
11645 if (strict_overflow_p)
11646 fold_overflow_warning (("assuming signed overflow does not "
11647 "occur when simplifying A / (B << N)"),
11648 WARN_STRICT_OVERFLOW_MISC);
11650 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11651 sh_cnt,
11652 build_int_cst (TREE_TYPE (sh_cnt),
11653 pow2));
11654 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11655 fold_convert_loc (loc, type, arg0), sh_cnt);
11659 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11660 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11661 if (INTEGRAL_TYPE_P (type)
11662 && TYPE_UNSIGNED (type)
11663 && code == FLOOR_DIV_EXPR)
11664 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11666 /* Fall thru */
11668 case ROUND_DIV_EXPR:
11669 case CEIL_DIV_EXPR:
11670 case EXACT_DIV_EXPR:
11671 if (integer_onep (arg1))
11672 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11673 if (integer_zerop (arg1))
11674 return NULL_TREE;
11675 /* X / -1 is -X. */
11676 if (!TYPE_UNSIGNED (type)
11677 && TREE_CODE (arg1) == INTEGER_CST
11678 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11679 && TREE_INT_CST_HIGH (arg1) == -1)
11680 return fold_convert_loc (loc, type, negate_expr (arg0));
11682 /* Convert -A / -B to A / B when the type is signed and overflow is
11683 undefined. */
11684 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11685 && TREE_CODE (arg0) == NEGATE_EXPR
11686 && negate_expr_p (arg1))
11688 if (INTEGRAL_TYPE_P (type))
11689 fold_overflow_warning (("assuming signed overflow does not occur "
11690 "when distributing negation across "
11691 "division"),
11692 WARN_STRICT_OVERFLOW_MISC);
11693 return fold_build2_loc (loc, code, type,
11694 fold_convert_loc (loc, type,
11695 TREE_OPERAND (arg0, 0)),
11696 fold_convert_loc (loc, type,
11697 negate_expr (arg1)));
11699 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11700 && TREE_CODE (arg1) == NEGATE_EXPR
11701 && negate_expr_p (arg0))
11703 if (INTEGRAL_TYPE_P (type))
11704 fold_overflow_warning (("assuming signed overflow does not occur "
11705 "when distributing negation across "
11706 "division"),
11707 WARN_STRICT_OVERFLOW_MISC);
11708 return fold_build2_loc (loc, code, type,
11709 fold_convert_loc (loc, type,
11710 negate_expr (arg0)),
11711 fold_convert_loc (loc, type,
11712 TREE_OPERAND (arg1, 0)));
11715 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11716 operation, EXACT_DIV_EXPR.
11718 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11719 At one time others generated faster code, it's not clear if they do
11720 after the last round to changes to the DIV code in expmed.c. */
11721 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11722 && multiple_of_p (type, arg0, arg1))
11723 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11725 strict_overflow_p = false;
11726 if (TREE_CODE (arg1) == INTEGER_CST
11727 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11728 &strict_overflow_p)))
11730 if (strict_overflow_p)
11731 fold_overflow_warning (("assuming signed overflow does not occur "
11732 "when simplifying division"),
11733 WARN_STRICT_OVERFLOW_MISC);
11734 return fold_convert_loc (loc, type, tem);
11737 return NULL_TREE;
11739 case CEIL_MOD_EXPR:
11740 case FLOOR_MOD_EXPR:
11741 case ROUND_MOD_EXPR:
11742 case TRUNC_MOD_EXPR:
11743 /* X % 1 is always zero, but be sure to preserve any side
11744 effects in X. */
11745 if (integer_onep (arg1))
11746 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11748 /* X % 0, return X % 0 unchanged so that we can get the
11749 proper warnings and errors. */
11750 if (integer_zerop (arg1))
11751 return NULL_TREE;
11753 /* 0 % X is always zero, but be sure to preserve any side
11754 effects in X. Place this after checking for X == 0. */
11755 if (integer_zerop (arg0))
11756 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11758 /* X % -1 is zero. */
11759 if (!TYPE_UNSIGNED (type)
11760 && TREE_CODE (arg1) == INTEGER_CST
11761 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11762 && TREE_INT_CST_HIGH (arg1) == -1)
11763 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11765 /* X % -C is the same as X % C. */
11766 if (code == TRUNC_MOD_EXPR
11767 && !TYPE_UNSIGNED (type)
11768 && TREE_CODE (arg1) == INTEGER_CST
11769 && !TREE_OVERFLOW (arg1)
11770 && TREE_INT_CST_HIGH (arg1) < 0
11771 && !TYPE_OVERFLOW_TRAPS (type)
11772 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11773 && !sign_bit_p (arg1, arg1))
11774 return fold_build2_loc (loc, code, type,
11775 fold_convert_loc (loc, type, arg0),
11776 fold_convert_loc (loc, type,
11777 negate_expr (arg1)));
11779 /* X % -Y is the same as X % Y. */
11780 if (code == TRUNC_MOD_EXPR
11781 && !TYPE_UNSIGNED (type)
11782 && TREE_CODE (arg1) == NEGATE_EXPR
11783 && !TYPE_OVERFLOW_TRAPS (type))
11784 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11785 fold_convert_loc (loc, type,
11786 TREE_OPERAND (arg1, 0)));
11788 strict_overflow_p = false;
11789 if (TREE_CODE (arg1) == INTEGER_CST
11790 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11791 &strict_overflow_p)))
11793 if (strict_overflow_p)
11794 fold_overflow_warning (("assuming signed overflow does not occur "
11795 "when simplifying modulus"),
11796 WARN_STRICT_OVERFLOW_MISC);
11797 return fold_convert_loc (loc, type, tem);
11800 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11801 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11802 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11803 && (TYPE_UNSIGNED (type)
11804 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11806 tree c = arg1;
11807 /* Also optimize A % (C << N) where C is a power of 2,
11808 to A & ((C << N) - 1). */
11809 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11810 c = TREE_OPERAND (arg1, 0);
11812 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11814 tree mask
11815 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11816 build_int_cst (TREE_TYPE (arg1), 1));
11817 if (strict_overflow_p)
11818 fold_overflow_warning (("assuming signed overflow does not "
11819 "occur when simplifying "
11820 "X % (power of two)"),
11821 WARN_STRICT_OVERFLOW_MISC);
11822 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11823 fold_convert_loc (loc, type, arg0),
11824 fold_convert_loc (loc, type, mask));
11828 return NULL_TREE;
11830 case LROTATE_EXPR:
11831 case RROTATE_EXPR:
11832 if (integer_all_onesp (arg0))
11833 return omit_one_operand_loc (loc, type, arg0, arg1);
11834 goto shift;
11836 case RSHIFT_EXPR:
11837 /* Optimize -1 >> x for arithmetic right shifts. */
11838 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11839 && tree_expr_nonnegative_p (arg1))
11840 return omit_one_operand_loc (loc, type, arg0, arg1);
11841 /* ... fall through ... */
11843 case LSHIFT_EXPR:
11844 shift:
11845 if (integer_zerop (arg1))
11846 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11847 if (integer_zerop (arg0))
11848 return omit_one_operand_loc (loc, type, arg0, arg1);
11850 /* Since negative shift count is not well-defined,
11851 don't try to compute it in the compiler. */
11852 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11853 return NULL_TREE;
11855 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11856 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11857 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11858 && host_integerp (TREE_OPERAND (arg0, 1), false)
11859 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11861 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11862 + TREE_INT_CST_LOW (arg1));
11864 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11865 being well defined. */
11866 if (low >= TYPE_PRECISION (type))
11868 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11869 low = low % TYPE_PRECISION (type);
11870 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11871 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11872 TREE_OPERAND (arg0, 0));
11873 else
11874 low = TYPE_PRECISION (type) - 1;
11877 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11878 build_int_cst (type, low));
11881 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11882 into x & ((unsigned)-1 >> c) for unsigned types. */
11883 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11884 || (TYPE_UNSIGNED (type)
11885 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11886 && host_integerp (arg1, false)
11887 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11888 && host_integerp (TREE_OPERAND (arg0, 1), false)
11889 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11891 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11892 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11893 tree lshift;
11894 tree arg00;
11896 if (low0 == low1)
11898 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11900 lshift = build_int_cst (type, -1);
11901 lshift = int_const_binop (code, lshift, arg1);
11903 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11907 /* Rewrite an LROTATE_EXPR by a constant into an
11908 RROTATE_EXPR by a new constant. */
11909 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11911 tree tem = build_int_cst (TREE_TYPE (arg1),
11912 TYPE_PRECISION (type));
11913 tem = const_binop (MINUS_EXPR, tem, arg1);
11914 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11917 /* If we have a rotate of a bit operation with the rotate count and
11918 the second operand of the bit operation both constant,
11919 permute the two operations. */
11920 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11921 && (TREE_CODE (arg0) == BIT_AND_EXPR
11922 || TREE_CODE (arg0) == BIT_IOR_EXPR
11923 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11924 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11925 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11926 fold_build2_loc (loc, code, type,
11927 TREE_OPERAND (arg0, 0), arg1),
11928 fold_build2_loc (loc, code, type,
11929 TREE_OPERAND (arg0, 1), arg1));
11931 /* Two consecutive rotates adding up to the precision of the
11932 type can be ignored. */
11933 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11934 && TREE_CODE (arg0) == RROTATE_EXPR
11935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11936 && TREE_INT_CST_HIGH (arg1) == 0
11937 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11938 && ((TREE_INT_CST_LOW (arg1)
11939 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11940 == (unsigned int) TYPE_PRECISION (type)))
11941 return TREE_OPERAND (arg0, 0);
11943 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11944 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11945 if the latter can be further optimized. */
11946 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11947 && TREE_CODE (arg0) == BIT_AND_EXPR
11948 && TREE_CODE (arg1) == INTEGER_CST
11949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11951 tree mask = fold_build2_loc (loc, code, type,
11952 fold_convert_loc (loc, type,
11953 TREE_OPERAND (arg0, 1)),
11954 arg1);
11955 tree shift = fold_build2_loc (loc, code, type,
11956 fold_convert_loc (loc, type,
11957 TREE_OPERAND (arg0, 0)),
11958 arg1);
11959 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11960 if (tem)
11961 return tem;
11964 return NULL_TREE;
11966 case MIN_EXPR:
11967 if (operand_equal_p (arg0, arg1, 0))
11968 return omit_one_operand_loc (loc, type, arg0, arg1);
11969 if (INTEGRAL_TYPE_P (type)
11970 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11971 return omit_one_operand_loc (loc, type, arg1, arg0);
11972 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11973 if (tem)
11974 return tem;
11975 goto associate;
11977 case MAX_EXPR:
11978 if (operand_equal_p (arg0, arg1, 0))
11979 return omit_one_operand_loc (loc, type, arg0, arg1);
11980 if (INTEGRAL_TYPE_P (type)
11981 && TYPE_MAX_VALUE (type)
11982 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11983 return omit_one_operand_loc (loc, type, arg1, arg0);
11984 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11985 if (tem)
11986 return tem;
11987 goto associate;
11989 case TRUTH_ANDIF_EXPR:
11990 /* Note that the operands of this must be ints
11991 and their values must be 0 or 1.
11992 ("true" is a fixed value perhaps depending on the language.) */
11993 /* If first arg is constant zero, return it. */
11994 if (integer_zerop (arg0))
11995 return fold_convert_loc (loc, type, arg0);
11996 case TRUTH_AND_EXPR:
11997 /* If either arg is constant true, drop it. */
11998 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11999 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12000 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12001 /* Preserve sequence points. */
12002 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12004 /* If second arg is constant zero, result is zero, but first arg
12005 must be evaluated. */
12006 if (integer_zerop (arg1))
12007 return omit_one_operand_loc (loc, type, arg1, arg0);
12008 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12009 case will be handled here. */
12010 if (integer_zerop (arg0))
12011 return omit_one_operand_loc (loc, type, arg0, arg1);
12013 /* !X && X is always false. */
12014 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12015 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12016 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12017 /* X && !X is always false. */
12018 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12019 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12020 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12022 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12023 means A >= Y && A != MAX, but in this case we know that
12024 A < X <= MAX. */
12026 if (!TREE_SIDE_EFFECTS (arg0)
12027 && !TREE_SIDE_EFFECTS (arg1))
12029 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12030 if (tem && !operand_equal_p (tem, arg0, 0))
12031 return fold_build2_loc (loc, code, type, tem, arg1);
12033 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12034 if (tem && !operand_equal_p (tem, arg1, 0))
12035 return fold_build2_loc (loc, code, type, arg0, tem);
12038 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12039 != NULL_TREE)
12040 return tem;
12042 return NULL_TREE;
12044 case TRUTH_ORIF_EXPR:
12045 /* Note that the operands of this must be ints
12046 and their values must be 0 or true.
12047 ("true" is a fixed value perhaps depending on the language.) */
12048 /* If first arg is constant true, return it. */
12049 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12050 return fold_convert_loc (loc, type, arg0);
12051 case TRUTH_OR_EXPR:
12052 /* If either arg is constant zero, drop it. */
12053 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12054 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12055 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12056 /* Preserve sequence points. */
12057 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12058 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12059 /* If second arg is constant true, result is true, but we must
12060 evaluate first arg. */
12061 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12062 return omit_one_operand_loc (loc, type, arg1, arg0);
12063 /* Likewise for first arg, but note this only occurs here for
12064 TRUTH_OR_EXPR. */
12065 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12066 return omit_one_operand_loc (loc, type, arg0, arg1);
12068 /* !X || X is always true. */
12069 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12070 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12071 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12072 /* X || !X is always true. */
12073 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12074 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12075 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12077 /* (X && !Y) || (!X && Y) is X ^ Y */
12078 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12079 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12081 tree a0, a1, l0, l1, n0, n1;
12083 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12084 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12086 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12087 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12089 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12090 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12092 if ((operand_equal_p (n0, a0, 0)
12093 && operand_equal_p (n1, a1, 0))
12094 || (operand_equal_p (n0, a1, 0)
12095 && operand_equal_p (n1, a0, 0)))
12096 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12099 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12100 != NULL_TREE)
12101 return tem;
12103 return NULL_TREE;
12105 case TRUTH_XOR_EXPR:
12106 /* If the second arg is constant zero, drop it. */
12107 if (integer_zerop (arg1))
12108 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12109 /* If the second arg is constant true, this is a logical inversion. */
12110 if (integer_onep (arg1))
12112 /* Only call invert_truthvalue if operand is a truth value. */
12113 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12114 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12115 else
12116 tem = invert_truthvalue_loc (loc, arg0);
12117 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12119 /* Identical arguments cancel to zero. */
12120 if (operand_equal_p (arg0, arg1, 0))
12121 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12123 /* !X ^ X is always true. */
12124 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12125 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12126 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12128 /* X ^ !X is always true. */
12129 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12130 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12131 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12133 return NULL_TREE;
12135 case EQ_EXPR:
12136 case NE_EXPR:
12137 STRIP_NOPS (arg0);
12138 STRIP_NOPS (arg1);
12140 tem = fold_comparison (loc, code, type, op0, op1);
12141 if (tem != NULL_TREE)
12142 return tem;
12144 /* bool_var != 0 becomes bool_var. */
12145 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12146 && code == NE_EXPR)
12147 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12149 /* bool_var == 1 becomes bool_var. */
12150 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12151 && code == EQ_EXPR)
12152 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12154 /* bool_var != 1 becomes !bool_var. */
12155 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12156 && code == NE_EXPR)
12157 return fold_convert_loc (loc, type,
12158 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12159 TREE_TYPE (arg0), arg0));
12161 /* bool_var == 0 becomes !bool_var. */
12162 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12163 && code == EQ_EXPR)
12164 return fold_convert_loc (loc, type,
12165 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12166 TREE_TYPE (arg0), arg0));
12168 /* !exp != 0 becomes !exp */
12169 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12170 && code == NE_EXPR)
12171 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12173 /* If this is an equality comparison of the address of two non-weak,
12174 unaliased symbols neither of which are extern (since we do not
12175 have access to attributes for externs), then we know the result. */
12176 if (TREE_CODE (arg0) == ADDR_EXPR
12177 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12178 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12179 && ! lookup_attribute ("alias",
12180 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12181 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12182 && TREE_CODE (arg1) == ADDR_EXPR
12183 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12184 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12185 && ! lookup_attribute ("alias",
12186 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12187 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12189 /* We know that we're looking at the address of two
12190 non-weak, unaliased, static _DECL nodes.
12192 It is both wasteful and incorrect to call operand_equal_p
12193 to compare the two ADDR_EXPR nodes. It is wasteful in that
12194 all we need to do is test pointer equality for the arguments
12195 to the two ADDR_EXPR nodes. It is incorrect to use
12196 operand_equal_p as that function is NOT equivalent to a
12197 C equality test. It can in fact return false for two
12198 objects which would test as equal using the C equality
12199 operator. */
12200 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12201 return constant_boolean_node (equal
12202 ? code == EQ_EXPR : code != EQ_EXPR,
12203 type);
12206 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12207 a MINUS_EXPR of a constant, we can convert it into a comparison with
12208 a revised constant as long as no overflow occurs. */
12209 if (TREE_CODE (arg1) == INTEGER_CST
12210 && (TREE_CODE (arg0) == PLUS_EXPR
12211 || TREE_CODE (arg0) == MINUS_EXPR)
12212 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12213 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12214 ? MINUS_EXPR : PLUS_EXPR,
12215 fold_convert_loc (loc, TREE_TYPE (arg0),
12216 arg1),
12217 TREE_OPERAND (arg0, 1)))
12218 && !TREE_OVERFLOW (tem))
12219 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12221 /* Similarly for a NEGATE_EXPR. */
12222 if (TREE_CODE (arg0) == NEGATE_EXPR
12223 && TREE_CODE (arg1) == INTEGER_CST
12224 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12225 arg1)))
12226 && TREE_CODE (tem) == INTEGER_CST
12227 && !TREE_OVERFLOW (tem))
12228 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12230 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12231 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12232 && TREE_CODE (arg1) == INTEGER_CST
12233 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12234 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12235 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12236 fold_convert_loc (loc,
12237 TREE_TYPE (arg0),
12238 arg1),
12239 TREE_OPERAND (arg0, 1)));
12241 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12242 if ((TREE_CODE (arg0) == PLUS_EXPR
12243 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12244 || TREE_CODE (arg0) == MINUS_EXPR)
12245 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12246 0)),
12247 arg1, 0)
12248 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12249 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12251 tree val = TREE_OPERAND (arg0, 1);
12252 return omit_two_operands_loc (loc, type,
12253 fold_build2_loc (loc, code, type,
12254 val,
12255 build_int_cst (TREE_TYPE (val),
12256 0)),
12257 TREE_OPERAND (arg0, 0), arg1);
12260 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12261 if (TREE_CODE (arg0) == MINUS_EXPR
12262 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12263 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12264 1)),
12265 arg1, 0)
12266 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12268 return omit_two_operands_loc (loc, type,
12269 code == NE_EXPR
12270 ? boolean_true_node : boolean_false_node,
12271 TREE_OPERAND (arg0, 1), arg1);
12274 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12275 for !=. Don't do this for ordered comparisons due to overflow. */
12276 if (TREE_CODE (arg0) == MINUS_EXPR
12277 && integer_zerop (arg1))
12278 return fold_build2_loc (loc, code, type,
12279 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12281 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12282 if (TREE_CODE (arg0) == ABS_EXPR
12283 && (integer_zerop (arg1) || real_zerop (arg1)))
12284 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12286 /* If this is an EQ or NE comparison with zero and ARG0 is
12287 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12288 two operations, but the latter can be done in one less insn
12289 on machines that have only two-operand insns or on which a
12290 constant cannot be the first operand. */
12291 if (TREE_CODE (arg0) == BIT_AND_EXPR
12292 && integer_zerop (arg1))
12294 tree arg00 = TREE_OPERAND (arg0, 0);
12295 tree arg01 = TREE_OPERAND (arg0, 1);
12296 if (TREE_CODE (arg00) == LSHIFT_EXPR
12297 && integer_onep (TREE_OPERAND (arg00, 0)))
12299 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12300 arg01, TREE_OPERAND (arg00, 1));
12301 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12302 build_int_cst (TREE_TYPE (arg0), 1));
12303 return fold_build2_loc (loc, code, type,
12304 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12305 arg1);
12307 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12308 && integer_onep (TREE_OPERAND (arg01, 0)))
12310 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12311 arg00, TREE_OPERAND (arg01, 1));
12312 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12313 build_int_cst (TREE_TYPE (arg0), 1));
12314 return fold_build2_loc (loc, code, type,
12315 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12316 arg1);
12320 /* If this is an NE or EQ comparison of zero against the result of a
12321 signed MOD operation whose second operand is a power of 2, make
12322 the MOD operation unsigned since it is simpler and equivalent. */
12323 if (integer_zerop (arg1)
12324 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12325 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12326 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12327 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12328 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12329 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12331 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12332 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12333 fold_convert_loc (loc, newtype,
12334 TREE_OPERAND (arg0, 0)),
12335 fold_convert_loc (loc, newtype,
12336 TREE_OPERAND (arg0, 1)));
12338 return fold_build2_loc (loc, code, type, newmod,
12339 fold_convert_loc (loc, newtype, arg1));
12342 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12343 C1 is a valid shift constant, and C2 is a power of two, i.e.
12344 a single bit. */
12345 if (TREE_CODE (arg0) == BIT_AND_EXPR
12346 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12347 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12348 == INTEGER_CST
12349 && integer_pow2p (TREE_OPERAND (arg0, 1))
12350 && integer_zerop (arg1))
12352 tree itype = TREE_TYPE (arg0);
12353 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12354 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12356 /* Check for a valid shift count. */
12357 if (TREE_INT_CST_HIGH (arg001) == 0
12358 && TREE_INT_CST_LOW (arg001) < prec)
12360 tree arg01 = TREE_OPERAND (arg0, 1);
12361 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12362 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12363 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12364 can be rewritten as (X & (C2 << C1)) != 0. */
12365 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12367 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12368 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12369 return fold_build2_loc (loc, code, type, tem,
12370 fold_convert_loc (loc, itype, arg1));
12372 /* Otherwise, for signed (arithmetic) shifts,
12373 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12374 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12375 else if (!TYPE_UNSIGNED (itype))
12376 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12377 arg000, build_int_cst (itype, 0));
12378 /* Otherwise, of unsigned (logical) shifts,
12379 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12380 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12381 else
12382 return omit_one_operand_loc (loc, type,
12383 code == EQ_EXPR ? integer_one_node
12384 : integer_zero_node,
12385 arg000);
12389 /* If we have (A & C) == C where C is a power of 2, convert this into
12390 (A & C) != 0. Similarly for NE_EXPR. */
12391 if (TREE_CODE (arg0) == BIT_AND_EXPR
12392 && integer_pow2p (TREE_OPERAND (arg0, 1))
12393 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12394 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12395 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12396 integer_zero_node));
12398 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12399 bit, then fold the expression into A < 0 or A >= 0. */
12400 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12401 if (tem)
12402 return tem;
12404 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12405 Similarly for NE_EXPR. */
12406 if (TREE_CODE (arg0) == BIT_AND_EXPR
12407 && TREE_CODE (arg1) == INTEGER_CST
12408 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12410 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12411 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12412 TREE_OPERAND (arg0, 1));
12413 tree dandnotc
12414 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12415 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12416 notc);
12417 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12418 if (integer_nonzerop (dandnotc))
12419 return omit_one_operand_loc (loc, type, rslt, arg0);
12422 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12423 Similarly for NE_EXPR. */
12424 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12425 && TREE_CODE (arg1) == INTEGER_CST
12426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12428 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12429 tree candnotd
12430 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12431 TREE_OPERAND (arg0, 1),
12432 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12433 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12434 if (integer_nonzerop (candnotd))
12435 return omit_one_operand_loc (loc, type, rslt, arg0);
12438 /* If this is a comparison of a field, we may be able to simplify it. */
12439 if ((TREE_CODE (arg0) == COMPONENT_REF
12440 || TREE_CODE (arg0) == BIT_FIELD_REF)
12441 /* Handle the constant case even without -O
12442 to make sure the warnings are given. */
12443 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12445 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12446 if (t1)
12447 return t1;
12450 /* Optimize comparisons of strlen vs zero to a compare of the
12451 first character of the string vs zero. To wit,
12452 strlen(ptr) == 0 => *ptr == 0
12453 strlen(ptr) != 0 => *ptr != 0
12454 Other cases should reduce to one of these two (or a constant)
12455 due to the return value of strlen being unsigned. */
12456 if (TREE_CODE (arg0) == CALL_EXPR
12457 && integer_zerop (arg1))
12459 tree fndecl = get_callee_fndecl (arg0);
12461 if (fndecl
12462 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12463 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12464 && call_expr_nargs (arg0) == 1
12465 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12467 tree iref = build_fold_indirect_ref_loc (loc,
12468 CALL_EXPR_ARG (arg0, 0));
12469 return fold_build2_loc (loc, code, type, iref,
12470 build_int_cst (TREE_TYPE (iref), 0));
12474 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12475 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12476 if (TREE_CODE (arg0) == RSHIFT_EXPR
12477 && integer_zerop (arg1)
12478 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12480 tree arg00 = TREE_OPERAND (arg0, 0);
12481 tree arg01 = TREE_OPERAND (arg0, 1);
12482 tree itype = TREE_TYPE (arg00);
12483 if (TREE_INT_CST_HIGH (arg01) == 0
12484 && TREE_INT_CST_LOW (arg01)
12485 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12487 if (TYPE_UNSIGNED (itype))
12489 itype = signed_type_for (itype);
12490 arg00 = fold_convert_loc (loc, itype, arg00);
12492 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12493 type, arg00, build_int_cst (itype, 0));
12497 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12498 if (integer_zerop (arg1)
12499 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12500 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12501 TREE_OPERAND (arg0, 1));
12503 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12504 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12505 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12506 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12507 build_int_cst (TREE_TYPE (arg0), 0));
12508 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12509 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12510 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12511 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12512 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12513 build_int_cst (TREE_TYPE (arg0), 0));
12515 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12516 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12517 && TREE_CODE (arg1) == INTEGER_CST
12518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12519 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12520 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12521 TREE_OPERAND (arg0, 1), arg1));
12523 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12524 (X & C) == 0 when C is a single bit. */
12525 if (TREE_CODE (arg0) == BIT_AND_EXPR
12526 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12527 && integer_zerop (arg1)
12528 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12530 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12531 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12532 TREE_OPERAND (arg0, 1));
12533 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12534 type, tem,
12535 fold_convert_loc (loc, TREE_TYPE (arg0),
12536 arg1));
12539 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12540 constant C is a power of two, i.e. a single bit. */
12541 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12542 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12543 && integer_zerop (arg1)
12544 && integer_pow2p (TREE_OPERAND (arg0, 1))
12545 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12546 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12548 tree arg00 = TREE_OPERAND (arg0, 0);
12549 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12550 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12553 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12554 when is C is a power of two, i.e. a single bit. */
12555 if (TREE_CODE (arg0) == BIT_AND_EXPR
12556 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12557 && integer_zerop (arg1)
12558 && integer_pow2p (TREE_OPERAND (arg0, 1))
12559 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12560 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12562 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12563 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12564 arg000, TREE_OPERAND (arg0, 1));
12565 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12566 tem, build_int_cst (TREE_TYPE (tem), 0));
12569 if (integer_zerop (arg1)
12570 && tree_expr_nonzero_p (arg0))
12572 tree res = constant_boolean_node (code==NE_EXPR, type);
12573 return omit_one_operand_loc (loc, type, res, arg0);
12576 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12577 if (TREE_CODE (arg0) == NEGATE_EXPR
12578 && TREE_CODE (arg1) == NEGATE_EXPR)
12579 return fold_build2_loc (loc, code, type,
12580 TREE_OPERAND (arg0, 0),
12581 fold_convert_loc (loc, TREE_TYPE (arg0),
12582 TREE_OPERAND (arg1, 0)));
12584 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12585 if (TREE_CODE (arg0) == BIT_AND_EXPR
12586 && TREE_CODE (arg1) == BIT_AND_EXPR)
12588 tree arg00 = TREE_OPERAND (arg0, 0);
12589 tree arg01 = TREE_OPERAND (arg0, 1);
12590 tree arg10 = TREE_OPERAND (arg1, 0);
12591 tree arg11 = TREE_OPERAND (arg1, 1);
12592 tree itype = TREE_TYPE (arg0);
12594 if (operand_equal_p (arg01, arg11, 0))
12595 return fold_build2_loc (loc, code, type,
12596 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12597 fold_build2_loc (loc,
12598 BIT_XOR_EXPR, itype,
12599 arg00, arg10),
12600 arg01),
12601 build_int_cst (itype, 0));
12603 if (operand_equal_p (arg01, arg10, 0))
12604 return fold_build2_loc (loc, code, type,
12605 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12606 fold_build2_loc (loc,
12607 BIT_XOR_EXPR, itype,
12608 arg00, arg11),
12609 arg01),
12610 build_int_cst (itype, 0));
12612 if (operand_equal_p (arg00, arg11, 0))
12613 return fold_build2_loc (loc, code, type,
12614 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12615 fold_build2_loc (loc,
12616 BIT_XOR_EXPR, itype,
12617 arg01, arg10),
12618 arg00),
12619 build_int_cst (itype, 0));
12621 if (operand_equal_p (arg00, arg10, 0))
12622 return fold_build2_loc (loc, code, type,
12623 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12624 fold_build2_loc (loc,
12625 BIT_XOR_EXPR, itype,
12626 arg01, arg11),
12627 arg00),
12628 build_int_cst (itype, 0));
12631 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12632 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12634 tree arg00 = TREE_OPERAND (arg0, 0);
12635 tree arg01 = TREE_OPERAND (arg0, 1);
12636 tree arg10 = TREE_OPERAND (arg1, 0);
12637 tree arg11 = TREE_OPERAND (arg1, 1);
12638 tree itype = TREE_TYPE (arg0);
12640 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12641 operand_equal_p guarantees no side-effects so we don't need
12642 to use omit_one_operand on Z. */
12643 if (operand_equal_p (arg01, arg11, 0))
12644 return fold_build2_loc (loc, code, type, arg00,
12645 fold_convert_loc (loc, TREE_TYPE (arg00),
12646 arg10));
12647 if (operand_equal_p (arg01, arg10, 0))
12648 return fold_build2_loc (loc, code, type, arg00,
12649 fold_convert_loc (loc, TREE_TYPE (arg00),
12650 arg11));
12651 if (operand_equal_p (arg00, arg11, 0))
12652 return fold_build2_loc (loc, code, type, arg01,
12653 fold_convert_loc (loc, TREE_TYPE (arg01),
12654 arg10));
12655 if (operand_equal_p (arg00, arg10, 0))
12656 return fold_build2_loc (loc, code, type, arg01,
12657 fold_convert_loc (loc, TREE_TYPE (arg01),
12658 arg11));
12660 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12661 if (TREE_CODE (arg01) == INTEGER_CST
12662 && TREE_CODE (arg11) == INTEGER_CST)
12664 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12665 fold_convert_loc (loc, itype, arg11));
12666 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12667 return fold_build2_loc (loc, code, type, tem,
12668 fold_convert_loc (loc, itype, arg10));
12672 /* Attempt to simplify equality/inequality comparisons of complex
12673 values. Only lower the comparison if the result is known or
12674 can be simplified to a single scalar comparison. */
12675 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12676 || TREE_CODE (arg0) == COMPLEX_CST)
12677 && (TREE_CODE (arg1) == COMPLEX_EXPR
12678 || TREE_CODE (arg1) == COMPLEX_CST))
12680 tree real0, imag0, real1, imag1;
12681 tree rcond, icond;
12683 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12685 real0 = TREE_OPERAND (arg0, 0);
12686 imag0 = TREE_OPERAND (arg0, 1);
12688 else
12690 real0 = TREE_REALPART (arg0);
12691 imag0 = TREE_IMAGPART (arg0);
12694 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12696 real1 = TREE_OPERAND (arg1, 0);
12697 imag1 = TREE_OPERAND (arg1, 1);
12699 else
12701 real1 = TREE_REALPART (arg1);
12702 imag1 = TREE_IMAGPART (arg1);
12705 rcond = fold_binary_loc (loc, code, type, real0, real1);
12706 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12708 if (integer_zerop (rcond))
12710 if (code == EQ_EXPR)
12711 return omit_two_operands_loc (loc, type, boolean_false_node,
12712 imag0, imag1);
12713 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12715 else
12717 if (code == NE_EXPR)
12718 return omit_two_operands_loc (loc, type, boolean_true_node,
12719 imag0, imag1);
12720 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12724 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12725 if (icond && TREE_CODE (icond) == INTEGER_CST)
12727 if (integer_zerop (icond))
12729 if (code == EQ_EXPR)
12730 return omit_two_operands_loc (loc, type, boolean_false_node,
12731 real0, real1);
12732 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12734 else
12736 if (code == NE_EXPR)
12737 return omit_two_operands_loc (loc, type, boolean_true_node,
12738 real0, real1);
12739 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12744 return NULL_TREE;
12746 case LT_EXPR:
12747 case GT_EXPR:
12748 case LE_EXPR:
12749 case GE_EXPR:
12750 tem = fold_comparison (loc, code, type, op0, op1);
12751 if (tem != NULL_TREE)
12752 return tem;
12754 /* Transform comparisons of the form X +- C CMP X. */
12755 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12756 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12757 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12758 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12759 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12760 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12762 tree arg01 = TREE_OPERAND (arg0, 1);
12763 enum tree_code code0 = TREE_CODE (arg0);
12764 int is_positive;
12766 if (TREE_CODE (arg01) == REAL_CST)
12767 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12768 else
12769 is_positive = tree_int_cst_sgn (arg01);
12771 /* (X - c) > X becomes false. */
12772 if (code == GT_EXPR
12773 && ((code0 == MINUS_EXPR && is_positive >= 0)
12774 || (code0 == PLUS_EXPR && is_positive <= 0)))
12776 if (TREE_CODE (arg01) == INTEGER_CST
12777 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12778 fold_overflow_warning (("assuming signed overflow does not "
12779 "occur when assuming that (X - c) > X "
12780 "is always false"),
12781 WARN_STRICT_OVERFLOW_ALL);
12782 return constant_boolean_node (0, type);
12785 /* Likewise (X + c) < X becomes false. */
12786 if (code == LT_EXPR
12787 && ((code0 == PLUS_EXPR && is_positive >= 0)
12788 || (code0 == MINUS_EXPR && is_positive <= 0)))
12790 if (TREE_CODE (arg01) == INTEGER_CST
12791 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12792 fold_overflow_warning (("assuming signed overflow does not "
12793 "occur when assuming that "
12794 "(X + c) < X is always false"),
12795 WARN_STRICT_OVERFLOW_ALL);
12796 return constant_boolean_node (0, type);
12799 /* Convert (X - c) <= X to true. */
12800 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12801 && code == LE_EXPR
12802 && ((code0 == MINUS_EXPR && is_positive >= 0)
12803 || (code0 == PLUS_EXPR && is_positive <= 0)))
12805 if (TREE_CODE (arg01) == INTEGER_CST
12806 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12807 fold_overflow_warning (("assuming signed overflow does not "
12808 "occur when assuming that "
12809 "(X - c) <= X is always true"),
12810 WARN_STRICT_OVERFLOW_ALL);
12811 return constant_boolean_node (1, type);
12814 /* Convert (X + c) >= X to true. */
12815 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12816 && code == GE_EXPR
12817 && ((code0 == PLUS_EXPR && is_positive >= 0)
12818 || (code0 == MINUS_EXPR && is_positive <= 0)))
12820 if (TREE_CODE (arg01) == INTEGER_CST
12821 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12822 fold_overflow_warning (("assuming signed overflow does not "
12823 "occur when assuming that "
12824 "(X + c) >= X is always true"),
12825 WARN_STRICT_OVERFLOW_ALL);
12826 return constant_boolean_node (1, type);
12829 if (TREE_CODE (arg01) == INTEGER_CST)
12831 /* Convert X + c > X and X - c < X to true for integers. */
12832 if (code == GT_EXPR
12833 && ((code0 == PLUS_EXPR && is_positive > 0)
12834 || (code0 == MINUS_EXPR && is_positive < 0)))
12836 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12837 fold_overflow_warning (("assuming signed overflow does "
12838 "not occur when assuming that "
12839 "(X + c) > X is always true"),
12840 WARN_STRICT_OVERFLOW_ALL);
12841 return constant_boolean_node (1, type);
12844 if (code == LT_EXPR
12845 && ((code0 == MINUS_EXPR && is_positive > 0)
12846 || (code0 == PLUS_EXPR && is_positive < 0)))
12848 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12849 fold_overflow_warning (("assuming signed overflow does "
12850 "not occur when assuming that "
12851 "(X - c) < X is always true"),
12852 WARN_STRICT_OVERFLOW_ALL);
12853 return constant_boolean_node (1, type);
12856 /* Convert X + c <= X and X - c >= X to false for integers. */
12857 if (code == LE_EXPR
12858 && ((code0 == PLUS_EXPR && is_positive > 0)
12859 || (code0 == MINUS_EXPR && is_positive < 0)))
12861 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12862 fold_overflow_warning (("assuming signed overflow does "
12863 "not occur when assuming that "
12864 "(X + c) <= X is always false"),
12865 WARN_STRICT_OVERFLOW_ALL);
12866 return constant_boolean_node (0, type);
12869 if (code == GE_EXPR
12870 && ((code0 == MINUS_EXPR && is_positive > 0)
12871 || (code0 == PLUS_EXPR && is_positive < 0)))
12873 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12874 fold_overflow_warning (("assuming signed overflow does "
12875 "not occur when assuming that "
12876 "(X - c) >= X is always false"),
12877 WARN_STRICT_OVERFLOW_ALL);
12878 return constant_boolean_node (0, type);
12883 /* Comparisons with the highest or lowest possible integer of
12884 the specified precision will have known values. */
12886 tree arg1_type = TREE_TYPE (arg1);
12887 unsigned int width = TYPE_PRECISION (arg1_type);
12889 if (TREE_CODE (arg1) == INTEGER_CST
12890 && width <= 2 * HOST_BITS_PER_WIDE_INT
12891 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12893 HOST_WIDE_INT signed_max_hi;
12894 unsigned HOST_WIDE_INT signed_max_lo;
12895 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12897 if (width <= HOST_BITS_PER_WIDE_INT)
12899 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12900 - 1;
12901 signed_max_hi = 0;
12902 max_hi = 0;
12904 if (TYPE_UNSIGNED (arg1_type))
12906 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12907 min_lo = 0;
12908 min_hi = 0;
12910 else
12912 max_lo = signed_max_lo;
12913 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12914 min_hi = -1;
12917 else
12919 width -= HOST_BITS_PER_WIDE_INT;
12920 signed_max_lo = -1;
12921 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12922 - 1;
12923 max_lo = -1;
12924 min_lo = 0;
12926 if (TYPE_UNSIGNED (arg1_type))
12928 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12929 min_hi = 0;
12931 else
12933 max_hi = signed_max_hi;
12934 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12938 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12939 && TREE_INT_CST_LOW (arg1) == max_lo)
12940 switch (code)
12942 case GT_EXPR:
12943 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12945 case GE_EXPR:
12946 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12948 case LE_EXPR:
12949 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12951 case LT_EXPR:
12952 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12954 /* The GE_EXPR and LT_EXPR cases above are not normally
12955 reached because of previous transformations. */
12957 default:
12958 break;
12960 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12961 == max_hi
12962 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12963 switch (code)
12965 case GT_EXPR:
12966 arg1 = const_binop (PLUS_EXPR, arg1,
12967 build_int_cst (TREE_TYPE (arg1), 1));
12968 return fold_build2_loc (loc, EQ_EXPR, type,
12969 fold_convert_loc (loc,
12970 TREE_TYPE (arg1), arg0),
12971 arg1);
12972 case LE_EXPR:
12973 arg1 = const_binop (PLUS_EXPR, arg1,
12974 build_int_cst (TREE_TYPE (arg1), 1));
12975 return fold_build2_loc (loc, NE_EXPR, type,
12976 fold_convert_loc (loc, TREE_TYPE (arg1),
12977 arg0),
12978 arg1);
12979 default:
12980 break;
12982 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12983 == min_hi
12984 && TREE_INT_CST_LOW (arg1) == min_lo)
12985 switch (code)
12987 case LT_EXPR:
12988 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12990 case LE_EXPR:
12991 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12993 case GE_EXPR:
12994 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12996 case GT_EXPR:
12997 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12999 default:
13000 break;
13002 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13003 == min_hi
13004 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13005 switch (code)
13007 case GE_EXPR:
13008 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13009 return fold_build2_loc (loc, NE_EXPR, type,
13010 fold_convert_loc (loc,
13011 TREE_TYPE (arg1), arg0),
13012 arg1);
13013 case LT_EXPR:
13014 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13015 return fold_build2_loc (loc, EQ_EXPR, type,
13016 fold_convert_loc (loc, TREE_TYPE (arg1),
13017 arg0),
13018 arg1);
13019 default:
13020 break;
13023 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13024 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13025 && TYPE_UNSIGNED (arg1_type)
13026 /* We will flip the signedness of the comparison operator
13027 associated with the mode of arg1, so the sign bit is
13028 specified by this mode. Check that arg1 is the signed
13029 max associated with this sign bit. */
13030 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13031 /* signed_type does not work on pointer types. */
13032 && INTEGRAL_TYPE_P (arg1_type))
13034 /* The following case also applies to X < signed_max+1
13035 and X >= signed_max+1 because previous transformations. */
13036 if (code == LE_EXPR || code == GT_EXPR)
13038 tree st;
13039 st = signed_type_for (TREE_TYPE (arg1));
13040 return fold_build2_loc (loc,
13041 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13042 type, fold_convert_loc (loc, st, arg0),
13043 build_int_cst (st, 0));
13049 /* If we are comparing an ABS_EXPR with a constant, we can
13050 convert all the cases into explicit comparisons, but they may
13051 well not be faster than doing the ABS and one comparison.
13052 But ABS (X) <= C is a range comparison, which becomes a subtraction
13053 and a comparison, and is probably faster. */
13054 if (code == LE_EXPR
13055 && TREE_CODE (arg1) == INTEGER_CST
13056 && TREE_CODE (arg0) == ABS_EXPR
13057 && ! TREE_SIDE_EFFECTS (arg0)
13058 && (0 != (tem = negate_expr (arg1)))
13059 && TREE_CODE (tem) == INTEGER_CST
13060 && !TREE_OVERFLOW (tem))
13061 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13062 build2 (GE_EXPR, type,
13063 TREE_OPERAND (arg0, 0), tem),
13064 build2 (LE_EXPR, type,
13065 TREE_OPERAND (arg0, 0), arg1));
13067 /* Convert ABS_EXPR<x> >= 0 to true. */
13068 strict_overflow_p = false;
13069 if (code == GE_EXPR
13070 && (integer_zerop (arg1)
13071 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13072 && real_zerop (arg1)))
13073 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13075 if (strict_overflow_p)
13076 fold_overflow_warning (("assuming signed overflow does not occur "
13077 "when simplifying comparison of "
13078 "absolute value and zero"),
13079 WARN_STRICT_OVERFLOW_CONDITIONAL);
13080 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13083 /* Convert ABS_EXPR<x> < 0 to false. */
13084 strict_overflow_p = false;
13085 if (code == LT_EXPR
13086 && (integer_zerop (arg1) || real_zerop (arg1))
13087 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13089 if (strict_overflow_p)
13090 fold_overflow_warning (("assuming signed overflow does not occur "
13091 "when simplifying comparison of "
13092 "absolute value and zero"),
13093 WARN_STRICT_OVERFLOW_CONDITIONAL);
13094 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13097 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13098 and similarly for >= into !=. */
13099 if ((code == LT_EXPR || code == GE_EXPR)
13100 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13101 && TREE_CODE (arg1) == LSHIFT_EXPR
13102 && integer_onep (TREE_OPERAND (arg1, 0)))
13103 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13104 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13105 TREE_OPERAND (arg1, 1)),
13106 build_int_cst (TREE_TYPE (arg0), 0));
13108 if ((code == LT_EXPR || code == GE_EXPR)
13109 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13110 && CONVERT_EXPR_P (arg1)
13111 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13112 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13114 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13115 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13116 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13117 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13118 build_int_cst (TREE_TYPE (arg0), 0));
13121 return NULL_TREE;
13123 case UNORDERED_EXPR:
13124 case ORDERED_EXPR:
13125 case UNLT_EXPR:
13126 case UNLE_EXPR:
13127 case UNGT_EXPR:
13128 case UNGE_EXPR:
13129 case UNEQ_EXPR:
13130 case LTGT_EXPR:
13131 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13133 t1 = fold_relational_const (code, type, arg0, arg1);
13134 if (t1 != NULL_TREE)
13135 return t1;
13138 /* If the first operand is NaN, the result is constant. */
13139 if (TREE_CODE (arg0) == REAL_CST
13140 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13141 && (code != LTGT_EXPR || ! flag_trapping_math))
13143 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13144 ? integer_zero_node
13145 : integer_one_node;
13146 return omit_one_operand_loc (loc, type, t1, arg1);
13149 /* If the second operand is NaN, the result is constant. */
13150 if (TREE_CODE (arg1) == REAL_CST
13151 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13152 && (code != LTGT_EXPR || ! flag_trapping_math))
13154 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13155 ? integer_zero_node
13156 : integer_one_node;
13157 return omit_one_operand_loc (loc, type, t1, arg0);
13160 /* Simplify unordered comparison of something with itself. */
13161 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13162 && operand_equal_p (arg0, arg1, 0))
13163 return constant_boolean_node (1, type);
13165 if (code == LTGT_EXPR
13166 && !flag_trapping_math
13167 && operand_equal_p (arg0, arg1, 0))
13168 return constant_boolean_node (0, type);
13170 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13172 tree targ0 = strip_float_extensions (arg0);
13173 tree targ1 = strip_float_extensions (arg1);
13174 tree newtype = TREE_TYPE (targ0);
13176 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13177 newtype = TREE_TYPE (targ1);
13179 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13180 return fold_build2_loc (loc, code, type,
13181 fold_convert_loc (loc, newtype, targ0),
13182 fold_convert_loc (loc, newtype, targ1));
13185 return NULL_TREE;
13187 case COMPOUND_EXPR:
13188 /* When pedantic, a compound expression can be neither an lvalue
13189 nor an integer constant expression. */
13190 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13191 return NULL_TREE;
13192 /* Don't let (0, 0) be null pointer constant. */
13193 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13194 : fold_convert_loc (loc, type, arg1);
13195 return pedantic_non_lvalue_loc (loc, tem);
13197 case COMPLEX_EXPR:
13198 if ((TREE_CODE (arg0) == REAL_CST
13199 && TREE_CODE (arg1) == REAL_CST)
13200 || (TREE_CODE (arg0) == INTEGER_CST
13201 && TREE_CODE (arg1) == INTEGER_CST))
13202 return build_complex (type, arg0, arg1);
13203 if (TREE_CODE (arg0) == REALPART_EXPR
13204 && TREE_CODE (arg1) == IMAGPART_EXPR
13205 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
13206 == TYPE_MAIN_VARIANT (type))
13207 && operand_equal_p (TREE_OPERAND (arg0, 0),
13208 TREE_OPERAND (arg1, 0), 0))
13209 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13210 TREE_OPERAND (arg1, 0));
13211 return NULL_TREE;
13213 case ASSERT_EXPR:
13214 /* An ASSERT_EXPR should never be passed to fold_binary. */
13215 gcc_unreachable ();
13217 default:
13218 return NULL_TREE;
13219 } /* switch (code) */
13222 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13223 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13224 of GOTO_EXPR. */
13226 static tree
13227 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13229 switch (TREE_CODE (*tp))
13231 case LABEL_EXPR:
13232 return *tp;
13234 case GOTO_EXPR:
13235 *walk_subtrees = 0;
13237 /* ... fall through ... */
13239 default:
13240 return NULL_TREE;
13244 /* Return whether the sub-tree ST contains a label which is accessible from
13245 outside the sub-tree. */
13247 static bool
13248 contains_label_p (tree st)
13250 return
13251 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13254 /* Fold a ternary expression of code CODE and type TYPE with operands
13255 OP0, OP1, and OP2. Return the folded expression if folding is
13256 successful. Otherwise, return NULL_TREE. */
13258 tree
13259 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13260 tree op0, tree op1, tree op2)
13262 tree tem;
13263 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13264 enum tree_code_class kind = TREE_CODE_CLASS (code);
13266 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13267 && TREE_CODE_LENGTH (code) == 3);
13269 /* Strip any conversions that don't change the mode. This is safe
13270 for every expression, except for a comparison expression because
13271 its signedness is derived from its operands. So, in the latter
13272 case, only strip conversions that don't change the signedness.
13274 Note that this is done as an internal manipulation within the
13275 constant folder, in order to find the simplest representation of
13276 the arguments so that their form can be studied. In any cases,
13277 the appropriate type conversions should be put back in the tree
13278 that will get out of the constant folder. */
13279 if (op0)
13281 arg0 = op0;
13282 STRIP_NOPS (arg0);
13285 if (op1)
13287 arg1 = op1;
13288 STRIP_NOPS (arg1);
13291 if (op2)
13293 arg2 = op2;
13294 STRIP_NOPS (arg2);
13297 switch (code)
13299 case COMPONENT_REF:
13300 if (TREE_CODE (arg0) == CONSTRUCTOR
13301 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13303 unsigned HOST_WIDE_INT idx;
13304 tree field, value;
13305 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13306 if (field == arg1)
13307 return value;
13309 return NULL_TREE;
13311 case COND_EXPR:
13312 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13313 so all simple results must be passed through pedantic_non_lvalue. */
13314 if (TREE_CODE (arg0) == INTEGER_CST)
13316 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13317 tem = integer_zerop (arg0) ? op2 : op1;
13318 /* Only optimize constant conditions when the selected branch
13319 has the same type as the COND_EXPR. This avoids optimizing
13320 away "c ? x : throw", where the throw has a void type.
13321 Avoid throwing away that operand which contains label. */
13322 if ((!TREE_SIDE_EFFECTS (unused_op)
13323 || !contains_label_p (unused_op))
13324 && (! VOID_TYPE_P (TREE_TYPE (tem))
13325 || VOID_TYPE_P (type)))
13326 return pedantic_non_lvalue_loc (loc, tem);
13327 return NULL_TREE;
13329 if (operand_equal_p (arg1, op2, 0))
13330 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13332 /* If we have A op B ? A : C, we may be able to convert this to a
13333 simpler expression, depending on the operation and the values
13334 of B and C. Signed zeros prevent all of these transformations,
13335 for reasons given above each one.
13337 Also try swapping the arguments and inverting the conditional. */
13338 if (COMPARISON_CLASS_P (arg0)
13339 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13340 arg1, TREE_OPERAND (arg0, 1))
13341 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13343 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13344 if (tem)
13345 return tem;
13348 if (COMPARISON_CLASS_P (arg0)
13349 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13350 op2,
13351 TREE_OPERAND (arg0, 1))
13352 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13354 location_t loc0 = expr_location_or (arg0, loc);
13355 tem = fold_truth_not_expr (loc0, arg0);
13356 if (tem && COMPARISON_CLASS_P (tem))
13358 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13359 if (tem)
13360 return tem;
13364 /* If the second operand is simpler than the third, swap them
13365 since that produces better jump optimization results. */
13366 if (truth_value_p (TREE_CODE (arg0))
13367 && tree_swap_operands_p (op1, op2, false))
13369 location_t loc0 = expr_location_or (arg0, loc);
13370 /* See if this can be inverted. If it can't, possibly because
13371 it was a floating-point inequality comparison, don't do
13372 anything. */
13373 tem = fold_truth_not_expr (loc0, arg0);
13374 if (tem)
13375 return fold_build3_loc (loc, code, type, tem, op2, op1);
13378 /* Convert A ? 1 : 0 to simply A. */
13379 if (integer_onep (op1)
13380 && integer_zerop (op2)
13381 /* If we try to convert OP0 to our type, the
13382 call to fold will try to move the conversion inside
13383 a COND, which will recurse. In that case, the COND_EXPR
13384 is probably the best choice, so leave it alone. */
13385 && type == TREE_TYPE (arg0))
13386 return pedantic_non_lvalue_loc (loc, arg0);
13388 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13389 over COND_EXPR in cases such as floating point comparisons. */
13390 if (integer_zerop (op1)
13391 && integer_onep (op2)
13392 && truth_value_p (TREE_CODE (arg0)))
13393 return pedantic_non_lvalue_loc (loc,
13394 fold_convert_loc (loc, type,
13395 invert_truthvalue_loc (loc,
13396 arg0)));
13398 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13399 if (TREE_CODE (arg0) == LT_EXPR
13400 && integer_zerop (TREE_OPERAND (arg0, 1))
13401 && integer_zerop (op2)
13402 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13404 /* sign_bit_p only checks ARG1 bits within A's precision.
13405 If <sign bit of A> has wider type than A, bits outside
13406 of A's precision in <sign bit of A> need to be checked.
13407 If they are all 0, this optimization needs to be done
13408 in unsigned A's type, if they are all 1 in signed A's type,
13409 otherwise this can't be done. */
13410 if (TYPE_PRECISION (TREE_TYPE (tem))
13411 < TYPE_PRECISION (TREE_TYPE (arg1))
13412 && TYPE_PRECISION (TREE_TYPE (tem))
13413 < TYPE_PRECISION (type))
13415 unsigned HOST_WIDE_INT mask_lo;
13416 HOST_WIDE_INT mask_hi;
13417 int inner_width, outer_width;
13418 tree tem_type;
13420 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13421 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13422 if (outer_width > TYPE_PRECISION (type))
13423 outer_width = TYPE_PRECISION (type);
13425 if (outer_width > HOST_BITS_PER_WIDE_INT)
13427 mask_hi = ((unsigned HOST_WIDE_INT) -1
13428 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13429 mask_lo = -1;
13431 else
13433 mask_hi = 0;
13434 mask_lo = ((unsigned HOST_WIDE_INT) -1
13435 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13437 if (inner_width > HOST_BITS_PER_WIDE_INT)
13439 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13440 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13441 mask_lo = 0;
13443 else
13444 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13445 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13447 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13448 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13450 tem_type = signed_type_for (TREE_TYPE (tem));
13451 tem = fold_convert_loc (loc, tem_type, tem);
13453 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13454 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13456 tem_type = unsigned_type_for (TREE_TYPE (tem));
13457 tem = fold_convert_loc (loc, tem_type, tem);
13459 else
13460 tem = NULL;
13463 if (tem)
13464 return
13465 fold_convert_loc (loc, type,
13466 fold_build2_loc (loc, BIT_AND_EXPR,
13467 TREE_TYPE (tem), tem,
13468 fold_convert_loc (loc,
13469 TREE_TYPE (tem),
13470 arg1)));
13473 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13474 already handled above. */
13475 if (TREE_CODE (arg0) == BIT_AND_EXPR
13476 && integer_onep (TREE_OPERAND (arg0, 1))
13477 && integer_zerop (op2)
13478 && integer_pow2p (arg1))
13480 tree tem = TREE_OPERAND (arg0, 0);
13481 STRIP_NOPS (tem);
13482 if (TREE_CODE (tem) == RSHIFT_EXPR
13483 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13484 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13485 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13486 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13487 TREE_OPERAND (tem, 0), arg1);
13490 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13491 is probably obsolete because the first operand should be a
13492 truth value (that's why we have the two cases above), but let's
13493 leave it in until we can confirm this for all front-ends. */
13494 if (integer_zerop (op2)
13495 && TREE_CODE (arg0) == NE_EXPR
13496 && integer_zerop (TREE_OPERAND (arg0, 1))
13497 && integer_pow2p (arg1)
13498 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13499 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13500 arg1, OEP_ONLY_CONST))
13501 return pedantic_non_lvalue_loc (loc,
13502 fold_convert_loc (loc, type,
13503 TREE_OPERAND (arg0, 0)));
13505 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13506 if (integer_zerop (op2)
13507 && truth_value_p (TREE_CODE (arg0))
13508 && truth_value_p (TREE_CODE (arg1)))
13509 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13510 fold_convert_loc (loc, type, arg0),
13511 arg1);
13513 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13514 if (integer_onep (op2)
13515 && truth_value_p (TREE_CODE (arg0))
13516 && truth_value_p (TREE_CODE (arg1)))
13518 location_t loc0 = expr_location_or (arg0, loc);
13519 /* Only perform transformation if ARG0 is easily inverted. */
13520 tem = fold_truth_not_expr (loc0, arg0);
13521 if (tem)
13522 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13523 fold_convert_loc (loc, type, tem),
13524 arg1);
13527 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13528 if (integer_zerop (arg1)
13529 && truth_value_p (TREE_CODE (arg0))
13530 && truth_value_p (TREE_CODE (op2)))
13532 location_t loc0 = expr_location_or (arg0, loc);
13533 /* Only perform transformation if ARG0 is easily inverted. */
13534 tem = fold_truth_not_expr (loc0, arg0);
13535 if (tem)
13536 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13537 fold_convert_loc (loc, type, tem),
13538 op2);
13541 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13542 if (integer_onep (arg1)
13543 && truth_value_p (TREE_CODE (arg0))
13544 && truth_value_p (TREE_CODE (op2)))
13545 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13546 fold_convert_loc (loc, type, arg0),
13547 op2);
13549 return NULL_TREE;
13551 case CALL_EXPR:
13552 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13553 of fold_ternary on them. */
13554 gcc_unreachable ();
13556 case BIT_FIELD_REF:
13557 if ((TREE_CODE (arg0) == VECTOR_CST
13558 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13559 && type == TREE_TYPE (TREE_TYPE (arg0)))
13561 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13562 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13564 if (width != 0
13565 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13566 && (idx % width) == 0
13567 && (idx = idx / width)
13568 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13570 tree elements = NULL_TREE;
13572 if (TREE_CODE (arg0) == VECTOR_CST)
13573 elements = TREE_VECTOR_CST_ELTS (arg0);
13574 else
13576 unsigned HOST_WIDE_INT idx;
13577 tree value;
13579 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13580 elements = tree_cons (NULL_TREE, value, elements);
13582 while (idx-- > 0 && elements)
13583 elements = TREE_CHAIN (elements);
13584 if (elements)
13585 return TREE_VALUE (elements);
13586 else
13587 return build_zero_cst (type);
13591 /* A bit-field-ref that referenced the full argument can be stripped. */
13592 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13593 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13594 && integer_zerop (op2))
13595 return fold_convert_loc (loc, type, arg0);
13597 return NULL_TREE;
13599 case FMA_EXPR:
13600 /* For integers we can decompose the FMA if possible. */
13601 if (TREE_CODE (arg0) == INTEGER_CST
13602 && TREE_CODE (arg1) == INTEGER_CST)
13603 return fold_build2_loc (loc, PLUS_EXPR, type,
13604 const_binop (MULT_EXPR, arg0, arg1), arg2);
13605 if (integer_zerop (arg2))
13606 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13608 return fold_fma (loc, type, arg0, arg1, arg2);
13610 default:
13611 return NULL_TREE;
13612 } /* switch (code) */
13615 /* Perform constant folding and related simplification of EXPR.
13616 The related simplifications include x*1 => x, x*0 => 0, etc.,
13617 and application of the associative law.
13618 NOP_EXPR conversions may be removed freely (as long as we
13619 are careful not to change the type of the overall expression).
13620 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13621 but we can constant-fold them if they have constant operands. */
13623 #ifdef ENABLE_FOLD_CHECKING
13624 # define fold(x) fold_1 (x)
13625 static tree fold_1 (tree);
13626 static
13627 #endif
13628 tree
13629 fold (tree expr)
13631 const tree t = expr;
13632 enum tree_code code = TREE_CODE (t);
13633 enum tree_code_class kind = TREE_CODE_CLASS (code);
13634 tree tem;
13635 location_t loc = EXPR_LOCATION (expr);
13637 /* Return right away if a constant. */
13638 if (kind == tcc_constant)
13639 return t;
13641 /* CALL_EXPR-like objects with variable numbers of operands are
13642 treated specially. */
13643 if (kind == tcc_vl_exp)
13645 if (code == CALL_EXPR)
13647 tem = fold_call_expr (loc, expr, false);
13648 return tem ? tem : expr;
13650 return expr;
13653 if (IS_EXPR_CODE_CLASS (kind))
13655 tree type = TREE_TYPE (t);
13656 tree op0, op1, op2;
13658 switch (TREE_CODE_LENGTH (code))
13660 case 1:
13661 op0 = TREE_OPERAND (t, 0);
13662 tem = fold_unary_loc (loc, code, type, op0);
13663 return tem ? tem : expr;
13664 case 2:
13665 op0 = TREE_OPERAND (t, 0);
13666 op1 = TREE_OPERAND (t, 1);
13667 tem = fold_binary_loc (loc, code, type, op0, op1);
13668 return tem ? tem : expr;
13669 case 3:
13670 op0 = TREE_OPERAND (t, 0);
13671 op1 = TREE_OPERAND (t, 1);
13672 op2 = TREE_OPERAND (t, 2);
13673 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13674 return tem ? tem : expr;
13675 default:
13676 break;
13680 switch (code)
13682 case ARRAY_REF:
13684 tree op0 = TREE_OPERAND (t, 0);
13685 tree op1 = TREE_OPERAND (t, 1);
13687 if (TREE_CODE (op1) == INTEGER_CST
13688 && TREE_CODE (op0) == CONSTRUCTOR
13689 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13691 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13692 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13693 unsigned HOST_WIDE_INT begin = 0;
13695 /* Find a matching index by means of a binary search. */
13696 while (begin != end)
13698 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13699 tree index = VEC_index (constructor_elt, elts, middle)->index;
13701 if (TREE_CODE (index) == INTEGER_CST
13702 && tree_int_cst_lt (index, op1))
13703 begin = middle + 1;
13704 else if (TREE_CODE (index) == INTEGER_CST
13705 && tree_int_cst_lt (op1, index))
13706 end = middle;
13707 else if (TREE_CODE (index) == RANGE_EXPR
13708 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13709 begin = middle + 1;
13710 else if (TREE_CODE (index) == RANGE_EXPR
13711 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13712 end = middle;
13713 else
13714 return VEC_index (constructor_elt, elts, middle)->value;
13718 return t;
13721 case CONST_DECL:
13722 return fold (DECL_INITIAL (t));
13724 default:
13725 return t;
13726 } /* switch (code) */
13729 #ifdef ENABLE_FOLD_CHECKING
13730 #undef fold
13732 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13733 static void fold_check_failed (const_tree, const_tree);
13734 void print_fold_checksum (const_tree);
13736 /* When --enable-checking=fold, compute a digest of expr before
13737 and after actual fold call to see if fold did not accidentally
13738 change original expr. */
13740 tree
13741 fold (tree expr)
13743 tree ret;
13744 struct md5_ctx ctx;
13745 unsigned char checksum_before[16], checksum_after[16];
13746 htab_t ht;
13748 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13749 md5_init_ctx (&ctx);
13750 fold_checksum_tree (expr, &ctx, ht);
13751 md5_finish_ctx (&ctx, checksum_before);
13752 htab_empty (ht);
13754 ret = fold_1 (expr);
13756 md5_init_ctx (&ctx);
13757 fold_checksum_tree (expr, &ctx, ht);
13758 md5_finish_ctx (&ctx, checksum_after);
13759 htab_delete (ht);
13761 if (memcmp (checksum_before, checksum_after, 16))
13762 fold_check_failed (expr, ret);
13764 return ret;
13767 void
13768 print_fold_checksum (const_tree expr)
13770 struct md5_ctx ctx;
13771 unsigned char checksum[16], cnt;
13772 htab_t ht;
13774 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13775 md5_init_ctx (&ctx);
13776 fold_checksum_tree (expr, &ctx, ht);
13777 md5_finish_ctx (&ctx, checksum);
13778 htab_delete (ht);
13779 for (cnt = 0; cnt < 16; ++cnt)
13780 fprintf (stderr, "%02x", checksum[cnt]);
13781 putc ('\n', stderr);
13784 static void
13785 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13787 internal_error ("fold check: original tree changed by fold");
13790 static void
13791 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13793 void **slot;
13794 enum tree_code code;
13795 union tree_node buf;
13796 int i, len;
13798 recursive_label:
13800 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13801 <= sizeof (struct tree_function_decl))
13802 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13803 if (expr == NULL)
13804 return;
13805 slot = (void **) htab_find_slot (ht, expr, INSERT);
13806 if (*slot != NULL)
13807 return;
13808 *slot = CONST_CAST_TREE (expr);
13809 code = TREE_CODE (expr);
13810 if (TREE_CODE_CLASS (code) == tcc_declaration
13811 && DECL_ASSEMBLER_NAME_SET_P (expr))
13813 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13814 memcpy ((char *) &buf, expr, tree_size (expr));
13815 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13816 expr = (tree) &buf;
13818 else if (TREE_CODE_CLASS (code) == tcc_type
13819 && (TYPE_POINTER_TO (expr)
13820 || TYPE_REFERENCE_TO (expr)
13821 || TYPE_CACHED_VALUES_P (expr)
13822 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13823 || TYPE_NEXT_VARIANT (expr)))
13825 /* Allow these fields to be modified. */
13826 tree tmp;
13827 memcpy ((char *) &buf, expr, tree_size (expr));
13828 expr = tmp = (tree) &buf;
13829 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13830 TYPE_POINTER_TO (tmp) = NULL;
13831 TYPE_REFERENCE_TO (tmp) = NULL;
13832 TYPE_NEXT_VARIANT (tmp) = NULL;
13833 if (TYPE_CACHED_VALUES_P (tmp))
13835 TYPE_CACHED_VALUES_P (tmp) = 0;
13836 TYPE_CACHED_VALUES (tmp) = NULL;
13839 md5_process_bytes (expr, tree_size (expr), ctx);
13840 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13841 if (TREE_CODE_CLASS (code) != tcc_type
13842 && TREE_CODE_CLASS (code) != tcc_declaration
13843 && code != TREE_LIST
13844 && code != SSA_NAME
13845 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13846 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13847 switch (TREE_CODE_CLASS (code))
13849 case tcc_constant:
13850 switch (code)
13852 case STRING_CST:
13853 md5_process_bytes (TREE_STRING_POINTER (expr),
13854 TREE_STRING_LENGTH (expr), ctx);
13855 break;
13856 case COMPLEX_CST:
13857 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13858 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13859 break;
13860 case VECTOR_CST:
13861 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13862 break;
13863 default:
13864 break;
13866 break;
13867 case tcc_exceptional:
13868 switch (code)
13870 case TREE_LIST:
13871 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13872 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13873 expr = TREE_CHAIN (expr);
13874 goto recursive_label;
13875 break;
13876 case TREE_VEC:
13877 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13878 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13879 break;
13880 default:
13881 break;
13883 break;
13884 case tcc_expression:
13885 case tcc_reference:
13886 case tcc_comparison:
13887 case tcc_unary:
13888 case tcc_binary:
13889 case tcc_statement:
13890 case tcc_vl_exp:
13891 len = TREE_OPERAND_LENGTH (expr);
13892 for (i = 0; i < len; ++i)
13893 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13894 break;
13895 case tcc_declaration:
13896 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13897 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13898 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13900 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13901 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13902 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13903 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13904 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13906 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13907 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13909 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13911 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13912 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13913 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13915 break;
13916 case tcc_type:
13917 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13918 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13919 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13920 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13921 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13922 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13923 if (INTEGRAL_TYPE_P (expr)
13924 || SCALAR_FLOAT_TYPE_P (expr))
13926 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13927 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13929 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13930 if (TREE_CODE (expr) == RECORD_TYPE
13931 || TREE_CODE (expr) == UNION_TYPE
13932 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13933 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13934 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13935 break;
13936 default:
13937 break;
13941 /* Helper function for outputting the checksum of a tree T. When
13942 debugging with gdb, you can "define mynext" to be "next" followed
13943 by "call debug_fold_checksum (op0)", then just trace down till the
13944 outputs differ. */
13946 DEBUG_FUNCTION void
13947 debug_fold_checksum (const_tree t)
13949 int i;
13950 unsigned char checksum[16];
13951 struct md5_ctx ctx;
13952 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13954 md5_init_ctx (&ctx);
13955 fold_checksum_tree (t, &ctx, ht);
13956 md5_finish_ctx (&ctx, checksum);
13957 htab_empty (ht);
13959 for (i = 0; i < 16; i++)
13960 fprintf (stderr, "%d ", checksum[i]);
13962 fprintf (stderr, "\n");
13965 #endif
13967 /* Fold a unary tree expression with code CODE of type TYPE with an
13968 operand OP0. LOC is the location of the resulting expression.
13969 Return a folded expression if successful. Otherwise, return a tree
13970 expression with code CODE of type TYPE with an operand OP0. */
13972 tree
13973 fold_build1_stat_loc (location_t loc,
13974 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13976 tree tem;
13977 #ifdef ENABLE_FOLD_CHECKING
13978 unsigned char checksum_before[16], checksum_after[16];
13979 struct md5_ctx ctx;
13980 htab_t ht;
13982 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13983 md5_init_ctx (&ctx);
13984 fold_checksum_tree (op0, &ctx, ht);
13985 md5_finish_ctx (&ctx, checksum_before);
13986 htab_empty (ht);
13987 #endif
13989 tem = fold_unary_loc (loc, code, type, op0);
13990 if (!tem)
13991 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13993 #ifdef ENABLE_FOLD_CHECKING
13994 md5_init_ctx (&ctx);
13995 fold_checksum_tree (op0, &ctx, ht);
13996 md5_finish_ctx (&ctx, checksum_after);
13997 htab_delete (ht);
13999 if (memcmp (checksum_before, checksum_after, 16))
14000 fold_check_failed (op0, tem);
14001 #endif
14002 return tem;
14005 /* Fold a binary tree expression with code CODE of type TYPE with
14006 operands OP0 and OP1. LOC is the location of the resulting
14007 expression. Return a folded expression if successful. Otherwise,
14008 return a tree expression with code CODE of type TYPE with operands
14009 OP0 and OP1. */
14011 tree
14012 fold_build2_stat_loc (location_t loc,
14013 enum tree_code code, tree type, tree op0, tree op1
14014 MEM_STAT_DECL)
14016 tree tem;
14017 #ifdef ENABLE_FOLD_CHECKING
14018 unsigned char checksum_before_op0[16],
14019 checksum_before_op1[16],
14020 checksum_after_op0[16],
14021 checksum_after_op1[16];
14022 struct md5_ctx ctx;
14023 htab_t ht;
14025 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14026 md5_init_ctx (&ctx);
14027 fold_checksum_tree (op0, &ctx, ht);
14028 md5_finish_ctx (&ctx, checksum_before_op0);
14029 htab_empty (ht);
14031 md5_init_ctx (&ctx);
14032 fold_checksum_tree (op1, &ctx, ht);
14033 md5_finish_ctx (&ctx, checksum_before_op1);
14034 htab_empty (ht);
14035 #endif
14037 tem = fold_binary_loc (loc, code, type, op0, op1);
14038 if (!tem)
14039 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14041 #ifdef ENABLE_FOLD_CHECKING
14042 md5_init_ctx (&ctx);
14043 fold_checksum_tree (op0, &ctx, ht);
14044 md5_finish_ctx (&ctx, checksum_after_op0);
14045 htab_empty (ht);
14047 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14048 fold_check_failed (op0, tem);
14050 md5_init_ctx (&ctx);
14051 fold_checksum_tree (op1, &ctx, ht);
14052 md5_finish_ctx (&ctx, checksum_after_op1);
14053 htab_delete (ht);
14055 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14056 fold_check_failed (op1, tem);
14057 #endif
14058 return tem;
14061 /* Fold a ternary tree expression with code CODE of type TYPE with
14062 operands OP0, OP1, and OP2. Return a folded expression if
14063 successful. Otherwise, return a tree expression with code CODE of
14064 type TYPE with operands OP0, OP1, and OP2. */
14066 tree
14067 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14068 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14070 tree tem;
14071 #ifdef ENABLE_FOLD_CHECKING
14072 unsigned char checksum_before_op0[16],
14073 checksum_before_op1[16],
14074 checksum_before_op2[16],
14075 checksum_after_op0[16],
14076 checksum_after_op1[16],
14077 checksum_after_op2[16];
14078 struct md5_ctx ctx;
14079 htab_t ht;
14081 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14082 md5_init_ctx (&ctx);
14083 fold_checksum_tree (op0, &ctx, ht);
14084 md5_finish_ctx (&ctx, checksum_before_op0);
14085 htab_empty (ht);
14087 md5_init_ctx (&ctx);
14088 fold_checksum_tree (op1, &ctx, ht);
14089 md5_finish_ctx (&ctx, checksum_before_op1);
14090 htab_empty (ht);
14092 md5_init_ctx (&ctx);
14093 fold_checksum_tree (op2, &ctx, ht);
14094 md5_finish_ctx (&ctx, checksum_before_op2);
14095 htab_empty (ht);
14096 #endif
14098 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14099 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14100 if (!tem)
14101 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14103 #ifdef ENABLE_FOLD_CHECKING
14104 md5_init_ctx (&ctx);
14105 fold_checksum_tree (op0, &ctx, ht);
14106 md5_finish_ctx (&ctx, checksum_after_op0);
14107 htab_empty (ht);
14109 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14110 fold_check_failed (op0, tem);
14112 md5_init_ctx (&ctx);
14113 fold_checksum_tree (op1, &ctx, ht);
14114 md5_finish_ctx (&ctx, checksum_after_op1);
14115 htab_empty (ht);
14117 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14118 fold_check_failed (op1, tem);
14120 md5_init_ctx (&ctx);
14121 fold_checksum_tree (op2, &ctx, ht);
14122 md5_finish_ctx (&ctx, checksum_after_op2);
14123 htab_delete (ht);
14125 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14126 fold_check_failed (op2, tem);
14127 #endif
14128 return tem;
14131 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14132 arguments in ARGARRAY, and a null static chain.
14133 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14134 of type TYPE from the given operands as constructed by build_call_array. */
14136 tree
14137 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14138 int nargs, tree *argarray)
14140 tree tem;
14141 #ifdef ENABLE_FOLD_CHECKING
14142 unsigned char checksum_before_fn[16],
14143 checksum_before_arglist[16],
14144 checksum_after_fn[16],
14145 checksum_after_arglist[16];
14146 struct md5_ctx ctx;
14147 htab_t ht;
14148 int i;
14150 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14151 md5_init_ctx (&ctx);
14152 fold_checksum_tree (fn, &ctx, ht);
14153 md5_finish_ctx (&ctx, checksum_before_fn);
14154 htab_empty (ht);
14156 md5_init_ctx (&ctx);
14157 for (i = 0; i < nargs; i++)
14158 fold_checksum_tree (argarray[i], &ctx, ht);
14159 md5_finish_ctx (&ctx, checksum_before_arglist);
14160 htab_empty (ht);
14161 #endif
14163 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14165 #ifdef ENABLE_FOLD_CHECKING
14166 md5_init_ctx (&ctx);
14167 fold_checksum_tree (fn, &ctx, ht);
14168 md5_finish_ctx (&ctx, checksum_after_fn);
14169 htab_empty (ht);
14171 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14172 fold_check_failed (fn, tem);
14174 md5_init_ctx (&ctx);
14175 for (i = 0; i < nargs; i++)
14176 fold_checksum_tree (argarray[i], &ctx, ht);
14177 md5_finish_ctx (&ctx, checksum_after_arglist);
14178 htab_delete (ht);
14180 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14181 fold_check_failed (NULL_TREE, tem);
14182 #endif
14183 return tem;
14186 /* Perform constant folding and related simplification of initializer
14187 expression EXPR. These behave identically to "fold_buildN" but ignore
14188 potential run-time traps and exceptions that fold must preserve. */
14190 #define START_FOLD_INIT \
14191 int saved_signaling_nans = flag_signaling_nans;\
14192 int saved_trapping_math = flag_trapping_math;\
14193 int saved_rounding_math = flag_rounding_math;\
14194 int saved_trapv = flag_trapv;\
14195 int saved_folding_initializer = folding_initializer;\
14196 flag_signaling_nans = 0;\
14197 flag_trapping_math = 0;\
14198 flag_rounding_math = 0;\
14199 flag_trapv = 0;\
14200 folding_initializer = 1;
14202 #define END_FOLD_INIT \
14203 flag_signaling_nans = saved_signaling_nans;\
14204 flag_trapping_math = saved_trapping_math;\
14205 flag_rounding_math = saved_rounding_math;\
14206 flag_trapv = saved_trapv;\
14207 folding_initializer = saved_folding_initializer;
14209 tree
14210 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14211 tree type, tree op)
14213 tree result;
14214 START_FOLD_INIT;
14216 result = fold_build1_loc (loc, code, type, op);
14218 END_FOLD_INIT;
14219 return result;
14222 tree
14223 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14224 tree type, tree op0, tree op1)
14226 tree result;
14227 START_FOLD_INIT;
14229 result = fold_build2_loc (loc, code, type, op0, op1);
14231 END_FOLD_INIT;
14232 return result;
14235 tree
14236 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14237 tree type, tree op0, tree op1, tree op2)
14239 tree result;
14240 START_FOLD_INIT;
14242 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14244 END_FOLD_INIT;
14245 return result;
14248 tree
14249 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14250 int nargs, tree *argarray)
14252 tree result;
14253 START_FOLD_INIT;
14255 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14257 END_FOLD_INIT;
14258 return result;
14261 #undef START_FOLD_INIT
14262 #undef END_FOLD_INIT
14264 /* Determine if first argument is a multiple of second argument. Return 0 if
14265 it is not, or we cannot easily determined it to be.
14267 An example of the sort of thing we care about (at this point; this routine
14268 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14269 fold cases do now) is discovering that
14271 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14273 is a multiple of
14275 SAVE_EXPR (J * 8)
14277 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14279 This code also handles discovering that
14281 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14283 is a multiple of 8 so we don't have to worry about dealing with a
14284 possible remainder.
14286 Note that we *look* inside a SAVE_EXPR only to determine how it was
14287 calculated; it is not safe for fold to do much of anything else with the
14288 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14289 at run time. For example, the latter example above *cannot* be implemented
14290 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14291 evaluation time of the original SAVE_EXPR is not necessarily the same at
14292 the time the new expression is evaluated. The only optimization of this
14293 sort that would be valid is changing
14295 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14297 divided by 8 to
14299 SAVE_EXPR (I) * SAVE_EXPR (J)
14301 (where the same SAVE_EXPR (J) is used in the original and the
14302 transformed version). */
14305 multiple_of_p (tree type, const_tree top, const_tree bottom)
14307 if (operand_equal_p (top, bottom, 0))
14308 return 1;
14310 if (TREE_CODE (type) != INTEGER_TYPE)
14311 return 0;
14313 switch (TREE_CODE (top))
14315 case BIT_AND_EXPR:
14316 /* Bitwise and provides a power of two multiple. If the mask is
14317 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14318 if (!integer_pow2p (bottom))
14319 return 0;
14320 /* FALLTHRU */
14322 case MULT_EXPR:
14323 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14324 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14326 case PLUS_EXPR:
14327 case MINUS_EXPR:
14328 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14329 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14331 case LSHIFT_EXPR:
14332 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14334 tree op1, t1;
14336 op1 = TREE_OPERAND (top, 1);
14337 /* const_binop may not detect overflow correctly,
14338 so check for it explicitly here. */
14339 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14340 > TREE_INT_CST_LOW (op1)
14341 && TREE_INT_CST_HIGH (op1) == 0
14342 && 0 != (t1 = fold_convert (type,
14343 const_binop (LSHIFT_EXPR,
14344 size_one_node,
14345 op1)))
14346 && !TREE_OVERFLOW (t1))
14347 return multiple_of_p (type, t1, bottom);
14349 return 0;
14351 case NOP_EXPR:
14352 /* Can't handle conversions from non-integral or wider integral type. */
14353 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14354 || (TYPE_PRECISION (type)
14355 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14356 return 0;
14358 /* .. fall through ... */
14360 case SAVE_EXPR:
14361 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14363 case COND_EXPR:
14364 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14365 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14367 case INTEGER_CST:
14368 if (TREE_CODE (bottom) != INTEGER_CST
14369 || integer_zerop (bottom)
14370 || (TYPE_UNSIGNED (type)
14371 && (tree_int_cst_sgn (top) < 0
14372 || tree_int_cst_sgn (bottom) < 0)))
14373 return 0;
14374 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14375 top, bottom));
14377 default:
14378 return 0;
14382 /* Return true if CODE or TYPE is known to be non-negative. */
14384 static bool
14385 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14387 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14388 && truth_value_p (code))
14389 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14390 have a signed:1 type (where the value is -1 and 0). */
14391 return true;
14392 return false;
14395 /* Return true if (CODE OP0) is known to be non-negative. If the return
14396 value is based on the assumption that signed overflow is undefined,
14397 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14398 *STRICT_OVERFLOW_P. */
14400 bool
14401 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14402 bool *strict_overflow_p)
14404 if (TYPE_UNSIGNED (type))
14405 return true;
14407 switch (code)
14409 case ABS_EXPR:
14410 /* We can't return 1 if flag_wrapv is set because
14411 ABS_EXPR<INT_MIN> = INT_MIN. */
14412 if (!INTEGRAL_TYPE_P (type))
14413 return true;
14414 if (TYPE_OVERFLOW_UNDEFINED (type))
14416 *strict_overflow_p = true;
14417 return true;
14419 break;
14421 case NON_LVALUE_EXPR:
14422 case FLOAT_EXPR:
14423 case FIX_TRUNC_EXPR:
14424 return tree_expr_nonnegative_warnv_p (op0,
14425 strict_overflow_p);
14427 case NOP_EXPR:
14429 tree inner_type = TREE_TYPE (op0);
14430 tree outer_type = type;
14432 if (TREE_CODE (outer_type) == REAL_TYPE)
14434 if (TREE_CODE (inner_type) == REAL_TYPE)
14435 return tree_expr_nonnegative_warnv_p (op0,
14436 strict_overflow_p);
14437 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14439 if (TYPE_UNSIGNED (inner_type))
14440 return true;
14441 return tree_expr_nonnegative_warnv_p (op0,
14442 strict_overflow_p);
14445 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14447 if (TREE_CODE (inner_type) == REAL_TYPE)
14448 return tree_expr_nonnegative_warnv_p (op0,
14449 strict_overflow_p);
14450 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14451 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14452 && TYPE_UNSIGNED (inner_type);
14455 break;
14457 default:
14458 return tree_simple_nonnegative_warnv_p (code, type);
14461 /* We don't know sign of `t', so be conservative and return false. */
14462 return false;
14465 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14466 value is based on the assumption that signed overflow is undefined,
14467 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14468 *STRICT_OVERFLOW_P. */
14470 bool
14471 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14472 tree op1, bool *strict_overflow_p)
14474 if (TYPE_UNSIGNED (type))
14475 return true;
14477 switch (code)
14479 case POINTER_PLUS_EXPR:
14480 case PLUS_EXPR:
14481 if (FLOAT_TYPE_P (type))
14482 return (tree_expr_nonnegative_warnv_p (op0,
14483 strict_overflow_p)
14484 && tree_expr_nonnegative_warnv_p (op1,
14485 strict_overflow_p));
14487 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14488 both unsigned and at least 2 bits shorter than the result. */
14489 if (TREE_CODE (type) == INTEGER_TYPE
14490 && TREE_CODE (op0) == NOP_EXPR
14491 && TREE_CODE (op1) == NOP_EXPR)
14493 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14494 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14495 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14496 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14498 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14499 TYPE_PRECISION (inner2)) + 1;
14500 return prec < TYPE_PRECISION (type);
14503 break;
14505 case MULT_EXPR:
14506 if (FLOAT_TYPE_P (type))
14508 /* x * x for floating point x is always non-negative. */
14509 if (operand_equal_p (op0, op1, 0))
14510 return true;
14511 return (tree_expr_nonnegative_warnv_p (op0,
14512 strict_overflow_p)
14513 && tree_expr_nonnegative_warnv_p (op1,
14514 strict_overflow_p));
14517 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14518 both unsigned and their total bits is shorter than the result. */
14519 if (TREE_CODE (type) == INTEGER_TYPE
14520 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14521 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14523 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14524 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14525 : TREE_TYPE (op0);
14526 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14527 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14528 : TREE_TYPE (op1);
14530 bool unsigned0 = TYPE_UNSIGNED (inner0);
14531 bool unsigned1 = TYPE_UNSIGNED (inner1);
14533 if (TREE_CODE (op0) == INTEGER_CST)
14534 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14536 if (TREE_CODE (op1) == INTEGER_CST)
14537 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14539 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14540 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14542 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14543 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14544 : TYPE_PRECISION (inner0);
14546 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14547 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14548 : TYPE_PRECISION (inner1);
14550 return precision0 + precision1 < TYPE_PRECISION (type);
14553 return false;
14555 case BIT_AND_EXPR:
14556 case MAX_EXPR:
14557 return (tree_expr_nonnegative_warnv_p (op0,
14558 strict_overflow_p)
14559 || tree_expr_nonnegative_warnv_p (op1,
14560 strict_overflow_p));
14562 case BIT_IOR_EXPR:
14563 case BIT_XOR_EXPR:
14564 case MIN_EXPR:
14565 case RDIV_EXPR:
14566 case TRUNC_DIV_EXPR:
14567 case CEIL_DIV_EXPR:
14568 case FLOOR_DIV_EXPR:
14569 case ROUND_DIV_EXPR:
14570 return (tree_expr_nonnegative_warnv_p (op0,
14571 strict_overflow_p)
14572 && tree_expr_nonnegative_warnv_p (op1,
14573 strict_overflow_p));
14575 case TRUNC_MOD_EXPR:
14576 case CEIL_MOD_EXPR:
14577 case FLOOR_MOD_EXPR:
14578 case ROUND_MOD_EXPR:
14579 return tree_expr_nonnegative_warnv_p (op0,
14580 strict_overflow_p);
14581 default:
14582 return tree_simple_nonnegative_warnv_p (code, type);
14585 /* We don't know sign of `t', so be conservative and return false. */
14586 return false;
14589 /* Return true if T is known to be non-negative. If the return
14590 value is based on the assumption that signed overflow is undefined,
14591 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14592 *STRICT_OVERFLOW_P. */
14594 bool
14595 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14597 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14598 return true;
14600 switch (TREE_CODE (t))
14602 case INTEGER_CST:
14603 return tree_int_cst_sgn (t) >= 0;
14605 case REAL_CST:
14606 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14608 case FIXED_CST:
14609 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14611 case COND_EXPR:
14612 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14613 strict_overflow_p)
14614 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14615 strict_overflow_p));
14616 default:
14617 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14618 TREE_TYPE (t));
14620 /* We don't know sign of `t', so be conservative and return false. */
14621 return false;
14624 /* Return true if T is known to be non-negative. If the return
14625 value is based on the assumption that signed overflow is undefined,
14626 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14627 *STRICT_OVERFLOW_P. */
14629 bool
14630 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14631 tree arg0, tree arg1, bool *strict_overflow_p)
14633 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14634 switch (DECL_FUNCTION_CODE (fndecl))
14636 CASE_FLT_FN (BUILT_IN_ACOS):
14637 CASE_FLT_FN (BUILT_IN_ACOSH):
14638 CASE_FLT_FN (BUILT_IN_CABS):
14639 CASE_FLT_FN (BUILT_IN_COSH):
14640 CASE_FLT_FN (BUILT_IN_ERFC):
14641 CASE_FLT_FN (BUILT_IN_EXP):
14642 CASE_FLT_FN (BUILT_IN_EXP10):
14643 CASE_FLT_FN (BUILT_IN_EXP2):
14644 CASE_FLT_FN (BUILT_IN_FABS):
14645 CASE_FLT_FN (BUILT_IN_FDIM):
14646 CASE_FLT_FN (BUILT_IN_HYPOT):
14647 CASE_FLT_FN (BUILT_IN_POW10):
14648 CASE_INT_FN (BUILT_IN_FFS):
14649 CASE_INT_FN (BUILT_IN_PARITY):
14650 CASE_INT_FN (BUILT_IN_POPCOUNT):
14651 case BUILT_IN_BSWAP32:
14652 case BUILT_IN_BSWAP64:
14653 /* Always true. */
14654 return true;
14656 CASE_FLT_FN (BUILT_IN_SQRT):
14657 /* sqrt(-0.0) is -0.0. */
14658 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14659 return true;
14660 return tree_expr_nonnegative_warnv_p (arg0,
14661 strict_overflow_p);
14663 CASE_FLT_FN (BUILT_IN_ASINH):
14664 CASE_FLT_FN (BUILT_IN_ATAN):
14665 CASE_FLT_FN (BUILT_IN_ATANH):
14666 CASE_FLT_FN (BUILT_IN_CBRT):
14667 CASE_FLT_FN (BUILT_IN_CEIL):
14668 CASE_FLT_FN (BUILT_IN_ERF):
14669 CASE_FLT_FN (BUILT_IN_EXPM1):
14670 CASE_FLT_FN (BUILT_IN_FLOOR):
14671 CASE_FLT_FN (BUILT_IN_FMOD):
14672 CASE_FLT_FN (BUILT_IN_FREXP):
14673 CASE_FLT_FN (BUILT_IN_LCEIL):
14674 CASE_FLT_FN (BUILT_IN_LDEXP):
14675 CASE_FLT_FN (BUILT_IN_LFLOOR):
14676 CASE_FLT_FN (BUILT_IN_LLCEIL):
14677 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14678 CASE_FLT_FN (BUILT_IN_LLRINT):
14679 CASE_FLT_FN (BUILT_IN_LLROUND):
14680 CASE_FLT_FN (BUILT_IN_LRINT):
14681 CASE_FLT_FN (BUILT_IN_LROUND):
14682 CASE_FLT_FN (BUILT_IN_MODF):
14683 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14684 CASE_FLT_FN (BUILT_IN_RINT):
14685 CASE_FLT_FN (BUILT_IN_ROUND):
14686 CASE_FLT_FN (BUILT_IN_SCALB):
14687 CASE_FLT_FN (BUILT_IN_SCALBLN):
14688 CASE_FLT_FN (BUILT_IN_SCALBN):
14689 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14690 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14691 CASE_FLT_FN (BUILT_IN_SINH):
14692 CASE_FLT_FN (BUILT_IN_TANH):
14693 CASE_FLT_FN (BUILT_IN_TRUNC):
14694 /* True if the 1st argument is nonnegative. */
14695 return tree_expr_nonnegative_warnv_p (arg0,
14696 strict_overflow_p);
14698 CASE_FLT_FN (BUILT_IN_FMAX):
14699 /* True if the 1st OR 2nd arguments are nonnegative. */
14700 return (tree_expr_nonnegative_warnv_p (arg0,
14701 strict_overflow_p)
14702 || (tree_expr_nonnegative_warnv_p (arg1,
14703 strict_overflow_p)));
14705 CASE_FLT_FN (BUILT_IN_FMIN):
14706 /* True if the 1st AND 2nd arguments are nonnegative. */
14707 return (tree_expr_nonnegative_warnv_p (arg0,
14708 strict_overflow_p)
14709 && (tree_expr_nonnegative_warnv_p (arg1,
14710 strict_overflow_p)));
14712 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14713 /* True if the 2nd argument is nonnegative. */
14714 return tree_expr_nonnegative_warnv_p (arg1,
14715 strict_overflow_p);
14717 CASE_FLT_FN (BUILT_IN_POWI):
14718 /* True if the 1st argument is nonnegative or the second
14719 argument is an even integer. */
14720 if (TREE_CODE (arg1) == INTEGER_CST
14721 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14722 return true;
14723 return tree_expr_nonnegative_warnv_p (arg0,
14724 strict_overflow_p);
14726 CASE_FLT_FN (BUILT_IN_POW):
14727 /* True if the 1st argument is nonnegative or the second
14728 argument is an even integer valued real. */
14729 if (TREE_CODE (arg1) == REAL_CST)
14731 REAL_VALUE_TYPE c;
14732 HOST_WIDE_INT n;
14734 c = TREE_REAL_CST (arg1);
14735 n = real_to_integer (&c);
14736 if ((n & 1) == 0)
14738 REAL_VALUE_TYPE cint;
14739 real_from_integer (&cint, VOIDmode, n,
14740 n < 0 ? -1 : 0, 0);
14741 if (real_identical (&c, &cint))
14742 return true;
14745 return tree_expr_nonnegative_warnv_p (arg0,
14746 strict_overflow_p);
14748 default:
14749 break;
14751 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14752 type);
14755 /* Return true if T is known to be non-negative. If the return
14756 value is based on the assumption that signed overflow is undefined,
14757 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14758 *STRICT_OVERFLOW_P. */
14760 bool
14761 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14763 enum tree_code code = TREE_CODE (t);
14764 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14765 return true;
14767 switch (code)
14769 case TARGET_EXPR:
14771 tree temp = TARGET_EXPR_SLOT (t);
14772 t = TARGET_EXPR_INITIAL (t);
14774 /* If the initializer is non-void, then it's a normal expression
14775 that will be assigned to the slot. */
14776 if (!VOID_TYPE_P (t))
14777 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14779 /* Otherwise, the initializer sets the slot in some way. One common
14780 way is an assignment statement at the end of the initializer. */
14781 while (1)
14783 if (TREE_CODE (t) == BIND_EXPR)
14784 t = expr_last (BIND_EXPR_BODY (t));
14785 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14786 || TREE_CODE (t) == TRY_CATCH_EXPR)
14787 t = expr_last (TREE_OPERAND (t, 0));
14788 else if (TREE_CODE (t) == STATEMENT_LIST)
14789 t = expr_last (t);
14790 else
14791 break;
14793 if (TREE_CODE (t) == MODIFY_EXPR
14794 && TREE_OPERAND (t, 0) == temp)
14795 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14796 strict_overflow_p);
14798 return false;
14801 case CALL_EXPR:
14803 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14804 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14806 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14807 get_callee_fndecl (t),
14808 arg0,
14809 arg1,
14810 strict_overflow_p);
14812 case COMPOUND_EXPR:
14813 case MODIFY_EXPR:
14814 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14815 strict_overflow_p);
14816 case BIND_EXPR:
14817 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14818 strict_overflow_p);
14819 case SAVE_EXPR:
14820 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14821 strict_overflow_p);
14823 default:
14824 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14825 TREE_TYPE (t));
14828 /* We don't know sign of `t', so be conservative and return false. */
14829 return false;
14832 /* Return true if T is known to be non-negative. If the return
14833 value is based on the assumption that signed overflow is undefined,
14834 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14835 *STRICT_OVERFLOW_P. */
14837 bool
14838 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14840 enum tree_code code;
14841 if (t == error_mark_node)
14842 return false;
14844 code = TREE_CODE (t);
14845 switch (TREE_CODE_CLASS (code))
14847 case tcc_binary:
14848 case tcc_comparison:
14849 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14850 TREE_TYPE (t),
14851 TREE_OPERAND (t, 0),
14852 TREE_OPERAND (t, 1),
14853 strict_overflow_p);
14855 case tcc_unary:
14856 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14857 TREE_TYPE (t),
14858 TREE_OPERAND (t, 0),
14859 strict_overflow_p);
14861 case tcc_constant:
14862 case tcc_declaration:
14863 case tcc_reference:
14864 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14866 default:
14867 break;
14870 switch (code)
14872 case TRUTH_AND_EXPR:
14873 case TRUTH_OR_EXPR:
14874 case TRUTH_XOR_EXPR:
14875 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14876 TREE_TYPE (t),
14877 TREE_OPERAND (t, 0),
14878 TREE_OPERAND (t, 1),
14879 strict_overflow_p);
14880 case TRUTH_NOT_EXPR:
14881 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14882 TREE_TYPE (t),
14883 TREE_OPERAND (t, 0),
14884 strict_overflow_p);
14886 case COND_EXPR:
14887 case CONSTRUCTOR:
14888 case OBJ_TYPE_REF:
14889 case ASSERT_EXPR:
14890 case ADDR_EXPR:
14891 case WITH_SIZE_EXPR:
14892 case SSA_NAME:
14893 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14895 default:
14896 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14900 /* Return true if `t' is known to be non-negative. Handle warnings
14901 about undefined signed overflow. */
14903 bool
14904 tree_expr_nonnegative_p (tree t)
14906 bool ret, strict_overflow_p;
14908 strict_overflow_p = false;
14909 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14910 if (strict_overflow_p)
14911 fold_overflow_warning (("assuming signed overflow does not occur when "
14912 "determining that expression is always "
14913 "non-negative"),
14914 WARN_STRICT_OVERFLOW_MISC);
14915 return ret;
14919 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14920 For floating point we further ensure that T is not denormal.
14921 Similar logic is present in nonzero_address in rtlanal.h.
14923 If the return value is based on the assumption that signed overflow
14924 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14925 change *STRICT_OVERFLOW_P. */
14927 bool
14928 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14929 bool *strict_overflow_p)
14931 switch (code)
14933 case ABS_EXPR:
14934 return tree_expr_nonzero_warnv_p (op0,
14935 strict_overflow_p);
14937 case NOP_EXPR:
14939 tree inner_type = TREE_TYPE (op0);
14940 tree outer_type = type;
14942 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14943 && tree_expr_nonzero_warnv_p (op0,
14944 strict_overflow_p));
14946 break;
14948 case NON_LVALUE_EXPR:
14949 return tree_expr_nonzero_warnv_p (op0,
14950 strict_overflow_p);
14952 default:
14953 break;
14956 return false;
14959 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14960 For floating point we further ensure that T is not denormal.
14961 Similar logic is present in nonzero_address in rtlanal.h.
14963 If the return value is based on the assumption that signed overflow
14964 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14965 change *STRICT_OVERFLOW_P. */
14967 bool
14968 tree_binary_nonzero_warnv_p (enum tree_code code,
14969 tree type,
14970 tree op0,
14971 tree op1, bool *strict_overflow_p)
14973 bool sub_strict_overflow_p;
14974 switch (code)
14976 case POINTER_PLUS_EXPR:
14977 case PLUS_EXPR:
14978 if (TYPE_OVERFLOW_UNDEFINED (type))
14980 /* With the presence of negative values it is hard
14981 to say something. */
14982 sub_strict_overflow_p = false;
14983 if (!tree_expr_nonnegative_warnv_p (op0,
14984 &sub_strict_overflow_p)
14985 || !tree_expr_nonnegative_warnv_p (op1,
14986 &sub_strict_overflow_p))
14987 return false;
14988 /* One of operands must be positive and the other non-negative. */
14989 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14990 overflows, on a twos-complement machine the sum of two
14991 nonnegative numbers can never be zero. */
14992 return (tree_expr_nonzero_warnv_p (op0,
14993 strict_overflow_p)
14994 || tree_expr_nonzero_warnv_p (op1,
14995 strict_overflow_p));
14997 break;
14999 case MULT_EXPR:
15000 if (TYPE_OVERFLOW_UNDEFINED (type))
15002 if (tree_expr_nonzero_warnv_p (op0,
15003 strict_overflow_p)
15004 && tree_expr_nonzero_warnv_p (op1,
15005 strict_overflow_p))
15007 *strict_overflow_p = true;
15008 return true;
15011 break;
15013 case MIN_EXPR:
15014 sub_strict_overflow_p = false;
15015 if (tree_expr_nonzero_warnv_p (op0,
15016 &sub_strict_overflow_p)
15017 && tree_expr_nonzero_warnv_p (op1,
15018 &sub_strict_overflow_p))
15020 if (sub_strict_overflow_p)
15021 *strict_overflow_p = true;
15023 break;
15025 case MAX_EXPR:
15026 sub_strict_overflow_p = false;
15027 if (tree_expr_nonzero_warnv_p (op0,
15028 &sub_strict_overflow_p))
15030 if (sub_strict_overflow_p)
15031 *strict_overflow_p = true;
15033 /* When both operands are nonzero, then MAX must be too. */
15034 if (tree_expr_nonzero_warnv_p (op1,
15035 strict_overflow_p))
15036 return true;
15038 /* MAX where operand 0 is positive is positive. */
15039 return tree_expr_nonnegative_warnv_p (op0,
15040 strict_overflow_p);
15042 /* MAX where operand 1 is positive is positive. */
15043 else if (tree_expr_nonzero_warnv_p (op1,
15044 &sub_strict_overflow_p)
15045 && tree_expr_nonnegative_warnv_p (op1,
15046 &sub_strict_overflow_p))
15048 if (sub_strict_overflow_p)
15049 *strict_overflow_p = true;
15050 return true;
15052 break;
15054 case BIT_IOR_EXPR:
15055 return (tree_expr_nonzero_warnv_p (op1,
15056 strict_overflow_p)
15057 || tree_expr_nonzero_warnv_p (op0,
15058 strict_overflow_p));
15060 default:
15061 break;
15064 return false;
15067 /* Return true when T is an address and is known to be nonzero.
15068 For floating point we further ensure that T is not denormal.
15069 Similar logic is present in nonzero_address in rtlanal.h.
15071 If the return value is based on the assumption that signed overflow
15072 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15073 change *STRICT_OVERFLOW_P. */
15075 bool
15076 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15078 bool sub_strict_overflow_p;
15079 switch (TREE_CODE (t))
15081 case INTEGER_CST:
15082 return !integer_zerop (t);
15084 case ADDR_EXPR:
15086 tree base = TREE_OPERAND (t, 0);
15087 if (!DECL_P (base))
15088 base = get_base_address (base);
15090 if (!base)
15091 return false;
15093 /* Weak declarations may link to NULL. Other things may also be NULL
15094 so protect with -fdelete-null-pointer-checks; but not variables
15095 allocated on the stack. */
15096 if (DECL_P (base)
15097 && (flag_delete_null_pointer_checks
15098 || (DECL_CONTEXT (base)
15099 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15100 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15101 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15103 /* Constants are never weak. */
15104 if (CONSTANT_CLASS_P (base))
15105 return true;
15107 return false;
15110 case COND_EXPR:
15111 sub_strict_overflow_p = false;
15112 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15113 &sub_strict_overflow_p)
15114 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15115 &sub_strict_overflow_p))
15117 if (sub_strict_overflow_p)
15118 *strict_overflow_p = true;
15119 return true;
15121 break;
15123 default:
15124 break;
15126 return false;
15129 /* Return true when T is an address and is known to be nonzero.
15130 For floating point we further ensure that T is not denormal.
15131 Similar logic is present in nonzero_address in rtlanal.h.
15133 If the return value is based on the assumption that signed overflow
15134 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15135 change *STRICT_OVERFLOW_P. */
15137 bool
15138 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15140 tree type = TREE_TYPE (t);
15141 enum tree_code code;
15143 /* Doing something useful for floating point would need more work. */
15144 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15145 return false;
15147 code = TREE_CODE (t);
15148 switch (TREE_CODE_CLASS (code))
15150 case tcc_unary:
15151 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15152 strict_overflow_p);
15153 case tcc_binary:
15154 case tcc_comparison:
15155 return tree_binary_nonzero_warnv_p (code, type,
15156 TREE_OPERAND (t, 0),
15157 TREE_OPERAND (t, 1),
15158 strict_overflow_p);
15159 case tcc_constant:
15160 case tcc_declaration:
15161 case tcc_reference:
15162 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15164 default:
15165 break;
15168 switch (code)
15170 case TRUTH_NOT_EXPR:
15171 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15172 strict_overflow_p);
15174 case TRUTH_AND_EXPR:
15175 case TRUTH_OR_EXPR:
15176 case TRUTH_XOR_EXPR:
15177 return tree_binary_nonzero_warnv_p (code, type,
15178 TREE_OPERAND (t, 0),
15179 TREE_OPERAND (t, 1),
15180 strict_overflow_p);
15182 case COND_EXPR:
15183 case CONSTRUCTOR:
15184 case OBJ_TYPE_REF:
15185 case ASSERT_EXPR:
15186 case ADDR_EXPR:
15187 case WITH_SIZE_EXPR:
15188 case SSA_NAME:
15189 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15191 case COMPOUND_EXPR:
15192 case MODIFY_EXPR:
15193 case BIND_EXPR:
15194 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15195 strict_overflow_p);
15197 case SAVE_EXPR:
15198 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15199 strict_overflow_p);
15201 case CALL_EXPR:
15202 return alloca_call_p (t);
15204 default:
15205 break;
15207 return false;
15210 /* Return true when T is an address and is known to be nonzero.
15211 Handle warnings about undefined signed overflow. */
15213 bool
15214 tree_expr_nonzero_p (tree t)
15216 bool ret, strict_overflow_p;
15218 strict_overflow_p = false;
15219 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15220 if (strict_overflow_p)
15221 fold_overflow_warning (("assuming signed overflow does not occur when "
15222 "determining that expression is always "
15223 "non-zero"),
15224 WARN_STRICT_OVERFLOW_MISC);
15225 return ret;
15228 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15229 attempt to fold the expression to a constant without modifying TYPE,
15230 OP0 or OP1.
15232 If the expression could be simplified to a constant, then return
15233 the constant. If the expression would not be simplified to a
15234 constant, then return NULL_TREE. */
15236 tree
15237 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15239 tree tem = fold_binary (code, type, op0, op1);
15240 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15243 /* Given the components of a unary expression CODE, TYPE and OP0,
15244 attempt to fold the expression to a constant without modifying
15245 TYPE or OP0.
15247 If the expression could be simplified to a constant, then return
15248 the constant. If the expression would not be simplified to a
15249 constant, then return NULL_TREE. */
15251 tree
15252 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15254 tree tem = fold_unary (code, type, op0);
15255 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15258 /* If EXP represents referencing an element in a constant string
15259 (either via pointer arithmetic or array indexing), return the
15260 tree representing the value accessed, otherwise return NULL. */
15262 tree
15263 fold_read_from_constant_string (tree exp)
15265 if ((TREE_CODE (exp) == INDIRECT_REF
15266 || TREE_CODE (exp) == ARRAY_REF)
15267 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15269 tree exp1 = TREE_OPERAND (exp, 0);
15270 tree index;
15271 tree string;
15272 location_t loc = EXPR_LOCATION (exp);
15274 if (TREE_CODE (exp) == INDIRECT_REF)
15275 string = string_constant (exp1, &index);
15276 else
15278 tree low_bound = array_ref_low_bound (exp);
15279 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15281 /* Optimize the special-case of a zero lower bound.
15283 We convert the low_bound to sizetype to avoid some problems
15284 with constant folding. (E.g. suppose the lower bound is 1,
15285 and its mode is QI. Without the conversion,l (ARRAY
15286 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15287 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15288 if (! integer_zerop (low_bound))
15289 index = size_diffop_loc (loc, index,
15290 fold_convert_loc (loc, sizetype, low_bound));
15292 string = exp1;
15295 if (string
15296 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15297 && TREE_CODE (string) == STRING_CST
15298 && TREE_CODE (index) == INTEGER_CST
15299 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15300 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15301 == MODE_INT)
15302 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15303 return build_int_cst_type (TREE_TYPE (exp),
15304 (TREE_STRING_POINTER (string)
15305 [TREE_INT_CST_LOW (index)]));
15307 return NULL;
15310 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15311 an integer constant, real, or fixed-point constant.
15313 TYPE is the type of the result. */
15315 static tree
15316 fold_negate_const (tree arg0, tree type)
15318 tree t = NULL_TREE;
15320 switch (TREE_CODE (arg0))
15322 case INTEGER_CST:
15324 double_int val = tree_to_double_int (arg0);
15325 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15327 t = force_fit_type_double (type, val, 1,
15328 (overflow | TREE_OVERFLOW (arg0))
15329 && !TYPE_UNSIGNED (type));
15330 break;
15333 case REAL_CST:
15334 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15335 break;
15337 case FIXED_CST:
15339 FIXED_VALUE_TYPE f;
15340 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15341 &(TREE_FIXED_CST (arg0)), NULL,
15342 TYPE_SATURATING (type));
15343 t = build_fixed (type, f);
15344 /* Propagate overflow flags. */
15345 if (overflow_p | TREE_OVERFLOW (arg0))
15346 TREE_OVERFLOW (t) = 1;
15347 break;
15350 default:
15351 gcc_unreachable ();
15354 return t;
15357 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15358 an integer constant or real constant.
15360 TYPE is the type of the result. */
15362 tree
15363 fold_abs_const (tree arg0, tree type)
15365 tree t = NULL_TREE;
15367 switch (TREE_CODE (arg0))
15369 case INTEGER_CST:
15371 double_int val = tree_to_double_int (arg0);
15373 /* If the value is unsigned or non-negative, then the absolute value
15374 is the same as the ordinary value. */
15375 if (TYPE_UNSIGNED (type)
15376 || !double_int_negative_p (val))
15377 t = arg0;
15379 /* If the value is negative, then the absolute value is
15380 its negation. */
15381 else
15383 int overflow;
15385 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15386 t = force_fit_type_double (type, val, -1,
15387 overflow | TREE_OVERFLOW (arg0));
15390 break;
15392 case REAL_CST:
15393 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15394 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15395 else
15396 t = arg0;
15397 break;
15399 default:
15400 gcc_unreachable ();
15403 return t;
15406 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15407 constant. TYPE is the type of the result. */
15409 static tree
15410 fold_not_const (const_tree arg0, tree type)
15412 double_int val;
15414 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15416 val = double_int_not (tree_to_double_int (arg0));
15417 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15420 /* Given CODE, a relational operator, the target type, TYPE and two
15421 constant operands OP0 and OP1, return the result of the
15422 relational operation. If the result is not a compile time
15423 constant, then return NULL_TREE. */
15425 static tree
15426 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15428 int result, invert;
15430 /* From here on, the only cases we handle are when the result is
15431 known to be a constant. */
15433 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15435 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15436 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15438 /* Handle the cases where either operand is a NaN. */
15439 if (real_isnan (c0) || real_isnan (c1))
15441 switch (code)
15443 case EQ_EXPR:
15444 case ORDERED_EXPR:
15445 result = 0;
15446 break;
15448 case NE_EXPR:
15449 case UNORDERED_EXPR:
15450 case UNLT_EXPR:
15451 case UNLE_EXPR:
15452 case UNGT_EXPR:
15453 case UNGE_EXPR:
15454 case UNEQ_EXPR:
15455 result = 1;
15456 break;
15458 case LT_EXPR:
15459 case LE_EXPR:
15460 case GT_EXPR:
15461 case GE_EXPR:
15462 case LTGT_EXPR:
15463 if (flag_trapping_math)
15464 return NULL_TREE;
15465 result = 0;
15466 break;
15468 default:
15469 gcc_unreachable ();
15472 return constant_boolean_node (result, type);
15475 return constant_boolean_node (real_compare (code, c0, c1), type);
15478 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15480 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15481 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15482 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15485 /* Handle equality/inequality of complex constants. */
15486 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15488 tree rcond = fold_relational_const (code, type,
15489 TREE_REALPART (op0),
15490 TREE_REALPART (op1));
15491 tree icond = fold_relational_const (code, type,
15492 TREE_IMAGPART (op0),
15493 TREE_IMAGPART (op1));
15494 if (code == EQ_EXPR)
15495 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15496 else if (code == NE_EXPR)
15497 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15498 else
15499 return NULL_TREE;
15502 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15504 To compute GT, swap the arguments and do LT.
15505 To compute GE, do LT and invert the result.
15506 To compute LE, swap the arguments, do LT and invert the result.
15507 To compute NE, do EQ and invert the result.
15509 Therefore, the code below must handle only EQ and LT. */
15511 if (code == LE_EXPR || code == GT_EXPR)
15513 tree tem = op0;
15514 op0 = op1;
15515 op1 = tem;
15516 code = swap_tree_comparison (code);
15519 /* Note that it is safe to invert for real values here because we
15520 have already handled the one case that it matters. */
15522 invert = 0;
15523 if (code == NE_EXPR || code == GE_EXPR)
15525 invert = 1;
15526 code = invert_tree_comparison (code, false);
15529 /* Compute a result for LT or EQ if args permit;
15530 Otherwise return T. */
15531 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15533 if (code == EQ_EXPR)
15534 result = tree_int_cst_equal (op0, op1);
15535 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15536 result = INT_CST_LT_UNSIGNED (op0, op1);
15537 else
15538 result = INT_CST_LT (op0, op1);
15540 else
15541 return NULL_TREE;
15543 if (invert)
15544 result ^= 1;
15545 return constant_boolean_node (result, type);
15548 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15549 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15550 itself. */
15552 tree
15553 fold_build_cleanup_point_expr (tree type, tree expr)
15555 /* If the expression does not have side effects then we don't have to wrap
15556 it with a cleanup point expression. */
15557 if (!TREE_SIDE_EFFECTS (expr))
15558 return expr;
15560 /* If the expression is a return, check to see if the expression inside the
15561 return has no side effects or the right hand side of the modify expression
15562 inside the return. If either don't have side effects set we don't need to
15563 wrap the expression in a cleanup point expression. Note we don't check the
15564 left hand side of the modify because it should always be a return decl. */
15565 if (TREE_CODE (expr) == RETURN_EXPR)
15567 tree op = TREE_OPERAND (expr, 0);
15568 if (!op || !TREE_SIDE_EFFECTS (op))
15569 return expr;
15570 op = TREE_OPERAND (op, 1);
15571 if (!TREE_SIDE_EFFECTS (op))
15572 return expr;
15575 return build1 (CLEANUP_POINT_EXPR, type, expr);
15578 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15579 of an indirection through OP0, or NULL_TREE if no simplification is
15580 possible. */
15582 tree
15583 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15585 tree sub = op0;
15586 tree subtype;
15588 STRIP_NOPS (sub);
15589 subtype = TREE_TYPE (sub);
15590 if (!POINTER_TYPE_P (subtype))
15591 return NULL_TREE;
15593 if (TREE_CODE (sub) == ADDR_EXPR)
15595 tree op = TREE_OPERAND (sub, 0);
15596 tree optype = TREE_TYPE (op);
15597 /* *&CONST_DECL -> to the value of the const decl. */
15598 if (TREE_CODE (op) == CONST_DECL)
15599 return DECL_INITIAL (op);
15600 /* *&p => p; make sure to handle *&"str"[cst] here. */
15601 if (type == optype)
15603 tree fop = fold_read_from_constant_string (op);
15604 if (fop)
15605 return fop;
15606 else
15607 return op;
15609 /* *(foo *)&fooarray => fooarray[0] */
15610 else if (TREE_CODE (optype) == ARRAY_TYPE
15611 && type == TREE_TYPE (optype)
15612 && (!in_gimple_form
15613 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15615 tree type_domain = TYPE_DOMAIN (optype);
15616 tree min_val = size_zero_node;
15617 if (type_domain && TYPE_MIN_VALUE (type_domain))
15618 min_val = TYPE_MIN_VALUE (type_domain);
15619 if (in_gimple_form
15620 && TREE_CODE (min_val) != INTEGER_CST)
15621 return NULL_TREE;
15622 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15623 NULL_TREE, NULL_TREE);
15625 /* *(foo *)&complexfoo => __real__ complexfoo */
15626 else if (TREE_CODE (optype) == COMPLEX_TYPE
15627 && type == TREE_TYPE (optype))
15628 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15629 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15630 else if (TREE_CODE (optype) == VECTOR_TYPE
15631 && type == TREE_TYPE (optype))
15633 tree part_width = TYPE_SIZE (type);
15634 tree index = bitsize_int (0);
15635 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15639 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15640 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15642 tree op00 = TREE_OPERAND (sub, 0);
15643 tree op01 = TREE_OPERAND (sub, 1);
15645 STRIP_NOPS (op00);
15646 if (TREE_CODE (op00) == ADDR_EXPR)
15648 tree op00type;
15649 op00 = TREE_OPERAND (op00, 0);
15650 op00type = TREE_TYPE (op00);
15652 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15653 if (TREE_CODE (op00type) == VECTOR_TYPE
15654 && type == TREE_TYPE (op00type))
15656 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15657 tree part_width = TYPE_SIZE (type);
15658 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15659 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15660 tree index = bitsize_int (indexi);
15662 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15663 return fold_build3_loc (loc,
15664 BIT_FIELD_REF, type, op00,
15665 part_width, index);
15668 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15669 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15670 && type == TREE_TYPE (op00type))
15672 tree size = TYPE_SIZE_UNIT (type);
15673 if (tree_int_cst_equal (size, op01))
15674 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15676 /* ((foo *)&fooarray)[1] => fooarray[1] */
15677 else if (TREE_CODE (op00type) == ARRAY_TYPE
15678 && type == TREE_TYPE (op00type))
15680 tree type_domain = TYPE_DOMAIN (op00type);
15681 tree min_val = size_zero_node;
15682 if (type_domain && TYPE_MIN_VALUE (type_domain))
15683 min_val = TYPE_MIN_VALUE (type_domain);
15684 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15685 TYPE_SIZE_UNIT (type));
15686 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15687 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15688 NULL_TREE, NULL_TREE);
15693 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15694 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15695 && type == TREE_TYPE (TREE_TYPE (subtype))
15696 && (!in_gimple_form
15697 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15699 tree type_domain;
15700 tree min_val = size_zero_node;
15701 sub = build_fold_indirect_ref_loc (loc, sub);
15702 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15703 if (type_domain && TYPE_MIN_VALUE (type_domain))
15704 min_val = TYPE_MIN_VALUE (type_domain);
15705 if (in_gimple_form
15706 && TREE_CODE (min_val) != INTEGER_CST)
15707 return NULL_TREE;
15708 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15709 NULL_TREE);
15712 return NULL_TREE;
15715 /* Builds an expression for an indirection through T, simplifying some
15716 cases. */
15718 tree
15719 build_fold_indirect_ref_loc (location_t loc, tree t)
15721 tree type = TREE_TYPE (TREE_TYPE (t));
15722 tree sub = fold_indirect_ref_1 (loc, type, t);
15724 if (sub)
15725 return sub;
15727 return build1_loc (loc, INDIRECT_REF, type, t);
15730 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15732 tree
15733 fold_indirect_ref_loc (location_t loc, tree t)
15735 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15737 if (sub)
15738 return sub;
15739 else
15740 return t;
15743 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15744 whose result is ignored. The type of the returned tree need not be
15745 the same as the original expression. */
15747 tree
15748 fold_ignored_result (tree t)
15750 if (!TREE_SIDE_EFFECTS (t))
15751 return integer_zero_node;
15753 for (;;)
15754 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15756 case tcc_unary:
15757 t = TREE_OPERAND (t, 0);
15758 break;
15760 case tcc_binary:
15761 case tcc_comparison:
15762 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15763 t = TREE_OPERAND (t, 0);
15764 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15765 t = TREE_OPERAND (t, 1);
15766 else
15767 return t;
15768 break;
15770 case tcc_expression:
15771 switch (TREE_CODE (t))
15773 case COMPOUND_EXPR:
15774 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15775 return t;
15776 t = TREE_OPERAND (t, 0);
15777 break;
15779 case COND_EXPR:
15780 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15781 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15782 return t;
15783 t = TREE_OPERAND (t, 0);
15784 break;
15786 default:
15787 return t;
15789 break;
15791 default:
15792 return t;
15796 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15797 This can only be applied to objects of a sizetype. */
15799 tree
15800 round_up_loc (location_t loc, tree value, int divisor)
15802 tree div = NULL_TREE;
15804 gcc_assert (divisor > 0);
15805 if (divisor == 1)
15806 return value;
15808 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15809 have to do anything. Only do this when we are not given a const,
15810 because in that case, this check is more expensive than just
15811 doing it. */
15812 if (TREE_CODE (value) != INTEGER_CST)
15814 div = build_int_cst (TREE_TYPE (value), divisor);
15816 if (multiple_of_p (TREE_TYPE (value), value, div))
15817 return value;
15820 /* If divisor is a power of two, simplify this to bit manipulation. */
15821 if (divisor == (divisor & -divisor))
15823 if (TREE_CODE (value) == INTEGER_CST)
15825 double_int val = tree_to_double_int (value);
15826 bool overflow_p;
15828 if ((val.low & (divisor - 1)) == 0)
15829 return value;
15831 overflow_p = TREE_OVERFLOW (value);
15832 val.low &= ~(divisor - 1);
15833 val.low += divisor;
15834 if (val.low == 0)
15836 val.high++;
15837 if (val.high == 0)
15838 overflow_p = true;
15841 return force_fit_type_double (TREE_TYPE (value), val,
15842 -1, overflow_p);
15844 else
15846 tree t;
15848 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15849 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15850 t = build_int_cst (TREE_TYPE (value), -divisor);
15851 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15854 else
15856 if (!div)
15857 div = build_int_cst (TREE_TYPE (value), divisor);
15858 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15859 value = size_binop_loc (loc, MULT_EXPR, value, div);
15862 return value;
15865 /* Likewise, but round down. */
15867 tree
15868 round_down_loc (location_t loc, tree value, int divisor)
15870 tree div = NULL_TREE;
15872 gcc_assert (divisor > 0);
15873 if (divisor == 1)
15874 return value;
15876 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15877 have to do anything. Only do this when we are not given a const,
15878 because in that case, this check is more expensive than just
15879 doing it. */
15880 if (TREE_CODE (value) != INTEGER_CST)
15882 div = build_int_cst (TREE_TYPE (value), divisor);
15884 if (multiple_of_p (TREE_TYPE (value), value, div))
15885 return value;
15888 /* If divisor is a power of two, simplify this to bit manipulation. */
15889 if (divisor == (divisor & -divisor))
15891 tree t;
15893 t = build_int_cst (TREE_TYPE (value), -divisor);
15894 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15896 else
15898 if (!div)
15899 div = build_int_cst (TREE_TYPE (value), divisor);
15900 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15901 value = size_binop_loc (loc, MULT_EXPR, value, div);
15904 return value;
15907 /* Returns the pointer to the base of the object addressed by EXP and
15908 extracts the information about the offset of the access, storing it
15909 to PBITPOS and POFFSET. */
15911 static tree
15912 split_address_to_core_and_offset (tree exp,
15913 HOST_WIDE_INT *pbitpos, tree *poffset)
15915 tree core;
15916 enum machine_mode mode;
15917 int unsignedp, volatilep;
15918 HOST_WIDE_INT bitsize;
15919 location_t loc = EXPR_LOCATION (exp);
15921 if (TREE_CODE (exp) == ADDR_EXPR)
15923 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15924 poffset, &mode, &unsignedp, &volatilep,
15925 false);
15926 core = build_fold_addr_expr_loc (loc, core);
15928 else
15930 core = exp;
15931 *pbitpos = 0;
15932 *poffset = NULL_TREE;
15935 return core;
15938 /* Returns true if addresses of E1 and E2 differ by a constant, false
15939 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15941 bool
15942 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15944 tree core1, core2;
15945 HOST_WIDE_INT bitpos1, bitpos2;
15946 tree toffset1, toffset2, tdiff, type;
15948 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15949 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15951 if (bitpos1 % BITS_PER_UNIT != 0
15952 || bitpos2 % BITS_PER_UNIT != 0
15953 || !operand_equal_p (core1, core2, 0))
15954 return false;
15956 if (toffset1 && toffset2)
15958 type = TREE_TYPE (toffset1);
15959 if (type != TREE_TYPE (toffset2))
15960 toffset2 = fold_convert (type, toffset2);
15962 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15963 if (!cst_and_fits_in_hwi (tdiff))
15964 return false;
15966 *diff = int_cst_value (tdiff);
15968 else if (toffset1 || toffset2)
15970 /* If only one of the offsets is non-constant, the difference cannot
15971 be a constant. */
15972 return false;
15974 else
15975 *diff = 0;
15977 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15978 return true;
15981 /* Simplify the floating point expression EXP when the sign of the
15982 result is not significant. Return NULL_TREE if no simplification
15983 is possible. */
15985 tree
15986 fold_strip_sign_ops (tree exp)
15988 tree arg0, arg1;
15989 location_t loc = EXPR_LOCATION (exp);
15991 switch (TREE_CODE (exp))
15993 case ABS_EXPR:
15994 case NEGATE_EXPR:
15995 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15996 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15998 case MULT_EXPR:
15999 case RDIV_EXPR:
16000 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16001 return NULL_TREE;
16002 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16003 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16004 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16005 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16006 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16007 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16008 break;
16010 case COMPOUND_EXPR:
16011 arg0 = TREE_OPERAND (exp, 0);
16012 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16013 if (arg1)
16014 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16015 break;
16017 case COND_EXPR:
16018 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16019 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16020 if (arg0 || arg1)
16021 return fold_build3_loc (loc,
16022 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16023 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16024 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16025 break;
16027 case CALL_EXPR:
16029 const enum built_in_function fcode = builtin_mathfn_code (exp);
16030 switch (fcode)
16032 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16033 /* Strip copysign function call, return the 1st argument. */
16034 arg0 = CALL_EXPR_ARG (exp, 0);
16035 arg1 = CALL_EXPR_ARG (exp, 1);
16036 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16038 default:
16039 /* Strip sign ops from the argument of "odd" math functions. */
16040 if (negate_mathfn_p (fcode))
16042 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16043 if (arg0)
16044 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16046 break;
16049 break;
16051 default:
16052 break;
16054 return NULL_TREE;