re PR libstdc++/48382 (ctype_noninline.h should not be installed)
[official-gcc.git] / gcc / fold-const.c
blob0c0420d043c379feabb32ccef8db2233543330fe
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
120 tree, tree);
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
126 tree, tree, tree);
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
131 tree, tree,
132 tree, tree, int);
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
135 tree, tree, tree);
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
147 static location_t
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
169 return x;
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
176 addition.
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
180 sign. */
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
187 tree
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
190 double_int quo, rem;
191 int uns;
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
199 uns = false;
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
203 uns, code, &rem);
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
208 return NULL_TREE;
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
218 used. */
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
237 void
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
250 deferred code. */
252 void
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
255 const char *warnmsg;
256 location_t locus;
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
263 && code != 0
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
266 return;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
273 return;
275 if (gimple_no_warning_p (stmt))
276 return;
278 /* Use the smallest code level when deciding to issue the
279 warning. */
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
284 return;
286 if (stmt == NULL)
287 locus = input_location;
288 else
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
294 warnings. */
296 void
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
304 bool
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
313 static void
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
332 static bool
333 negate_mathfn_p (enum built_in_function code)
335 switch (code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
360 return true;
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
368 default:
369 break;
371 return false;
374 /* Check whether we may negate an integer constant T without causing
375 overflow. */
377 bool
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
381 unsigned int prec;
382 tree type;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
388 return false;
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
394 return true;
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
398 else
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
408 static bool
409 negate_expr_p (tree t)
411 tree type;
413 if (t == 0)
414 return false;
416 type = TREE_TYPE (t);
418 STRIP_SIGN_NOPS (t);
419 switch (TREE_CODE (t))
421 case INTEGER_CST:
422 if (TYPE_OVERFLOW_WRAPS (type))
423 return true;
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
427 case BIT_NOT_EXPR:
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
431 case FIXED_CST:
432 case NEGATE_EXPR:
433 return true;
435 case REAL_CST:
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
440 case COMPLEX_CST:
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
444 case COMPLEX_EXPR:
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
448 case CONJ_EXPR:
449 return negate_expr_p (TREE_OPERAND (t, 0));
451 case PLUS_EXPR:
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
454 return false;
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
459 return true;
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
463 case MINUS_EXPR:
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
470 case MULT_EXPR:
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
472 break;
474 /* Fall through. */
476 case RDIV_EXPR:
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
480 break;
482 case TRUNC_DIV_EXPR:
483 case ROUND_DIV_EXPR:
484 case FLOOR_DIV_EXPR:
485 case CEIL_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
491 overflow. */
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
494 break;
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
506 break;
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
522 return true;
524 break;
526 default:
527 break;
529 return false;
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
537 static tree
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
541 tree tem;
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
550 break;
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
556 return tem;
557 break;
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
563 return tem;
564 break;
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
570 case COMPLEX_CST:
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
581 break;
583 case COMPLEX_EXPR:
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 break;
590 case CONJ_EXPR:
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 break;
596 case NEGATE_EXPR:
597 return TREE_OPERAND (t, 0);
599 case PLUS_EXPR:
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
621 break;
623 case MINUS_EXPR:
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
630 break;
632 case MULT_EXPR:
633 if (TYPE_UNSIGNED (type))
634 break;
636 /* Fall through. */
638 case RDIV_EXPR:
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
650 break;
652 case TRUNC_DIV_EXPR:
653 case ROUND_DIV_EXPR:
654 case FLOOR_DIV_EXPR:
655 case CEIL_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
661 overflow. */
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
687 break;
689 case NOP_EXPR:
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
697 break;
699 case CALL_EXPR:
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
704 tree fndecl, arg;
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
710 break;
712 case RSHIFT_EXPR:
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
729 break;
731 default:
732 break;
735 return NULL_TREE;
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
740 return NULL_TREE. */
742 static tree
743 negate_expr (tree t)
745 tree type, tem;
746 location_t loc;
748 if (t == NULL_TREE)
749 return NULL_TREE;
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
753 STRIP_SIGN_NOPS (t);
755 tem = fold_negate_expr (loc, t);
756 if (!tem)
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
781 static tree
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
785 tree var = 0;
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
843 *conp = in;
844 else
845 var = in;
847 if (negate_p)
849 if (*litp)
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
857 return var;
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
865 static tree
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 if (t1 == 0)
869 return t2;
870 else if (t2 == 0)
871 return t1;
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
911 static bool
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
915 return false;
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
917 return false;
919 switch (code)
921 case LSHIFT_EXPR:
922 case RSHIFT_EXPR:
923 case LROTATE_EXPR:
924 case RROTATE_EXPR:
925 return true;
927 default:
928 break;
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time.
941 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
943 tree
944 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
946 double_int op1, op2, res, tmp;
947 tree t;
948 tree type = TREE_TYPE (arg1);
949 bool uns = TYPE_UNSIGNED (type);
950 bool is_sizetype
951 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
952 bool overflow = false;
954 op1 = tree_to_double_int (arg1);
955 op2 = tree_to_double_int (arg2);
957 switch (code)
959 case BIT_IOR_EXPR:
960 res = double_int_ior (op1, op2);
961 break;
963 case BIT_XOR_EXPR:
964 res = double_int_xor (op1, op2);
965 break;
967 case BIT_AND_EXPR:
968 res = double_int_and (op1, op2);
969 break;
971 case RSHIFT_EXPR:
972 res = double_int_rshift (op1, double_int_to_shwi (op2),
973 TYPE_PRECISION (type), !uns);
974 break;
976 case LSHIFT_EXPR:
977 /* It's unclear from the C standard whether shifts can overflow.
978 The following code ignores overflow; perhaps a C standard
979 interpretation ruling is needed. */
980 res = double_int_lshift (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type), !uns);
982 break;
984 case RROTATE_EXPR:
985 res = double_int_rrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
987 break;
989 case LROTATE_EXPR:
990 res = double_int_lrotate (op1, double_int_to_shwi (op2),
991 TYPE_PRECISION (type));
992 break;
994 case PLUS_EXPR:
995 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
996 &res.low, &res.high);
997 break;
999 case MINUS_EXPR:
1000 neg_double (op2.low, op2.high, &res.low, &res.high);
1001 add_double (op1.low, op1.high, res.low, res.high,
1002 &res.low, &res.high);
1003 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1004 break;
1006 case MULT_EXPR:
1007 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1008 &res.low, &res.high);
1009 break;
1011 case TRUNC_DIV_EXPR:
1012 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1013 case EXACT_DIV_EXPR:
1014 /* This is a shortcut for a common special case. */
1015 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1016 && !TREE_OVERFLOW (arg1)
1017 && !TREE_OVERFLOW (arg2)
1018 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1020 if (code == CEIL_DIV_EXPR)
1021 op1.low += op2.low - 1;
1023 res.low = op1.low / op2.low, res.high = 0;
1024 break;
1027 /* ... fall through ... */
1029 case ROUND_DIV_EXPR:
1030 if (double_int_zero_p (op2))
1031 return NULL_TREE;
1032 if (double_int_one_p (op2))
1034 res = op1;
1035 break;
1037 if (double_int_equal_p (op1, op2)
1038 && ! double_int_zero_p (op1))
1040 res = double_int_one;
1041 break;
1043 overflow = div_and_round_double (code, uns,
1044 op1.low, op1.high, op2.low, op2.high,
1045 &res.low, &res.high,
1046 &tmp.low, &tmp.high);
1047 break;
1049 case TRUNC_MOD_EXPR:
1050 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1051 /* This is a shortcut for a common special case. */
1052 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1053 && !TREE_OVERFLOW (arg1)
1054 && !TREE_OVERFLOW (arg2)
1055 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1057 if (code == CEIL_MOD_EXPR)
1058 op1.low += op2.low - 1;
1059 res.low = op1.low % op2.low, res.high = 0;
1060 break;
1063 /* ... fall through ... */
1065 case ROUND_MOD_EXPR:
1066 if (double_int_zero_p (op2))
1067 return NULL_TREE;
1068 overflow = div_and_round_double (code, uns,
1069 op1.low, op1.high, op2.low, op2.high,
1070 &tmp.low, &tmp.high,
1071 &res.low, &res.high);
1072 break;
1074 case MIN_EXPR:
1075 res = double_int_min (op1, op2, uns);
1076 break;
1078 case MAX_EXPR:
1079 res = double_int_max (op1, op2, uns);
1080 break;
1082 default:
1083 return NULL_TREE;
1086 if (notrunc)
1088 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1090 /* Propagate overflow flags ourselves. */
1091 if (((!uns || is_sizetype) && overflow)
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1094 t = copy_node (t);
1095 TREE_OVERFLOW (t) = 1;
1098 else
1099 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1100 ((!uns || is_sizetype) && overflow)
1101 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1103 return t;
1106 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1107 constant. We assume ARG1 and ARG2 have the same data type, or at least
1108 are the same kind of constant and the same machine mode. Return zero if
1109 combining the constants is not allowed in the current operating mode. */
1111 static tree
1112 const_binop (enum tree_code code, tree arg1, tree arg2)
1114 /* Sanity check for the recursive cases. */
1115 if (!arg1 || !arg2)
1116 return NULL_TREE;
1118 STRIP_NOPS (arg1);
1119 STRIP_NOPS (arg2);
1121 if (TREE_CODE (arg1) == INTEGER_CST)
1122 return int_const_binop (code, arg1, arg2, 0);
1124 if (TREE_CODE (arg1) == REAL_CST)
1126 enum machine_mode mode;
1127 REAL_VALUE_TYPE d1;
1128 REAL_VALUE_TYPE d2;
1129 REAL_VALUE_TYPE value;
1130 REAL_VALUE_TYPE result;
1131 bool inexact;
1132 tree t, type;
1134 /* The following codes are handled by real_arithmetic. */
1135 switch (code)
1137 case PLUS_EXPR:
1138 case MINUS_EXPR:
1139 case MULT_EXPR:
1140 case RDIV_EXPR:
1141 case MIN_EXPR:
1142 case MAX_EXPR:
1143 break;
1145 default:
1146 return NULL_TREE;
1149 d1 = TREE_REAL_CST (arg1);
1150 d2 = TREE_REAL_CST (arg2);
1152 type = TREE_TYPE (arg1);
1153 mode = TYPE_MODE (type);
1155 /* Don't perform operation if we honor signaling NaNs and
1156 either operand is a NaN. */
1157 if (HONOR_SNANS (mode)
1158 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1159 return NULL_TREE;
1161 /* Don't perform operation if it would raise a division
1162 by zero exception. */
1163 if (code == RDIV_EXPR
1164 && REAL_VALUES_EQUAL (d2, dconst0)
1165 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1166 return NULL_TREE;
1168 /* If either operand is a NaN, just return it. Otherwise, set up
1169 for floating-point trap; we return an overflow. */
1170 if (REAL_VALUE_ISNAN (d1))
1171 return arg1;
1172 else if (REAL_VALUE_ISNAN (d2))
1173 return arg2;
1175 inexact = real_arithmetic (&value, code, &d1, &d2);
1176 real_convert (&result, mode, &value);
1178 /* Don't constant fold this floating point operation if
1179 the result has overflowed and flag_trapping_math. */
1180 if (flag_trapping_math
1181 && MODE_HAS_INFINITIES (mode)
1182 && REAL_VALUE_ISINF (result)
1183 && !REAL_VALUE_ISINF (d1)
1184 && !REAL_VALUE_ISINF (d2))
1185 return NULL_TREE;
1187 /* Don't constant fold this floating point operation if the
1188 result may dependent upon the run-time rounding mode and
1189 flag_rounding_math is set, or if GCC's software emulation
1190 is unable to accurately represent the result. */
1191 if ((flag_rounding_math
1192 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1193 && (inexact || !real_identical (&result, &value)))
1194 return NULL_TREE;
1196 t = build_real (type, result);
1198 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1199 return t;
1202 if (TREE_CODE (arg1) == FIXED_CST)
1204 FIXED_VALUE_TYPE f1;
1205 FIXED_VALUE_TYPE f2;
1206 FIXED_VALUE_TYPE result;
1207 tree t, type;
1208 int sat_p;
1209 bool overflow_p;
1211 /* The following codes are handled by fixed_arithmetic. */
1212 switch (code)
1214 case PLUS_EXPR:
1215 case MINUS_EXPR:
1216 case MULT_EXPR:
1217 case TRUNC_DIV_EXPR:
1218 f2 = TREE_FIXED_CST (arg2);
1219 break;
1221 case LSHIFT_EXPR:
1222 case RSHIFT_EXPR:
1223 f2.data.high = TREE_INT_CST_HIGH (arg2);
1224 f2.data.low = TREE_INT_CST_LOW (arg2);
1225 f2.mode = SImode;
1226 break;
1228 default:
1229 return NULL_TREE;
1232 f1 = TREE_FIXED_CST (arg1);
1233 type = TREE_TYPE (arg1);
1234 sat_p = TYPE_SATURATING (type);
1235 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1236 t = build_fixed (type, result);
1237 /* Propagate overflow flags. */
1238 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1239 TREE_OVERFLOW (t) = 1;
1240 return t;
1243 if (TREE_CODE (arg1) == COMPLEX_CST)
1245 tree type = TREE_TYPE (arg1);
1246 tree r1 = TREE_REALPART (arg1);
1247 tree i1 = TREE_IMAGPART (arg1);
1248 tree r2 = TREE_REALPART (arg2);
1249 tree i2 = TREE_IMAGPART (arg2);
1250 tree real, imag;
1252 switch (code)
1254 case PLUS_EXPR:
1255 case MINUS_EXPR:
1256 real = const_binop (code, r1, r2);
1257 imag = const_binop (code, i1, i2);
1258 break;
1260 case MULT_EXPR:
1261 if (COMPLEX_FLOAT_TYPE_P (type))
1262 return do_mpc_arg2 (arg1, arg2, type,
1263 /* do_nonfinite= */ folding_initializer,
1264 mpc_mul);
1266 real = const_binop (MINUS_EXPR,
1267 const_binop (MULT_EXPR, r1, r2),
1268 const_binop (MULT_EXPR, i1, i2));
1269 imag = const_binop (PLUS_EXPR,
1270 const_binop (MULT_EXPR, r1, i2),
1271 const_binop (MULT_EXPR, i1, r2));
1272 break;
1274 case RDIV_EXPR:
1275 if (COMPLEX_FLOAT_TYPE_P (type))
1276 return do_mpc_arg2 (arg1, arg2, type,
1277 /* do_nonfinite= */ folding_initializer,
1278 mpc_div);
1279 /* Fallthru ... */
1280 case TRUNC_DIV_EXPR:
1281 case CEIL_DIV_EXPR:
1282 case FLOOR_DIV_EXPR:
1283 case ROUND_DIV_EXPR:
1284 if (flag_complex_method == 0)
1286 /* Keep this algorithm in sync with
1287 tree-complex.c:expand_complex_div_straight().
1289 Expand complex division to scalars, straightforward algorithm.
1290 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1291 t = br*br + bi*bi
1293 tree magsquared
1294 = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r2, r2),
1296 const_binop (MULT_EXPR, i2, i2));
1297 tree t1
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r1, r2),
1300 const_binop (MULT_EXPR, i1, i2));
1301 tree t2
1302 = const_binop (MINUS_EXPR,
1303 const_binop (MULT_EXPR, i1, r2),
1304 const_binop (MULT_EXPR, r1, i2));
1306 real = const_binop (code, t1, magsquared);
1307 imag = const_binop (code, t2, magsquared);
1309 else
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_wide().
1314 Expand complex division to scalars, modified algorithm to minimize
1315 overflow with wide input ranges. */
1316 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1317 fold_abs_const (r2, TREE_TYPE (type)),
1318 fold_abs_const (i2, TREE_TYPE (type)));
1320 if (integer_nonzerop (compare))
1322 /* In the TRUE branch, we compute
1323 ratio = br/bi;
1324 div = (br * ratio) + bi;
1325 tr = (ar * ratio) + ai;
1326 ti = (ai * ratio) - ar;
1327 tr = tr / div;
1328 ti = ti / div; */
1329 tree ratio = const_binop (code, r2, i2);
1330 tree div = const_binop (PLUS_EXPR, i2,
1331 const_binop (MULT_EXPR, r2, ratio));
1332 real = const_binop (MULT_EXPR, r1, ratio);
1333 real = const_binop (PLUS_EXPR, real, i1);
1334 real = const_binop (code, real, div);
1336 imag = const_binop (MULT_EXPR, i1, ratio);
1337 imag = const_binop (MINUS_EXPR, imag, r1);
1338 imag = const_binop (code, imag, div);
1340 else
1342 /* In the FALSE branch, we compute
1343 ratio = d/c;
1344 divisor = (d * ratio) + c;
1345 tr = (b * ratio) + a;
1346 ti = b - (a * ratio);
1347 tr = tr / div;
1348 ti = ti / div; */
1349 tree ratio = const_binop (code, i2, r2);
1350 tree div = const_binop (PLUS_EXPR, r2,
1351 const_binop (MULT_EXPR, i2, ratio));
1353 real = const_binop (MULT_EXPR, i1, ratio);
1354 real = const_binop (PLUS_EXPR, real, r1);
1355 real = const_binop (code, real, div);
1357 imag = const_binop (MULT_EXPR, r1, ratio);
1358 imag = const_binop (MINUS_EXPR, i1, imag);
1359 imag = const_binop (code, imag, div);
1362 break;
1364 default:
1365 return NULL_TREE;
1368 if (real && imag)
1369 return build_complex (type, real, imag);
1372 if (TREE_CODE (arg1) == VECTOR_CST)
1374 tree type = TREE_TYPE(arg1);
1375 int count = TYPE_VECTOR_SUBPARTS (type), i;
1376 tree elements1, elements2, list = NULL_TREE;
1378 if(TREE_CODE(arg2) != VECTOR_CST)
1379 return NULL_TREE;
1381 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1382 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1384 for (i = 0; i < count; i++)
1386 tree elem1, elem2, elem;
1388 /* The trailing elements can be empty and should be treated as 0 */
1389 if(!elements1)
1390 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1391 else
1393 elem1 = TREE_VALUE(elements1);
1394 elements1 = TREE_CHAIN (elements1);
1397 if(!elements2)
1398 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1399 else
1401 elem2 = TREE_VALUE(elements2);
1402 elements2 = TREE_CHAIN (elements2);
1405 elem = const_binop (code, elem1, elem2);
1407 /* It is possible that const_binop cannot handle the given
1408 code and return NULL_TREE */
1409 if(elem == NULL_TREE)
1410 return NULL_TREE;
1412 list = tree_cons (NULL_TREE, elem, list);
1414 return build_vector(type, nreverse(list));
1416 return NULL_TREE;
1419 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1420 indicates which particular sizetype to create. */
1422 tree
1423 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1425 return build_int_cst (sizetype_tab[(int) kind], number);
1428 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1429 is a tree code. The type of the result is taken from the operands.
1430 Both must be equivalent integer types, ala int_binop_types_match_p.
1431 If the operands are constant, so is the result. */
1433 tree
1434 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1436 tree type = TREE_TYPE (arg0);
1438 if (arg0 == error_mark_node || arg1 == error_mark_node)
1439 return error_mark_node;
1441 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1442 TREE_TYPE (arg1)));
1444 /* Handle the special case of two integer constants faster. */
1445 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1447 /* And some specific cases even faster than that. */
1448 if (code == PLUS_EXPR)
1450 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1451 return arg1;
1452 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1453 return arg0;
1455 else if (code == MINUS_EXPR)
1457 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1458 return arg0;
1460 else if (code == MULT_EXPR)
1462 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1463 return arg1;
1466 /* Handle general case of two integer constants. */
1467 return int_const_binop (code, arg0, arg1, 0);
1470 return fold_build2_loc (loc, code, type, arg0, arg1);
1473 /* Given two values, either both of sizetype or both of bitsizetype,
1474 compute the difference between the two values. Return the value
1475 in signed type corresponding to the type of the operands. */
1477 tree
1478 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1480 tree type = TREE_TYPE (arg0);
1481 tree ctype;
1483 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1484 TREE_TYPE (arg1)));
1486 /* If the type is already signed, just do the simple thing. */
1487 if (!TYPE_UNSIGNED (type))
1488 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1490 if (type == sizetype)
1491 ctype = ssizetype;
1492 else if (type == bitsizetype)
1493 ctype = sbitsizetype;
1494 else
1495 ctype = signed_type_for (type);
1497 /* If either operand is not a constant, do the conversions to the signed
1498 type and subtract. The hardware will do the right thing with any
1499 overflow in the subtraction. */
1500 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1501 return size_binop_loc (loc, MINUS_EXPR,
1502 fold_convert_loc (loc, ctype, arg0),
1503 fold_convert_loc (loc, ctype, arg1));
1505 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1506 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1507 overflow) and negate (which can't either). Special-case a result
1508 of zero while we're here. */
1509 if (tree_int_cst_equal (arg0, arg1))
1510 return build_int_cst (ctype, 0);
1511 else if (tree_int_cst_lt (arg1, arg0))
1512 return fold_convert_loc (loc, ctype,
1513 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1514 else
1515 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1516 fold_convert_loc (loc, ctype,
1517 size_binop_loc (loc,
1518 MINUS_EXPR,
1519 arg1, arg0)));
1522 /* A subroutine of fold_convert_const handling conversions of an
1523 INTEGER_CST to another integer type. */
1525 static tree
1526 fold_convert_const_int_from_int (tree type, const_tree arg1)
1528 tree t;
1530 /* Given an integer constant, make new constant with new type,
1531 appropriately sign-extended or truncated. */
1532 t = force_fit_type_double (type, tree_to_double_int (arg1),
1533 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1534 (TREE_INT_CST_HIGH (arg1) < 0
1535 && (TYPE_UNSIGNED (type)
1536 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1537 | TREE_OVERFLOW (arg1));
1539 return t;
1542 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1543 to an integer type. */
1545 static tree
1546 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1548 int overflow = 0;
1549 tree t;
1551 /* The following code implements the floating point to integer
1552 conversion rules required by the Java Language Specification,
1553 that IEEE NaNs are mapped to zero and values that overflow
1554 the target precision saturate, i.e. values greater than
1555 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1556 are mapped to INT_MIN. These semantics are allowed by the
1557 C and C++ standards that simply state that the behavior of
1558 FP-to-integer conversion is unspecified upon overflow. */
1560 double_int val;
1561 REAL_VALUE_TYPE r;
1562 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1564 switch (code)
1566 case FIX_TRUNC_EXPR:
1567 real_trunc (&r, VOIDmode, &x);
1568 break;
1570 default:
1571 gcc_unreachable ();
1574 /* If R is NaN, return zero and show we have an overflow. */
1575 if (REAL_VALUE_ISNAN (r))
1577 overflow = 1;
1578 val = double_int_zero;
1581 /* See if R is less than the lower bound or greater than the
1582 upper bound. */
1584 if (! overflow)
1586 tree lt = TYPE_MIN_VALUE (type);
1587 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1588 if (REAL_VALUES_LESS (r, l))
1590 overflow = 1;
1591 val = tree_to_double_int (lt);
1595 if (! overflow)
1597 tree ut = TYPE_MAX_VALUE (type);
1598 if (ut)
1600 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1601 if (REAL_VALUES_LESS (u, r))
1603 overflow = 1;
1604 val = tree_to_double_int (ut);
1609 if (! overflow)
1610 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1612 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1613 return t;
1616 /* A subroutine of fold_convert_const handling conversions of a
1617 FIXED_CST to an integer type. */
1619 static tree
1620 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1622 tree t;
1623 double_int temp, temp_trunc;
1624 unsigned int mode;
1626 /* Right shift FIXED_CST to temp by fbit. */
1627 temp = TREE_FIXED_CST (arg1).data;
1628 mode = TREE_FIXED_CST (arg1).mode;
1629 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1631 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1632 HOST_BITS_PER_DOUBLE_INT,
1633 SIGNED_FIXED_POINT_MODE_P (mode));
1635 /* Left shift temp to temp_trunc by fbit. */
1636 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1637 HOST_BITS_PER_DOUBLE_INT,
1638 SIGNED_FIXED_POINT_MODE_P (mode));
1640 else
1642 temp = double_int_zero;
1643 temp_trunc = double_int_zero;
1646 /* If FIXED_CST is negative, we need to round the value toward 0.
1647 By checking if the fractional bits are not zero to add 1 to temp. */
1648 if (SIGNED_FIXED_POINT_MODE_P (mode)
1649 && double_int_negative_p (temp_trunc)
1650 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1651 temp = double_int_add (temp, double_int_one);
1653 /* Given a fixed-point constant, make new constant with new type,
1654 appropriately sign-extended or truncated. */
1655 t = force_fit_type_double (type, temp, -1,
1656 (double_int_negative_p (temp)
1657 && (TYPE_UNSIGNED (type)
1658 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1659 | TREE_OVERFLOW (arg1));
1661 return t;
1664 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1665 to another floating point type. */
1667 static tree
1668 fold_convert_const_real_from_real (tree type, const_tree arg1)
1670 REAL_VALUE_TYPE value;
1671 tree t;
1673 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1674 t = build_real (type, value);
1676 /* If converting an infinity or NAN to a representation that doesn't
1677 have one, set the overflow bit so that we can produce some kind of
1678 error message at the appropriate point if necessary. It's not the
1679 most user-friendly message, but it's better than nothing. */
1680 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1681 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1682 TREE_OVERFLOW (t) = 1;
1683 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1684 && !MODE_HAS_NANS (TYPE_MODE (type)))
1685 TREE_OVERFLOW (t) = 1;
1686 /* Regular overflow, conversion produced an infinity in a mode that
1687 can't represent them. */
1688 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1689 && REAL_VALUE_ISINF (value)
1690 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1691 TREE_OVERFLOW (t) = 1;
1692 else
1693 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1694 return t;
1697 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1698 to a floating point type. */
1700 static tree
1701 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1703 REAL_VALUE_TYPE value;
1704 tree t;
1706 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1707 t = build_real (type, value);
1709 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1710 return t;
1713 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1714 to another fixed-point type. */
1716 static tree
1717 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1719 FIXED_VALUE_TYPE value;
1720 tree t;
1721 bool overflow_p;
1723 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1730 return t;
1733 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1734 to a fixed-point type. */
1736 static tree
1737 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1743 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1744 TREE_INT_CST (arg1),
1745 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1746 TYPE_SATURATING (type));
1747 t = build_fixed (type, value);
1749 /* Propagate overflow flags. */
1750 if (overflow_p | TREE_OVERFLOW (arg1))
1751 TREE_OVERFLOW (t) = 1;
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1756 to a fixed-point type. */
1758 static tree
1759 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1765 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1766 &TREE_REAL_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1776 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1777 type TYPE. If no simplification can be done return NULL_TREE. */
1779 static tree
1780 fold_convert_const (enum tree_code code, tree type, tree arg1)
1782 if (TREE_TYPE (arg1) == type)
1783 return arg1;
1785 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1786 || TREE_CODE (type) == OFFSET_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return fold_convert_const_int_from_int (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_int_from_real (code, type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_int_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == REAL_TYPE)
1797 if (TREE_CODE (arg1) == INTEGER_CST)
1798 return build_real_from_int_cst (type, arg1);
1799 else if (TREE_CODE (arg1) == REAL_CST)
1800 return fold_convert_const_real_from_real (type, arg1);
1801 else if (TREE_CODE (arg1) == FIXED_CST)
1802 return fold_convert_const_real_from_fixed (type, arg1);
1804 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1806 if (TREE_CODE (arg1) == FIXED_CST)
1807 return fold_convert_const_fixed_from_fixed (type, arg1);
1808 else if (TREE_CODE (arg1) == INTEGER_CST)
1809 return fold_convert_const_fixed_from_int (type, arg1);
1810 else if (TREE_CODE (arg1) == REAL_CST)
1811 return fold_convert_const_fixed_from_real (type, arg1);
1813 return NULL_TREE;
1816 /* Construct a vector of zero elements of vector type TYPE. */
1818 static tree
1819 build_zero_vector (tree type)
1821 tree t;
1823 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1824 return build_vector_from_val (type, t);
1827 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1829 bool
1830 fold_convertible_p (const_tree type, const_tree arg)
1832 tree orig = TREE_TYPE (arg);
1834 if (type == orig)
1835 return true;
1837 if (TREE_CODE (arg) == ERROR_MARK
1838 || TREE_CODE (type) == ERROR_MARK
1839 || TREE_CODE (orig) == ERROR_MARK)
1840 return false;
1842 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1843 return true;
1845 switch (TREE_CODE (type))
1847 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1848 case POINTER_TYPE: case REFERENCE_TYPE:
1849 case OFFSET_TYPE:
1850 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1851 || TREE_CODE (orig) == OFFSET_TYPE)
1852 return true;
1853 return (TREE_CODE (orig) == VECTOR_TYPE
1854 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1856 case REAL_TYPE:
1857 case FIXED_POINT_TYPE:
1858 case COMPLEX_TYPE:
1859 case VECTOR_TYPE:
1860 case VOID_TYPE:
1861 return TREE_CODE (type) == TREE_CODE (orig);
1863 default:
1864 return false;
1868 /* Convert expression ARG to type TYPE. Used by the middle-end for
1869 simple conversions in preference to calling the front-end's convert. */
1871 tree
1872 fold_convert_loc (location_t loc, tree type, tree arg)
1874 tree orig = TREE_TYPE (arg);
1875 tree tem;
1877 if (type == orig)
1878 return arg;
1880 if (TREE_CODE (arg) == ERROR_MARK
1881 || TREE_CODE (type) == ERROR_MARK
1882 || TREE_CODE (orig) == ERROR_MARK)
1883 return error_mark_node;
1885 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1886 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1888 switch (TREE_CODE (type))
1890 case POINTER_TYPE:
1891 case REFERENCE_TYPE:
1892 /* Handle conversions between pointers to different address spaces. */
1893 if (POINTER_TYPE_P (orig)
1894 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1895 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1896 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1897 /* fall through */
1899 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1900 case OFFSET_TYPE:
1901 if (TREE_CODE (arg) == INTEGER_CST)
1903 tem = fold_convert_const (NOP_EXPR, type, arg);
1904 if (tem != NULL_TREE)
1905 return tem;
1907 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1908 || TREE_CODE (orig) == OFFSET_TYPE)
1909 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1910 if (TREE_CODE (orig) == COMPLEX_TYPE)
1911 return fold_convert_loc (loc, type,
1912 fold_build1_loc (loc, REALPART_EXPR,
1913 TREE_TYPE (orig), arg));
1914 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1915 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1916 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1918 case REAL_TYPE:
1919 if (TREE_CODE (arg) == INTEGER_CST)
1921 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1922 if (tem != NULL_TREE)
1923 return tem;
1925 else if (TREE_CODE (arg) == REAL_CST)
1927 tem = fold_convert_const (NOP_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1931 else if (TREE_CODE (arg) == FIXED_CST)
1933 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1938 switch (TREE_CODE (orig))
1940 case INTEGER_TYPE:
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1945 case REAL_TYPE:
1946 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1948 case FIXED_POINT_TYPE:
1949 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1951 case COMPLEX_TYPE:
1952 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1953 return fold_convert_loc (loc, type, tem);
1955 default:
1956 gcc_unreachable ();
1959 case FIXED_POINT_TYPE:
1960 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1961 || TREE_CODE (arg) == REAL_CST)
1963 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1965 goto fold_convert_exit;
1968 switch (TREE_CODE (orig))
1970 case FIXED_POINT_TYPE:
1971 case INTEGER_TYPE:
1972 case ENUMERAL_TYPE:
1973 case BOOLEAN_TYPE:
1974 case REAL_TYPE:
1975 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1977 case COMPLEX_TYPE:
1978 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1979 return fold_convert_loc (loc, type, tem);
1981 default:
1982 gcc_unreachable ();
1985 case COMPLEX_TYPE:
1986 switch (TREE_CODE (orig))
1988 case INTEGER_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 case REAL_TYPE:
1992 case FIXED_POINT_TYPE:
1993 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1994 fold_convert_loc (loc, TREE_TYPE (type), arg),
1995 fold_convert_loc (loc, TREE_TYPE (type),
1996 integer_zero_node));
1997 case COMPLEX_TYPE:
1999 tree rpart, ipart;
2001 if (TREE_CODE (arg) == COMPLEX_EXPR)
2003 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2004 TREE_OPERAND (arg, 0));
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2006 TREE_OPERAND (arg, 1));
2007 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2010 arg = save_expr (arg);
2011 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2012 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2013 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2014 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2015 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2018 default:
2019 gcc_unreachable ();
2022 case VECTOR_TYPE:
2023 if (integer_zerop (arg))
2024 return build_zero_vector (type);
2025 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2026 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2027 || TREE_CODE (orig) == VECTOR_TYPE);
2028 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2030 case VOID_TYPE:
2031 tem = fold_ignored_result (arg);
2032 if (TREE_CODE (tem) == MODIFY_EXPR)
2033 goto fold_convert_exit;
2034 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2036 default:
2037 gcc_unreachable ();
2039 fold_convert_exit:
2040 protected_set_expr_location_unshare (tem, loc);
2041 return tem;
2044 /* Return false if expr can be assumed not to be an lvalue, true
2045 otherwise. */
2047 static bool
2048 maybe_lvalue_p (const_tree x)
2050 /* We only need to wrap lvalue tree codes. */
2051 switch (TREE_CODE (x))
2053 case VAR_DECL:
2054 case PARM_DECL:
2055 case RESULT_DECL:
2056 case LABEL_DECL:
2057 case FUNCTION_DECL:
2058 case SSA_NAME:
2060 case COMPONENT_REF:
2061 case MEM_REF:
2062 case INDIRECT_REF:
2063 case ARRAY_REF:
2064 case ARRAY_RANGE_REF:
2065 case BIT_FIELD_REF:
2066 case OBJ_TYPE_REF:
2068 case REALPART_EXPR:
2069 case IMAGPART_EXPR:
2070 case PREINCREMENT_EXPR:
2071 case PREDECREMENT_EXPR:
2072 case SAVE_EXPR:
2073 case TRY_CATCH_EXPR:
2074 case WITH_CLEANUP_EXPR:
2075 case COMPOUND_EXPR:
2076 case MODIFY_EXPR:
2077 case TARGET_EXPR:
2078 case COND_EXPR:
2079 case BIND_EXPR:
2080 break;
2082 default:
2083 /* Assume the worst for front-end tree codes. */
2084 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2085 break;
2086 return false;
2089 return true;
2092 /* Return an expr equal to X but certainly not valid as an lvalue. */
2094 tree
2095 non_lvalue_loc (location_t loc, tree x)
2097 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2098 us. */
2099 if (in_gimple_form)
2100 return x;
2102 if (! maybe_lvalue_p (x))
2103 return x;
2104 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2107 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2108 Zero means allow extended lvalues. */
2110 int pedantic_lvalues;
2112 /* When pedantic, return an expr equal to X but certainly not valid as a
2113 pedantic lvalue. Otherwise, return X. */
2115 static tree
2116 pedantic_non_lvalue_loc (location_t loc, tree x)
2118 if (pedantic_lvalues)
2119 return non_lvalue_loc (loc, x);
2121 return protected_set_expr_location_unshare (x, loc);
2124 /* Given a tree comparison code, return the code that is the logical inverse
2125 of the given code. It is not safe to do this for floating-point
2126 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2127 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2129 enum tree_code
2130 invert_tree_comparison (enum tree_code code, bool honor_nans)
2132 if (honor_nans && flag_trapping_math)
2133 return ERROR_MARK;
2135 switch (code)
2137 case EQ_EXPR:
2138 return NE_EXPR;
2139 case NE_EXPR:
2140 return EQ_EXPR;
2141 case GT_EXPR:
2142 return honor_nans ? UNLE_EXPR : LE_EXPR;
2143 case GE_EXPR:
2144 return honor_nans ? UNLT_EXPR : LT_EXPR;
2145 case LT_EXPR:
2146 return honor_nans ? UNGE_EXPR : GE_EXPR;
2147 case LE_EXPR:
2148 return honor_nans ? UNGT_EXPR : GT_EXPR;
2149 case LTGT_EXPR:
2150 return UNEQ_EXPR;
2151 case UNEQ_EXPR:
2152 return LTGT_EXPR;
2153 case UNGT_EXPR:
2154 return LE_EXPR;
2155 case UNGE_EXPR:
2156 return LT_EXPR;
2157 case UNLT_EXPR:
2158 return GE_EXPR;
2159 case UNLE_EXPR:
2160 return GT_EXPR;
2161 case ORDERED_EXPR:
2162 return UNORDERED_EXPR;
2163 case UNORDERED_EXPR:
2164 return ORDERED_EXPR;
2165 default:
2166 gcc_unreachable ();
2170 /* Similar, but return the comparison that results if the operands are
2171 swapped. This is safe for floating-point. */
2173 enum tree_code
2174 swap_tree_comparison (enum tree_code code)
2176 switch (code)
2178 case EQ_EXPR:
2179 case NE_EXPR:
2180 case ORDERED_EXPR:
2181 case UNORDERED_EXPR:
2182 case LTGT_EXPR:
2183 case UNEQ_EXPR:
2184 return code;
2185 case GT_EXPR:
2186 return LT_EXPR;
2187 case GE_EXPR:
2188 return LE_EXPR;
2189 case LT_EXPR:
2190 return GT_EXPR;
2191 case LE_EXPR:
2192 return GE_EXPR;
2193 case UNGT_EXPR:
2194 return UNLT_EXPR;
2195 case UNGE_EXPR:
2196 return UNLE_EXPR;
2197 case UNLT_EXPR:
2198 return UNGT_EXPR;
2199 case UNLE_EXPR:
2200 return UNGE_EXPR;
2201 default:
2202 gcc_unreachable ();
2207 /* Convert a comparison tree code from an enum tree_code representation
2208 into a compcode bit-based encoding. This function is the inverse of
2209 compcode_to_comparison. */
2211 static enum comparison_code
2212 comparison_to_compcode (enum tree_code code)
2214 switch (code)
2216 case LT_EXPR:
2217 return COMPCODE_LT;
2218 case EQ_EXPR:
2219 return COMPCODE_EQ;
2220 case LE_EXPR:
2221 return COMPCODE_LE;
2222 case GT_EXPR:
2223 return COMPCODE_GT;
2224 case NE_EXPR:
2225 return COMPCODE_NE;
2226 case GE_EXPR:
2227 return COMPCODE_GE;
2228 case ORDERED_EXPR:
2229 return COMPCODE_ORD;
2230 case UNORDERED_EXPR:
2231 return COMPCODE_UNORD;
2232 case UNLT_EXPR:
2233 return COMPCODE_UNLT;
2234 case UNEQ_EXPR:
2235 return COMPCODE_UNEQ;
2236 case UNLE_EXPR:
2237 return COMPCODE_UNLE;
2238 case UNGT_EXPR:
2239 return COMPCODE_UNGT;
2240 case LTGT_EXPR:
2241 return COMPCODE_LTGT;
2242 case UNGE_EXPR:
2243 return COMPCODE_UNGE;
2244 default:
2245 gcc_unreachable ();
2249 /* Convert a compcode bit-based encoding of a comparison operator back
2250 to GCC's enum tree_code representation. This function is the
2251 inverse of comparison_to_compcode. */
2253 static enum tree_code
2254 compcode_to_comparison (enum comparison_code code)
2256 switch (code)
2258 case COMPCODE_LT:
2259 return LT_EXPR;
2260 case COMPCODE_EQ:
2261 return EQ_EXPR;
2262 case COMPCODE_LE:
2263 return LE_EXPR;
2264 case COMPCODE_GT:
2265 return GT_EXPR;
2266 case COMPCODE_NE:
2267 return NE_EXPR;
2268 case COMPCODE_GE:
2269 return GE_EXPR;
2270 case COMPCODE_ORD:
2271 return ORDERED_EXPR;
2272 case COMPCODE_UNORD:
2273 return UNORDERED_EXPR;
2274 case COMPCODE_UNLT:
2275 return UNLT_EXPR;
2276 case COMPCODE_UNEQ:
2277 return UNEQ_EXPR;
2278 case COMPCODE_UNLE:
2279 return UNLE_EXPR;
2280 case COMPCODE_UNGT:
2281 return UNGT_EXPR;
2282 case COMPCODE_LTGT:
2283 return LTGT_EXPR;
2284 case COMPCODE_UNGE:
2285 return UNGE_EXPR;
2286 default:
2287 gcc_unreachable ();
2291 /* Return a tree for the comparison which is the combination of
2292 doing the AND or OR (depending on CODE) of the two operations LCODE
2293 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2294 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2295 if this makes the transformation invalid. */
2297 tree
2298 combine_comparisons (location_t loc,
2299 enum tree_code code, enum tree_code lcode,
2300 enum tree_code rcode, tree truth_type,
2301 tree ll_arg, tree lr_arg)
2303 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2304 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2305 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2306 int compcode;
2308 switch (code)
2310 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2311 compcode = lcompcode & rcompcode;
2312 break;
2314 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2315 compcode = lcompcode | rcompcode;
2316 break;
2318 default:
2319 return NULL_TREE;
2322 if (!honor_nans)
2324 /* Eliminate unordered comparisons, as well as LTGT and ORD
2325 which are not used unless the mode has NaNs. */
2326 compcode &= ~COMPCODE_UNORD;
2327 if (compcode == COMPCODE_LTGT)
2328 compcode = COMPCODE_NE;
2329 else if (compcode == COMPCODE_ORD)
2330 compcode = COMPCODE_TRUE;
2332 else if (flag_trapping_math)
2334 /* Check that the original operation and the optimized ones will trap
2335 under the same condition. */
2336 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2337 && (lcompcode != COMPCODE_EQ)
2338 && (lcompcode != COMPCODE_ORD);
2339 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2340 && (rcompcode != COMPCODE_EQ)
2341 && (rcompcode != COMPCODE_ORD);
2342 bool trap = (compcode & COMPCODE_UNORD) == 0
2343 && (compcode != COMPCODE_EQ)
2344 && (compcode != COMPCODE_ORD);
2346 /* In a short-circuited boolean expression the LHS might be
2347 such that the RHS, if evaluated, will never trap. For
2348 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2349 if neither x nor y is NaN. (This is a mixed blessing: for
2350 example, the expression above will never trap, hence
2351 optimizing it to x < y would be invalid). */
2352 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2353 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2354 rtrap = false;
2356 /* If the comparison was short-circuited, and only the RHS
2357 trapped, we may now generate a spurious trap. */
2358 if (rtrap && !ltrap
2359 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2360 return NULL_TREE;
2362 /* If we changed the conditions that cause a trap, we lose. */
2363 if ((ltrap || rtrap) != trap)
2364 return NULL_TREE;
2367 if (compcode == COMPCODE_TRUE)
2368 return constant_boolean_node (true, truth_type);
2369 else if (compcode == COMPCODE_FALSE)
2370 return constant_boolean_node (false, truth_type);
2371 else
2373 enum tree_code tcode;
2375 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2376 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2380 /* Return nonzero if two operands (typically of the same tree node)
2381 are necessarily equal. If either argument has side-effects this
2382 function returns zero. FLAGS modifies behavior as follows:
2384 If OEP_ONLY_CONST is set, only return nonzero for constants.
2385 This function tests whether the operands are indistinguishable;
2386 it does not test whether they are equal using C's == operation.
2387 The distinction is important for IEEE floating point, because
2388 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2389 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2391 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2392 even though it may hold multiple values during a function.
2393 This is because a GCC tree node guarantees that nothing else is
2394 executed between the evaluation of its "operands" (which may often
2395 be evaluated in arbitrary order). Hence if the operands themselves
2396 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2397 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2398 unset means assuming isochronic (or instantaneous) tree equivalence.
2399 Unless comparing arbitrary expression trees, such as from different
2400 statements, this flag can usually be left unset.
2402 If OEP_PURE_SAME is set, then pure functions with identical arguments
2403 are considered the same. It is used when the caller has other ways
2404 to ensure that global memory is unchanged in between. */
2407 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2409 /* If either is ERROR_MARK, they aren't equal. */
2410 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2411 || TREE_TYPE (arg0) == error_mark_node
2412 || TREE_TYPE (arg1) == error_mark_node)
2413 return 0;
2415 /* Similar, if either does not have a type (like a released SSA name),
2416 they aren't equal. */
2417 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2418 return 0;
2420 /* Check equality of integer constants before bailing out due to
2421 precision differences. */
2422 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2423 return tree_int_cst_equal (arg0, arg1);
2425 /* If both types don't have the same signedness, then we can't consider
2426 them equal. We must check this before the STRIP_NOPS calls
2427 because they may change the signedness of the arguments. As pointers
2428 strictly don't have a signedness, require either two pointers or
2429 two non-pointers as well. */
2430 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2431 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2432 return 0;
2434 /* We cannot consider pointers to different address space equal. */
2435 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2436 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2437 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2438 return 0;
2440 /* If both types don't have the same precision, then it is not safe
2441 to strip NOPs. */
2442 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2443 return 0;
2445 STRIP_NOPS (arg0);
2446 STRIP_NOPS (arg1);
2448 /* In case both args are comparisons but with different comparison
2449 code, try to swap the comparison operands of one arg to produce
2450 a match and compare that variant. */
2451 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2452 && COMPARISON_CLASS_P (arg0)
2453 && COMPARISON_CLASS_P (arg1))
2455 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2457 if (TREE_CODE (arg0) == swap_code)
2458 return operand_equal_p (TREE_OPERAND (arg0, 0),
2459 TREE_OPERAND (arg1, 1), flags)
2460 && operand_equal_p (TREE_OPERAND (arg0, 1),
2461 TREE_OPERAND (arg1, 0), flags);
2464 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2465 /* This is needed for conversions and for COMPONENT_REF.
2466 Might as well play it safe and always test this. */
2467 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2468 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2469 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2470 return 0;
2472 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2473 We don't care about side effects in that case because the SAVE_EXPR
2474 takes care of that for us. In all other cases, two expressions are
2475 equal if they have no side effects. If we have two identical
2476 expressions with side effects that should be treated the same due
2477 to the only side effects being identical SAVE_EXPR's, that will
2478 be detected in the recursive calls below. */
2479 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2480 && (TREE_CODE (arg0) == SAVE_EXPR
2481 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2482 return 1;
2484 /* Next handle constant cases, those for which we can return 1 even
2485 if ONLY_CONST is set. */
2486 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2487 switch (TREE_CODE (arg0))
2489 case INTEGER_CST:
2490 return tree_int_cst_equal (arg0, arg1);
2492 case FIXED_CST:
2493 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2494 TREE_FIXED_CST (arg1));
2496 case REAL_CST:
2497 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2498 TREE_REAL_CST (arg1)))
2499 return 1;
2502 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2504 /* If we do not distinguish between signed and unsigned zero,
2505 consider them equal. */
2506 if (real_zerop (arg0) && real_zerop (arg1))
2507 return 1;
2509 return 0;
2511 case VECTOR_CST:
2513 tree v1, v2;
2515 v1 = TREE_VECTOR_CST_ELTS (arg0);
2516 v2 = TREE_VECTOR_CST_ELTS (arg1);
2517 while (v1 && v2)
2519 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2520 flags))
2521 return 0;
2522 v1 = TREE_CHAIN (v1);
2523 v2 = TREE_CHAIN (v2);
2526 return v1 == v2;
2529 case COMPLEX_CST:
2530 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2531 flags)
2532 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2533 flags));
2535 case STRING_CST:
2536 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2537 && ! memcmp (TREE_STRING_POINTER (arg0),
2538 TREE_STRING_POINTER (arg1),
2539 TREE_STRING_LENGTH (arg0)));
2541 case ADDR_EXPR:
2542 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2544 default:
2545 break;
2548 if (flags & OEP_ONLY_CONST)
2549 return 0;
2551 /* Define macros to test an operand from arg0 and arg1 for equality and a
2552 variant that allows null and views null as being different from any
2553 non-null value. In the latter case, if either is null, the both
2554 must be; otherwise, do the normal comparison. */
2555 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2556 TREE_OPERAND (arg1, N), flags)
2558 #define OP_SAME_WITH_NULL(N) \
2559 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2560 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2562 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2564 case tcc_unary:
2565 /* Two conversions are equal only if signedness and modes match. */
2566 switch (TREE_CODE (arg0))
2568 CASE_CONVERT:
2569 case FIX_TRUNC_EXPR:
2570 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2571 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2572 return 0;
2573 break;
2574 default:
2575 break;
2578 return OP_SAME (0);
2581 case tcc_comparison:
2582 case tcc_binary:
2583 if (OP_SAME (0) && OP_SAME (1))
2584 return 1;
2586 /* For commutative ops, allow the other order. */
2587 return (commutative_tree_code (TREE_CODE (arg0))
2588 && operand_equal_p (TREE_OPERAND (arg0, 0),
2589 TREE_OPERAND (arg1, 1), flags)
2590 && operand_equal_p (TREE_OPERAND (arg0, 1),
2591 TREE_OPERAND (arg1, 0), flags));
2593 case tcc_reference:
2594 /* If either of the pointer (or reference) expressions we are
2595 dereferencing contain a side effect, these cannot be equal. */
2596 if (TREE_SIDE_EFFECTS (arg0)
2597 || TREE_SIDE_EFFECTS (arg1))
2598 return 0;
2600 switch (TREE_CODE (arg0))
2602 case INDIRECT_REF:
2603 case REALPART_EXPR:
2604 case IMAGPART_EXPR:
2605 return OP_SAME (0);
2607 case MEM_REF:
2608 /* Require equal access sizes, and similar pointer types.
2609 We can have incomplete types for array references of
2610 variable-sized arrays from the Fortran frontent
2611 though. */
2612 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2613 || (TYPE_SIZE (TREE_TYPE (arg0))
2614 && TYPE_SIZE (TREE_TYPE (arg1))
2615 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2616 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2617 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2618 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2619 && OP_SAME (0) && OP_SAME (1));
2621 case ARRAY_REF:
2622 case ARRAY_RANGE_REF:
2623 /* Operands 2 and 3 may be null.
2624 Compare the array index by value if it is constant first as we
2625 may have different types but same value here. */
2626 return (OP_SAME (0)
2627 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2628 TREE_OPERAND (arg1, 1))
2629 || OP_SAME (1))
2630 && OP_SAME_WITH_NULL (2)
2631 && OP_SAME_WITH_NULL (3));
2633 case COMPONENT_REF:
2634 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2635 may be NULL when we're called to compare MEM_EXPRs. */
2636 return OP_SAME_WITH_NULL (0)
2637 && OP_SAME (1)
2638 && OP_SAME_WITH_NULL (2);
2640 case BIT_FIELD_REF:
2641 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2643 default:
2644 return 0;
2647 case tcc_expression:
2648 switch (TREE_CODE (arg0))
2650 case ADDR_EXPR:
2651 case TRUTH_NOT_EXPR:
2652 return OP_SAME (0);
2654 case TRUTH_ANDIF_EXPR:
2655 case TRUTH_ORIF_EXPR:
2656 return OP_SAME (0) && OP_SAME (1);
2658 case FMA_EXPR:
2659 case WIDEN_MULT_PLUS_EXPR:
2660 case WIDEN_MULT_MINUS_EXPR:
2661 if (!OP_SAME (2))
2662 return 0;
2663 /* The multiplcation operands are commutative. */
2664 /* FALLTHRU */
2666 case TRUTH_AND_EXPR:
2667 case TRUTH_OR_EXPR:
2668 case TRUTH_XOR_EXPR:
2669 if (OP_SAME (0) && OP_SAME (1))
2670 return 1;
2672 /* Otherwise take into account this is a commutative operation. */
2673 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2674 TREE_OPERAND (arg1, 1), flags)
2675 && operand_equal_p (TREE_OPERAND (arg0, 1),
2676 TREE_OPERAND (arg1, 0), flags));
2678 case COND_EXPR:
2679 case VEC_COND_EXPR:
2680 case DOT_PROD_EXPR:
2681 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2683 default:
2684 return 0;
2687 case tcc_vl_exp:
2688 switch (TREE_CODE (arg0))
2690 case CALL_EXPR:
2691 /* If the CALL_EXPRs call different functions, then they
2692 clearly can not be equal. */
2693 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2694 flags))
2695 return 0;
2698 unsigned int cef = call_expr_flags (arg0);
2699 if (flags & OEP_PURE_SAME)
2700 cef &= ECF_CONST | ECF_PURE;
2701 else
2702 cef &= ECF_CONST;
2703 if (!cef)
2704 return 0;
2707 /* Now see if all the arguments are the same. */
2709 const_call_expr_arg_iterator iter0, iter1;
2710 const_tree a0, a1;
2711 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2712 a1 = first_const_call_expr_arg (arg1, &iter1);
2713 a0 && a1;
2714 a0 = next_const_call_expr_arg (&iter0),
2715 a1 = next_const_call_expr_arg (&iter1))
2716 if (! operand_equal_p (a0, a1, flags))
2717 return 0;
2719 /* If we get here and both argument lists are exhausted
2720 then the CALL_EXPRs are equal. */
2721 return ! (a0 || a1);
2723 default:
2724 return 0;
2727 case tcc_declaration:
2728 /* Consider __builtin_sqrt equal to sqrt. */
2729 return (TREE_CODE (arg0) == FUNCTION_DECL
2730 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2731 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2732 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2734 default:
2735 return 0;
2738 #undef OP_SAME
2739 #undef OP_SAME_WITH_NULL
2742 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2743 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2745 When in doubt, return 0. */
2747 static int
2748 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2750 int unsignedp1, unsignedpo;
2751 tree primarg0, primarg1, primother;
2752 unsigned int correct_width;
2754 if (operand_equal_p (arg0, arg1, 0))
2755 return 1;
2757 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2758 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2759 return 0;
2761 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2762 and see if the inner values are the same. This removes any
2763 signedness comparison, which doesn't matter here. */
2764 primarg0 = arg0, primarg1 = arg1;
2765 STRIP_NOPS (primarg0);
2766 STRIP_NOPS (primarg1);
2767 if (operand_equal_p (primarg0, primarg1, 0))
2768 return 1;
2770 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2771 actual comparison operand, ARG0.
2773 First throw away any conversions to wider types
2774 already present in the operands. */
2776 primarg1 = get_narrower (arg1, &unsignedp1);
2777 primother = get_narrower (other, &unsignedpo);
2779 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2780 if (unsignedp1 == unsignedpo
2781 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2782 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2784 tree type = TREE_TYPE (arg0);
2786 /* Make sure shorter operand is extended the right way
2787 to match the longer operand. */
2788 primarg1 = fold_convert (signed_or_unsigned_type_for
2789 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2791 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2792 return 1;
2795 return 0;
2798 /* See if ARG is an expression that is either a comparison or is performing
2799 arithmetic on comparisons. The comparisons must only be comparing
2800 two different values, which will be stored in *CVAL1 and *CVAL2; if
2801 they are nonzero it means that some operands have already been found.
2802 No variables may be used anywhere else in the expression except in the
2803 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2804 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2806 If this is true, return 1. Otherwise, return zero. */
2808 static int
2809 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2811 enum tree_code code = TREE_CODE (arg);
2812 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2814 /* We can handle some of the tcc_expression cases here. */
2815 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2816 tclass = tcc_unary;
2817 else if (tclass == tcc_expression
2818 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2819 || code == COMPOUND_EXPR))
2820 tclass = tcc_binary;
2822 else if (tclass == tcc_expression && code == SAVE_EXPR
2823 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2825 /* If we've already found a CVAL1 or CVAL2, this expression is
2826 two complex to handle. */
2827 if (*cval1 || *cval2)
2828 return 0;
2830 tclass = tcc_unary;
2831 *save_p = 1;
2834 switch (tclass)
2836 case tcc_unary:
2837 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2839 case tcc_binary:
2840 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2841 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2842 cval1, cval2, save_p));
2844 case tcc_constant:
2845 return 1;
2847 case tcc_expression:
2848 if (code == COND_EXPR)
2849 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2850 cval1, cval2, save_p)
2851 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2852 cval1, cval2, save_p)
2853 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2854 cval1, cval2, save_p));
2855 return 0;
2857 case tcc_comparison:
2858 /* First see if we can handle the first operand, then the second. For
2859 the second operand, we know *CVAL1 can't be zero. It must be that
2860 one side of the comparison is each of the values; test for the
2861 case where this isn't true by failing if the two operands
2862 are the same. */
2864 if (operand_equal_p (TREE_OPERAND (arg, 0),
2865 TREE_OPERAND (arg, 1), 0))
2866 return 0;
2868 if (*cval1 == 0)
2869 *cval1 = TREE_OPERAND (arg, 0);
2870 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2872 else if (*cval2 == 0)
2873 *cval2 = TREE_OPERAND (arg, 0);
2874 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2876 else
2877 return 0;
2879 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2881 else if (*cval2 == 0)
2882 *cval2 = TREE_OPERAND (arg, 1);
2883 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2885 else
2886 return 0;
2888 return 1;
2890 default:
2891 return 0;
2895 /* ARG is a tree that is known to contain just arithmetic operations and
2896 comparisons. Evaluate the operations in the tree substituting NEW0 for
2897 any occurrence of OLD0 as an operand of a comparison and likewise for
2898 NEW1 and OLD1. */
2900 static tree
2901 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2902 tree old1, tree new1)
2904 tree type = TREE_TYPE (arg);
2905 enum tree_code code = TREE_CODE (arg);
2906 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2908 /* We can handle some of the tcc_expression cases here. */
2909 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2910 tclass = tcc_unary;
2911 else if (tclass == tcc_expression
2912 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2913 tclass = tcc_binary;
2915 switch (tclass)
2917 case tcc_unary:
2918 return fold_build1_loc (loc, code, type,
2919 eval_subst (loc, TREE_OPERAND (arg, 0),
2920 old0, new0, old1, new1));
2922 case tcc_binary:
2923 return fold_build2_loc (loc, code, type,
2924 eval_subst (loc, TREE_OPERAND (arg, 0),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 1),
2927 old0, new0, old1, new1));
2929 case tcc_expression:
2930 switch (code)
2932 case SAVE_EXPR:
2933 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2934 old1, new1);
2936 case COMPOUND_EXPR:
2937 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2938 old1, new1);
2940 case COND_EXPR:
2941 return fold_build3_loc (loc, code, type,
2942 eval_subst (loc, TREE_OPERAND (arg, 0),
2943 old0, new0, old1, new1),
2944 eval_subst (loc, TREE_OPERAND (arg, 1),
2945 old0, new0, old1, new1),
2946 eval_subst (loc, TREE_OPERAND (arg, 2),
2947 old0, new0, old1, new1));
2948 default:
2949 break;
2951 /* Fall through - ??? */
2953 case tcc_comparison:
2955 tree arg0 = TREE_OPERAND (arg, 0);
2956 tree arg1 = TREE_OPERAND (arg, 1);
2958 /* We need to check both for exact equality and tree equality. The
2959 former will be true if the operand has a side-effect. In that
2960 case, we know the operand occurred exactly once. */
2962 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2963 arg0 = new0;
2964 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2965 arg0 = new1;
2967 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2968 arg1 = new0;
2969 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2970 arg1 = new1;
2972 return fold_build2_loc (loc, code, type, arg0, arg1);
2975 default:
2976 return arg;
2980 /* Return a tree for the case when the result of an expression is RESULT
2981 converted to TYPE and OMITTED was previously an operand of the expression
2982 but is now not needed (e.g., we folded OMITTED * 0).
2984 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2985 the conversion of RESULT to TYPE. */
2987 tree
2988 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2990 tree t = fold_convert_loc (loc, type, result);
2992 /* If the resulting operand is an empty statement, just return the omitted
2993 statement casted to void. */
2994 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2995 return build1_loc (loc, NOP_EXPR, void_type_node,
2996 fold_ignored_result (omitted));
2998 if (TREE_SIDE_EFFECTS (omitted))
2999 return build2_loc (loc, COMPOUND_EXPR, type,
3000 fold_ignored_result (omitted), t);
3002 return non_lvalue_loc (loc, t);
3005 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3007 static tree
3008 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3009 tree omitted)
3011 tree t = fold_convert_loc (loc, type, result);
3013 /* If the resulting operand is an empty statement, just return the omitted
3014 statement casted to void. */
3015 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3016 return build1_loc (loc, NOP_EXPR, void_type_node,
3017 fold_ignored_result (omitted));
3019 if (TREE_SIDE_EFFECTS (omitted))
3020 return build2_loc (loc, COMPOUND_EXPR, type,
3021 fold_ignored_result (omitted), t);
3023 return pedantic_non_lvalue_loc (loc, t);
3026 /* Return a tree for the case when the result of an expression is RESULT
3027 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3028 of the expression but are now not needed.
3030 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3031 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3032 evaluated before OMITTED2. Otherwise, if neither has side effects,
3033 just do the conversion of RESULT to TYPE. */
3035 tree
3036 omit_two_operands_loc (location_t loc, tree type, tree result,
3037 tree omitted1, tree omitted2)
3039 tree t = fold_convert_loc (loc, type, result);
3041 if (TREE_SIDE_EFFECTS (omitted2))
3042 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3043 if (TREE_SIDE_EFFECTS (omitted1))
3044 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3046 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3050 /* Return a simplified tree node for the truth-negation of ARG. This
3051 never alters ARG itself. We assume that ARG is an operation that
3052 returns a truth value (0 or 1).
3054 FIXME: one would think we would fold the result, but it causes
3055 problems with the dominator optimizer. */
3057 tree
3058 fold_truth_not_expr (location_t loc, tree arg)
3060 tree type = TREE_TYPE (arg);
3061 enum tree_code code = TREE_CODE (arg);
3062 location_t loc1, loc2;
3064 /* If this is a comparison, we can simply invert it, except for
3065 floating-point non-equality comparisons, in which case we just
3066 enclose a TRUTH_NOT_EXPR around what we have. */
3068 if (TREE_CODE_CLASS (code) == tcc_comparison)
3070 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3071 if (FLOAT_TYPE_P (op_type)
3072 && flag_trapping_math
3073 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3074 && code != NE_EXPR && code != EQ_EXPR)
3075 return NULL_TREE;
3077 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3078 if (code == ERROR_MARK)
3079 return NULL_TREE;
3081 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3082 TREE_OPERAND (arg, 1));
3085 switch (code)
3087 case INTEGER_CST:
3088 return constant_boolean_node (integer_zerop (arg), type);
3090 case TRUTH_AND_EXPR:
3091 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3092 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3093 return build2_loc (loc, TRUTH_OR_EXPR, type,
3094 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3095 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3097 case TRUTH_OR_EXPR:
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3100 return build2_loc (loc, TRUTH_AND_EXPR, type,
3101 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3102 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3104 case TRUTH_XOR_EXPR:
3105 /* Here we can invert either operand. We invert the first operand
3106 unless the second operand is a TRUTH_NOT_EXPR in which case our
3107 result is the XOR of the first operand with the inside of the
3108 negation of the second operand. */
3110 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3111 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3112 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3113 else
3114 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3115 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3116 TREE_OPERAND (arg, 1));
3118 case TRUTH_ANDIF_EXPR:
3119 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3120 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3121 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3122 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3123 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3125 case TRUTH_ORIF_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3127 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3129 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3130 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3132 case TRUTH_NOT_EXPR:
3133 return TREE_OPERAND (arg, 0);
3135 case COND_EXPR:
3137 tree arg1 = TREE_OPERAND (arg, 1);
3138 tree arg2 = TREE_OPERAND (arg, 2);
3140 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3141 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3143 /* A COND_EXPR may have a throw as one operand, which
3144 then has void type. Just leave void operands
3145 as they are. */
3146 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3147 VOID_TYPE_P (TREE_TYPE (arg1))
3148 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3149 VOID_TYPE_P (TREE_TYPE (arg2))
3150 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3153 case COMPOUND_EXPR:
3154 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3155 return build2_loc (loc, COMPOUND_EXPR, type,
3156 TREE_OPERAND (arg, 0),
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3159 case NON_LVALUE_EXPR:
3160 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3161 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3163 CASE_CONVERT:
3164 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3165 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3167 /* ... fall through ... */
3169 case FLOAT_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 return build1_loc (loc, TREE_CODE (arg), type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3174 case BIT_AND_EXPR:
3175 if (!integer_onep (TREE_OPERAND (arg, 1)))
3176 return NULL_TREE;
3177 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3179 case SAVE_EXPR:
3180 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3182 case CLEANUP_POINT_EXPR:
3183 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3184 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3185 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3187 default:
3188 return NULL_TREE;
3192 /* Return a simplified tree node for the truth-negation of ARG. This
3193 never alters ARG itself. We assume that ARG is an operation that
3194 returns a truth value (0 or 1).
3196 FIXME: one would think we would fold the result, but it causes
3197 problems with the dominator optimizer. */
3199 tree
3200 invert_truthvalue_loc (location_t loc, tree arg)
3202 tree tem;
3204 if (TREE_CODE (arg) == ERROR_MARK)
3205 return arg;
3207 tem = fold_truth_not_expr (loc, arg);
3208 if (!tem)
3209 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3211 return tem;
3214 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3215 operands are another bit-wise operation with a common input. If so,
3216 distribute the bit operations to save an operation and possibly two if
3217 constants are involved. For example, convert
3218 (A | B) & (A | C) into A | (B & C)
3219 Further simplification will occur if B and C are constants.
3221 If this optimization cannot be done, 0 will be returned. */
3223 static tree
3224 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3225 tree arg0, tree arg1)
3227 tree common;
3228 tree left, right;
3230 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3231 || TREE_CODE (arg0) == code
3232 || (TREE_CODE (arg0) != BIT_AND_EXPR
3233 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3234 return 0;
3236 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3238 common = TREE_OPERAND (arg0, 0);
3239 left = TREE_OPERAND (arg0, 1);
3240 right = TREE_OPERAND (arg1, 1);
3242 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3244 common = TREE_OPERAND (arg0, 0);
3245 left = TREE_OPERAND (arg0, 1);
3246 right = TREE_OPERAND (arg1, 0);
3248 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3250 common = TREE_OPERAND (arg0, 1);
3251 left = TREE_OPERAND (arg0, 0);
3252 right = TREE_OPERAND (arg1, 1);
3254 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3256 common = TREE_OPERAND (arg0, 1);
3257 left = TREE_OPERAND (arg0, 0);
3258 right = TREE_OPERAND (arg1, 0);
3260 else
3261 return 0;
3263 common = fold_convert_loc (loc, type, common);
3264 left = fold_convert_loc (loc, type, left);
3265 right = fold_convert_loc (loc, type, right);
3266 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3267 fold_build2_loc (loc, code, type, left, right));
3270 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3271 with code CODE. This optimization is unsafe. */
3272 static tree
3273 distribute_real_division (location_t loc, enum tree_code code, tree type,
3274 tree arg0, tree arg1)
3276 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3277 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3279 /* (A / C) +- (B / C) -> (A +- B) / C. */
3280 if (mul0 == mul1
3281 && operand_equal_p (TREE_OPERAND (arg0, 1),
3282 TREE_OPERAND (arg1, 1), 0))
3283 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3284 fold_build2_loc (loc, code, type,
3285 TREE_OPERAND (arg0, 0),
3286 TREE_OPERAND (arg1, 0)),
3287 TREE_OPERAND (arg0, 1));
3289 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3290 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3291 TREE_OPERAND (arg1, 0), 0)
3292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3293 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3295 REAL_VALUE_TYPE r0, r1;
3296 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3297 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3298 if (!mul0)
3299 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3300 if (!mul1)
3301 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3302 real_arithmetic (&r0, code, &r0, &r1);
3303 return fold_build2_loc (loc, MULT_EXPR, type,
3304 TREE_OPERAND (arg0, 0),
3305 build_real (type, r0));
3308 return NULL_TREE;
3311 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3312 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3314 static tree
3315 make_bit_field_ref (location_t loc, tree inner, tree type,
3316 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3318 tree result, bftype;
3320 if (bitpos == 0)
3322 tree size = TYPE_SIZE (TREE_TYPE (inner));
3323 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3324 || POINTER_TYPE_P (TREE_TYPE (inner)))
3325 && host_integerp (size, 0)
3326 && tree_low_cst (size, 0) == bitsize)
3327 return fold_convert_loc (loc, type, inner);
3330 bftype = type;
3331 if (TYPE_PRECISION (bftype) != bitsize
3332 || TYPE_UNSIGNED (bftype) == !unsignedp)
3333 bftype = build_nonstandard_integer_type (bitsize, 0);
3335 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3336 size_int (bitsize), bitsize_int (bitpos));
3338 if (bftype != type)
3339 result = fold_convert_loc (loc, type, result);
3341 return result;
3344 /* Optimize a bit-field compare.
3346 There are two cases: First is a compare against a constant and the
3347 second is a comparison of two items where the fields are at the same
3348 bit position relative to the start of a chunk (byte, halfword, word)
3349 large enough to contain it. In these cases we can avoid the shift
3350 implicit in bitfield extractions.
3352 For constants, we emit a compare of the shifted constant with the
3353 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3354 compared. For two fields at the same position, we do the ANDs with the
3355 similar mask and compare the result of the ANDs.
3357 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3358 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3359 are the left and right operands of the comparison, respectively.
3361 If the optimization described above can be done, we return the resulting
3362 tree. Otherwise we return zero. */
3364 static tree
3365 optimize_bit_field_compare (location_t loc, enum tree_code code,
3366 tree compare_type, tree lhs, tree rhs)
3368 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3369 tree type = TREE_TYPE (lhs);
3370 tree signed_type, unsigned_type;
3371 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3372 enum machine_mode lmode, rmode, nmode;
3373 int lunsignedp, runsignedp;
3374 int lvolatilep = 0, rvolatilep = 0;
3375 tree linner, rinner = NULL_TREE;
3376 tree mask;
3377 tree offset;
3379 /* Get all the information about the extractions being done. If the bit size
3380 if the same as the size of the underlying object, we aren't doing an
3381 extraction at all and so can do nothing. We also don't want to
3382 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3383 then will no longer be able to replace it. */
3384 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3385 &lunsignedp, &lvolatilep, false);
3386 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3387 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3388 return 0;
3390 if (!const_p)
3392 /* If this is not a constant, we can only do something if bit positions,
3393 sizes, and signedness are the same. */
3394 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3395 &runsignedp, &rvolatilep, false);
3397 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3398 || lunsignedp != runsignedp || offset != 0
3399 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3400 return 0;
3403 /* See if we can find a mode to refer to this field. We should be able to,
3404 but fail if we can't. */
3405 if (lvolatilep
3406 && GET_MODE_BITSIZE (lmode) > 0
3407 && flag_strict_volatile_bitfields > 0)
3408 nmode = lmode;
3409 else
3410 nmode = get_best_mode (lbitsize, lbitpos,
3411 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3412 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3413 TYPE_ALIGN (TREE_TYPE (rinner))),
3414 word_mode, lvolatilep || rvolatilep);
3415 if (nmode == VOIDmode)
3416 return 0;
3418 /* Set signed and unsigned types of the precision of this mode for the
3419 shifts below. */
3420 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3421 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3423 /* Compute the bit position and size for the new reference and our offset
3424 within it. If the new reference is the same size as the original, we
3425 won't optimize anything, so return zero. */
3426 nbitsize = GET_MODE_BITSIZE (nmode);
3427 nbitpos = lbitpos & ~ (nbitsize - 1);
3428 lbitpos -= nbitpos;
3429 if (nbitsize == lbitsize)
3430 return 0;
3432 if (BYTES_BIG_ENDIAN)
3433 lbitpos = nbitsize - lbitsize - lbitpos;
3435 /* Make the mask to be used against the extracted field. */
3436 mask = build_int_cst_type (unsigned_type, -1);
3437 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3438 mask = const_binop (RSHIFT_EXPR, mask,
3439 size_int (nbitsize - lbitsize - lbitpos));
3441 if (! const_p)
3442 /* If not comparing with constant, just rework the comparison
3443 and return. */
3444 return fold_build2_loc (loc, code, compare_type,
3445 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3446 make_bit_field_ref (loc, linner,
3447 unsigned_type,
3448 nbitsize, nbitpos,
3450 mask),
3451 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3452 make_bit_field_ref (loc, rinner,
3453 unsigned_type,
3454 nbitsize, nbitpos,
3456 mask));
3458 /* Otherwise, we are handling the constant case. See if the constant is too
3459 big for the field. Warn and return a tree of for 0 (false) if so. We do
3460 this not only for its own sake, but to avoid having to test for this
3461 error case below. If we didn't, we might generate wrong code.
3463 For unsigned fields, the constant shifted right by the field length should
3464 be all zero. For signed fields, the high-order bits should agree with
3465 the sign bit. */
3467 if (lunsignedp)
3469 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3470 fold_convert_loc (loc,
3471 unsigned_type, rhs),
3472 size_int (lbitsize))))
3474 warning (0, "comparison is always %d due to width of bit-field",
3475 code == NE_EXPR);
3476 return constant_boolean_node (code == NE_EXPR, compare_type);
3479 else
3481 tree tem = const_binop (RSHIFT_EXPR,
3482 fold_convert_loc (loc, signed_type, rhs),
3483 size_int (lbitsize - 1));
3484 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3486 warning (0, "comparison is always %d due to width of bit-field",
3487 code == NE_EXPR);
3488 return constant_boolean_node (code == NE_EXPR, compare_type);
3492 /* Single-bit compares should always be against zero. */
3493 if (lbitsize == 1 && ! integer_zerop (rhs))
3495 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3496 rhs = build_int_cst (type, 0);
3499 /* Make a new bitfield reference, shift the constant over the
3500 appropriate number of bits and mask it with the computed mask
3501 (in case this was a signed field). If we changed it, make a new one. */
3502 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3503 if (lvolatilep)
3505 TREE_SIDE_EFFECTS (lhs) = 1;
3506 TREE_THIS_VOLATILE (lhs) = 1;
3509 rhs = const_binop (BIT_AND_EXPR,
3510 const_binop (LSHIFT_EXPR,
3511 fold_convert_loc (loc, unsigned_type, rhs),
3512 size_int (lbitpos)),
3513 mask);
3515 lhs = build2_loc (loc, code, compare_type,
3516 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3517 return lhs;
3520 /* Subroutine for fold_truthop: decode a field reference.
3522 If EXP is a comparison reference, we return the innermost reference.
3524 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3525 set to the starting bit number.
3527 If the innermost field can be completely contained in a mode-sized
3528 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3530 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3531 otherwise it is not changed.
3533 *PUNSIGNEDP is set to the signedness of the field.
3535 *PMASK is set to the mask used. This is either contained in a
3536 BIT_AND_EXPR or derived from the width of the field.
3538 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3540 Return 0 if this is not a component reference or is one that we can't
3541 do anything with. */
3543 static tree
3544 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3545 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3546 int *punsignedp, int *pvolatilep,
3547 tree *pmask, tree *pand_mask)
3549 tree outer_type = 0;
3550 tree and_mask = 0;
3551 tree mask, inner, offset;
3552 tree unsigned_type;
3553 unsigned int precision;
3555 /* All the optimizations using this function assume integer fields.
3556 There are problems with FP fields since the type_for_size call
3557 below can fail for, e.g., XFmode. */
3558 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3559 return 0;
3561 /* We are interested in the bare arrangement of bits, so strip everything
3562 that doesn't affect the machine mode. However, record the type of the
3563 outermost expression if it may matter below. */
3564 if (CONVERT_EXPR_P (exp)
3565 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3566 outer_type = TREE_TYPE (exp);
3567 STRIP_NOPS (exp);
3569 if (TREE_CODE (exp) == BIT_AND_EXPR)
3571 and_mask = TREE_OPERAND (exp, 1);
3572 exp = TREE_OPERAND (exp, 0);
3573 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3574 if (TREE_CODE (and_mask) != INTEGER_CST)
3575 return 0;
3578 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3579 punsignedp, pvolatilep, false);
3580 if ((inner == exp && and_mask == 0)
3581 || *pbitsize < 0 || offset != 0
3582 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3583 return 0;
3585 /* If the number of bits in the reference is the same as the bitsize of
3586 the outer type, then the outer type gives the signedness. Otherwise
3587 (in case of a small bitfield) the signedness is unchanged. */
3588 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3589 *punsignedp = TYPE_UNSIGNED (outer_type);
3591 /* Compute the mask to access the bitfield. */
3592 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3593 precision = TYPE_PRECISION (unsigned_type);
3595 mask = build_int_cst_type (unsigned_type, -1);
3597 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3598 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3600 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3601 if (and_mask != 0)
3602 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3603 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3605 *pmask = mask;
3606 *pand_mask = and_mask;
3607 return inner;
3610 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3611 bit positions. */
3613 static int
3614 all_ones_mask_p (const_tree mask, int size)
3616 tree type = TREE_TYPE (mask);
3617 unsigned int precision = TYPE_PRECISION (type);
3618 tree tmask;
3620 tmask = build_int_cst_type (signed_type_for (type), -1);
3622 return
3623 tree_int_cst_equal (mask,
3624 const_binop (RSHIFT_EXPR,
3625 const_binop (LSHIFT_EXPR, tmask,
3626 size_int (precision - size)),
3627 size_int (precision - size)));
3630 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3631 represents the sign bit of EXP's type. If EXP represents a sign
3632 or zero extension, also test VAL against the unextended type.
3633 The return value is the (sub)expression whose sign bit is VAL,
3634 or NULL_TREE otherwise. */
3636 static tree
3637 sign_bit_p (tree exp, const_tree val)
3639 unsigned HOST_WIDE_INT mask_lo, lo;
3640 HOST_WIDE_INT mask_hi, hi;
3641 int width;
3642 tree t;
3644 /* Tree EXP must have an integral type. */
3645 t = TREE_TYPE (exp);
3646 if (! INTEGRAL_TYPE_P (t))
3647 return NULL_TREE;
3649 /* Tree VAL must be an integer constant. */
3650 if (TREE_CODE (val) != INTEGER_CST
3651 || TREE_OVERFLOW (val))
3652 return NULL_TREE;
3654 width = TYPE_PRECISION (t);
3655 if (width > HOST_BITS_PER_WIDE_INT)
3657 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3658 lo = 0;
3660 mask_hi = ((unsigned HOST_WIDE_INT) -1
3661 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3662 mask_lo = -1;
3664 else
3666 hi = 0;
3667 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3669 mask_hi = 0;
3670 mask_lo = ((unsigned HOST_WIDE_INT) -1
3671 >> (HOST_BITS_PER_WIDE_INT - width));
3674 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3675 treat VAL as if it were unsigned. */
3676 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3677 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3678 return exp;
3680 /* Handle extension from a narrower type. */
3681 if (TREE_CODE (exp) == NOP_EXPR
3682 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3683 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3685 return NULL_TREE;
3688 /* Subroutine for fold_truthop: determine if an operand is simple enough
3689 to be evaluated unconditionally. */
3691 static int
3692 simple_operand_p (const_tree exp)
3694 /* Strip any conversions that don't change the machine mode. */
3695 STRIP_NOPS (exp);
3697 return (CONSTANT_CLASS_P (exp)
3698 || TREE_CODE (exp) == SSA_NAME
3699 || (DECL_P (exp)
3700 && ! TREE_ADDRESSABLE (exp)
3701 && ! TREE_THIS_VOLATILE (exp)
3702 && ! DECL_NONLOCAL (exp)
3703 /* Don't regard global variables as simple. They may be
3704 allocated in ways unknown to the compiler (shared memory,
3705 #pragma weak, etc). */
3706 && ! TREE_PUBLIC (exp)
3707 && ! DECL_EXTERNAL (exp)
3708 /* Loading a static variable is unduly expensive, but global
3709 registers aren't expensive. */
3710 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3713 /* The following functions are subroutines to fold_range_test and allow it to
3714 try to change a logical combination of comparisons into a range test.
3716 For example, both
3717 X == 2 || X == 3 || X == 4 || X == 5
3719 X >= 2 && X <= 5
3720 are converted to
3721 (unsigned) (X - 2) <= 3
3723 We describe each set of comparisons as being either inside or outside
3724 a range, using a variable named like IN_P, and then describe the
3725 range with a lower and upper bound. If one of the bounds is omitted,
3726 it represents either the highest or lowest value of the type.
3728 In the comments below, we represent a range by two numbers in brackets
3729 preceded by a "+" to designate being inside that range, or a "-" to
3730 designate being outside that range, so the condition can be inverted by
3731 flipping the prefix. An omitted bound is represented by a "-". For
3732 example, "- [-, 10]" means being outside the range starting at the lowest
3733 possible value and ending at 10, in other words, being greater than 10.
3734 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3735 always false.
3737 We set up things so that the missing bounds are handled in a consistent
3738 manner so neither a missing bound nor "true" and "false" need to be
3739 handled using a special case. */
3741 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3742 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3743 and UPPER1_P are nonzero if the respective argument is an upper bound
3744 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3745 must be specified for a comparison. ARG1 will be converted to ARG0's
3746 type if both are specified. */
3748 static tree
3749 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3750 tree arg1, int upper1_p)
3752 tree tem;
3753 int result;
3754 int sgn0, sgn1;
3756 /* If neither arg represents infinity, do the normal operation.
3757 Else, if not a comparison, return infinity. Else handle the special
3758 comparison rules. Note that most of the cases below won't occur, but
3759 are handled for consistency. */
3761 if (arg0 != 0 && arg1 != 0)
3763 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3764 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3765 STRIP_NOPS (tem);
3766 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3769 if (TREE_CODE_CLASS (code) != tcc_comparison)
3770 return 0;
3772 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3773 for neither. In real maths, we cannot assume open ended ranges are
3774 the same. But, this is computer arithmetic, where numbers are finite.
3775 We can therefore make the transformation of any unbounded range with
3776 the value Z, Z being greater than any representable number. This permits
3777 us to treat unbounded ranges as equal. */
3778 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3779 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3780 switch (code)
3782 case EQ_EXPR:
3783 result = sgn0 == sgn1;
3784 break;
3785 case NE_EXPR:
3786 result = sgn0 != sgn1;
3787 break;
3788 case LT_EXPR:
3789 result = sgn0 < sgn1;
3790 break;
3791 case LE_EXPR:
3792 result = sgn0 <= sgn1;
3793 break;
3794 case GT_EXPR:
3795 result = sgn0 > sgn1;
3796 break;
3797 case GE_EXPR:
3798 result = sgn0 >= sgn1;
3799 break;
3800 default:
3801 gcc_unreachable ();
3804 return constant_boolean_node (result, type);
3807 /* Given EXP, a logical expression, set the range it is testing into
3808 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3809 actually being tested. *PLOW and *PHIGH will be made of the same
3810 type as the returned expression. If EXP is not a comparison, we
3811 will most likely not be returning a useful value and range. Set
3812 *STRICT_OVERFLOW_P to true if the return value is only valid
3813 because signed overflow is undefined; otherwise, do not change
3814 *STRICT_OVERFLOW_P. */
3816 tree
3817 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3818 bool *strict_overflow_p)
3820 enum tree_code code;
3821 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3822 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3823 int in_p, n_in_p;
3824 tree low, high, n_low, n_high;
3825 location_t loc = EXPR_LOCATION (exp);
3827 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3828 and see if we can refine the range. Some of the cases below may not
3829 happen, but it doesn't seem worth worrying about this. We "continue"
3830 the outer loop when we've changed something; otherwise we "break"
3831 the switch, which will "break" the while. */
3833 in_p = 0;
3834 low = high = build_int_cst (TREE_TYPE (exp), 0);
3836 while (1)
3838 code = TREE_CODE (exp);
3839 exp_type = TREE_TYPE (exp);
3841 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3843 if (TREE_OPERAND_LENGTH (exp) > 0)
3844 arg0 = TREE_OPERAND (exp, 0);
3845 if (TREE_CODE_CLASS (code) == tcc_comparison
3846 || TREE_CODE_CLASS (code) == tcc_unary
3847 || TREE_CODE_CLASS (code) == tcc_binary)
3848 arg0_type = TREE_TYPE (arg0);
3849 if (TREE_CODE_CLASS (code) == tcc_binary
3850 || TREE_CODE_CLASS (code) == tcc_comparison
3851 || (TREE_CODE_CLASS (code) == tcc_expression
3852 && TREE_OPERAND_LENGTH (exp) > 1))
3853 arg1 = TREE_OPERAND (exp, 1);
3856 switch (code)
3858 case TRUTH_NOT_EXPR:
3859 in_p = ! in_p, exp = arg0;
3860 continue;
3862 case EQ_EXPR: case NE_EXPR:
3863 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3864 /* We can only do something if the range is testing for zero
3865 and if the second operand is an integer constant. Note that
3866 saying something is "in" the range we make is done by
3867 complementing IN_P since it will set in the initial case of
3868 being not equal to zero; "out" is leaving it alone. */
3869 if (low == 0 || high == 0
3870 || ! integer_zerop (low) || ! integer_zerop (high)
3871 || TREE_CODE (arg1) != INTEGER_CST)
3872 break;
3874 switch (code)
3876 case NE_EXPR: /* - [c, c] */
3877 low = high = arg1;
3878 break;
3879 case EQ_EXPR: /* + [c, c] */
3880 in_p = ! in_p, low = high = arg1;
3881 break;
3882 case GT_EXPR: /* - [-, c] */
3883 low = 0, high = arg1;
3884 break;
3885 case GE_EXPR: /* + [c, -] */
3886 in_p = ! in_p, low = arg1, high = 0;
3887 break;
3888 case LT_EXPR: /* - [c, -] */
3889 low = arg1, high = 0;
3890 break;
3891 case LE_EXPR: /* + [-, c] */
3892 in_p = ! in_p, low = 0, high = arg1;
3893 break;
3894 default:
3895 gcc_unreachable ();
3898 /* If this is an unsigned comparison, we also know that EXP is
3899 greater than or equal to zero. We base the range tests we make
3900 on that fact, so we record it here so we can parse existing
3901 range tests. We test arg0_type since often the return type
3902 of, e.g. EQ_EXPR, is boolean. */
3903 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3905 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3906 in_p, low, high, 1,
3907 build_int_cst (arg0_type, 0),
3908 NULL_TREE))
3909 break;
3911 in_p = n_in_p, low = n_low, high = n_high;
3913 /* If the high bound is missing, but we have a nonzero low
3914 bound, reverse the range so it goes from zero to the low bound
3915 minus 1. */
3916 if (high == 0 && low && ! integer_zerop (low))
3918 in_p = ! in_p;
3919 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3920 integer_one_node, 0);
3921 low = build_int_cst (arg0_type, 0);
3925 exp = arg0;
3926 continue;
3928 case NEGATE_EXPR:
3929 /* (-x) IN [a,b] -> x in [-b, -a] */
3930 n_low = range_binop (MINUS_EXPR, exp_type,
3931 build_int_cst (exp_type, 0),
3932 0, high, 1);
3933 n_high = range_binop (MINUS_EXPR, exp_type,
3934 build_int_cst (exp_type, 0),
3935 0, low, 0);
3936 if (n_high != 0 && TREE_OVERFLOW (n_high))
3937 break;
3938 goto normalize;
3940 case BIT_NOT_EXPR:
3941 /* ~ X -> -X - 1 */
3942 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3943 build_int_cst (exp_type, 1));
3944 continue;
3946 case PLUS_EXPR: case MINUS_EXPR:
3947 if (TREE_CODE (arg1) != INTEGER_CST)
3948 break;
3950 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3951 move a constant to the other side. */
3952 if (!TYPE_UNSIGNED (arg0_type)
3953 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3954 break;
3956 /* If EXP is signed, any overflow in the computation is undefined,
3957 so we don't worry about it so long as our computations on
3958 the bounds don't overflow. For unsigned, overflow is defined
3959 and this is exactly the right thing. */
3960 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3961 arg0_type, low, 0, arg1, 0);
3962 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3963 arg0_type, high, 1, arg1, 0);
3964 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3965 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3966 break;
3968 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3969 *strict_overflow_p = true;
3971 normalize:
3972 /* Check for an unsigned range which has wrapped around the maximum
3973 value thus making n_high < n_low, and normalize it. */
3974 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3976 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3977 integer_one_node, 0);
3978 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3979 integer_one_node, 0);
3981 /* If the range is of the form +/- [ x+1, x ], we won't
3982 be able to normalize it. But then, it represents the
3983 whole range or the empty set, so make it
3984 +/- [ -, - ]. */
3985 if (tree_int_cst_equal (n_low, low)
3986 && tree_int_cst_equal (n_high, high))
3987 low = high = 0;
3988 else
3989 in_p = ! in_p;
3991 else
3992 low = n_low, high = n_high;
3994 exp = arg0;
3995 continue;
3997 CASE_CONVERT: case NON_LVALUE_EXPR:
3998 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3999 break;
4001 if (! INTEGRAL_TYPE_P (arg0_type)
4002 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4003 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4004 break;
4006 n_low = low, n_high = high;
4008 if (n_low != 0)
4009 n_low = fold_convert_loc (loc, arg0_type, n_low);
4011 if (n_high != 0)
4012 n_high = fold_convert_loc (loc, arg0_type, n_high);
4015 /* If we're converting arg0 from an unsigned type, to exp,
4016 a signed type, we will be doing the comparison as unsigned.
4017 The tests above have already verified that LOW and HIGH
4018 are both positive.
4020 So we have to ensure that we will handle large unsigned
4021 values the same way that the current signed bounds treat
4022 negative values. */
4024 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4026 tree high_positive;
4027 tree equiv_type;
4028 /* For fixed-point modes, we need to pass the saturating flag
4029 as the 2nd parameter. */
4030 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4031 equiv_type = lang_hooks.types.type_for_mode
4032 (TYPE_MODE (arg0_type),
4033 TYPE_SATURATING (arg0_type));
4034 else
4035 equiv_type = lang_hooks.types.type_for_mode
4036 (TYPE_MODE (arg0_type), 1);
4038 /* A range without an upper bound is, naturally, unbounded.
4039 Since convert would have cropped a very large value, use
4040 the max value for the destination type. */
4041 high_positive
4042 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4043 : TYPE_MAX_VALUE (arg0_type);
4045 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4046 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4047 fold_convert_loc (loc, arg0_type,
4048 high_positive),
4049 build_int_cst (arg0_type, 1));
4051 /* If the low bound is specified, "and" the range with the
4052 range for which the original unsigned value will be
4053 positive. */
4054 if (low != 0)
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 1, n_low, n_high, 1,
4058 fold_convert_loc (loc, arg0_type,
4059 integer_zero_node),
4060 high_positive))
4061 break;
4063 in_p = (n_in_p == in_p);
4065 else
4067 /* Otherwise, "or" the range with the range of the input
4068 that will be interpreted as negative. */
4069 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4070 0, n_low, n_high, 1,
4071 fold_convert_loc (loc, arg0_type,
4072 integer_zero_node),
4073 high_positive))
4074 break;
4076 in_p = (in_p != n_in_p);
4080 exp = arg0;
4081 low = n_low, high = n_high;
4082 continue;
4084 default:
4085 break;
4088 break;
4091 /* If EXP is a constant, we can evaluate whether this is true or false. */
4092 if (TREE_CODE (exp) == INTEGER_CST)
4094 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4095 exp, 0, low, 0))
4096 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4097 exp, 1, high, 1)));
4098 low = high = 0;
4099 exp = 0;
4102 *pin_p = in_p, *plow = low, *phigh = high;
4103 return exp;
4106 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4107 type, TYPE, return an expression to test if EXP is in (or out of, depending
4108 on IN_P) the range. Return 0 if the test couldn't be created. */
4110 tree
4111 build_range_check (location_t loc, tree type, tree exp, int in_p,
4112 tree low, tree high)
4114 tree etype = TREE_TYPE (exp), value;
4116 #ifdef HAVE_canonicalize_funcptr_for_compare
4117 /* Disable this optimization for function pointer expressions
4118 on targets that require function pointer canonicalization. */
4119 if (HAVE_canonicalize_funcptr_for_compare
4120 && TREE_CODE (etype) == POINTER_TYPE
4121 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4122 return NULL_TREE;
4123 #endif
4125 if (! in_p)
4127 value = build_range_check (loc, type, exp, 1, low, high);
4128 if (value != 0)
4129 return invert_truthvalue_loc (loc, value);
4131 return 0;
4134 if (low == 0 && high == 0)
4135 return build_int_cst (type, 1);
4137 if (low == 0)
4138 return fold_build2_loc (loc, LE_EXPR, type, exp,
4139 fold_convert_loc (loc, etype, high));
4141 if (high == 0)
4142 return fold_build2_loc (loc, GE_EXPR, type, exp,
4143 fold_convert_loc (loc, etype, low));
4145 if (operand_equal_p (low, high, 0))
4146 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4147 fold_convert_loc (loc, etype, low));
4149 if (integer_zerop (low))
4151 if (! TYPE_UNSIGNED (etype))
4153 etype = unsigned_type_for (etype);
4154 high = fold_convert_loc (loc, etype, high);
4155 exp = fold_convert_loc (loc, etype, exp);
4157 return build_range_check (loc, type, exp, 1, 0, high);
4160 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4161 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4163 unsigned HOST_WIDE_INT lo;
4164 HOST_WIDE_INT hi;
4165 int prec;
4167 prec = TYPE_PRECISION (etype);
4168 if (prec <= HOST_BITS_PER_WIDE_INT)
4170 hi = 0;
4171 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4173 else
4175 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4176 lo = (unsigned HOST_WIDE_INT) -1;
4179 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4181 if (TYPE_UNSIGNED (etype))
4183 tree signed_etype = signed_type_for (etype);
4184 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4185 etype
4186 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4187 else
4188 etype = signed_etype;
4189 exp = fold_convert_loc (loc, etype, exp);
4191 return fold_build2_loc (loc, GT_EXPR, type, exp,
4192 build_int_cst (etype, 0));
4196 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4197 This requires wrap-around arithmetics for the type of the expression.
4198 First make sure that arithmetics in this type is valid, then make sure
4199 that it wraps around. */
4200 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4201 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4202 TYPE_UNSIGNED (etype));
4204 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4206 tree utype, minv, maxv;
4208 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4209 for the type in question, as we rely on this here. */
4210 utype = unsigned_type_for (etype);
4211 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4212 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4213 integer_one_node, 1);
4214 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4216 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4217 minv, 1, maxv, 1)))
4218 etype = utype;
4219 else
4220 return 0;
4223 high = fold_convert_loc (loc, etype, high);
4224 low = fold_convert_loc (loc, etype, low);
4225 exp = fold_convert_loc (loc, etype, exp);
4227 value = const_binop (MINUS_EXPR, high, low);
4230 if (POINTER_TYPE_P (etype))
4232 if (value != 0 && !TREE_OVERFLOW (value))
4234 low = fold_convert_loc (loc, sizetype, low);
4235 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4236 return build_range_check (loc, type,
4237 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4238 etype, exp, low),
4239 1, build_int_cst (etype, 0), value);
4241 return 0;
4244 if (value != 0 && !TREE_OVERFLOW (value))
4245 return build_range_check (loc, type,
4246 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4247 1, build_int_cst (etype, 0), value);
4249 return 0;
4252 /* Return the predecessor of VAL in its type, handling the infinite case. */
4254 static tree
4255 range_predecessor (tree val)
4257 tree type = TREE_TYPE (val);
4259 if (INTEGRAL_TYPE_P (type)
4260 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4261 return 0;
4262 else
4263 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4266 /* Return the successor of VAL in its type, handling the infinite case. */
4268 static tree
4269 range_successor (tree val)
4271 tree type = TREE_TYPE (val);
4273 if (INTEGRAL_TYPE_P (type)
4274 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4275 return 0;
4276 else
4277 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4280 /* Given two ranges, see if we can merge them into one. Return 1 if we
4281 can, 0 if we can't. Set the output range into the specified parameters. */
4283 bool
4284 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4285 tree high0, int in1_p, tree low1, tree high1)
4287 int no_overlap;
4288 int subset;
4289 int temp;
4290 tree tem;
4291 int in_p;
4292 tree low, high;
4293 int lowequal = ((low0 == 0 && low1 == 0)
4294 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4295 low0, 0, low1, 0)));
4296 int highequal = ((high0 == 0 && high1 == 0)
4297 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4298 high0, 1, high1, 1)));
4300 /* Make range 0 be the range that starts first, or ends last if they
4301 start at the same value. Swap them if it isn't. */
4302 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4303 low0, 0, low1, 0))
4304 || (lowequal
4305 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4306 high1, 1, high0, 1))))
4308 temp = in0_p, in0_p = in1_p, in1_p = temp;
4309 tem = low0, low0 = low1, low1 = tem;
4310 tem = high0, high0 = high1, high1 = tem;
4313 /* Now flag two cases, whether the ranges are disjoint or whether the
4314 second range is totally subsumed in the first. Note that the tests
4315 below are simplified by the ones above. */
4316 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4317 high0, 1, low1, 0));
4318 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4319 high1, 1, high0, 1));
4321 /* We now have four cases, depending on whether we are including or
4322 excluding the two ranges. */
4323 if (in0_p && in1_p)
4325 /* If they don't overlap, the result is false. If the second range
4326 is a subset it is the result. Otherwise, the range is from the start
4327 of the second to the end of the first. */
4328 if (no_overlap)
4329 in_p = 0, low = high = 0;
4330 else if (subset)
4331 in_p = 1, low = low1, high = high1;
4332 else
4333 in_p = 1, low = low1, high = high0;
4336 else if (in0_p && ! in1_p)
4338 /* If they don't overlap, the result is the first range. If they are
4339 equal, the result is false. If the second range is a subset of the
4340 first, and the ranges begin at the same place, we go from just after
4341 the end of the second range to the end of the first. If the second
4342 range is not a subset of the first, or if it is a subset and both
4343 ranges end at the same place, the range starts at the start of the
4344 first range and ends just before the second range.
4345 Otherwise, we can't describe this as a single range. */
4346 if (no_overlap)
4347 in_p = 1, low = low0, high = high0;
4348 else if (lowequal && highequal)
4349 in_p = 0, low = high = 0;
4350 else if (subset && lowequal)
4352 low = range_successor (high1);
4353 high = high0;
4354 in_p = 1;
4355 if (low == 0)
4357 /* We are in the weird situation where high0 > high1 but
4358 high1 has no successor. Punt. */
4359 return 0;
4362 else if (! subset || highequal)
4364 low = low0;
4365 high = range_predecessor (low1);
4366 in_p = 1;
4367 if (high == 0)
4369 /* low0 < low1 but low1 has no predecessor. Punt. */
4370 return 0;
4373 else
4374 return 0;
4377 else if (! in0_p && in1_p)
4379 /* If they don't overlap, the result is the second range. If the second
4380 is a subset of the first, the result is false. Otherwise,
4381 the range starts just after the first range and ends at the
4382 end of the second. */
4383 if (no_overlap)
4384 in_p = 1, low = low1, high = high1;
4385 else if (subset || highequal)
4386 in_p = 0, low = high = 0;
4387 else
4389 low = range_successor (high0);
4390 high = high1;
4391 in_p = 1;
4392 if (low == 0)
4394 /* high1 > high0 but high0 has no successor. Punt. */
4395 return 0;
4400 else
4402 /* The case where we are excluding both ranges. Here the complex case
4403 is if they don't overlap. In that case, the only time we have a
4404 range is if they are adjacent. If the second is a subset of the
4405 first, the result is the first. Otherwise, the range to exclude
4406 starts at the beginning of the first range and ends at the end of the
4407 second. */
4408 if (no_overlap)
4410 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4411 range_successor (high0),
4412 1, low1, 0)))
4413 in_p = 0, low = low0, high = high1;
4414 else
4416 /* Canonicalize - [min, x] into - [-, x]. */
4417 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4418 switch (TREE_CODE (TREE_TYPE (low0)))
4420 case ENUMERAL_TYPE:
4421 if (TYPE_PRECISION (TREE_TYPE (low0))
4422 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4423 break;
4424 /* FALLTHROUGH */
4425 case INTEGER_TYPE:
4426 if (tree_int_cst_equal (low0,
4427 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4428 low0 = 0;
4429 break;
4430 case POINTER_TYPE:
4431 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4432 && integer_zerop (low0))
4433 low0 = 0;
4434 break;
4435 default:
4436 break;
4439 /* Canonicalize - [x, max] into - [x, -]. */
4440 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4441 switch (TREE_CODE (TREE_TYPE (high1)))
4443 case ENUMERAL_TYPE:
4444 if (TYPE_PRECISION (TREE_TYPE (high1))
4445 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4446 break;
4447 /* FALLTHROUGH */
4448 case INTEGER_TYPE:
4449 if (tree_int_cst_equal (high1,
4450 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4451 high1 = 0;
4452 break;
4453 case POINTER_TYPE:
4454 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4455 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4456 high1, 1,
4457 integer_one_node, 1)))
4458 high1 = 0;
4459 break;
4460 default:
4461 break;
4464 /* The ranges might be also adjacent between the maximum and
4465 minimum values of the given type. For
4466 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4467 return + [x + 1, y - 1]. */
4468 if (low0 == 0 && high1 == 0)
4470 low = range_successor (high0);
4471 high = range_predecessor (low1);
4472 if (low == 0 || high == 0)
4473 return 0;
4475 in_p = 1;
4477 else
4478 return 0;
4481 else if (subset)
4482 in_p = 0, low = low0, high = high0;
4483 else
4484 in_p = 0, low = low0, high = high1;
4487 *pin_p = in_p, *plow = low, *phigh = high;
4488 return 1;
4492 /* Subroutine of fold, looking inside expressions of the form
4493 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4494 of the COND_EXPR. This function is being used also to optimize
4495 A op B ? C : A, by reversing the comparison first.
4497 Return a folded expression whose code is not a COND_EXPR
4498 anymore, or NULL_TREE if no folding opportunity is found. */
4500 static tree
4501 fold_cond_expr_with_comparison (location_t loc, tree type,
4502 tree arg0, tree arg1, tree arg2)
4504 enum tree_code comp_code = TREE_CODE (arg0);
4505 tree arg00 = TREE_OPERAND (arg0, 0);
4506 tree arg01 = TREE_OPERAND (arg0, 1);
4507 tree arg1_type = TREE_TYPE (arg1);
4508 tree tem;
4510 STRIP_NOPS (arg1);
4511 STRIP_NOPS (arg2);
4513 /* If we have A op 0 ? A : -A, consider applying the following
4514 transformations:
4516 A == 0? A : -A same as -A
4517 A != 0? A : -A same as A
4518 A >= 0? A : -A same as abs (A)
4519 A > 0? A : -A same as abs (A)
4520 A <= 0? A : -A same as -abs (A)
4521 A < 0? A : -A same as -abs (A)
4523 None of these transformations work for modes with signed
4524 zeros. If A is +/-0, the first two transformations will
4525 change the sign of the result (from +0 to -0, or vice
4526 versa). The last four will fix the sign of the result,
4527 even though the original expressions could be positive or
4528 negative, depending on the sign of A.
4530 Note that all these transformations are correct if A is
4531 NaN, since the two alternatives (A and -A) are also NaNs. */
4532 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4533 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4534 ? real_zerop (arg01)
4535 : integer_zerop (arg01))
4536 && ((TREE_CODE (arg2) == NEGATE_EXPR
4537 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4538 /* In the case that A is of the form X-Y, '-A' (arg2) may
4539 have already been folded to Y-X, check for that. */
4540 || (TREE_CODE (arg1) == MINUS_EXPR
4541 && TREE_CODE (arg2) == MINUS_EXPR
4542 && operand_equal_p (TREE_OPERAND (arg1, 0),
4543 TREE_OPERAND (arg2, 1), 0)
4544 && operand_equal_p (TREE_OPERAND (arg1, 1),
4545 TREE_OPERAND (arg2, 0), 0))))
4546 switch (comp_code)
4548 case EQ_EXPR:
4549 case UNEQ_EXPR:
4550 tem = fold_convert_loc (loc, arg1_type, arg1);
4551 return pedantic_non_lvalue_loc (loc,
4552 fold_convert_loc (loc, type,
4553 negate_expr (tem)));
4554 case NE_EXPR:
4555 case LTGT_EXPR:
4556 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4557 case UNGE_EXPR:
4558 case UNGT_EXPR:
4559 if (flag_trapping_math)
4560 break;
4561 /* Fall through. */
4562 case GE_EXPR:
4563 case GT_EXPR:
4564 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4565 arg1 = fold_convert_loc (loc, signed_type_for
4566 (TREE_TYPE (arg1)), arg1);
4567 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4568 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4569 case UNLE_EXPR:
4570 case UNLT_EXPR:
4571 if (flag_trapping_math)
4572 break;
4573 case LE_EXPR:
4574 case LT_EXPR:
4575 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4576 arg1 = fold_convert_loc (loc, signed_type_for
4577 (TREE_TYPE (arg1)), arg1);
4578 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4579 return negate_expr (fold_convert_loc (loc, type, tem));
4580 default:
4581 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4582 break;
4585 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4586 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4587 both transformations are correct when A is NaN: A != 0
4588 is then true, and A == 0 is false. */
4590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4591 && integer_zerop (arg01) && integer_zerop (arg2))
4593 if (comp_code == NE_EXPR)
4594 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4595 else if (comp_code == EQ_EXPR)
4596 return build_int_cst (type, 0);
4599 /* Try some transformations of A op B ? A : B.
4601 A == B? A : B same as B
4602 A != B? A : B same as A
4603 A >= B? A : B same as max (A, B)
4604 A > B? A : B same as max (B, A)
4605 A <= B? A : B same as min (A, B)
4606 A < B? A : B same as min (B, A)
4608 As above, these transformations don't work in the presence
4609 of signed zeros. For example, if A and B are zeros of
4610 opposite sign, the first two transformations will change
4611 the sign of the result. In the last four, the original
4612 expressions give different results for (A=+0, B=-0) and
4613 (A=-0, B=+0), but the transformed expressions do not.
4615 The first two transformations are correct if either A or B
4616 is a NaN. In the first transformation, the condition will
4617 be false, and B will indeed be chosen. In the case of the
4618 second transformation, the condition A != B will be true,
4619 and A will be chosen.
4621 The conversions to max() and min() are not correct if B is
4622 a number and A is not. The conditions in the original
4623 expressions will be false, so all four give B. The min()
4624 and max() versions would give a NaN instead. */
4625 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4626 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4627 /* Avoid these transformations if the COND_EXPR may be used
4628 as an lvalue in the C++ front-end. PR c++/19199. */
4629 && (in_gimple_form
4630 || (strcmp (lang_hooks.name, "GNU C++") != 0
4631 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4632 || ! maybe_lvalue_p (arg1)
4633 || ! maybe_lvalue_p (arg2)))
4635 tree comp_op0 = arg00;
4636 tree comp_op1 = arg01;
4637 tree comp_type = TREE_TYPE (comp_op0);
4639 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4640 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4642 comp_type = type;
4643 comp_op0 = arg1;
4644 comp_op1 = arg2;
4647 switch (comp_code)
4649 case EQ_EXPR:
4650 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4651 case NE_EXPR:
4652 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4653 case LE_EXPR:
4654 case LT_EXPR:
4655 case UNLE_EXPR:
4656 case UNLT_EXPR:
4657 /* In C++ a ?: expression can be an lvalue, so put the
4658 operand which will be used if they are equal first
4659 so that we can convert this back to the
4660 corresponding COND_EXPR. */
4661 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4663 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4664 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4665 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4666 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4667 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4668 comp_op1, comp_op0);
4669 return pedantic_non_lvalue_loc (loc,
4670 fold_convert_loc (loc, type, tem));
4672 break;
4673 case GE_EXPR:
4674 case GT_EXPR:
4675 case UNGE_EXPR:
4676 case UNGT_EXPR:
4677 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4679 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4680 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4681 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4682 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4683 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4684 comp_op1, comp_op0);
4685 return pedantic_non_lvalue_loc (loc,
4686 fold_convert_loc (loc, type, tem));
4688 break;
4689 case UNEQ_EXPR:
4690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, arg2));
4693 break;
4694 case LTGT_EXPR:
4695 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4696 return pedantic_non_lvalue_loc (loc,
4697 fold_convert_loc (loc, type, arg1));
4698 break;
4699 default:
4700 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4701 break;
4705 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4706 we might still be able to simplify this. For example,
4707 if C1 is one less or one more than C2, this might have started
4708 out as a MIN or MAX and been transformed by this function.
4709 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4711 if (INTEGRAL_TYPE_P (type)
4712 && TREE_CODE (arg01) == INTEGER_CST
4713 && TREE_CODE (arg2) == INTEGER_CST)
4714 switch (comp_code)
4716 case EQ_EXPR:
4717 if (TREE_CODE (arg1) == INTEGER_CST)
4718 break;
4719 /* We can replace A with C1 in this case. */
4720 arg1 = fold_convert_loc (loc, type, arg01);
4721 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4723 case LT_EXPR:
4724 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4725 MIN_EXPR, to preserve the signedness of the comparison. */
4726 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4727 OEP_ONLY_CONST)
4728 && operand_equal_p (arg01,
4729 const_binop (PLUS_EXPR, arg2,
4730 build_int_cst (type, 1)),
4731 OEP_ONLY_CONST))
4733 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4734 fold_convert_loc (loc, TREE_TYPE (arg00),
4735 arg2));
4736 return pedantic_non_lvalue_loc (loc,
4737 fold_convert_loc (loc, type, tem));
4739 break;
4741 case LE_EXPR:
4742 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4743 as above. */
4744 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4745 OEP_ONLY_CONST)
4746 && operand_equal_p (arg01,
4747 const_binop (MINUS_EXPR, arg2,
4748 build_int_cst (type, 1)),
4749 OEP_ONLY_CONST))
4751 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4752 fold_convert_loc (loc, TREE_TYPE (arg00),
4753 arg2));
4754 return pedantic_non_lvalue_loc (loc,
4755 fold_convert_loc (loc, type, tem));
4757 break;
4759 case GT_EXPR:
4760 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4761 MAX_EXPR, to preserve the signedness of the comparison. */
4762 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4763 OEP_ONLY_CONST)
4764 && operand_equal_p (arg01,
4765 const_binop (MINUS_EXPR, arg2,
4766 build_int_cst (type, 1)),
4767 OEP_ONLY_CONST))
4769 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4770 fold_convert_loc (loc, TREE_TYPE (arg00),
4771 arg2));
4772 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4774 break;
4776 case GE_EXPR:
4777 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4778 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4779 OEP_ONLY_CONST)
4780 && operand_equal_p (arg01,
4781 const_binop (PLUS_EXPR, arg2,
4782 build_int_cst (type, 1)),
4783 OEP_ONLY_CONST))
4785 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4786 fold_convert_loc (loc, TREE_TYPE (arg00),
4787 arg2));
4788 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4790 break;
4791 case NE_EXPR:
4792 break;
4793 default:
4794 gcc_unreachable ();
4797 return NULL_TREE;
4802 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4803 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4804 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4805 false) >= 2)
4806 #endif
4808 /* EXP is some logical combination of boolean tests. See if we can
4809 merge it into some range test. Return the new tree if so. */
4811 static tree
4812 fold_range_test (location_t loc, enum tree_code code, tree type,
4813 tree op0, tree op1)
4815 int or_op = (code == TRUTH_ORIF_EXPR
4816 || code == TRUTH_OR_EXPR);
4817 int in0_p, in1_p, in_p;
4818 tree low0, low1, low, high0, high1, high;
4819 bool strict_overflow_p = false;
4820 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4821 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4822 tree tem;
4823 const char * const warnmsg = G_("assuming signed overflow does not occur "
4824 "when simplifying range test");
4826 /* If this is an OR operation, invert both sides; we will invert
4827 again at the end. */
4828 if (or_op)
4829 in0_p = ! in0_p, in1_p = ! in1_p;
4831 /* If both expressions are the same, if we can merge the ranges, and we
4832 can build the range test, return it or it inverted. If one of the
4833 ranges is always true or always false, consider it to be the same
4834 expression as the other. */
4835 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4836 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4837 in1_p, low1, high1)
4838 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4839 lhs != 0 ? lhs
4840 : rhs != 0 ? rhs : integer_zero_node,
4841 in_p, low, high))))
4843 if (strict_overflow_p)
4844 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4845 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4848 /* On machines where the branch cost is expensive, if this is a
4849 short-circuited branch and the underlying object on both sides
4850 is the same, make a non-short-circuit operation. */
4851 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4852 && lhs != 0 && rhs != 0
4853 && (code == TRUTH_ANDIF_EXPR
4854 || code == TRUTH_ORIF_EXPR)
4855 && operand_equal_p (lhs, rhs, 0))
4857 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4858 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4859 which cases we can't do this. */
4860 if (simple_operand_p (lhs))
4861 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4862 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4863 type, op0, op1);
4865 else if (lang_hooks.decls.global_bindings_p () == 0
4866 && ! CONTAINS_PLACEHOLDER_P (lhs))
4868 tree common = save_expr (lhs);
4870 if (0 != (lhs = build_range_check (loc, type, common,
4871 or_op ? ! in0_p : in0_p,
4872 low0, high0))
4873 && (0 != (rhs = build_range_check (loc, type, common,
4874 or_op ? ! in1_p : in1_p,
4875 low1, high1))))
4877 if (strict_overflow_p)
4878 fold_overflow_warning (warnmsg,
4879 WARN_STRICT_OVERFLOW_COMPARISON);
4880 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4881 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4882 type, lhs, rhs);
4887 return 0;
4890 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4891 bit value. Arrange things so the extra bits will be set to zero if and
4892 only if C is signed-extended to its full width. If MASK is nonzero,
4893 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4895 static tree
4896 unextend (tree c, int p, int unsignedp, tree mask)
4898 tree type = TREE_TYPE (c);
4899 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4900 tree temp;
4902 if (p == modesize || unsignedp)
4903 return c;
4905 /* We work by getting just the sign bit into the low-order bit, then
4906 into the high-order bit, then sign-extend. We then XOR that value
4907 with C. */
4908 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4909 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4911 /* We must use a signed type in order to get an arithmetic right shift.
4912 However, we must also avoid introducing accidental overflows, so that
4913 a subsequent call to integer_zerop will work. Hence we must
4914 do the type conversion here. At this point, the constant is either
4915 zero or one, and the conversion to a signed type can never overflow.
4916 We could get an overflow if this conversion is done anywhere else. */
4917 if (TYPE_UNSIGNED (type))
4918 temp = fold_convert (signed_type_for (type), temp);
4920 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4921 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4922 if (mask != 0)
4923 temp = const_binop (BIT_AND_EXPR, temp,
4924 fold_convert (TREE_TYPE (c), mask));
4925 /* If necessary, convert the type back to match the type of C. */
4926 if (TYPE_UNSIGNED (type))
4927 temp = fold_convert (type, temp);
4929 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4932 /* For an expression that has the form
4933 (A && B) || ~B
4935 (A || B) && ~B,
4936 we can drop one of the inner expressions and simplify to
4937 A || ~B
4939 A && ~B
4940 LOC is the location of the resulting expression. OP is the inner
4941 logical operation; the left-hand side in the examples above, while CMPOP
4942 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4943 removing a condition that guards another, as in
4944 (A != NULL && A->...) || A == NULL
4945 which we must not transform. If RHS_ONLY is true, only eliminate the
4946 right-most operand of the inner logical operation. */
4948 static tree
4949 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4950 bool rhs_only)
4952 tree type = TREE_TYPE (cmpop);
4953 enum tree_code code = TREE_CODE (cmpop);
4954 enum tree_code truthop_code = TREE_CODE (op);
4955 tree lhs = TREE_OPERAND (op, 0);
4956 tree rhs = TREE_OPERAND (op, 1);
4957 tree orig_lhs = lhs, orig_rhs = rhs;
4958 enum tree_code rhs_code = TREE_CODE (rhs);
4959 enum tree_code lhs_code = TREE_CODE (lhs);
4960 enum tree_code inv_code;
4962 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4963 return NULL_TREE;
4965 if (TREE_CODE_CLASS (code) != tcc_comparison)
4966 return NULL_TREE;
4968 if (rhs_code == truthop_code)
4970 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4971 if (newrhs != NULL_TREE)
4973 rhs = newrhs;
4974 rhs_code = TREE_CODE (rhs);
4977 if (lhs_code == truthop_code && !rhs_only)
4979 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4980 if (newlhs != NULL_TREE)
4982 lhs = newlhs;
4983 lhs_code = TREE_CODE (lhs);
4987 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4988 if (inv_code == rhs_code
4989 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4990 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4991 return lhs;
4992 if (!rhs_only && inv_code == lhs_code
4993 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4994 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4995 return rhs;
4996 if (rhs != orig_rhs || lhs != orig_lhs)
4997 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4998 lhs, rhs);
4999 return NULL_TREE;
5002 /* Find ways of folding logical expressions of LHS and RHS:
5003 Try to merge two comparisons to the same innermost item.
5004 Look for range tests like "ch >= '0' && ch <= '9'".
5005 Look for combinations of simple terms on machines with expensive branches
5006 and evaluate the RHS unconditionally.
5008 For example, if we have p->a == 2 && p->b == 4 and we can make an
5009 object large enough to span both A and B, we can do this with a comparison
5010 against the object ANDed with the a mask.
5012 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5013 operations to do this with one comparison.
5015 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5016 function and the one above.
5018 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5019 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5021 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5022 two operands.
5024 We return the simplified tree or 0 if no optimization is possible. */
5026 static tree
5027 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5028 tree lhs, tree rhs)
5030 /* If this is the "or" of two comparisons, we can do something if
5031 the comparisons are NE_EXPR. If this is the "and", we can do something
5032 if the comparisons are EQ_EXPR. I.e.,
5033 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5035 WANTED_CODE is this operation code. For single bit fields, we can
5036 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5037 comparison for one-bit fields. */
5039 enum tree_code wanted_code;
5040 enum tree_code lcode, rcode;
5041 tree ll_arg, lr_arg, rl_arg, rr_arg;
5042 tree ll_inner, lr_inner, rl_inner, rr_inner;
5043 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5044 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5045 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5046 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5047 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5048 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5049 enum machine_mode lnmode, rnmode;
5050 tree ll_mask, lr_mask, rl_mask, rr_mask;
5051 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5052 tree l_const, r_const;
5053 tree lntype, rntype, result;
5054 HOST_WIDE_INT first_bit, end_bit;
5055 int volatilep;
5056 tree orig_lhs = lhs, orig_rhs = rhs;
5057 enum tree_code orig_code = code;
5059 /* Start by getting the comparison codes. Fail if anything is volatile.
5060 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5061 it were surrounded with a NE_EXPR. */
5063 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5064 return 0;
5066 lcode = TREE_CODE (lhs);
5067 rcode = TREE_CODE (rhs);
5069 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5071 lhs = build2 (NE_EXPR, truth_type, lhs,
5072 build_int_cst (TREE_TYPE (lhs), 0));
5073 lcode = NE_EXPR;
5076 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5078 rhs = build2 (NE_EXPR, truth_type, rhs,
5079 build_int_cst (TREE_TYPE (rhs), 0));
5080 rcode = NE_EXPR;
5083 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5084 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5085 return 0;
5087 ll_arg = TREE_OPERAND (lhs, 0);
5088 lr_arg = TREE_OPERAND (lhs, 1);
5089 rl_arg = TREE_OPERAND (rhs, 0);
5090 rr_arg = TREE_OPERAND (rhs, 1);
5092 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5093 if (simple_operand_p (ll_arg)
5094 && simple_operand_p (lr_arg))
5096 if (operand_equal_p (ll_arg, rl_arg, 0)
5097 && operand_equal_p (lr_arg, rr_arg, 0))
5099 result = combine_comparisons (loc, code, lcode, rcode,
5100 truth_type, ll_arg, lr_arg);
5101 if (result)
5102 return result;
5104 else if (operand_equal_p (ll_arg, rr_arg, 0)
5105 && operand_equal_p (lr_arg, rl_arg, 0))
5107 result = combine_comparisons (loc, code, lcode,
5108 swap_tree_comparison (rcode),
5109 truth_type, ll_arg, lr_arg);
5110 if (result)
5111 return result;
5115 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5116 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5118 /* If the RHS can be evaluated unconditionally and its operands are
5119 simple, it wins to evaluate the RHS unconditionally on machines
5120 with expensive branches. In this case, this isn't a comparison
5121 that can be merged. Avoid doing this if the RHS is a floating-point
5122 comparison since those can trap. */
5124 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5125 false) >= 2
5126 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5127 && simple_operand_p (rl_arg)
5128 && simple_operand_p (rr_arg))
5130 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5131 if (code == TRUTH_OR_EXPR
5132 && lcode == NE_EXPR && integer_zerop (lr_arg)
5133 && rcode == NE_EXPR && integer_zerop (rr_arg)
5134 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5135 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5136 return build2_loc (loc, NE_EXPR, truth_type,
5137 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5138 ll_arg, rl_arg),
5139 build_int_cst (TREE_TYPE (ll_arg), 0));
5141 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5142 if (code == TRUTH_AND_EXPR
5143 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5144 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5145 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5146 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5147 return build2_loc (loc, EQ_EXPR, truth_type,
5148 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5149 ll_arg, rl_arg),
5150 build_int_cst (TREE_TYPE (ll_arg), 0));
5152 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5154 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5155 return build2_loc (loc, code, truth_type, lhs, rhs);
5156 return NULL_TREE;
5160 /* See if the comparisons can be merged. Then get all the parameters for
5161 each side. */
5163 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5164 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5165 return 0;
5167 volatilep = 0;
5168 ll_inner = decode_field_reference (loc, ll_arg,
5169 &ll_bitsize, &ll_bitpos, &ll_mode,
5170 &ll_unsignedp, &volatilep, &ll_mask,
5171 &ll_and_mask);
5172 lr_inner = decode_field_reference (loc, lr_arg,
5173 &lr_bitsize, &lr_bitpos, &lr_mode,
5174 &lr_unsignedp, &volatilep, &lr_mask,
5175 &lr_and_mask);
5176 rl_inner = decode_field_reference (loc, rl_arg,
5177 &rl_bitsize, &rl_bitpos, &rl_mode,
5178 &rl_unsignedp, &volatilep, &rl_mask,
5179 &rl_and_mask);
5180 rr_inner = decode_field_reference (loc, rr_arg,
5181 &rr_bitsize, &rr_bitpos, &rr_mode,
5182 &rr_unsignedp, &volatilep, &rr_mask,
5183 &rr_and_mask);
5185 /* It must be true that the inner operation on the lhs of each
5186 comparison must be the same if we are to be able to do anything.
5187 Then see if we have constants. If not, the same must be true for
5188 the rhs's. */
5189 if (volatilep || ll_inner == 0 || rl_inner == 0
5190 || ! operand_equal_p (ll_inner, rl_inner, 0))
5191 return 0;
5193 if (TREE_CODE (lr_arg) == INTEGER_CST
5194 && TREE_CODE (rr_arg) == INTEGER_CST)
5195 l_const = lr_arg, r_const = rr_arg;
5196 else if (lr_inner == 0 || rr_inner == 0
5197 || ! operand_equal_p (lr_inner, rr_inner, 0))
5198 return 0;
5199 else
5200 l_const = r_const = 0;
5202 /* If either comparison code is not correct for our logical operation,
5203 fail. However, we can convert a one-bit comparison against zero into
5204 the opposite comparison against that bit being set in the field. */
5206 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5207 if (lcode != wanted_code)
5209 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5211 /* Make the left operand unsigned, since we are only interested
5212 in the value of one bit. Otherwise we are doing the wrong
5213 thing below. */
5214 ll_unsignedp = 1;
5215 l_const = ll_mask;
5217 else
5218 return 0;
5221 /* This is analogous to the code for l_const above. */
5222 if (rcode != wanted_code)
5224 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5226 rl_unsignedp = 1;
5227 r_const = rl_mask;
5229 else
5230 return 0;
5233 /* See if we can find a mode that contains both fields being compared on
5234 the left. If we can't, fail. Otherwise, update all constants and masks
5235 to be relative to a field of that size. */
5236 first_bit = MIN (ll_bitpos, rl_bitpos);
5237 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5238 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5239 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5240 volatilep);
5241 if (lnmode == VOIDmode)
5242 return 0;
5244 lnbitsize = GET_MODE_BITSIZE (lnmode);
5245 lnbitpos = first_bit & ~ (lnbitsize - 1);
5246 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5247 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5249 if (BYTES_BIG_ENDIAN)
5251 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5252 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5255 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5256 size_int (xll_bitpos));
5257 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5258 size_int (xrl_bitpos));
5260 if (l_const)
5262 l_const = fold_convert_loc (loc, lntype, l_const);
5263 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5264 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5265 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5266 fold_build1_loc (loc, BIT_NOT_EXPR,
5267 lntype, ll_mask))))
5269 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5271 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5274 if (r_const)
5276 r_const = fold_convert_loc (loc, lntype, r_const);
5277 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5278 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5279 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5280 fold_build1_loc (loc, BIT_NOT_EXPR,
5281 lntype, rl_mask))))
5283 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5285 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5289 /* If the right sides are not constant, do the same for it. Also,
5290 disallow this optimization if a size or signedness mismatch occurs
5291 between the left and right sides. */
5292 if (l_const == 0)
5294 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5295 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5296 /* Make sure the two fields on the right
5297 correspond to the left without being swapped. */
5298 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5299 return 0;
5301 first_bit = MIN (lr_bitpos, rr_bitpos);
5302 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5303 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5304 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5305 volatilep);
5306 if (rnmode == VOIDmode)
5307 return 0;
5309 rnbitsize = GET_MODE_BITSIZE (rnmode);
5310 rnbitpos = first_bit & ~ (rnbitsize - 1);
5311 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5312 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5314 if (BYTES_BIG_ENDIAN)
5316 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5317 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5320 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5321 rntype, lr_mask),
5322 size_int (xlr_bitpos));
5323 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5324 rntype, rr_mask),
5325 size_int (xrr_bitpos));
5327 /* Make a mask that corresponds to both fields being compared.
5328 Do this for both items being compared. If the operands are the
5329 same size and the bits being compared are in the same position
5330 then we can do this by masking both and comparing the masked
5331 results. */
5332 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5333 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5334 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5336 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5337 ll_unsignedp || rl_unsignedp);
5338 if (! all_ones_mask_p (ll_mask, lnbitsize))
5339 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5341 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5342 lr_unsignedp || rr_unsignedp);
5343 if (! all_ones_mask_p (lr_mask, rnbitsize))
5344 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5346 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5349 /* There is still another way we can do something: If both pairs of
5350 fields being compared are adjacent, we may be able to make a wider
5351 field containing them both.
5353 Note that we still must mask the lhs/rhs expressions. Furthermore,
5354 the mask must be shifted to account for the shift done by
5355 make_bit_field_ref. */
5356 if ((ll_bitsize + ll_bitpos == rl_bitpos
5357 && lr_bitsize + lr_bitpos == rr_bitpos)
5358 || (ll_bitpos == rl_bitpos + rl_bitsize
5359 && lr_bitpos == rr_bitpos + rr_bitsize))
5361 tree type;
5363 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5364 ll_bitsize + rl_bitsize,
5365 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5366 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5367 lr_bitsize + rr_bitsize,
5368 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5370 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5371 size_int (MIN (xll_bitpos, xrl_bitpos)));
5372 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5373 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5375 /* Convert to the smaller type before masking out unwanted bits. */
5376 type = lntype;
5377 if (lntype != rntype)
5379 if (lnbitsize > rnbitsize)
5381 lhs = fold_convert_loc (loc, rntype, lhs);
5382 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5383 type = rntype;
5385 else if (lnbitsize < rnbitsize)
5387 rhs = fold_convert_loc (loc, lntype, rhs);
5388 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5389 type = lntype;
5393 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5394 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5396 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5397 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5399 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5402 return 0;
5405 /* Handle the case of comparisons with constants. If there is something in
5406 common between the masks, those bits of the constants must be the same.
5407 If not, the condition is always false. Test for this to avoid generating
5408 incorrect code below. */
5409 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5410 if (! integer_zerop (result)
5411 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5412 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5414 if (wanted_code == NE_EXPR)
5416 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5417 return constant_boolean_node (true, truth_type);
5419 else
5421 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5422 return constant_boolean_node (false, truth_type);
5426 /* Construct the expression we will return. First get the component
5427 reference we will make. Unless the mask is all ones the width of
5428 that field, perform the mask operation. Then compare with the
5429 merged constant. */
5430 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5431 ll_unsignedp || rl_unsignedp);
5433 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5434 if (! all_ones_mask_p (ll_mask, lnbitsize))
5435 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5437 return build2_loc (loc, wanted_code, truth_type, result,
5438 const_binop (BIT_IOR_EXPR, l_const, r_const));
5441 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5442 constant. */
5444 static tree
5445 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5446 tree op0, tree op1)
5448 tree arg0 = op0;
5449 enum tree_code op_code;
5450 tree comp_const;
5451 tree minmax_const;
5452 int consts_equal, consts_lt;
5453 tree inner;
5455 STRIP_SIGN_NOPS (arg0);
5457 op_code = TREE_CODE (arg0);
5458 minmax_const = TREE_OPERAND (arg0, 1);
5459 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5460 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5461 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5462 inner = TREE_OPERAND (arg0, 0);
5464 /* If something does not permit us to optimize, return the original tree. */
5465 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5466 || TREE_CODE (comp_const) != INTEGER_CST
5467 || TREE_OVERFLOW (comp_const)
5468 || TREE_CODE (minmax_const) != INTEGER_CST
5469 || TREE_OVERFLOW (minmax_const))
5470 return NULL_TREE;
5472 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5473 and GT_EXPR, doing the rest with recursive calls using logical
5474 simplifications. */
5475 switch (code)
5477 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5479 tree tem
5480 = optimize_minmax_comparison (loc,
5481 invert_tree_comparison (code, false),
5482 type, op0, op1);
5483 if (tem)
5484 return invert_truthvalue_loc (loc, tem);
5485 return NULL_TREE;
5488 case GE_EXPR:
5489 return
5490 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5491 optimize_minmax_comparison
5492 (loc, EQ_EXPR, type, arg0, comp_const),
5493 optimize_minmax_comparison
5494 (loc, GT_EXPR, type, arg0, comp_const));
5496 case EQ_EXPR:
5497 if (op_code == MAX_EXPR && consts_equal)
5498 /* MAX (X, 0) == 0 -> X <= 0 */
5499 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5501 else if (op_code == MAX_EXPR && consts_lt)
5502 /* MAX (X, 0) == 5 -> X == 5 */
5503 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5505 else if (op_code == MAX_EXPR)
5506 /* MAX (X, 0) == -1 -> false */
5507 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5509 else if (consts_equal)
5510 /* MIN (X, 0) == 0 -> X >= 0 */
5511 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5513 else if (consts_lt)
5514 /* MIN (X, 0) == 5 -> false */
5515 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5517 else
5518 /* MIN (X, 0) == -1 -> X == -1 */
5519 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5521 case GT_EXPR:
5522 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5523 /* MAX (X, 0) > 0 -> X > 0
5524 MAX (X, 0) > 5 -> X > 5 */
5525 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5527 else if (op_code == MAX_EXPR)
5528 /* MAX (X, 0) > -1 -> true */
5529 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5531 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5532 /* MIN (X, 0) > 0 -> false
5533 MIN (X, 0) > 5 -> false */
5534 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5536 else
5537 /* MIN (X, 0) > -1 -> X > -1 */
5538 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5540 default:
5541 return NULL_TREE;
5545 /* T is an integer expression that is being multiplied, divided, or taken a
5546 modulus (CODE says which and what kind of divide or modulus) by a
5547 constant C. See if we can eliminate that operation by folding it with
5548 other operations already in T. WIDE_TYPE, if non-null, is a type that
5549 should be used for the computation if wider than our type.
5551 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5552 (X * 2) + (Y * 4). We must, however, be assured that either the original
5553 expression would not overflow or that overflow is undefined for the type
5554 in the language in question.
5556 If we return a non-null expression, it is an equivalent form of the
5557 original computation, but need not be in the original type.
5559 We set *STRICT_OVERFLOW_P to true if the return values depends on
5560 signed overflow being undefined. Otherwise we do not change
5561 *STRICT_OVERFLOW_P. */
5563 static tree
5564 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5565 bool *strict_overflow_p)
5567 /* To avoid exponential search depth, refuse to allow recursion past
5568 three levels. Beyond that (1) it's highly unlikely that we'll find
5569 something interesting and (2) we've probably processed it before
5570 when we built the inner expression. */
5572 static int depth;
5573 tree ret;
5575 if (depth > 3)
5576 return NULL;
5578 depth++;
5579 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5580 depth--;
5582 return ret;
5585 static tree
5586 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5587 bool *strict_overflow_p)
5589 tree type = TREE_TYPE (t);
5590 enum tree_code tcode = TREE_CODE (t);
5591 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5592 > GET_MODE_SIZE (TYPE_MODE (type)))
5593 ? wide_type : type);
5594 tree t1, t2;
5595 int same_p = tcode == code;
5596 tree op0 = NULL_TREE, op1 = NULL_TREE;
5597 bool sub_strict_overflow_p;
5599 /* Don't deal with constants of zero here; they confuse the code below. */
5600 if (integer_zerop (c))
5601 return NULL_TREE;
5603 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5604 op0 = TREE_OPERAND (t, 0);
5606 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5607 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5609 /* Note that we need not handle conditional operations here since fold
5610 already handles those cases. So just do arithmetic here. */
5611 switch (tcode)
5613 case INTEGER_CST:
5614 /* For a constant, we can always simplify if we are a multiply
5615 or (for divide and modulus) if it is a multiple of our constant. */
5616 if (code == MULT_EXPR
5617 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5618 return const_binop (code, fold_convert (ctype, t),
5619 fold_convert (ctype, c));
5620 break;
5622 CASE_CONVERT: case NON_LVALUE_EXPR:
5623 /* If op0 is an expression ... */
5624 if ((COMPARISON_CLASS_P (op0)
5625 || UNARY_CLASS_P (op0)
5626 || BINARY_CLASS_P (op0)
5627 || VL_EXP_CLASS_P (op0)
5628 || EXPRESSION_CLASS_P (op0))
5629 /* ... and has wrapping overflow, and its type is smaller
5630 than ctype, then we cannot pass through as widening. */
5631 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5632 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5633 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5634 && (TYPE_PRECISION (ctype)
5635 > TYPE_PRECISION (TREE_TYPE (op0))))
5636 /* ... or this is a truncation (t is narrower than op0),
5637 then we cannot pass through this narrowing. */
5638 || (TYPE_PRECISION (type)
5639 < TYPE_PRECISION (TREE_TYPE (op0)))
5640 /* ... or signedness changes for division or modulus,
5641 then we cannot pass through this conversion. */
5642 || (code != MULT_EXPR
5643 && (TYPE_UNSIGNED (ctype)
5644 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5645 /* ... or has undefined overflow while the converted to
5646 type has not, we cannot do the operation in the inner type
5647 as that would introduce undefined overflow. */
5648 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5649 && !TYPE_OVERFLOW_UNDEFINED (type))))
5650 break;
5652 /* Pass the constant down and see if we can make a simplification. If
5653 we can, replace this expression with the inner simplification for
5654 possible later conversion to our or some other type. */
5655 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5656 && TREE_CODE (t2) == INTEGER_CST
5657 && !TREE_OVERFLOW (t2)
5658 && (0 != (t1 = extract_muldiv (op0, t2, code,
5659 code == MULT_EXPR
5660 ? ctype : NULL_TREE,
5661 strict_overflow_p))))
5662 return t1;
5663 break;
5665 case ABS_EXPR:
5666 /* If widening the type changes it from signed to unsigned, then we
5667 must avoid building ABS_EXPR itself as unsigned. */
5668 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5670 tree cstype = (*signed_type_for) (ctype);
5671 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5672 != 0)
5674 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5675 return fold_convert (ctype, t1);
5677 break;
5679 /* If the constant is negative, we cannot simplify this. */
5680 if (tree_int_cst_sgn (c) == -1)
5681 break;
5682 /* FALLTHROUGH */
5683 case NEGATE_EXPR:
5684 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5685 != 0)
5686 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5687 break;
5689 case MIN_EXPR: case MAX_EXPR:
5690 /* If widening the type changes the signedness, then we can't perform
5691 this optimization as that changes the result. */
5692 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5693 break;
5695 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5696 sub_strict_overflow_p = false;
5697 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5698 &sub_strict_overflow_p)) != 0
5699 && (t2 = extract_muldiv (op1, c, code, wide_type,
5700 &sub_strict_overflow_p)) != 0)
5702 if (tree_int_cst_sgn (c) < 0)
5703 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5704 if (sub_strict_overflow_p)
5705 *strict_overflow_p = true;
5706 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5707 fold_convert (ctype, t2));
5709 break;
5711 case LSHIFT_EXPR: case RSHIFT_EXPR:
5712 /* If the second operand is constant, this is a multiplication
5713 or floor division, by a power of two, so we can treat it that
5714 way unless the multiplier or divisor overflows. Signed
5715 left-shift overflow is implementation-defined rather than
5716 undefined in C90, so do not convert signed left shift into
5717 multiplication. */
5718 if (TREE_CODE (op1) == INTEGER_CST
5719 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5720 /* const_binop may not detect overflow correctly,
5721 so check for it explicitly here. */
5722 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5723 && TREE_INT_CST_HIGH (op1) == 0
5724 && 0 != (t1 = fold_convert (ctype,
5725 const_binop (LSHIFT_EXPR,
5726 size_one_node,
5727 op1)))
5728 && !TREE_OVERFLOW (t1))
5729 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5730 ? MULT_EXPR : FLOOR_DIV_EXPR,
5731 ctype,
5732 fold_convert (ctype, op0),
5733 t1),
5734 c, code, wide_type, strict_overflow_p);
5735 break;
5737 case PLUS_EXPR: case MINUS_EXPR:
5738 /* See if we can eliminate the operation on both sides. If we can, we
5739 can return a new PLUS or MINUS. If we can't, the only remaining
5740 cases where we can do anything are if the second operand is a
5741 constant. */
5742 sub_strict_overflow_p = false;
5743 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5744 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5745 if (t1 != 0 && t2 != 0
5746 && (code == MULT_EXPR
5747 /* If not multiplication, we can only do this if both operands
5748 are divisible by c. */
5749 || (multiple_of_p (ctype, op0, c)
5750 && multiple_of_p (ctype, op1, c))))
5752 if (sub_strict_overflow_p)
5753 *strict_overflow_p = true;
5754 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5755 fold_convert (ctype, t2));
5758 /* If this was a subtraction, negate OP1 and set it to be an addition.
5759 This simplifies the logic below. */
5760 if (tcode == MINUS_EXPR)
5762 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5763 /* If OP1 was not easily negatable, the constant may be OP0. */
5764 if (TREE_CODE (op0) == INTEGER_CST)
5766 tree tem = op0;
5767 op0 = op1;
5768 op1 = tem;
5769 tem = t1;
5770 t1 = t2;
5771 t2 = tem;
5775 if (TREE_CODE (op1) != INTEGER_CST)
5776 break;
5778 /* If either OP1 or C are negative, this optimization is not safe for
5779 some of the division and remainder types while for others we need
5780 to change the code. */
5781 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5783 if (code == CEIL_DIV_EXPR)
5784 code = FLOOR_DIV_EXPR;
5785 else if (code == FLOOR_DIV_EXPR)
5786 code = CEIL_DIV_EXPR;
5787 else if (code != MULT_EXPR
5788 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5789 break;
5792 /* If it's a multiply or a division/modulus operation of a multiple
5793 of our constant, do the operation and verify it doesn't overflow. */
5794 if (code == MULT_EXPR
5795 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5797 op1 = const_binop (code, fold_convert (ctype, op1),
5798 fold_convert (ctype, c));
5799 /* We allow the constant to overflow with wrapping semantics. */
5800 if (op1 == 0
5801 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5802 break;
5804 else
5805 break;
5807 /* If we have an unsigned type is not a sizetype, we cannot widen
5808 the operation since it will change the result if the original
5809 computation overflowed. */
5810 if (TYPE_UNSIGNED (ctype)
5811 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5812 && ctype != type)
5813 break;
5815 /* If we were able to eliminate our operation from the first side,
5816 apply our operation to the second side and reform the PLUS. */
5817 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5818 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5820 /* The last case is if we are a multiply. In that case, we can
5821 apply the distributive law to commute the multiply and addition
5822 if the multiplication of the constants doesn't overflow. */
5823 if (code == MULT_EXPR)
5824 return fold_build2 (tcode, ctype,
5825 fold_build2 (code, ctype,
5826 fold_convert (ctype, op0),
5827 fold_convert (ctype, c)),
5828 op1);
5830 break;
5832 case MULT_EXPR:
5833 /* We have a special case here if we are doing something like
5834 (C * 8) % 4 since we know that's zero. */
5835 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5836 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5837 /* If the multiplication can overflow we cannot optimize this.
5838 ??? Until we can properly mark individual operations as
5839 not overflowing we need to treat sizetype special here as
5840 stor-layout relies on this opimization to make
5841 DECL_FIELD_BIT_OFFSET always a constant. */
5842 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5843 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5844 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5845 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5846 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5848 *strict_overflow_p = true;
5849 return omit_one_operand (type, integer_zero_node, op0);
5852 /* ... fall through ... */
5854 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5855 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5856 /* If we can extract our operation from the LHS, do so and return a
5857 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5858 do something only if the second operand is a constant. */
5859 if (same_p
5860 && (t1 = extract_muldiv (op0, c, code, wide_type,
5861 strict_overflow_p)) != 0)
5862 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5863 fold_convert (ctype, op1));
5864 else if (tcode == MULT_EXPR && code == MULT_EXPR
5865 && (t1 = extract_muldiv (op1, c, code, wide_type,
5866 strict_overflow_p)) != 0)
5867 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5868 fold_convert (ctype, t1));
5869 else if (TREE_CODE (op1) != INTEGER_CST)
5870 return 0;
5872 /* If these are the same operation types, we can associate them
5873 assuming no overflow. */
5874 if (tcode == code
5875 && 0 != (t1 = int_const_binop (MULT_EXPR,
5876 fold_convert (ctype, op1),
5877 fold_convert (ctype, c), 1))
5878 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5879 (TYPE_UNSIGNED (ctype)
5880 && tcode != MULT_EXPR) ? -1 : 1,
5881 TREE_OVERFLOW (t1)))
5882 && !TREE_OVERFLOW (t1))
5883 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5885 /* If these operations "cancel" each other, we have the main
5886 optimizations of this pass, which occur when either constant is a
5887 multiple of the other, in which case we replace this with either an
5888 operation or CODE or TCODE.
5890 If we have an unsigned type that is not a sizetype, we cannot do
5891 this since it will change the result if the original computation
5892 overflowed. */
5893 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5894 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5895 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5896 || (tcode == MULT_EXPR
5897 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5898 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5899 && code != MULT_EXPR)))
5901 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5903 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5904 *strict_overflow_p = true;
5905 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5906 fold_convert (ctype,
5907 const_binop (TRUNC_DIV_EXPR,
5908 op1, c)));
5910 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5912 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5913 *strict_overflow_p = true;
5914 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5915 fold_convert (ctype,
5916 const_binop (TRUNC_DIV_EXPR,
5917 c, op1)));
5920 break;
5922 default:
5923 break;
5926 return 0;
5929 /* Return a node which has the indicated constant VALUE (either 0 or
5930 1), and is of the indicated TYPE. */
5932 tree
5933 constant_boolean_node (int value, tree type)
5935 if (type == integer_type_node)
5936 return value ? integer_one_node : integer_zero_node;
5937 else if (type == boolean_type_node)
5938 return value ? boolean_true_node : boolean_false_node;
5939 else
5940 return build_int_cst (type, value);
5944 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5945 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5946 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5947 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5948 COND is the first argument to CODE; otherwise (as in the example
5949 given here), it is the second argument. TYPE is the type of the
5950 original expression. Return NULL_TREE if no simplification is
5951 possible. */
5953 static tree
5954 fold_binary_op_with_conditional_arg (location_t loc,
5955 enum tree_code code,
5956 tree type, tree op0, tree op1,
5957 tree cond, tree arg, int cond_first_p)
5959 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5960 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5961 tree test, true_value, false_value;
5962 tree lhs = NULL_TREE;
5963 tree rhs = NULL_TREE;
5965 if (TREE_CODE (cond) == COND_EXPR)
5967 test = TREE_OPERAND (cond, 0);
5968 true_value = TREE_OPERAND (cond, 1);
5969 false_value = TREE_OPERAND (cond, 2);
5970 /* If this operand throws an expression, then it does not make
5971 sense to try to perform a logical or arithmetic operation
5972 involving it. */
5973 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5974 lhs = true_value;
5975 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5976 rhs = false_value;
5978 else
5980 tree testtype = TREE_TYPE (cond);
5981 test = cond;
5982 true_value = constant_boolean_node (true, testtype);
5983 false_value = constant_boolean_node (false, testtype);
5986 /* This transformation is only worthwhile if we don't have to wrap ARG
5987 in a SAVE_EXPR and the operation can be simplified on at least one
5988 of the branches once its pushed inside the COND_EXPR. */
5989 if (!TREE_CONSTANT (arg)
5990 && (TREE_SIDE_EFFECTS (arg)
5991 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5992 return NULL_TREE;
5994 arg = fold_convert_loc (loc, arg_type, arg);
5995 if (lhs == 0)
5997 true_value = fold_convert_loc (loc, cond_type, true_value);
5998 if (cond_first_p)
5999 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6000 else
6001 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6003 if (rhs == 0)
6005 false_value = fold_convert_loc (loc, cond_type, false_value);
6006 if (cond_first_p)
6007 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6008 else
6009 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6012 /* Check that we have simplified at least one of the branches. */
6013 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6014 return NULL_TREE;
6016 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6020 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6022 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6023 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6024 ADDEND is the same as X.
6026 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6027 and finite. The problematic cases are when X is zero, and its mode
6028 has signed zeros. In the case of rounding towards -infinity,
6029 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6030 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6032 bool
6033 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6035 if (!real_zerop (addend))
6036 return false;
6038 /* Don't allow the fold with -fsignaling-nans. */
6039 if (HONOR_SNANS (TYPE_MODE (type)))
6040 return false;
6042 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6043 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6044 return true;
6046 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6047 if (TREE_CODE (addend) == REAL_CST
6048 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6049 negate = !negate;
6051 /* The mode has signed zeros, and we have to honor their sign.
6052 In this situation, there is only one case we can return true for.
6053 X - 0 is the same as X unless rounding towards -infinity is
6054 supported. */
6055 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6058 /* Subroutine of fold() that checks comparisons of built-in math
6059 functions against real constants.
6061 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6062 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6063 is the type of the result and ARG0 and ARG1 are the operands of the
6064 comparison. ARG1 must be a TREE_REAL_CST.
6066 The function returns the constant folded tree if a simplification
6067 can be made, and NULL_TREE otherwise. */
6069 static tree
6070 fold_mathfn_compare (location_t loc,
6071 enum built_in_function fcode, enum tree_code code,
6072 tree type, tree arg0, tree arg1)
6074 REAL_VALUE_TYPE c;
6076 if (BUILTIN_SQRT_P (fcode))
6078 tree arg = CALL_EXPR_ARG (arg0, 0);
6079 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6081 c = TREE_REAL_CST (arg1);
6082 if (REAL_VALUE_NEGATIVE (c))
6084 /* sqrt(x) < y is always false, if y is negative. */
6085 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6086 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6088 /* sqrt(x) > y is always true, if y is negative and we
6089 don't care about NaNs, i.e. negative values of x. */
6090 if (code == NE_EXPR || !HONOR_NANS (mode))
6091 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6093 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6094 return fold_build2_loc (loc, GE_EXPR, type, arg,
6095 build_real (TREE_TYPE (arg), dconst0));
6097 else if (code == GT_EXPR || code == GE_EXPR)
6099 REAL_VALUE_TYPE c2;
6101 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6102 real_convert (&c2, mode, &c2);
6104 if (REAL_VALUE_ISINF (c2))
6106 /* sqrt(x) > y is x == +Inf, when y is very large. */
6107 if (HONOR_INFINITIES (mode))
6108 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6109 build_real (TREE_TYPE (arg), c2));
6111 /* sqrt(x) > y is always false, when y is very large
6112 and we don't care about infinities. */
6113 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6116 /* sqrt(x) > c is the same as x > c*c. */
6117 return fold_build2_loc (loc, code, type, arg,
6118 build_real (TREE_TYPE (arg), c2));
6120 else if (code == LT_EXPR || code == LE_EXPR)
6122 REAL_VALUE_TYPE c2;
6124 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6125 real_convert (&c2, mode, &c2);
6127 if (REAL_VALUE_ISINF (c2))
6129 /* sqrt(x) < y is always true, when y is a very large
6130 value and we don't care about NaNs or Infinities. */
6131 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6132 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6134 /* sqrt(x) < y is x != +Inf when y is very large and we
6135 don't care about NaNs. */
6136 if (! HONOR_NANS (mode))
6137 return fold_build2_loc (loc, NE_EXPR, type, arg,
6138 build_real (TREE_TYPE (arg), c2));
6140 /* sqrt(x) < y is x >= 0 when y is very large and we
6141 don't care about Infinities. */
6142 if (! HONOR_INFINITIES (mode))
6143 return fold_build2_loc (loc, GE_EXPR, type, arg,
6144 build_real (TREE_TYPE (arg), dconst0));
6146 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6147 if (lang_hooks.decls.global_bindings_p () != 0
6148 || CONTAINS_PLACEHOLDER_P (arg))
6149 return NULL_TREE;
6151 arg = save_expr (arg);
6152 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6153 fold_build2_loc (loc, GE_EXPR, type, arg,
6154 build_real (TREE_TYPE (arg),
6155 dconst0)),
6156 fold_build2_loc (loc, NE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg),
6158 c2)));
6161 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6162 if (! HONOR_NANS (mode))
6163 return fold_build2_loc (loc, code, type, arg,
6164 build_real (TREE_TYPE (arg), c2));
6166 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6167 if (lang_hooks.decls.global_bindings_p () == 0
6168 && ! CONTAINS_PLACEHOLDER_P (arg))
6170 arg = save_expr (arg);
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6172 fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6174 dconst0)),
6175 fold_build2_loc (loc, code, type, arg,
6176 build_real (TREE_TYPE (arg),
6177 c2)));
6182 return NULL_TREE;
6185 /* Subroutine of fold() that optimizes comparisons against Infinities,
6186 either +Inf or -Inf.
6188 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6189 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6190 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6192 The function returns the constant folded tree if a simplification
6193 can be made, and NULL_TREE otherwise. */
6195 static tree
6196 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6197 tree arg0, tree arg1)
6199 enum machine_mode mode;
6200 REAL_VALUE_TYPE max;
6201 tree temp;
6202 bool neg;
6204 mode = TYPE_MODE (TREE_TYPE (arg0));
6206 /* For negative infinity swap the sense of the comparison. */
6207 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6208 if (neg)
6209 code = swap_tree_comparison (code);
6211 switch (code)
6213 case GT_EXPR:
6214 /* x > +Inf is always false, if with ignore sNANs. */
6215 if (HONOR_SNANS (mode))
6216 return NULL_TREE;
6217 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6219 case LE_EXPR:
6220 /* x <= +Inf is always true, if we don't case about NaNs. */
6221 if (! HONOR_NANS (mode))
6222 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6224 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6225 if (lang_hooks.decls.global_bindings_p () == 0
6226 && ! CONTAINS_PLACEHOLDER_P (arg0))
6228 arg0 = save_expr (arg0);
6229 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6231 break;
6233 case EQ_EXPR:
6234 case GE_EXPR:
6235 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6236 real_maxval (&max, neg, mode);
6237 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6238 arg0, build_real (TREE_TYPE (arg0), max));
6240 case LT_EXPR:
6241 /* x < +Inf is always equal to x <= DBL_MAX. */
6242 real_maxval (&max, neg, mode);
6243 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6244 arg0, build_real (TREE_TYPE (arg0), max));
6246 case NE_EXPR:
6247 /* x != +Inf is always equal to !(x > DBL_MAX). */
6248 real_maxval (&max, neg, mode);
6249 if (! HONOR_NANS (mode))
6250 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6251 arg0, build_real (TREE_TYPE (arg0), max));
6253 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6257 default:
6258 break;
6261 return NULL_TREE;
6264 /* Subroutine of fold() that optimizes comparisons of a division by
6265 a nonzero integer constant against an integer constant, i.e.
6266 X/C1 op C2.
6268 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6269 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6270 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272 The function returns the constant folded tree if a simplification
6273 can be made, and NULL_TREE otherwise. */
6275 static tree
6276 fold_div_compare (location_t loc,
6277 enum tree_code code, tree type, tree arg0, tree arg1)
6279 tree prod, tmp, hi, lo;
6280 tree arg00 = TREE_OPERAND (arg0, 0);
6281 tree arg01 = TREE_OPERAND (arg0, 1);
6282 double_int val;
6283 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6284 bool neg_overflow;
6285 int overflow;
6287 /* We have to do this the hard way to detect unsigned overflow.
6288 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6289 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6290 TREE_INT_CST_HIGH (arg01),
6291 TREE_INT_CST_LOW (arg1),
6292 TREE_INT_CST_HIGH (arg1),
6293 &val.low, &val.high, unsigned_p);
6294 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6295 neg_overflow = false;
6297 if (unsigned_p)
6299 tmp = int_const_binop (MINUS_EXPR, arg01,
6300 build_int_cst (TREE_TYPE (arg01), 1), 0);
6301 lo = prod;
6303 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6304 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6305 TREE_INT_CST_HIGH (prod),
6306 TREE_INT_CST_LOW (tmp),
6307 TREE_INT_CST_HIGH (tmp),
6308 &val.low, &val.high, unsigned_p);
6309 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6310 -1, overflow | TREE_OVERFLOW (prod));
6312 else if (tree_int_cst_sgn (arg01) >= 0)
6314 tmp = int_const_binop (MINUS_EXPR, arg01,
6315 build_int_cst (TREE_TYPE (arg01), 1), 0);
6316 switch (tree_int_cst_sgn (arg1))
6318 case -1:
6319 neg_overflow = true;
6320 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6321 hi = prod;
6322 break;
6324 case 0:
6325 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6326 hi = tmp;
6327 break;
6329 case 1:
6330 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6331 lo = prod;
6332 break;
6334 default:
6335 gcc_unreachable ();
6338 else
6340 /* A negative divisor reverses the relational operators. */
6341 code = swap_tree_comparison (code);
6343 tmp = int_const_binop (PLUS_EXPR, arg01,
6344 build_int_cst (TREE_TYPE (arg01), 1), 0);
6345 switch (tree_int_cst_sgn (arg1))
6347 case -1:
6348 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6349 lo = prod;
6350 break;
6352 case 0:
6353 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6354 lo = tmp;
6355 break;
6357 case 1:
6358 neg_overflow = true;
6359 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6360 hi = prod;
6361 break;
6363 default:
6364 gcc_unreachable ();
6368 switch (code)
6370 case EQ_EXPR:
6371 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6372 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6373 if (TREE_OVERFLOW (hi))
6374 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6375 if (TREE_OVERFLOW (lo))
6376 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6377 return build_range_check (loc, type, arg00, 1, lo, hi);
6379 case NE_EXPR:
6380 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6381 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6382 if (TREE_OVERFLOW (hi))
6383 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6384 if (TREE_OVERFLOW (lo))
6385 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6386 return build_range_check (loc, type, arg00, 0, lo, hi);
6388 case LT_EXPR:
6389 if (TREE_OVERFLOW (lo))
6391 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6392 return omit_one_operand_loc (loc, type, tmp, arg00);
6394 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6396 case LE_EXPR:
6397 if (TREE_OVERFLOW (hi))
6399 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6400 return omit_one_operand_loc (loc, type, tmp, arg00);
6402 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6404 case GT_EXPR:
6405 if (TREE_OVERFLOW (hi))
6407 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6408 return omit_one_operand_loc (loc, type, tmp, arg00);
6410 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6412 case GE_EXPR:
6413 if (TREE_OVERFLOW (lo))
6415 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6420 default:
6421 break;
6424 return NULL_TREE;
6428 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6429 equality/inequality test, then return a simplified form of the test
6430 using a sign testing. Otherwise return NULL. TYPE is the desired
6431 result type. */
6433 static tree
6434 fold_single_bit_test_into_sign_test (location_t loc,
6435 enum tree_code code, tree arg0, tree arg1,
6436 tree result_type)
6438 /* If this is testing a single bit, we can optimize the test. */
6439 if ((code == NE_EXPR || code == EQ_EXPR)
6440 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6441 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6443 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6444 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6445 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6447 if (arg00 != NULL_TREE
6448 /* This is only a win if casting to a signed type is cheap,
6449 i.e. when arg00's type is not a partial mode. */
6450 && TYPE_PRECISION (TREE_TYPE (arg00))
6451 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6453 tree stype = signed_type_for (TREE_TYPE (arg00));
6454 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6455 result_type,
6456 fold_convert_loc (loc, stype, arg00),
6457 build_int_cst (stype, 0));
6461 return NULL_TREE;
6464 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6465 equality/inequality test, then return a simplified form of
6466 the test using shifts and logical operations. Otherwise return
6467 NULL. TYPE is the desired result type. */
6469 tree
6470 fold_single_bit_test (location_t loc, enum tree_code code,
6471 tree arg0, tree arg1, tree result_type)
6473 /* If this is testing a single bit, we can optimize the test. */
6474 if ((code == NE_EXPR || code == EQ_EXPR)
6475 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6476 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6478 tree inner = TREE_OPERAND (arg0, 0);
6479 tree type = TREE_TYPE (arg0);
6480 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6481 enum machine_mode operand_mode = TYPE_MODE (type);
6482 int ops_unsigned;
6483 tree signed_type, unsigned_type, intermediate_type;
6484 tree tem, one;
6486 /* First, see if we can fold the single bit test into a sign-bit
6487 test. */
6488 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6489 result_type);
6490 if (tem)
6491 return tem;
6493 /* Otherwise we have (A & C) != 0 where C is a single bit,
6494 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6495 Similarly for (A & C) == 0. */
6497 /* If INNER is a right shift of a constant and it plus BITNUM does
6498 not overflow, adjust BITNUM and INNER. */
6499 if (TREE_CODE (inner) == RSHIFT_EXPR
6500 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6501 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6502 && bitnum < TYPE_PRECISION (type)
6503 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6504 bitnum - TYPE_PRECISION (type)))
6506 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6507 inner = TREE_OPERAND (inner, 0);
6510 /* If we are going to be able to omit the AND below, we must do our
6511 operations as unsigned. If we must use the AND, we have a choice.
6512 Normally unsigned is faster, but for some machines signed is. */
6513 #ifdef LOAD_EXTEND_OP
6514 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6515 && !flag_syntax_only) ? 0 : 1;
6516 #else
6517 ops_unsigned = 1;
6518 #endif
6520 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6521 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6522 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6523 inner = fold_convert_loc (loc, intermediate_type, inner);
6525 if (bitnum != 0)
6526 inner = build2 (RSHIFT_EXPR, intermediate_type,
6527 inner, size_int (bitnum));
6529 one = build_int_cst (intermediate_type, 1);
6531 if (code == EQ_EXPR)
6532 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6534 /* Put the AND last so it can combine with more things. */
6535 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6537 /* Make sure to return the proper type. */
6538 inner = fold_convert_loc (loc, result_type, inner);
6540 return inner;
6542 return NULL_TREE;
6545 /* Check whether we are allowed to reorder operands arg0 and arg1,
6546 such that the evaluation of arg1 occurs before arg0. */
6548 static bool
6549 reorder_operands_p (const_tree arg0, const_tree arg1)
6551 if (! flag_evaluation_order)
6552 return true;
6553 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6554 return true;
6555 return ! TREE_SIDE_EFFECTS (arg0)
6556 && ! TREE_SIDE_EFFECTS (arg1);
6559 /* Test whether it is preferable two swap two operands, ARG0 and
6560 ARG1, for example because ARG0 is an integer constant and ARG1
6561 isn't. If REORDER is true, only recommend swapping if we can
6562 evaluate the operands in reverse order. */
6564 bool
6565 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6567 STRIP_SIGN_NOPS (arg0);
6568 STRIP_SIGN_NOPS (arg1);
6570 if (TREE_CODE (arg1) == INTEGER_CST)
6571 return 0;
6572 if (TREE_CODE (arg0) == INTEGER_CST)
6573 return 1;
6575 if (TREE_CODE (arg1) == REAL_CST)
6576 return 0;
6577 if (TREE_CODE (arg0) == REAL_CST)
6578 return 1;
6580 if (TREE_CODE (arg1) == FIXED_CST)
6581 return 0;
6582 if (TREE_CODE (arg0) == FIXED_CST)
6583 return 1;
6585 if (TREE_CODE (arg1) == COMPLEX_CST)
6586 return 0;
6587 if (TREE_CODE (arg0) == COMPLEX_CST)
6588 return 1;
6590 if (TREE_CONSTANT (arg1))
6591 return 0;
6592 if (TREE_CONSTANT (arg0))
6593 return 1;
6595 if (optimize_function_for_size_p (cfun))
6596 return 0;
6598 if (reorder && flag_evaluation_order
6599 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6600 return 0;
6602 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6603 for commutative and comparison operators. Ensuring a canonical
6604 form allows the optimizers to find additional redundancies without
6605 having to explicitly check for both orderings. */
6606 if (TREE_CODE (arg0) == SSA_NAME
6607 && TREE_CODE (arg1) == SSA_NAME
6608 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6609 return 1;
6611 /* Put SSA_NAMEs last. */
6612 if (TREE_CODE (arg1) == SSA_NAME)
6613 return 0;
6614 if (TREE_CODE (arg0) == SSA_NAME)
6615 return 1;
6617 /* Put variables last. */
6618 if (DECL_P (arg1))
6619 return 0;
6620 if (DECL_P (arg0))
6621 return 1;
6623 return 0;
6626 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6627 ARG0 is extended to a wider type. */
6629 static tree
6630 fold_widened_comparison (location_t loc, enum tree_code code,
6631 tree type, tree arg0, tree arg1)
6633 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6634 tree arg1_unw;
6635 tree shorter_type, outer_type;
6636 tree min, max;
6637 bool above, below;
6639 if (arg0_unw == arg0)
6640 return NULL_TREE;
6641 shorter_type = TREE_TYPE (arg0_unw);
6643 #ifdef HAVE_canonicalize_funcptr_for_compare
6644 /* Disable this optimization if we're casting a function pointer
6645 type on targets that require function pointer canonicalization. */
6646 if (HAVE_canonicalize_funcptr_for_compare
6647 && TREE_CODE (shorter_type) == POINTER_TYPE
6648 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6649 return NULL_TREE;
6650 #endif
6652 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6653 return NULL_TREE;
6655 arg1_unw = get_unwidened (arg1, NULL_TREE);
6657 /* If possible, express the comparison in the shorter mode. */
6658 if ((code == EQ_EXPR || code == NE_EXPR
6659 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6660 && (TREE_TYPE (arg1_unw) == shorter_type
6661 || ((TYPE_PRECISION (shorter_type)
6662 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6663 && (TYPE_UNSIGNED (shorter_type)
6664 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6665 || (TREE_CODE (arg1_unw) == INTEGER_CST
6666 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6667 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6668 && int_fits_type_p (arg1_unw, shorter_type))))
6669 return fold_build2_loc (loc, code, type, arg0_unw,
6670 fold_convert_loc (loc, shorter_type, arg1_unw));
6672 if (TREE_CODE (arg1_unw) != INTEGER_CST
6673 || TREE_CODE (shorter_type) != INTEGER_TYPE
6674 || !int_fits_type_p (arg1_unw, shorter_type))
6675 return NULL_TREE;
6677 /* If we are comparing with the integer that does not fit into the range
6678 of the shorter type, the result is known. */
6679 outer_type = TREE_TYPE (arg1_unw);
6680 min = lower_bound_in_type (outer_type, shorter_type);
6681 max = upper_bound_in_type (outer_type, shorter_type);
6683 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6684 max, arg1_unw));
6685 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6686 arg1_unw, min));
6688 switch (code)
6690 case EQ_EXPR:
6691 if (above || below)
6692 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6693 break;
6695 case NE_EXPR:
6696 if (above || below)
6697 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6698 break;
6700 case LT_EXPR:
6701 case LE_EXPR:
6702 if (above)
6703 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6704 else if (below)
6705 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6707 case GT_EXPR:
6708 case GE_EXPR:
6709 if (above)
6710 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6711 else if (below)
6712 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714 default:
6715 break;
6718 return NULL_TREE;
6721 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6722 ARG0 just the signedness is changed. */
6724 static tree
6725 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6726 tree arg0, tree arg1)
6728 tree arg0_inner;
6729 tree inner_type, outer_type;
6731 if (!CONVERT_EXPR_P (arg0))
6732 return NULL_TREE;
6734 outer_type = TREE_TYPE (arg0);
6735 arg0_inner = TREE_OPERAND (arg0, 0);
6736 inner_type = TREE_TYPE (arg0_inner);
6738 #ifdef HAVE_canonicalize_funcptr_for_compare
6739 /* Disable this optimization if we're casting a function pointer
6740 type on targets that require function pointer canonicalization. */
6741 if (HAVE_canonicalize_funcptr_for_compare
6742 && TREE_CODE (inner_type) == POINTER_TYPE
6743 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6744 return NULL_TREE;
6745 #endif
6747 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6748 return NULL_TREE;
6750 if (TREE_CODE (arg1) != INTEGER_CST
6751 && !(CONVERT_EXPR_P (arg1)
6752 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6753 return NULL_TREE;
6755 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6756 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6757 && code != NE_EXPR
6758 && code != EQ_EXPR)
6759 return NULL_TREE;
6761 if (TREE_CODE (arg1) == INTEGER_CST)
6762 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6763 0, TREE_OVERFLOW (arg1));
6764 else
6765 arg1 = fold_convert_loc (loc, inner_type, arg1);
6767 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6770 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6771 step of the array. Reconstructs s and delta in the case of s *
6772 delta being an integer constant (and thus already folded). ADDR is
6773 the address. MULT is the multiplicative expression. If the
6774 function succeeds, the new address expression is returned.
6775 Otherwise NULL_TREE is returned. LOC is the location of the
6776 resulting expression. */
6778 static tree
6779 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6781 tree s, delta, step;
6782 tree ref = TREE_OPERAND (addr, 0), pref;
6783 tree ret, pos;
6784 tree itype;
6785 bool mdim = false;
6787 /* Strip the nops that might be added when converting op1 to sizetype. */
6788 STRIP_NOPS (op1);
6790 /* Canonicalize op1 into a possibly non-constant delta
6791 and an INTEGER_CST s. */
6792 if (TREE_CODE (op1) == MULT_EXPR)
6794 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6796 STRIP_NOPS (arg0);
6797 STRIP_NOPS (arg1);
6799 if (TREE_CODE (arg0) == INTEGER_CST)
6801 s = arg0;
6802 delta = arg1;
6804 else if (TREE_CODE (arg1) == INTEGER_CST)
6806 s = arg1;
6807 delta = arg0;
6809 else
6810 return NULL_TREE;
6812 else if (TREE_CODE (op1) == INTEGER_CST)
6814 delta = op1;
6815 s = NULL_TREE;
6817 else
6819 /* Simulate we are delta * 1. */
6820 delta = op1;
6821 s = integer_one_node;
6824 for (;; ref = TREE_OPERAND (ref, 0))
6826 if (TREE_CODE (ref) == ARRAY_REF)
6828 tree domain;
6830 /* Remember if this was a multi-dimensional array. */
6831 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6832 mdim = true;
6834 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6835 if (! domain)
6836 continue;
6837 itype = TREE_TYPE (domain);
6839 step = array_ref_element_size (ref);
6840 if (TREE_CODE (step) != INTEGER_CST)
6841 continue;
6843 if (s)
6845 if (! tree_int_cst_equal (step, s))
6846 continue;
6848 else
6850 /* Try if delta is a multiple of step. */
6851 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6852 if (! tmp)
6853 continue;
6854 delta = tmp;
6857 /* Only fold here if we can verify we do not overflow one
6858 dimension of a multi-dimensional array. */
6859 if (mdim)
6861 tree tmp;
6863 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6864 || !TYPE_MAX_VALUE (domain)
6865 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6866 continue;
6868 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6869 fold_convert_loc (loc, itype,
6870 TREE_OPERAND (ref, 1)),
6871 fold_convert_loc (loc, itype, delta));
6872 if (!tmp
6873 || TREE_CODE (tmp) != INTEGER_CST
6874 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6875 continue;
6878 break;
6880 else
6881 mdim = false;
6883 if (!handled_component_p (ref))
6884 return NULL_TREE;
6887 /* We found the suitable array reference. So copy everything up to it,
6888 and replace the index. */
6890 pref = TREE_OPERAND (addr, 0);
6891 ret = copy_node (pref);
6892 SET_EXPR_LOCATION (ret, loc);
6893 pos = ret;
6895 while (pref != ref)
6897 pref = TREE_OPERAND (pref, 0);
6898 TREE_OPERAND (pos, 0) = copy_node (pref);
6899 pos = TREE_OPERAND (pos, 0);
6902 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6903 fold_convert_loc (loc, itype,
6904 TREE_OPERAND (pos, 1)),
6905 fold_convert_loc (loc, itype, delta));
6907 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6911 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6912 means A >= Y && A != MAX, but in this case we know that
6913 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6915 static tree
6916 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6918 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6920 if (TREE_CODE (bound) == LT_EXPR)
6921 a = TREE_OPERAND (bound, 0);
6922 else if (TREE_CODE (bound) == GT_EXPR)
6923 a = TREE_OPERAND (bound, 1);
6924 else
6925 return NULL_TREE;
6927 typea = TREE_TYPE (a);
6928 if (!INTEGRAL_TYPE_P (typea)
6929 && !POINTER_TYPE_P (typea))
6930 return NULL_TREE;
6932 if (TREE_CODE (ineq) == LT_EXPR)
6934 a1 = TREE_OPERAND (ineq, 1);
6935 y = TREE_OPERAND (ineq, 0);
6937 else if (TREE_CODE (ineq) == GT_EXPR)
6939 a1 = TREE_OPERAND (ineq, 0);
6940 y = TREE_OPERAND (ineq, 1);
6942 else
6943 return NULL_TREE;
6945 if (TREE_TYPE (a1) != typea)
6946 return NULL_TREE;
6948 if (POINTER_TYPE_P (typea))
6950 /* Convert the pointer types into integer before taking the difference. */
6951 tree ta = fold_convert_loc (loc, ssizetype, a);
6952 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6953 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6955 else
6956 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6958 if (!diff || !integer_onep (diff))
6959 return NULL_TREE;
6961 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6964 /* Fold a sum or difference of at least one multiplication.
6965 Returns the folded tree or NULL if no simplification could be made. */
6967 static tree
6968 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6969 tree arg0, tree arg1)
6971 tree arg00, arg01, arg10, arg11;
6972 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6974 /* (A * C) +- (B * C) -> (A+-B) * C.
6975 (A * C) +- A -> A * (C+-1).
6976 We are most concerned about the case where C is a constant,
6977 but other combinations show up during loop reduction. Since
6978 it is not difficult, try all four possibilities. */
6980 if (TREE_CODE (arg0) == MULT_EXPR)
6982 arg00 = TREE_OPERAND (arg0, 0);
6983 arg01 = TREE_OPERAND (arg0, 1);
6985 else if (TREE_CODE (arg0) == INTEGER_CST)
6987 arg00 = build_one_cst (type);
6988 arg01 = arg0;
6990 else
6992 /* We cannot generate constant 1 for fract. */
6993 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6994 return NULL_TREE;
6995 arg00 = arg0;
6996 arg01 = build_one_cst (type);
6998 if (TREE_CODE (arg1) == MULT_EXPR)
7000 arg10 = TREE_OPERAND (arg1, 0);
7001 arg11 = TREE_OPERAND (arg1, 1);
7003 else if (TREE_CODE (arg1) == INTEGER_CST)
7005 arg10 = build_one_cst (type);
7006 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7007 the purpose of this canonicalization. */
7008 if (TREE_INT_CST_HIGH (arg1) == -1
7009 && negate_expr_p (arg1)
7010 && code == PLUS_EXPR)
7012 arg11 = negate_expr (arg1);
7013 code = MINUS_EXPR;
7015 else
7016 arg11 = arg1;
7018 else
7020 /* We cannot generate constant 1 for fract. */
7021 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7022 return NULL_TREE;
7023 arg10 = arg1;
7024 arg11 = build_one_cst (type);
7026 same = NULL_TREE;
7028 if (operand_equal_p (arg01, arg11, 0))
7029 same = arg01, alt0 = arg00, alt1 = arg10;
7030 else if (operand_equal_p (arg00, arg10, 0))
7031 same = arg00, alt0 = arg01, alt1 = arg11;
7032 else if (operand_equal_p (arg00, arg11, 0))
7033 same = arg00, alt0 = arg01, alt1 = arg10;
7034 else if (operand_equal_p (arg01, arg10, 0))
7035 same = arg01, alt0 = arg00, alt1 = arg11;
7037 /* No identical multiplicands; see if we can find a common
7038 power-of-two factor in non-power-of-two multiplies. This
7039 can help in multi-dimensional array access. */
7040 else if (host_integerp (arg01, 0)
7041 && host_integerp (arg11, 0))
7043 HOST_WIDE_INT int01, int11, tmp;
7044 bool swap = false;
7045 tree maybe_same;
7046 int01 = TREE_INT_CST_LOW (arg01);
7047 int11 = TREE_INT_CST_LOW (arg11);
7049 /* Move min of absolute values to int11. */
7050 if ((int01 >= 0 ? int01 : -int01)
7051 < (int11 >= 0 ? int11 : -int11))
7053 tmp = int01, int01 = int11, int11 = tmp;
7054 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7055 maybe_same = arg01;
7056 swap = true;
7058 else
7059 maybe_same = arg11;
7061 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7062 /* The remainder should not be a constant, otherwise we
7063 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7064 increased the number of multiplications necessary. */
7065 && TREE_CODE (arg10) != INTEGER_CST)
7067 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7068 build_int_cst (TREE_TYPE (arg00),
7069 int01 / int11));
7070 alt1 = arg10;
7071 same = maybe_same;
7072 if (swap)
7073 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7077 if (same)
7078 return fold_build2_loc (loc, MULT_EXPR, type,
7079 fold_build2_loc (loc, code, type,
7080 fold_convert_loc (loc, type, alt0),
7081 fold_convert_loc (loc, type, alt1)),
7082 fold_convert_loc (loc, type, same));
7084 return NULL_TREE;
7087 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7088 specified by EXPR into the buffer PTR of length LEN bytes.
7089 Return the number of bytes placed in the buffer, or zero
7090 upon failure. */
7092 static int
7093 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7095 tree type = TREE_TYPE (expr);
7096 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7097 int byte, offset, word, words;
7098 unsigned char value;
7100 if (total_bytes > len)
7101 return 0;
7102 words = total_bytes / UNITS_PER_WORD;
7104 for (byte = 0; byte < total_bytes; byte++)
7106 int bitpos = byte * BITS_PER_UNIT;
7107 if (bitpos < HOST_BITS_PER_WIDE_INT)
7108 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7109 else
7110 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7111 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7113 if (total_bytes > UNITS_PER_WORD)
7115 word = byte / UNITS_PER_WORD;
7116 if (WORDS_BIG_ENDIAN)
7117 word = (words - 1) - word;
7118 offset = word * UNITS_PER_WORD;
7119 if (BYTES_BIG_ENDIAN)
7120 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7121 else
7122 offset += byte % UNITS_PER_WORD;
7124 else
7125 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7126 ptr[offset] = value;
7128 return total_bytes;
7132 /* Subroutine of native_encode_expr. Encode the REAL_CST
7133 specified by EXPR into the buffer PTR of length LEN bytes.
7134 Return the number of bytes placed in the buffer, or zero
7135 upon failure. */
7137 static int
7138 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7140 tree type = TREE_TYPE (expr);
7141 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7142 int byte, offset, word, words, bitpos;
7143 unsigned char value;
7145 /* There are always 32 bits in each long, no matter the size of
7146 the hosts long. We handle floating point representations with
7147 up to 192 bits. */
7148 long tmp[6];
7150 if (total_bytes > len)
7151 return 0;
7152 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7154 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7156 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7157 bitpos += BITS_PER_UNIT)
7159 byte = (bitpos / BITS_PER_UNIT) & 3;
7160 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7162 if (UNITS_PER_WORD < 4)
7164 word = byte / UNITS_PER_WORD;
7165 if (WORDS_BIG_ENDIAN)
7166 word = (words - 1) - word;
7167 offset = word * UNITS_PER_WORD;
7168 if (BYTES_BIG_ENDIAN)
7169 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7170 else
7171 offset += byte % UNITS_PER_WORD;
7173 else
7174 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7175 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7177 return total_bytes;
7180 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7181 specified by EXPR into the buffer PTR of length LEN bytes.
7182 Return the number of bytes placed in the buffer, or zero
7183 upon failure. */
7185 static int
7186 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7188 int rsize, isize;
7189 tree part;
7191 part = TREE_REALPART (expr);
7192 rsize = native_encode_expr (part, ptr, len);
7193 if (rsize == 0)
7194 return 0;
7195 part = TREE_IMAGPART (expr);
7196 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7197 if (isize != rsize)
7198 return 0;
7199 return rsize + isize;
7203 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7206 upon failure. */
7208 static int
7209 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7211 int i, size, offset, count;
7212 tree itype, elem, elements;
7214 offset = 0;
7215 elements = TREE_VECTOR_CST_ELTS (expr);
7216 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7217 itype = TREE_TYPE (TREE_TYPE (expr));
7218 size = GET_MODE_SIZE (TYPE_MODE (itype));
7219 for (i = 0; i < count; i++)
7221 if (elements)
7223 elem = TREE_VALUE (elements);
7224 elements = TREE_CHAIN (elements);
7226 else
7227 elem = NULL_TREE;
7229 if (elem)
7231 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7232 return 0;
7234 else
7236 if (offset + size > len)
7237 return 0;
7238 memset (ptr+offset, 0, size);
7240 offset += size;
7242 return offset;
7246 /* Subroutine of native_encode_expr. Encode the STRING_CST
7247 specified by EXPR into the buffer PTR of length LEN bytes.
7248 Return the number of bytes placed in the buffer, or zero
7249 upon failure. */
7251 static int
7252 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7254 tree type = TREE_TYPE (expr);
7255 HOST_WIDE_INT total_bytes;
7257 if (TREE_CODE (type) != ARRAY_TYPE
7258 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7259 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7260 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7261 return 0;
7262 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7263 if (total_bytes > len)
7264 return 0;
7265 if (TREE_STRING_LENGTH (expr) < total_bytes)
7267 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7268 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7269 total_bytes - TREE_STRING_LENGTH (expr));
7271 else
7272 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7273 return total_bytes;
7277 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7278 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7279 buffer PTR of length LEN bytes. Return the number of bytes
7280 placed in the buffer, or zero upon failure. */
7283 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7285 switch (TREE_CODE (expr))
7287 case INTEGER_CST:
7288 return native_encode_int (expr, ptr, len);
7290 case REAL_CST:
7291 return native_encode_real (expr, ptr, len);
7293 case COMPLEX_CST:
7294 return native_encode_complex (expr, ptr, len);
7296 case VECTOR_CST:
7297 return native_encode_vector (expr, ptr, len);
7299 case STRING_CST:
7300 return native_encode_string (expr, ptr, len);
7302 default:
7303 return 0;
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7312 static tree
7313 native_interpret_int (tree type, const unsigned char *ptr, int len)
7315 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7316 int byte, offset, word, words;
7317 unsigned char value;
7318 double_int result;
7320 if (total_bytes > len)
7321 return NULL_TREE;
7322 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7323 return NULL_TREE;
7325 result = double_int_zero;
7326 words = total_bytes / UNITS_PER_WORD;
7328 for (byte = 0; byte < total_bytes; byte++)
7330 int bitpos = byte * BITS_PER_UNIT;
7331 if (total_bytes > UNITS_PER_WORD)
7333 word = byte / UNITS_PER_WORD;
7334 if (WORDS_BIG_ENDIAN)
7335 word = (words - 1) - word;
7336 offset = word * UNITS_PER_WORD;
7337 if (BYTES_BIG_ENDIAN)
7338 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7339 else
7340 offset += byte % UNITS_PER_WORD;
7342 else
7343 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7344 value = ptr[offset];
7346 if (bitpos < HOST_BITS_PER_WIDE_INT)
7347 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7348 else
7349 result.high |= (unsigned HOST_WIDE_INT) value
7350 << (bitpos - HOST_BITS_PER_WIDE_INT);
7353 return double_int_to_tree (type, result);
7357 /* Subroutine of native_interpret_expr. Interpret the contents of
7358 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7359 If the buffer cannot be interpreted, return NULL_TREE. */
7361 static tree
7362 native_interpret_real (tree type, const unsigned char *ptr, int len)
7364 enum machine_mode mode = TYPE_MODE (type);
7365 int total_bytes = GET_MODE_SIZE (mode);
7366 int byte, offset, word, words, bitpos;
7367 unsigned char value;
7368 /* There are always 32 bits in each long, no matter the size of
7369 the hosts long. We handle floating point representations with
7370 up to 192 bits. */
7371 REAL_VALUE_TYPE r;
7372 long tmp[6];
7374 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7375 if (total_bytes > len || total_bytes > 24)
7376 return NULL_TREE;
7377 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7379 memset (tmp, 0, sizeof (tmp));
7380 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7381 bitpos += BITS_PER_UNIT)
7383 byte = (bitpos / BITS_PER_UNIT) & 3;
7384 if (UNITS_PER_WORD < 4)
7386 word = byte / UNITS_PER_WORD;
7387 if (WORDS_BIG_ENDIAN)
7388 word = (words - 1) - word;
7389 offset = word * UNITS_PER_WORD;
7390 if (BYTES_BIG_ENDIAN)
7391 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7392 else
7393 offset += byte % UNITS_PER_WORD;
7395 else
7396 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7397 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7399 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7402 real_from_target (&r, tmp, mode);
7403 return build_real (type, r);
7407 /* Subroutine of native_interpret_expr. Interpret the contents of
7408 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7409 If the buffer cannot be interpreted, return NULL_TREE. */
7411 static tree
7412 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7414 tree etype, rpart, ipart;
7415 int size;
7417 etype = TREE_TYPE (type);
7418 size = GET_MODE_SIZE (TYPE_MODE (etype));
7419 if (size * 2 > len)
7420 return NULL_TREE;
7421 rpart = native_interpret_expr (etype, ptr, size);
7422 if (!rpart)
7423 return NULL_TREE;
7424 ipart = native_interpret_expr (etype, ptr+size, size);
7425 if (!ipart)
7426 return NULL_TREE;
7427 return build_complex (type, rpart, ipart);
7431 /* Subroutine of native_interpret_expr. Interpret the contents of
7432 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7433 If the buffer cannot be interpreted, return NULL_TREE. */
7435 static tree
7436 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7438 tree etype, elem, elements;
7439 int i, size, count;
7441 etype = TREE_TYPE (type);
7442 size = GET_MODE_SIZE (TYPE_MODE (etype));
7443 count = TYPE_VECTOR_SUBPARTS (type);
7444 if (size * count > len)
7445 return NULL_TREE;
7447 elements = NULL_TREE;
7448 for (i = count - 1; i >= 0; i--)
7450 elem = native_interpret_expr (etype, ptr+(i*size), size);
7451 if (!elem)
7452 return NULL_TREE;
7453 elements = tree_cons (NULL_TREE, elem, elements);
7455 return build_vector (type, elements);
7459 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7460 the buffer PTR of length LEN as a constant of type TYPE. For
7461 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7462 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7463 return NULL_TREE. */
7465 tree
7466 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7468 switch (TREE_CODE (type))
7470 case INTEGER_TYPE:
7471 case ENUMERAL_TYPE:
7472 case BOOLEAN_TYPE:
7473 return native_interpret_int (type, ptr, len);
7475 case REAL_TYPE:
7476 return native_interpret_real (type, ptr, len);
7478 case COMPLEX_TYPE:
7479 return native_interpret_complex (type, ptr, len);
7481 case VECTOR_TYPE:
7482 return native_interpret_vector (type, ptr, len);
7484 default:
7485 return NULL_TREE;
7490 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7491 TYPE at compile-time. If we're unable to perform the conversion
7492 return NULL_TREE. */
7494 static tree
7495 fold_view_convert_expr (tree type, tree expr)
7497 /* We support up to 512-bit values (for V8DFmode). */
7498 unsigned char buffer[64];
7499 int len;
7501 /* Check that the host and target are sane. */
7502 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7503 return NULL_TREE;
7505 len = native_encode_expr (expr, buffer, sizeof (buffer));
7506 if (len == 0)
7507 return NULL_TREE;
7509 return native_interpret_expr (type, buffer, len);
7512 /* Build an expression for the address of T. Folds away INDIRECT_REF
7513 to avoid confusing the gimplify process. */
7515 tree
7516 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7518 /* The size of the object is not relevant when talking about its address. */
7519 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7520 t = TREE_OPERAND (t, 0);
7522 if (TREE_CODE (t) == INDIRECT_REF)
7524 t = TREE_OPERAND (t, 0);
7526 if (TREE_TYPE (t) != ptrtype)
7527 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7529 else if (TREE_CODE (t) == MEM_REF
7530 && integer_zerop (TREE_OPERAND (t, 1)))
7531 return TREE_OPERAND (t, 0);
7532 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7534 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7536 if (TREE_TYPE (t) != ptrtype)
7537 t = fold_convert_loc (loc, ptrtype, t);
7539 else
7540 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7542 return t;
7545 /* Build an expression for the address of T. */
7547 tree
7548 build_fold_addr_expr_loc (location_t loc, tree t)
7550 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7552 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7555 /* Fold a unary expression of code CODE and type TYPE with operand
7556 OP0. Return the folded expression if folding is successful.
7557 Otherwise, return NULL_TREE. */
7559 tree
7560 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7562 tree tem;
7563 tree arg0;
7564 enum tree_code_class kind = TREE_CODE_CLASS (code);
7566 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7567 && TREE_CODE_LENGTH (code) == 1);
7569 arg0 = op0;
7570 if (arg0)
7572 if (CONVERT_EXPR_CODE_P (code)
7573 || code == FLOAT_EXPR || code == ABS_EXPR)
7575 /* Don't use STRIP_NOPS, because signedness of argument type
7576 matters. */
7577 STRIP_SIGN_NOPS (arg0);
7579 else
7581 /* Strip any conversions that don't change the mode. This
7582 is safe for every expression, except for a comparison
7583 expression because its signedness is derived from its
7584 operands.
7586 Note that this is done as an internal manipulation within
7587 the constant folder, in order to find the simplest
7588 representation of the arguments so that their form can be
7589 studied. In any cases, the appropriate type conversions
7590 should be put back in the tree that will get out of the
7591 constant folder. */
7592 STRIP_NOPS (arg0);
7596 if (TREE_CODE_CLASS (code) == tcc_unary)
7598 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7599 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7600 fold_build1_loc (loc, code, type,
7601 fold_convert_loc (loc, TREE_TYPE (op0),
7602 TREE_OPERAND (arg0, 1))));
7603 else if (TREE_CODE (arg0) == COND_EXPR)
7605 tree arg01 = TREE_OPERAND (arg0, 1);
7606 tree arg02 = TREE_OPERAND (arg0, 2);
7607 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7608 arg01 = fold_build1_loc (loc, code, type,
7609 fold_convert_loc (loc,
7610 TREE_TYPE (op0), arg01));
7611 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7612 arg02 = fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc,
7614 TREE_TYPE (op0), arg02));
7615 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7616 arg01, arg02);
7618 /* If this was a conversion, and all we did was to move into
7619 inside the COND_EXPR, bring it back out. But leave it if
7620 it is a conversion from integer to integer and the
7621 result precision is no wider than a word since such a
7622 conversion is cheap and may be optimized away by combine,
7623 while it couldn't if it were outside the COND_EXPR. Then return
7624 so we don't get into an infinite recursion loop taking the
7625 conversion out and then back in. */
7627 if ((CONVERT_EXPR_CODE_P (code)
7628 || code == NON_LVALUE_EXPR)
7629 && TREE_CODE (tem) == COND_EXPR
7630 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7631 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7632 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7633 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7634 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7635 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7636 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7637 && (INTEGRAL_TYPE_P
7638 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7639 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7640 || flag_syntax_only))
7641 tem = build1_loc (loc, code, type,
7642 build3 (COND_EXPR,
7643 TREE_TYPE (TREE_OPERAND
7644 (TREE_OPERAND (tem, 1), 0)),
7645 TREE_OPERAND (tem, 0),
7646 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7647 TREE_OPERAND (TREE_OPERAND (tem, 2),
7648 0)));
7649 return tem;
7651 else if (COMPARISON_CLASS_P (arg0))
7653 if (TREE_CODE (type) == BOOLEAN_TYPE)
7655 arg0 = copy_node (arg0);
7656 TREE_TYPE (arg0) = type;
7657 return arg0;
7659 else if (TREE_CODE (type) != INTEGER_TYPE)
7660 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7661 fold_build1_loc (loc, code, type,
7662 integer_one_node),
7663 fold_build1_loc (loc, code, type,
7664 integer_zero_node));
7668 switch (code)
7670 case PAREN_EXPR:
7671 /* Re-association barriers around constants and other re-association
7672 barriers can be removed. */
7673 if (CONSTANT_CLASS_P (op0)
7674 || TREE_CODE (op0) == PAREN_EXPR)
7675 return fold_convert_loc (loc, type, op0);
7676 return NULL_TREE;
7678 CASE_CONVERT:
7679 case FLOAT_EXPR:
7680 case FIX_TRUNC_EXPR:
7681 if (TREE_TYPE (op0) == type)
7682 return op0;
7684 /* If we have (type) (a CMP b) and type is an integral type, return
7685 new expression involving the new type. */
7686 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7687 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7688 TREE_OPERAND (op0, 1));
7690 /* Handle cases of two conversions in a row. */
7691 if (CONVERT_EXPR_P (op0))
7693 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7694 tree inter_type = TREE_TYPE (op0);
7695 int inside_int = INTEGRAL_TYPE_P (inside_type);
7696 int inside_ptr = POINTER_TYPE_P (inside_type);
7697 int inside_float = FLOAT_TYPE_P (inside_type);
7698 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7699 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7700 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7701 int inter_int = INTEGRAL_TYPE_P (inter_type);
7702 int inter_ptr = POINTER_TYPE_P (inter_type);
7703 int inter_float = FLOAT_TYPE_P (inter_type);
7704 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7705 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7706 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7707 int final_int = INTEGRAL_TYPE_P (type);
7708 int final_ptr = POINTER_TYPE_P (type);
7709 int final_float = FLOAT_TYPE_P (type);
7710 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7711 unsigned int final_prec = TYPE_PRECISION (type);
7712 int final_unsignedp = TYPE_UNSIGNED (type);
7714 /* In addition to the cases of two conversions in a row
7715 handled below, if we are converting something to its own
7716 type via an object of identical or wider precision, neither
7717 conversion is needed. */
7718 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7719 && (((inter_int || inter_ptr) && final_int)
7720 || (inter_float && final_float))
7721 && inter_prec >= final_prec)
7722 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7724 /* Likewise, if the intermediate and initial types are either both
7725 float or both integer, we don't need the middle conversion if the
7726 former is wider than the latter and doesn't change the signedness
7727 (for integers). Avoid this if the final type is a pointer since
7728 then we sometimes need the middle conversion. Likewise if the
7729 final type has a precision not equal to the size of its mode. */
7730 if (((inter_int && inside_int)
7731 || (inter_float && inside_float)
7732 || (inter_vec && inside_vec))
7733 && inter_prec >= inside_prec
7734 && (inter_float || inter_vec
7735 || inter_unsignedp == inside_unsignedp)
7736 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7737 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7738 && ! final_ptr
7739 && (! final_vec || inter_prec == inside_prec))
7740 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7742 /* If we have a sign-extension of a zero-extended value, we can
7743 replace that by a single zero-extension. */
7744 if (inside_int && inter_int && final_int
7745 && inside_prec < inter_prec && inter_prec < final_prec
7746 && inside_unsignedp && !inter_unsignedp)
7747 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7749 /* Two conversions in a row are not needed unless:
7750 - some conversion is floating-point (overstrict for now), or
7751 - some conversion is a vector (overstrict for now), or
7752 - the intermediate type is narrower than both initial and
7753 final, or
7754 - the intermediate type and innermost type differ in signedness,
7755 and the outermost type is wider than the intermediate, or
7756 - the initial type is a pointer type and the precisions of the
7757 intermediate and final types differ, or
7758 - the final type is a pointer type and the precisions of the
7759 initial and intermediate types differ. */
7760 if (! inside_float && ! inter_float && ! final_float
7761 && ! inside_vec && ! inter_vec && ! final_vec
7762 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7763 && ! (inside_int && inter_int
7764 && inter_unsignedp != inside_unsignedp
7765 && inter_prec < final_prec)
7766 && ((inter_unsignedp && inter_prec > inside_prec)
7767 == (final_unsignedp && final_prec > inter_prec))
7768 && ! (inside_ptr && inter_prec != final_prec)
7769 && ! (final_ptr && inside_prec != inter_prec)
7770 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7771 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7772 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7775 /* Handle (T *)&A.B.C for A being of type T and B and C
7776 living at offset zero. This occurs frequently in
7777 C++ upcasting and then accessing the base. */
7778 if (TREE_CODE (op0) == ADDR_EXPR
7779 && POINTER_TYPE_P (type)
7780 && handled_component_p (TREE_OPERAND (op0, 0)))
7782 HOST_WIDE_INT bitsize, bitpos;
7783 tree offset;
7784 enum machine_mode mode;
7785 int unsignedp, volatilep;
7786 tree base = TREE_OPERAND (op0, 0);
7787 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7788 &mode, &unsignedp, &volatilep, false);
7789 /* If the reference was to a (constant) zero offset, we can use
7790 the address of the base if it has the same base type
7791 as the result type and the pointer type is unqualified. */
7792 if (! offset && bitpos == 0
7793 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7794 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7795 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7796 return fold_convert_loc (loc, type,
7797 build_fold_addr_expr_loc (loc, base));
7800 if (TREE_CODE (op0) == MODIFY_EXPR
7801 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7802 /* Detect assigning a bitfield. */
7803 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7804 && DECL_BIT_FIELD
7805 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7807 /* Don't leave an assignment inside a conversion
7808 unless assigning a bitfield. */
7809 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7810 /* First do the assignment, then return converted constant. */
7811 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7812 TREE_NO_WARNING (tem) = 1;
7813 TREE_USED (tem) = 1;
7814 return tem;
7817 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7818 constants (if x has signed type, the sign bit cannot be set
7819 in c). This folds extension into the BIT_AND_EXPR.
7820 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7821 very likely don't have maximal range for their precision and this
7822 transformation effectively doesn't preserve non-maximal ranges. */
7823 if (TREE_CODE (type) == INTEGER_TYPE
7824 && TREE_CODE (op0) == BIT_AND_EXPR
7825 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7827 tree and_expr = op0;
7828 tree and0 = TREE_OPERAND (and_expr, 0);
7829 tree and1 = TREE_OPERAND (and_expr, 1);
7830 int change = 0;
7832 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7833 || (TYPE_PRECISION (type)
7834 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7835 change = 1;
7836 else if (TYPE_PRECISION (TREE_TYPE (and1))
7837 <= HOST_BITS_PER_WIDE_INT
7838 && host_integerp (and1, 1))
7840 unsigned HOST_WIDE_INT cst;
7842 cst = tree_low_cst (and1, 1);
7843 cst &= (HOST_WIDE_INT) -1
7844 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7845 change = (cst == 0);
7846 #ifdef LOAD_EXTEND_OP
7847 if (change
7848 && !flag_syntax_only
7849 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7850 == ZERO_EXTEND))
7852 tree uns = unsigned_type_for (TREE_TYPE (and0));
7853 and0 = fold_convert_loc (loc, uns, and0);
7854 and1 = fold_convert_loc (loc, uns, and1);
7856 #endif
7858 if (change)
7860 tem = force_fit_type_double (type, tree_to_double_int (and1),
7861 0, TREE_OVERFLOW (and1));
7862 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7863 fold_convert_loc (loc, type, and0), tem);
7867 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7868 when one of the new casts will fold away. Conservatively we assume
7869 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7870 if (POINTER_TYPE_P (type)
7871 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7872 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7873 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7874 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7876 tree arg00 = TREE_OPERAND (arg0, 0);
7877 tree arg01 = TREE_OPERAND (arg0, 1);
7879 return fold_build2_loc (loc,
7880 TREE_CODE (arg0), type,
7881 fold_convert_loc (loc, type, arg00),
7882 fold_convert_loc (loc, sizetype, arg01));
7885 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7886 of the same precision, and X is an integer type not narrower than
7887 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7888 if (INTEGRAL_TYPE_P (type)
7889 && TREE_CODE (op0) == BIT_NOT_EXPR
7890 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7891 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7892 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7894 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7895 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7896 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7897 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7898 fold_convert_loc (loc, type, tem));
7901 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7902 type of X and Y (integer types only). */
7903 if (INTEGRAL_TYPE_P (type)
7904 && TREE_CODE (op0) == MULT_EXPR
7905 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7906 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7908 /* Be careful not to introduce new overflows. */
7909 tree mult_type;
7910 if (TYPE_OVERFLOW_WRAPS (type))
7911 mult_type = type;
7912 else
7913 mult_type = unsigned_type_for (type);
7915 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7917 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7918 fold_convert_loc (loc, mult_type,
7919 TREE_OPERAND (op0, 0)),
7920 fold_convert_loc (loc, mult_type,
7921 TREE_OPERAND (op0, 1)));
7922 return fold_convert_loc (loc, type, tem);
7926 tem = fold_convert_const (code, type, op0);
7927 return tem ? tem : NULL_TREE;
7929 case ADDR_SPACE_CONVERT_EXPR:
7930 if (integer_zerop (arg0))
7931 return fold_convert_const (code, type, arg0);
7932 return NULL_TREE;
7934 case FIXED_CONVERT_EXPR:
7935 tem = fold_convert_const (code, type, arg0);
7936 return tem ? tem : NULL_TREE;
7938 case VIEW_CONVERT_EXPR:
7939 if (TREE_TYPE (op0) == type)
7940 return op0;
7941 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7942 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7943 type, TREE_OPERAND (op0, 0));
7944 if (TREE_CODE (op0) == MEM_REF)
7945 return fold_build2_loc (loc, MEM_REF, type,
7946 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7948 /* For integral conversions with the same precision or pointer
7949 conversions use a NOP_EXPR instead. */
7950 if ((INTEGRAL_TYPE_P (type)
7951 || POINTER_TYPE_P (type))
7952 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7953 || POINTER_TYPE_P (TREE_TYPE (op0)))
7954 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7955 return fold_convert_loc (loc, type, op0);
7957 /* Strip inner integral conversions that do not change the precision. */
7958 if (CONVERT_EXPR_P (op0)
7959 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7960 || POINTER_TYPE_P (TREE_TYPE (op0)))
7961 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7962 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7963 && (TYPE_PRECISION (TREE_TYPE (op0))
7964 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7965 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7966 type, TREE_OPERAND (op0, 0));
7968 return fold_view_convert_expr (type, op0);
7970 case NEGATE_EXPR:
7971 tem = fold_negate_expr (loc, arg0);
7972 if (tem)
7973 return fold_convert_loc (loc, type, tem);
7974 return NULL_TREE;
7976 case ABS_EXPR:
7977 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7978 return fold_abs_const (arg0, type);
7979 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7980 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7981 /* Convert fabs((double)float) into (double)fabsf(float). */
7982 else if (TREE_CODE (arg0) == NOP_EXPR
7983 && TREE_CODE (type) == REAL_TYPE)
7985 tree targ0 = strip_float_extensions (arg0);
7986 if (targ0 != arg0)
7987 return fold_convert_loc (loc, type,
7988 fold_build1_loc (loc, ABS_EXPR,
7989 TREE_TYPE (targ0),
7990 targ0));
7992 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7993 else if (TREE_CODE (arg0) == ABS_EXPR)
7994 return arg0;
7995 else if (tree_expr_nonnegative_p (arg0))
7996 return arg0;
7998 /* Strip sign ops from argument. */
7999 if (TREE_CODE (type) == REAL_TYPE)
8001 tem = fold_strip_sign_ops (arg0);
8002 if (tem)
8003 return fold_build1_loc (loc, ABS_EXPR, type,
8004 fold_convert_loc (loc, type, tem));
8006 return NULL_TREE;
8008 case CONJ_EXPR:
8009 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8010 return fold_convert_loc (loc, type, arg0);
8011 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8013 tree itype = TREE_TYPE (type);
8014 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8015 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8016 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8017 negate_expr (ipart));
8019 if (TREE_CODE (arg0) == COMPLEX_CST)
8021 tree itype = TREE_TYPE (type);
8022 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8023 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8024 return build_complex (type, rpart, negate_expr (ipart));
8026 if (TREE_CODE (arg0) == CONJ_EXPR)
8027 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8028 return NULL_TREE;
8030 case BIT_NOT_EXPR:
8031 if (TREE_CODE (arg0) == INTEGER_CST)
8032 return fold_not_const (arg0, type);
8033 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8034 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8035 /* Convert ~ (-A) to A - 1. */
8036 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8037 return fold_build2_loc (loc, MINUS_EXPR, type,
8038 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8039 build_int_cst (type, 1));
8040 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8041 else if (INTEGRAL_TYPE_P (type)
8042 && ((TREE_CODE (arg0) == MINUS_EXPR
8043 && integer_onep (TREE_OPERAND (arg0, 1)))
8044 || (TREE_CODE (arg0) == PLUS_EXPR
8045 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8046 return fold_build1_loc (loc, NEGATE_EXPR, type,
8047 fold_convert_loc (loc, type,
8048 TREE_OPERAND (arg0, 0)));
8049 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8050 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8051 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8052 fold_convert_loc (loc, type,
8053 TREE_OPERAND (arg0, 0)))))
8054 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8055 fold_convert_loc (loc, type,
8056 TREE_OPERAND (arg0, 1)));
8057 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8058 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8059 fold_convert_loc (loc, type,
8060 TREE_OPERAND (arg0, 1)))))
8061 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8062 fold_convert_loc (loc, type,
8063 TREE_OPERAND (arg0, 0)), tem);
8064 /* Perform BIT_NOT_EXPR on each element individually. */
8065 else if (TREE_CODE (arg0) == VECTOR_CST)
8067 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8068 int count = TYPE_VECTOR_SUBPARTS (type), i;
8070 for (i = 0; i < count; i++)
8072 if (elements)
8074 elem = TREE_VALUE (elements);
8075 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8076 if (elem == NULL_TREE)
8077 break;
8078 elements = TREE_CHAIN (elements);
8080 else
8081 elem = build_int_cst (TREE_TYPE (type), -1);
8082 list = tree_cons (NULL_TREE, elem, list);
8084 if (i == count)
8085 return build_vector (type, nreverse (list));
8088 return NULL_TREE;
8090 case TRUTH_NOT_EXPR:
8091 /* The argument to invert_truthvalue must have Boolean type. */
8092 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8093 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8095 /* Note that the operand of this must be an int
8096 and its values must be 0 or 1.
8097 ("true" is a fixed value perhaps depending on the language,
8098 but we don't handle values other than 1 correctly yet.) */
8099 tem = fold_truth_not_expr (loc, arg0);
8100 if (!tem)
8101 return NULL_TREE;
8102 return fold_convert_loc (loc, type, tem);
8104 case REALPART_EXPR:
8105 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8106 return fold_convert_loc (loc, type, arg0);
8107 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8108 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8109 TREE_OPERAND (arg0, 1));
8110 if (TREE_CODE (arg0) == COMPLEX_CST)
8111 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8112 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8114 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8115 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8116 fold_build1_loc (loc, REALPART_EXPR, itype,
8117 TREE_OPERAND (arg0, 0)),
8118 fold_build1_loc (loc, REALPART_EXPR, itype,
8119 TREE_OPERAND (arg0, 1)));
8120 return fold_convert_loc (loc, type, tem);
8122 if (TREE_CODE (arg0) == CONJ_EXPR)
8124 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8125 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8126 TREE_OPERAND (arg0, 0));
8127 return fold_convert_loc (loc, type, tem);
8129 if (TREE_CODE (arg0) == CALL_EXPR)
8131 tree fn = get_callee_fndecl (arg0);
8132 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8133 switch (DECL_FUNCTION_CODE (fn))
8135 CASE_FLT_FN (BUILT_IN_CEXPI):
8136 fn = mathfn_built_in (type, BUILT_IN_COS);
8137 if (fn)
8138 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8139 break;
8141 default:
8142 break;
8145 return NULL_TREE;
8147 case IMAGPART_EXPR:
8148 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8149 return build_zero_cst (type);
8150 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8151 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8152 TREE_OPERAND (arg0, 0));
8153 if (TREE_CODE (arg0) == COMPLEX_CST)
8154 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8155 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8157 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8158 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8159 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8160 TREE_OPERAND (arg0, 0)),
8161 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8162 TREE_OPERAND (arg0, 1)));
8163 return fold_convert_loc (loc, type, tem);
8165 if (TREE_CODE (arg0) == CONJ_EXPR)
8167 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8168 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8169 return fold_convert_loc (loc, type, negate_expr (tem));
8171 if (TREE_CODE (arg0) == CALL_EXPR)
8173 tree fn = get_callee_fndecl (arg0);
8174 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8175 switch (DECL_FUNCTION_CODE (fn))
8177 CASE_FLT_FN (BUILT_IN_CEXPI):
8178 fn = mathfn_built_in (type, BUILT_IN_SIN);
8179 if (fn)
8180 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8181 break;
8183 default:
8184 break;
8187 return NULL_TREE;
8189 case INDIRECT_REF:
8190 /* Fold *&X to X if X is an lvalue. */
8191 if (TREE_CODE (op0) == ADDR_EXPR)
8193 tree op00 = TREE_OPERAND (op0, 0);
8194 if ((TREE_CODE (op00) == VAR_DECL
8195 || TREE_CODE (op00) == PARM_DECL
8196 || TREE_CODE (op00) == RESULT_DECL)
8197 && !TREE_READONLY (op00))
8198 return op00;
8200 return NULL_TREE;
8202 default:
8203 return NULL_TREE;
8204 } /* switch (code) */
8208 /* If the operation was a conversion do _not_ mark a resulting constant
8209 with TREE_OVERFLOW if the original constant was not. These conversions
8210 have implementation defined behavior and retaining the TREE_OVERFLOW
8211 flag here would confuse later passes such as VRP. */
8212 tree
8213 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8214 tree type, tree op0)
8216 tree res = fold_unary_loc (loc, code, type, op0);
8217 if (res
8218 && TREE_CODE (res) == INTEGER_CST
8219 && TREE_CODE (op0) == INTEGER_CST
8220 && CONVERT_EXPR_CODE_P (code))
8221 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8223 return res;
8226 /* Fold a binary expression of code CODE and type TYPE with operands
8227 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8228 Return the folded expression if folding is successful. Otherwise,
8229 return NULL_TREE. */
8231 static tree
8232 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8234 enum tree_code compl_code;
8236 if (code == MIN_EXPR)
8237 compl_code = MAX_EXPR;
8238 else if (code == MAX_EXPR)
8239 compl_code = MIN_EXPR;
8240 else
8241 gcc_unreachable ();
8243 /* MIN (MAX (a, b), b) == b. */
8244 if (TREE_CODE (op0) == compl_code
8245 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8246 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8248 /* MIN (MAX (b, a), b) == b. */
8249 if (TREE_CODE (op0) == compl_code
8250 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8251 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8252 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8254 /* MIN (a, MAX (a, b)) == a. */
8255 if (TREE_CODE (op1) == compl_code
8256 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8257 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8258 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8260 /* MIN (a, MAX (b, a)) == a. */
8261 if (TREE_CODE (op1) == compl_code
8262 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8263 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8264 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8266 return NULL_TREE;
8269 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8270 by changing CODE to reduce the magnitude of constants involved in
8271 ARG0 of the comparison.
8272 Returns a canonicalized comparison tree if a simplification was
8273 possible, otherwise returns NULL_TREE.
8274 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8275 valid if signed overflow is undefined. */
8277 static tree
8278 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8279 tree arg0, tree arg1,
8280 bool *strict_overflow_p)
8282 enum tree_code code0 = TREE_CODE (arg0);
8283 tree t, cst0 = NULL_TREE;
8284 int sgn0;
8285 bool swap = false;
8287 /* Match A +- CST code arg1 and CST code arg1. We can change the
8288 first form only if overflow is undefined. */
8289 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8290 /* In principle pointers also have undefined overflow behavior,
8291 but that causes problems elsewhere. */
8292 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8293 && (code0 == MINUS_EXPR
8294 || code0 == PLUS_EXPR)
8295 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8296 || code0 == INTEGER_CST))
8297 return NULL_TREE;
8299 /* Identify the constant in arg0 and its sign. */
8300 if (code0 == INTEGER_CST)
8301 cst0 = arg0;
8302 else
8303 cst0 = TREE_OPERAND (arg0, 1);
8304 sgn0 = tree_int_cst_sgn (cst0);
8306 /* Overflowed constants and zero will cause problems. */
8307 if (integer_zerop (cst0)
8308 || TREE_OVERFLOW (cst0))
8309 return NULL_TREE;
8311 /* See if we can reduce the magnitude of the constant in
8312 arg0 by changing the comparison code. */
8313 if (code0 == INTEGER_CST)
8315 /* CST <= arg1 -> CST-1 < arg1. */
8316 if (code == LE_EXPR && sgn0 == 1)
8317 code = LT_EXPR;
8318 /* -CST < arg1 -> -CST-1 <= arg1. */
8319 else if (code == LT_EXPR && sgn0 == -1)
8320 code = LE_EXPR;
8321 /* CST > arg1 -> CST-1 >= arg1. */
8322 else if (code == GT_EXPR && sgn0 == 1)
8323 code = GE_EXPR;
8324 /* -CST >= arg1 -> -CST-1 > arg1. */
8325 else if (code == GE_EXPR && sgn0 == -1)
8326 code = GT_EXPR;
8327 else
8328 return NULL_TREE;
8329 /* arg1 code' CST' might be more canonical. */
8330 swap = true;
8332 else
8334 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8335 if (code == LT_EXPR
8336 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8337 code = LE_EXPR;
8338 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8339 else if (code == GT_EXPR
8340 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8341 code = GE_EXPR;
8342 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8343 else if (code == LE_EXPR
8344 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8345 code = LT_EXPR;
8346 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8347 else if (code == GE_EXPR
8348 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8349 code = GT_EXPR;
8350 else
8351 return NULL_TREE;
8352 *strict_overflow_p = true;
8355 /* Now build the constant reduced in magnitude. But not if that
8356 would produce one outside of its types range. */
8357 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8358 && ((sgn0 == 1
8359 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8360 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8361 || (sgn0 == -1
8362 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8363 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8364 /* We cannot swap the comparison here as that would cause us to
8365 endlessly recurse. */
8366 return NULL_TREE;
8368 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8369 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8370 if (code0 != INTEGER_CST)
8371 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8373 /* If swapping might yield to a more canonical form, do so. */
8374 if (swap)
8375 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8376 else
8377 return fold_build2_loc (loc, code, type, t, arg1);
8380 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8381 overflow further. Try to decrease the magnitude of constants involved
8382 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8383 and put sole constants at the second argument position.
8384 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8386 static tree
8387 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8388 tree arg0, tree arg1)
8390 tree t;
8391 bool strict_overflow_p;
8392 const char * const warnmsg = G_("assuming signed overflow does not occur "
8393 "when reducing constant in comparison");
8395 /* Try canonicalization by simplifying arg0. */
8396 strict_overflow_p = false;
8397 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8398 &strict_overflow_p);
8399 if (t)
8401 if (strict_overflow_p)
8402 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8403 return t;
8406 /* Try canonicalization by simplifying arg1 using the swapped
8407 comparison. */
8408 code = swap_tree_comparison (code);
8409 strict_overflow_p = false;
8410 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8411 &strict_overflow_p);
8412 if (t && strict_overflow_p)
8413 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8414 return t;
8417 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8418 space. This is used to avoid issuing overflow warnings for
8419 expressions like &p->x which can not wrap. */
8421 static bool
8422 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8424 unsigned HOST_WIDE_INT offset_low, total_low;
8425 HOST_WIDE_INT size, offset_high, total_high;
8427 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8428 return true;
8430 if (bitpos < 0)
8431 return true;
8433 if (offset == NULL_TREE)
8435 offset_low = 0;
8436 offset_high = 0;
8438 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8439 return true;
8440 else
8442 offset_low = TREE_INT_CST_LOW (offset);
8443 offset_high = TREE_INT_CST_HIGH (offset);
8446 if (add_double_with_sign (offset_low, offset_high,
8447 bitpos / BITS_PER_UNIT, 0,
8448 &total_low, &total_high,
8449 true))
8450 return true;
8452 if (total_high != 0)
8453 return true;
8455 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8456 if (size <= 0)
8457 return true;
8459 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8460 array. */
8461 if (TREE_CODE (base) == ADDR_EXPR)
8463 HOST_WIDE_INT base_size;
8465 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8466 if (base_size > 0 && size < base_size)
8467 size = base_size;
8470 return total_low > (unsigned HOST_WIDE_INT) size;
8473 /* Subroutine of fold_binary. This routine performs all of the
8474 transformations that are common to the equality/inequality
8475 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8476 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8477 fold_binary should call fold_binary. Fold a comparison with
8478 tree code CODE and type TYPE with operands OP0 and OP1. Return
8479 the folded comparison or NULL_TREE. */
8481 static tree
8482 fold_comparison (location_t loc, enum tree_code code, tree type,
8483 tree op0, tree op1)
8485 tree arg0, arg1, tem;
8487 arg0 = op0;
8488 arg1 = op1;
8490 STRIP_SIGN_NOPS (arg0);
8491 STRIP_SIGN_NOPS (arg1);
8493 tem = fold_relational_const (code, type, arg0, arg1);
8494 if (tem != NULL_TREE)
8495 return tem;
8497 /* If one arg is a real or integer constant, put it last. */
8498 if (tree_swap_operands_p (arg0, arg1, true))
8499 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8501 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8502 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8503 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8504 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8505 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8506 && (TREE_CODE (arg1) == INTEGER_CST
8507 && !TREE_OVERFLOW (arg1)))
8509 tree const1 = TREE_OPERAND (arg0, 1);
8510 tree const2 = arg1;
8511 tree variable = TREE_OPERAND (arg0, 0);
8512 tree lhs;
8513 int lhs_add;
8514 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8516 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8517 TREE_TYPE (arg1), const2, const1);
8519 /* If the constant operation overflowed this can be
8520 simplified as a comparison against INT_MAX/INT_MIN. */
8521 if (TREE_CODE (lhs) == INTEGER_CST
8522 && TREE_OVERFLOW (lhs))
8524 int const1_sgn = tree_int_cst_sgn (const1);
8525 enum tree_code code2 = code;
8527 /* Get the sign of the constant on the lhs if the
8528 operation were VARIABLE + CONST1. */
8529 if (TREE_CODE (arg0) == MINUS_EXPR)
8530 const1_sgn = -const1_sgn;
8532 /* The sign of the constant determines if we overflowed
8533 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8534 Canonicalize to the INT_MIN overflow by swapping the comparison
8535 if necessary. */
8536 if (const1_sgn == -1)
8537 code2 = swap_tree_comparison (code);
8539 /* We now can look at the canonicalized case
8540 VARIABLE + 1 CODE2 INT_MIN
8541 and decide on the result. */
8542 if (code2 == LT_EXPR
8543 || code2 == LE_EXPR
8544 || code2 == EQ_EXPR)
8545 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8546 else if (code2 == NE_EXPR
8547 || code2 == GE_EXPR
8548 || code2 == GT_EXPR)
8549 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8552 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8553 && (TREE_CODE (lhs) != INTEGER_CST
8554 || !TREE_OVERFLOW (lhs)))
8556 if (code != EQ_EXPR && code != NE_EXPR)
8557 fold_overflow_warning ("assuming signed overflow does not occur "
8558 "when changing X +- C1 cmp C2 to "
8559 "X cmp C1 +- C2",
8560 WARN_STRICT_OVERFLOW_COMPARISON);
8561 return fold_build2_loc (loc, code, type, variable, lhs);
8565 /* For comparisons of pointers we can decompose it to a compile time
8566 comparison of the base objects and the offsets into the object.
8567 This requires at least one operand being an ADDR_EXPR or a
8568 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8569 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8570 && (TREE_CODE (arg0) == ADDR_EXPR
8571 || TREE_CODE (arg1) == ADDR_EXPR
8572 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8573 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8575 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8576 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8577 enum machine_mode mode;
8578 int volatilep, unsignedp;
8579 bool indirect_base0 = false, indirect_base1 = false;
8581 /* Get base and offset for the access. Strip ADDR_EXPR for
8582 get_inner_reference, but put it back by stripping INDIRECT_REF
8583 off the base object if possible. indirect_baseN will be true
8584 if baseN is not an address but refers to the object itself. */
8585 base0 = arg0;
8586 if (TREE_CODE (arg0) == ADDR_EXPR)
8588 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8589 &bitsize, &bitpos0, &offset0, &mode,
8590 &unsignedp, &volatilep, false);
8591 if (TREE_CODE (base0) == INDIRECT_REF)
8592 base0 = TREE_OPERAND (base0, 0);
8593 else
8594 indirect_base0 = true;
8596 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8598 base0 = TREE_OPERAND (arg0, 0);
8599 STRIP_SIGN_NOPS (base0);
8600 if (TREE_CODE (base0) == ADDR_EXPR)
8602 base0 = TREE_OPERAND (base0, 0);
8603 indirect_base0 = true;
8605 offset0 = TREE_OPERAND (arg0, 1);
8608 base1 = arg1;
8609 if (TREE_CODE (arg1) == ADDR_EXPR)
8611 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8612 &bitsize, &bitpos1, &offset1, &mode,
8613 &unsignedp, &volatilep, false);
8614 if (TREE_CODE (base1) == INDIRECT_REF)
8615 base1 = TREE_OPERAND (base1, 0);
8616 else
8617 indirect_base1 = true;
8619 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8621 base1 = TREE_OPERAND (arg1, 0);
8622 STRIP_SIGN_NOPS (base1);
8623 if (TREE_CODE (base1) == ADDR_EXPR)
8625 base1 = TREE_OPERAND (base1, 0);
8626 indirect_base1 = true;
8628 offset1 = TREE_OPERAND (arg1, 1);
8631 /* A local variable can never be pointed to by
8632 the default SSA name of an incoming parameter. */
8633 if ((TREE_CODE (arg0) == ADDR_EXPR
8634 && indirect_base0
8635 && TREE_CODE (base0) == VAR_DECL
8636 && auto_var_in_fn_p (base0, current_function_decl)
8637 && !indirect_base1
8638 && TREE_CODE (base1) == SSA_NAME
8639 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8640 && SSA_NAME_IS_DEFAULT_DEF (base1))
8641 || (TREE_CODE (arg1) == ADDR_EXPR
8642 && indirect_base1
8643 && TREE_CODE (base1) == VAR_DECL
8644 && auto_var_in_fn_p (base1, current_function_decl)
8645 && !indirect_base0
8646 && TREE_CODE (base0) == SSA_NAME
8647 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8648 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8650 if (code == NE_EXPR)
8651 return constant_boolean_node (1, type);
8652 else if (code == EQ_EXPR)
8653 return constant_boolean_node (0, type);
8655 /* If we have equivalent bases we might be able to simplify. */
8656 else if (indirect_base0 == indirect_base1
8657 && operand_equal_p (base0, base1, 0))
8659 /* We can fold this expression to a constant if the non-constant
8660 offset parts are equal. */
8661 if ((offset0 == offset1
8662 || (offset0 && offset1
8663 && operand_equal_p (offset0, offset1, 0)))
8664 && (code == EQ_EXPR
8665 || code == NE_EXPR
8666 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8669 if (code != EQ_EXPR
8670 && code != NE_EXPR
8671 && bitpos0 != bitpos1
8672 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8673 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8674 fold_overflow_warning (("assuming pointer wraparound does not "
8675 "occur when comparing P +- C1 with "
8676 "P +- C2"),
8677 WARN_STRICT_OVERFLOW_CONDITIONAL);
8679 switch (code)
8681 case EQ_EXPR:
8682 return constant_boolean_node (bitpos0 == bitpos1, type);
8683 case NE_EXPR:
8684 return constant_boolean_node (bitpos0 != bitpos1, type);
8685 case LT_EXPR:
8686 return constant_boolean_node (bitpos0 < bitpos1, type);
8687 case LE_EXPR:
8688 return constant_boolean_node (bitpos0 <= bitpos1, type);
8689 case GE_EXPR:
8690 return constant_boolean_node (bitpos0 >= bitpos1, type);
8691 case GT_EXPR:
8692 return constant_boolean_node (bitpos0 > bitpos1, type);
8693 default:;
8696 /* We can simplify the comparison to a comparison of the variable
8697 offset parts if the constant offset parts are equal.
8698 Be careful to use signed size type here because otherwise we
8699 mess with array offsets in the wrong way. This is possible
8700 because pointer arithmetic is restricted to retain within an
8701 object and overflow on pointer differences is undefined as of
8702 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8703 else if (bitpos0 == bitpos1
8704 && ((code == EQ_EXPR || code == NE_EXPR)
8705 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8707 /* By converting to signed size type we cover middle-end pointer
8708 arithmetic which operates on unsigned pointer types of size
8709 type size and ARRAY_REF offsets which are properly sign or
8710 zero extended from their type in case it is narrower than
8711 size type. */
8712 if (offset0 == NULL_TREE)
8713 offset0 = build_int_cst (ssizetype, 0);
8714 else
8715 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8716 if (offset1 == NULL_TREE)
8717 offset1 = build_int_cst (ssizetype, 0);
8718 else
8719 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8721 if (code != EQ_EXPR
8722 && code != NE_EXPR
8723 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8724 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8725 fold_overflow_warning (("assuming pointer wraparound does not "
8726 "occur when comparing P +- C1 with "
8727 "P +- C2"),
8728 WARN_STRICT_OVERFLOW_COMPARISON);
8730 return fold_build2_loc (loc, code, type, offset0, offset1);
8733 /* For non-equal bases we can simplify if they are addresses
8734 of local binding decls or constants. */
8735 else if (indirect_base0 && indirect_base1
8736 /* We know that !operand_equal_p (base0, base1, 0)
8737 because the if condition was false. But make
8738 sure two decls are not the same. */
8739 && base0 != base1
8740 && TREE_CODE (arg0) == ADDR_EXPR
8741 && TREE_CODE (arg1) == ADDR_EXPR
8742 && (((TREE_CODE (base0) == VAR_DECL
8743 || TREE_CODE (base0) == PARM_DECL)
8744 && (targetm.binds_local_p (base0)
8745 || CONSTANT_CLASS_P (base1)))
8746 || CONSTANT_CLASS_P (base0))
8747 && (((TREE_CODE (base1) == VAR_DECL
8748 || TREE_CODE (base1) == PARM_DECL)
8749 && (targetm.binds_local_p (base1)
8750 || CONSTANT_CLASS_P (base0)))
8751 || CONSTANT_CLASS_P (base1)))
8753 if (code == EQ_EXPR)
8754 return omit_two_operands_loc (loc, type, boolean_false_node,
8755 arg0, arg1);
8756 else if (code == NE_EXPR)
8757 return omit_two_operands_loc (loc, type, boolean_true_node,
8758 arg0, arg1);
8760 /* For equal offsets we can simplify to a comparison of the
8761 base addresses. */
8762 else if (bitpos0 == bitpos1
8763 && (indirect_base0
8764 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8765 && (indirect_base1
8766 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8767 && ((offset0 == offset1)
8768 || (offset0 && offset1
8769 && operand_equal_p (offset0, offset1, 0))))
8771 if (indirect_base0)
8772 base0 = build_fold_addr_expr_loc (loc, base0);
8773 if (indirect_base1)
8774 base1 = build_fold_addr_expr_loc (loc, base1);
8775 return fold_build2_loc (loc, code, type, base0, base1);
8779 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8780 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8781 the resulting offset is smaller in absolute value than the
8782 original one. */
8783 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8784 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8785 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8786 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8787 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8788 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8789 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8791 tree const1 = TREE_OPERAND (arg0, 1);
8792 tree const2 = TREE_OPERAND (arg1, 1);
8793 tree variable1 = TREE_OPERAND (arg0, 0);
8794 tree variable2 = TREE_OPERAND (arg1, 0);
8795 tree cst;
8796 const char * const warnmsg = G_("assuming signed overflow does not "
8797 "occur when combining constants around "
8798 "a comparison");
8800 /* Put the constant on the side where it doesn't overflow and is
8801 of lower absolute value than before. */
8802 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8803 ? MINUS_EXPR : PLUS_EXPR,
8804 const2, const1, 0);
8805 if (!TREE_OVERFLOW (cst)
8806 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8808 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8809 return fold_build2_loc (loc, code, type,
8810 variable1,
8811 fold_build2_loc (loc,
8812 TREE_CODE (arg1), TREE_TYPE (arg1),
8813 variable2, cst));
8816 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8817 ? MINUS_EXPR : PLUS_EXPR,
8818 const1, const2, 0);
8819 if (!TREE_OVERFLOW (cst)
8820 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8822 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8823 return fold_build2_loc (loc, code, type,
8824 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8825 variable1, cst),
8826 variable2);
8830 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8831 signed arithmetic case. That form is created by the compiler
8832 often enough for folding it to be of value. One example is in
8833 computing loop trip counts after Operator Strength Reduction. */
8834 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8835 && TREE_CODE (arg0) == MULT_EXPR
8836 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8837 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8838 && integer_zerop (arg1))
8840 tree const1 = TREE_OPERAND (arg0, 1);
8841 tree const2 = arg1; /* zero */
8842 tree variable1 = TREE_OPERAND (arg0, 0);
8843 enum tree_code cmp_code = code;
8845 /* Handle unfolded multiplication by zero. */
8846 if (integer_zerop (const1))
8847 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8849 fold_overflow_warning (("assuming signed overflow does not occur when "
8850 "eliminating multiplication in comparison "
8851 "with zero"),
8852 WARN_STRICT_OVERFLOW_COMPARISON);
8854 /* If const1 is negative we swap the sense of the comparison. */
8855 if (tree_int_cst_sgn (const1) < 0)
8856 cmp_code = swap_tree_comparison (cmp_code);
8858 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8861 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8862 if (tem)
8863 return tem;
8865 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8867 tree targ0 = strip_float_extensions (arg0);
8868 tree targ1 = strip_float_extensions (arg1);
8869 tree newtype = TREE_TYPE (targ0);
8871 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8872 newtype = TREE_TYPE (targ1);
8874 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8875 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8876 return fold_build2_loc (loc, code, type,
8877 fold_convert_loc (loc, newtype, targ0),
8878 fold_convert_loc (loc, newtype, targ1));
8880 /* (-a) CMP (-b) -> b CMP a */
8881 if (TREE_CODE (arg0) == NEGATE_EXPR
8882 && TREE_CODE (arg1) == NEGATE_EXPR)
8883 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8884 TREE_OPERAND (arg0, 0));
8886 if (TREE_CODE (arg1) == REAL_CST)
8888 REAL_VALUE_TYPE cst;
8889 cst = TREE_REAL_CST (arg1);
8891 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8892 if (TREE_CODE (arg0) == NEGATE_EXPR)
8893 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8894 TREE_OPERAND (arg0, 0),
8895 build_real (TREE_TYPE (arg1),
8896 real_value_negate (&cst)));
8898 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8899 /* a CMP (-0) -> a CMP 0 */
8900 if (REAL_VALUE_MINUS_ZERO (cst))
8901 return fold_build2_loc (loc, code, type, arg0,
8902 build_real (TREE_TYPE (arg1), dconst0));
8904 /* x != NaN is always true, other ops are always false. */
8905 if (REAL_VALUE_ISNAN (cst)
8906 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8908 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8909 return omit_one_operand_loc (loc, type, tem, arg0);
8912 /* Fold comparisons against infinity. */
8913 if (REAL_VALUE_ISINF (cst)
8914 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8916 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8917 if (tem != NULL_TREE)
8918 return tem;
8922 /* If this is a comparison of a real constant with a PLUS_EXPR
8923 or a MINUS_EXPR of a real constant, we can convert it into a
8924 comparison with a revised real constant as long as no overflow
8925 occurs when unsafe_math_optimizations are enabled. */
8926 if (flag_unsafe_math_optimizations
8927 && TREE_CODE (arg1) == REAL_CST
8928 && (TREE_CODE (arg0) == PLUS_EXPR
8929 || TREE_CODE (arg0) == MINUS_EXPR)
8930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8931 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8932 ? MINUS_EXPR : PLUS_EXPR,
8933 arg1, TREE_OPERAND (arg0, 1)))
8934 && !TREE_OVERFLOW (tem))
8935 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8937 /* Likewise, we can simplify a comparison of a real constant with
8938 a MINUS_EXPR whose first operand is also a real constant, i.e.
8939 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8940 floating-point types only if -fassociative-math is set. */
8941 if (flag_associative_math
8942 && TREE_CODE (arg1) == REAL_CST
8943 && TREE_CODE (arg0) == MINUS_EXPR
8944 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8945 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8946 arg1))
8947 && !TREE_OVERFLOW (tem))
8948 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8949 TREE_OPERAND (arg0, 1), tem);
8951 /* Fold comparisons against built-in math functions. */
8952 if (TREE_CODE (arg1) == REAL_CST
8953 && flag_unsafe_math_optimizations
8954 && ! flag_errno_math)
8956 enum built_in_function fcode = builtin_mathfn_code (arg0);
8958 if (fcode != END_BUILTINS)
8960 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8961 if (tem != NULL_TREE)
8962 return tem;
8967 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8968 && CONVERT_EXPR_P (arg0))
8970 /* If we are widening one operand of an integer comparison,
8971 see if the other operand is similarly being widened. Perhaps we
8972 can do the comparison in the narrower type. */
8973 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8974 if (tem)
8975 return tem;
8977 /* Or if we are changing signedness. */
8978 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8979 if (tem)
8980 return tem;
8983 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8984 constant, we can simplify it. */
8985 if (TREE_CODE (arg1) == INTEGER_CST
8986 && (TREE_CODE (arg0) == MIN_EXPR
8987 || TREE_CODE (arg0) == MAX_EXPR)
8988 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8990 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8991 if (tem)
8992 return tem;
8995 /* Simplify comparison of something with itself. (For IEEE
8996 floating-point, we can only do some of these simplifications.) */
8997 if (operand_equal_p (arg0, arg1, 0))
8999 switch (code)
9001 case EQ_EXPR:
9002 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9003 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9004 return constant_boolean_node (1, type);
9005 break;
9007 case GE_EXPR:
9008 case LE_EXPR:
9009 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9010 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9011 return constant_boolean_node (1, type);
9012 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9014 case NE_EXPR:
9015 /* For NE, we can only do this simplification if integer
9016 or we don't honor IEEE floating point NaNs. */
9017 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9018 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9019 break;
9020 /* ... fall through ... */
9021 case GT_EXPR:
9022 case LT_EXPR:
9023 return constant_boolean_node (0, type);
9024 default:
9025 gcc_unreachable ();
9029 /* If we are comparing an expression that just has comparisons
9030 of two integer values, arithmetic expressions of those comparisons,
9031 and constants, we can simplify it. There are only three cases
9032 to check: the two values can either be equal, the first can be
9033 greater, or the second can be greater. Fold the expression for
9034 those three values. Since each value must be 0 or 1, we have
9035 eight possibilities, each of which corresponds to the constant 0
9036 or 1 or one of the six possible comparisons.
9038 This handles common cases like (a > b) == 0 but also handles
9039 expressions like ((x > y) - (y > x)) > 0, which supposedly
9040 occur in macroized code. */
9042 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9044 tree cval1 = 0, cval2 = 0;
9045 int save_p = 0;
9047 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9048 /* Don't handle degenerate cases here; they should already
9049 have been handled anyway. */
9050 && cval1 != 0 && cval2 != 0
9051 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9052 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9053 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9054 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9055 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9056 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9057 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9059 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9060 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9062 /* We can't just pass T to eval_subst in case cval1 or cval2
9063 was the same as ARG1. */
9065 tree high_result
9066 = fold_build2_loc (loc, code, type,
9067 eval_subst (loc, arg0, cval1, maxval,
9068 cval2, minval),
9069 arg1);
9070 tree equal_result
9071 = fold_build2_loc (loc, code, type,
9072 eval_subst (loc, arg0, cval1, maxval,
9073 cval2, maxval),
9074 arg1);
9075 tree low_result
9076 = fold_build2_loc (loc, code, type,
9077 eval_subst (loc, arg0, cval1, minval,
9078 cval2, maxval),
9079 arg1);
9081 /* All three of these results should be 0 or 1. Confirm they are.
9082 Then use those values to select the proper code to use. */
9084 if (TREE_CODE (high_result) == INTEGER_CST
9085 && TREE_CODE (equal_result) == INTEGER_CST
9086 && TREE_CODE (low_result) == INTEGER_CST)
9088 /* Make a 3-bit mask with the high-order bit being the
9089 value for `>', the next for '=', and the low for '<'. */
9090 switch ((integer_onep (high_result) * 4)
9091 + (integer_onep (equal_result) * 2)
9092 + integer_onep (low_result))
9094 case 0:
9095 /* Always false. */
9096 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9097 case 1:
9098 code = LT_EXPR;
9099 break;
9100 case 2:
9101 code = EQ_EXPR;
9102 break;
9103 case 3:
9104 code = LE_EXPR;
9105 break;
9106 case 4:
9107 code = GT_EXPR;
9108 break;
9109 case 5:
9110 code = NE_EXPR;
9111 break;
9112 case 6:
9113 code = GE_EXPR;
9114 break;
9115 case 7:
9116 /* Always true. */
9117 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9120 if (save_p)
9122 tem = save_expr (build2 (code, type, cval1, cval2));
9123 SET_EXPR_LOCATION (tem, loc);
9124 return tem;
9126 return fold_build2_loc (loc, code, type, cval1, cval2);
9131 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9132 into a single range test. */
9133 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9134 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9135 && TREE_CODE (arg1) == INTEGER_CST
9136 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9137 && !integer_zerop (TREE_OPERAND (arg0, 1))
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9139 && !TREE_OVERFLOW (arg1))
9141 tem = fold_div_compare (loc, code, type, arg0, arg1);
9142 if (tem != NULL_TREE)
9143 return tem;
9146 /* Fold ~X op ~Y as Y op X. */
9147 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9148 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9150 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9151 return fold_build2_loc (loc, code, type,
9152 fold_convert_loc (loc, cmp_type,
9153 TREE_OPERAND (arg1, 0)),
9154 TREE_OPERAND (arg0, 0));
9157 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9158 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9159 && TREE_CODE (arg1) == INTEGER_CST)
9161 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9162 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9163 TREE_OPERAND (arg0, 0),
9164 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9165 fold_convert_loc (loc, cmp_type, arg1)));
9168 return NULL_TREE;
9172 /* Subroutine of fold_binary. Optimize complex multiplications of the
9173 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9174 argument EXPR represents the expression "z" of type TYPE. */
9176 static tree
9177 fold_mult_zconjz (location_t loc, tree type, tree expr)
9179 tree itype = TREE_TYPE (type);
9180 tree rpart, ipart, tem;
9182 if (TREE_CODE (expr) == COMPLEX_EXPR)
9184 rpart = TREE_OPERAND (expr, 0);
9185 ipart = TREE_OPERAND (expr, 1);
9187 else if (TREE_CODE (expr) == COMPLEX_CST)
9189 rpart = TREE_REALPART (expr);
9190 ipart = TREE_IMAGPART (expr);
9192 else
9194 expr = save_expr (expr);
9195 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9196 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9199 rpart = save_expr (rpart);
9200 ipart = save_expr (ipart);
9201 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9202 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9203 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9204 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9205 build_zero_cst (itype));
9209 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9210 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9211 guarantees that P and N have the same least significant log2(M) bits.
9212 N is not otherwise constrained. In particular, N is not normalized to
9213 0 <= N < M as is common. In general, the precise value of P is unknown.
9214 M is chosen as large as possible such that constant N can be determined.
9216 Returns M and sets *RESIDUE to N.
9218 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9219 account. This is not always possible due to PR 35705.
9222 static unsigned HOST_WIDE_INT
9223 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9224 bool allow_func_align)
9226 enum tree_code code;
9228 *residue = 0;
9230 code = TREE_CODE (expr);
9231 if (code == ADDR_EXPR)
9233 expr = TREE_OPERAND (expr, 0);
9234 if (handled_component_p (expr))
9236 HOST_WIDE_INT bitsize, bitpos;
9237 tree offset;
9238 enum machine_mode mode;
9239 int unsignedp, volatilep;
9241 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9242 &mode, &unsignedp, &volatilep, false);
9243 *residue = bitpos / BITS_PER_UNIT;
9244 if (offset)
9246 if (TREE_CODE (offset) == INTEGER_CST)
9247 *residue += TREE_INT_CST_LOW (offset);
9248 else
9249 /* We don't handle more complicated offset expressions. */
9250 return 1;
9254 if (DECL_P (expr)
9255 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9256 return DECL_ALIGN_UNIT (expr);
9258 else if (code == POINTER_PLUS_EXPR)
9260 tree op0, op1;
9261 unsigned HOST_WIDE_INT modulus;
9262 enum tree_code inner_code;
9264 op0 = TREE_OPERAND (expr, 0);
9265 STRIP_NOPS (op0);
9266 modulus = get_pointer_modulus_and_residue (op0, residue,
9267 allow_func_align);
9269 op1 = TREE_OPERAND (expr, 1);
9270 STRIP_NOPS (op1);
9271 inner_code = TREE_CODE (op1);
9272 if (inner_code == INTEGER_CST)
9274 *residue += TREE_INT_CST_LOW (op1);
9275 return modulus;
9277 else if (inner_code == MULT_EXPR)
9279 op1 = TREE_OPERAND (op1, 1);
9280 if (TREE_CODE (op1) == INTEGER_CST)
9282 unsigned HOST_WIDE_INT align;
9284 /* Compute the greatest power-of-2 divisor of op1. */
9285 align = TREE_INT_CST_LOW (op1);
9286 align &= -align;
9288 /* If align is non-zero and less than *modulus, replace
9289 *modulus with align., If align is 0, then either op1 is 0
9290 or the greatest power-of-2 divisor of op1 doesn't fit in an
9291 unsigned HOST_WIDE_INT. In either case, no additional
9292 constraint is imposed. */
9293 if (align)
9294 modulus = MIN (modulus, align);
9296 return modulus;
9301 /* If we get here, we were unable to determine anything useful about the
9302 expression. */
9303 return 1;
9307 /* Fold a binary expression of code CODE and type TYPE with operands
9308 OP0 and OP1. LOC is the location of the resulting expression.
9309 Return the folded expression if folding is successful. Otherwise,
9310 return NULL_TREE. */
9312 tree
9313 fold_binary_loc (location_t loc,
9314 enum tree_code code, tree type, tree op0, tree op1)
9316 enum tree_code_class kind = TREE_CODE_CLASS (code);
9317 tree arg0, arg1, tem;
9318 tree t1 = NULL_TREE;
9319 bool strict_overflow_p;
9321 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9322 && TREE_CODE_LENGTH (code) == 2
9323 && op0 != NULL_TREE
9324 && op1 != NULL_TREE);
9326 arg0 = op0;
9327 arg1 = op1;
9329 /* Strip any conversions that don't change the mode. This is
9330 safe for every expression, except for a comparison expression
9331 because its signedness is derived from its operands. So, in
9332 the latter case, only strip conversions that don't change the
9333 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9334 preserved.
9336 Note that this is done as an internal manipulation within the
9337 constant folder, in order to find the simplest representation
9338 of the arguments so that their form can be studied. In any
9339 cases, the appropriate type conversions should be put back in
9340 the tree that will get out of the constant folder. */
9342 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9344 STRIP_SIGN_NOPS (arg0);
9345 STRIP_SIGN_NOPS (arg1);
9347 else
9349 STRIP_NOPS (arg0);
9350 STRIP_NOPS (arg1);
9353 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9354 constant but we can't do arithmetic on them. */
9355 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9356 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9357 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9358 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9359 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9360 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9362 if (kind == tcc_binary)
9364 /* Make sure type and arg0 have the same saturating flag. */
9365 gcc_assert (TYPE_SATURATING (type)
9366 == TYPE_SATURATING (TREE_TYPE (arg0)));
9367 tem = const_binop (code, arg0, arg1);
9369 else if (kind == tcc_comparison)
9370 tem = fold_relational_const (code, type, arg0, arg1);
9371 else
9372 tem = NULL_TREE;
9374 if (tem != NULL_TREE)
9376 if (TREE_TYPE (tem) != type)
9377 tem = fold_convert_loc (loc, type, tem);
9378 return tem;
9382 /* If this is a commutative operation, and ARG0 is a constant, move it
9383 to ARG1 to reduce the number of tests below. */
9384 if (commutative_tree_code (code)
9385 && tree_swap_operands_p (arg0, arg1, true))
9386 return fold_build2_loc (loc, code, type, op1, op0);
9388 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9390 First check for cases where an arithmetic operation is applied to a
9391 compound, conditional, or comparison operation. Push the arithmetic
9392 operation inside the compound or conditional to see if any folding
9393 can then be done. Convert comparison to conditional for this purpose.
9394 The also optimizes non-constant cases that used to be done in
9395 expand_expr.
9397 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9398 one of the operands is a comparison and the other is a comparison, a
9399 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9400 code below would make the expression more complex. Change it to a
9401 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9402 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9404 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9405 || code == EQ_EXPR || code == NE_EXPR)
9406 && ((truth_value_p (TREE_CODE (arg0))
9407 && (truth_value_p (TREE_CODE (arg1))
9408 || (TREE_CODE (arg1) == BIT_AND_EXPR
9409 && integer_onep (TREE_OPERAND (arg1, 1)))))
9410 || (truth_value_p (TREE_CODE (arg1))
9411 && (truth_value_p (TREE_CODE (arg0))
9412 || (TREE_CODE (arg0) == BIT_AND_EXPR
9413 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9415 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9416 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9417 : TRUTH_XOR_EXPR,
9418 boolean_type_node,
9419 fold_convert_loc (loc, boolean_type_node, arg0),
9420 fold_convert_loc (loc, boolean_type_node, arg1));
9422 if (code == EQ_EXPR)
9423 tem = invert_truthvalue_loc (loc, tem);
9425 return fold_convert_loc (loc, type, tem);
9428 if (TREE_CODE_CLASS (code) == tcc_binary
9429 || TREE_CODE_CLASS (code) == tcc_comparison)
9431 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9433 tem = fold_build2_loc (loc, code, type,
9434 fold_convert_loc (loc, TREE_TYPE (op0),
9435 TREE_OPERAND (arg0, 1)), op1);
9436 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9437 tem);
9439 if (TREE_CODE (arg1) == COMPOUND_EXPR
9440 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9442 tem = fold_build2_loc (loc, code, type, op0,
9443 fold_convert_loc (loc, TREE_TYPE (op1),
9444 TREE_OPERAND (arg1, 1)));
9445 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9446 tem);
9449 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9451 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9452 arg0, arg1,
9453 /*cond_first_p=*/1);
9454 if (tem != NULL_TREE)
9455 return tem;
9458 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9460 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9461 arg1, arg0,
9462 /*cond_first_p=*/0);
9463 if (tem != NULL_TREE)
9464 return tem;
9468 switch (code)
9470 case MEM_REF:
9471 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9472 if (TREE_CODE (arg0) == ADDR_EXPR
9473 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9475 tree iref = TREE_OPERAND (arg0, 0);
9476 return fold_build2 (MEM_REF, type,
9477 TREE_OPERAND (iref, 0),
9478 int_const_binop (PLUS_EXPR, arg1,
9479 TREE_OPERAND (iref, 1), 0));
9482 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9483 if (TREE_CODE (arg0) == ADDR_EXPR
9484 && handled_component_p (TREE_OPERAND (arg0, 0)))
9486 tree base;
9487 HOST_WIDE_INT coffset;
9488 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9489 &coffset);
9490 if (!base)
9491 return NULL_TREE;
9492 return fold_build2 (MEM_REF, type,
9493 build_fold_addr_expr (base),
9494 int_const_binop (PLUS_EXPR, arg1,
9495 size_int (coffset), 0));
9498 return NULL_TREE;
9500 case POINTER_PLUS_EXPR:
9501 /* 0 +p index -> (type)index */
9502 if (integer_zerop (arg0))
9503 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9505 /* PTR +p 0 -> PTR */
9506 if (integer_zerop (arg1))
9507 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9509 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9510 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9511 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9512 return fold_convert_loc (loc, type,
9513 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9514 fold_convert_loc (loc, sizetype,
9515 arg1),
9516 fold_convert_loc (loc, sizetype,
9517 arg0)));
9519 /* index +p PTR -> PTR +p index */
9520 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9521 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9522 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9523 fold_convert_loc (loc, type, arg1),
9524 fold_convert_loc (loc, sizetype, arg0));
9526 /* (PTR +p B) +p A -> PTR +p (B + A) */
9527 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9529 tree inner;
9530 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9531 tree arg00 = TREE_OPERAND (arg0, 0);
9532 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9533 arg01, fold_convert_loc (loc, sizetype, arg1));
9534 return fold_convert_loc (loc, type,
9535 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9536 TREE_TYPE (arg00),
9537 arg00, inner));
9540 /* PTR_CST +p CST -> CST1 */
9541 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9542 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9543 fold_convert_loc (loc, type, arg1));
9545 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9546 of the array. Loop optimizer sometimes produce this type of
9547 expressions. */
9548 if (TREE_CODE (arg0) == ADDR_EXPR)
9550 tem = try_move_mult_to_index (loc, arg0,
9551 fold_convert_loc (loc, sizetype, arg1));
9552 if (tem)
9553 return fold_convert_loc (loc, type, tem);
9556 return NULL_TREE;
9558 case PLUS_EXPR:
9559 /* A + (-B) -> A - B */
9560 if (TREE_CODE (arg1) == NEGATE_EXPR)
9561 return fold_build2_loc (loc, MINUS_EXPR, type,
9562 fold_convert_loc (loc, type, arg0),
9563 fold_convert_loc (loc, type,
9564 TREE_OPERAND (arg1, 0)));
9565 /* (-A) + B -> B - A */
9566 if (TREE_CODE (arg0) == NEGATE_EXPR
9567 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9568 return fold_build2_loc (loc, MINUS_EXPR, type,
9569 fold_convert_loc (loc, type, arg1),
9570 fold_convert_loc (loc, type,
9571 TREE_OPERAND (arg0, 0)));
9573 if (INTEGRAL_TYPE_P (type))
9575 /* Convert ~A + 1 to -A. */
9576 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9577 && integer_onep (arg1))
9578 return fold_build1_loc (loc, NEGATE_EXPR, type,
9579 fold_convert_loc (loc, type,
9580 TREE_OPERAND (arg0, 0)));
9582 /* ~X + X is -1. */
9583 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9584 && !TYPE_OVERFLOW_TRAPS (type))
9586 tree tem = TREE_OPERAND (arg0, 0);
9588 STRIP_NOPS (tem);
9589 if (operand_equal_p (tem, arg1, 0))
9591 t1 = build_int_cst_type (type, -1);
9592 return omit_one_operand_loc (loc, type, t1, arg1);
9596 /* X + ~X is -1. */
9597 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9598 && !TYPE_OVERFLOW_TRAPS (type))
9600 tree tem = TREE_OPERAND (arg1, 0);
9602 STRIP_NOPS (tem);
9603 if (operand_equal_p (arg0, tem, 0))
9605 t1 = build_int_cst_type (type, -1);
9606 return omit_one_operand_loc (loc, type, t1, arg0);
9610 /* X + (X / CST) * -CST is X % CST. */
9611 if (TREE_CODE (arg1) == MULT_EXPR
9612 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9613 && operand_equal_p (arg0,
9614 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9616 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9617 tree cst1 = TREE_OPERAND (arg1, 1);
9618 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9619 cst1, cst0);
9620 if (sum && integer_zerop (sum))
9621 return fold_convert_loc (loc, type,
9622 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9623 TREE_TYPE (arg0), arg0,
9624 cst0));
9628 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9629 same or one. Make sure type is not saturating.
9630 fold_plusminus_mult_expr will re-associate. */
9631 if ((TREE_CODE (arg0) == MULT_EXPR
9632 || TREE_CODE (arg1) == MULT_EXPR)
9633 && !TYPE_SATURATING (type)
9634 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9636 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9637 if (tem)
9638 return tem;
9641 if (! FLOAT_TYPE_P (type))
9643 if (integer_zerop (arg1))
9644 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9646 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9647 with a constant, and the two constants have no bits in common,
9648 we should treat this as a BIT_IOR_EXPR since this may produce more
9649 simplifications. */
9650 if (TREE_CODE (arg0) == BIT_AND_EXPR
9651 && TREE_CODE (arg1) == BIT_AND_EXPR
9652 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9653 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9654 && integer_zerop (const_binop (BIT_AND_EXPR,
9655 TREE_OPERAND (arg0, 1),
9656 TREE_OPERAND (arg1, 1))))
9658 code = BIT_IOR_EXPR;
9659 goto bit_ior;
9662 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9663 (plus (plus (mult) (mult)) (foo)) so that we can
9664 take advantage of the factoring cases below. */
9665 if (((TREE_CODE (arg0) == PLUS_EXPR
9666 || TREE_CODE (arg0) == MINUS_EXPR)
9667 && TREE_CODE (arg1) == MULT_EXPR)
9668 || ((TREE_CODE (arg1) == PLUS_EXPR
9669 || TREE_CODE (arg1) == MINUS_EXPR)
9670 && TREE_CODE (arg0) == MULT_EXPR))
9672 tree parg0, parg1, parg, marg;
9673 enum tree_code pcode;
9675 if (TREE_CODE (arg1) == MULT_EXPR)
9676 parg = arg0, marg = arg1;
9677 else
9678 parg = arg1, marg = arg0;
9679 pcode = TREE_CODE (parg);
9680 parg0 = TREE_OPERAND (parg, 0);
9681 parg1 = TREE_OPERAND (parg, 1);
9682 STRIP_NOPS (parg0);
9683 STRIP_NOPS (parg1);
9685 if (TREE_CODE (parg0) == MULT_EXPR
9686 && TREE_CODE (parg1) != MULT_EXPR)
9687 return fold_build2_loc (loc, pcode, type,
9688 fold_build2_loc (loc, PLUS_EXPR, type,
9689 fold_convert_loc (loc, type,
9690 parg0),
9691 fold_convert_loc (loc, type,
9692 marg)),
9693 fold_convert_loc (loc, type, parg1));
9694 if (TREE_CODE (parg0) != MULT_EXPR
9695 && TREE_CODE (parg1) == MULT_EXPR)
9696 return
9697 fold_build2_loc (loc, PLUS_EXPR, type,
9698 fold_convert_loc (loc, type, parg0),
9699 fold_build2_loc (loc, pcode, type,
9700 fold_convert_loc (loc, type, marg),
9701 fold_convert_loc (loc, type,
9702 parg1)));
9705 else
9707 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9708 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9709 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9711 /* Likewise if the operands are reversed. */
9712 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9713 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9715 /* Convert X + -C into X - C. */
9716 if (TREE_CODE (arg1) == REAL_CST
9717 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9719 tem = fold_negate_const (arg1, type);
9720 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9721 return fold_build2_loc (loc, MINUS_EXPR, type,
9722 fold_convert_loc (loc, type, arg0),
9723 fold_convert_loc (loc, type, tem));
9726 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9727 to __complex__ ( x, y ). This is not the same for SNaNs or
9728 if signed zeros are involved. */
9729 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9730 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9731 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9733 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9734 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9735 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9736 bool arg0rz = false, arg0iz = false;
9737 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9738 || (arg0i && (arg0iz = real_zerop (arg0i))))
9740 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9741 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9742 if (arg0rz && arg1i && real_zerop (arg1i))
9744 tree rp = arg1r ? arg1r
9745 : build1 (REALPART_EXPR, rtype, arg1);
9746 tree ip = arg0i ? arg0i
9747 : build1 (IMAGPART_EXPR, rtype, arg0);
9748 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9750 else if (arg0iz && arg1r && real_zerop (arg1r))
9752 tree rp = arg0r ? arg0r
9753 : build1 (REALPART_EXPR, rtype, arg0);
9754 tree ip = arg1i ? arg1i
9755 : build1 (IMAGPART_EXPR, rtype, arg1);
9756 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9761 if (flag_unsafe_math_optimizations
9762 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9763 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9764 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9765 return tem;
9767 /* Convert x+x into x*2.0. */
9768 if (operand_equal_p (arg0, arg1, 0)
9769 && SCALAR_FLOAT_TYPE_P (type))
9770 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9771 build_real (type, dconst2));
9773 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9774 We associate floats only if the user has specified
9775 -fassociative-math. */
9776 if (flag_associative_math
9777 && TREE_CODE (arg1) == PLUS_EXPR
9778 && TREE_CODE (arg0) != MULT_EXPR)
9780 tree tree10 = TREE_OPERAND (arg1, 0);
9781 tree tree11 = TREE_OPERAND (arg1, 1);
9782 if (TREE_CODE (tree11) == MULT_EXPR
9783 && TREE_CODE (tree10) == MULT_EXPR)
9785 tree tree0;
9786 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9787 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9790 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9791 We associate floats only if the user has specified
9792 -fassociative-math. */
9793 if (flag_associative_math
9794 && TREE_CODE (arg0) == PLUS_EXPR
9795 && TREE_CODE (arg1) != MULT_EXPR)
9797 tree tree00 = TREE_OPERAND (arg0, 0);
9798 tree tree01 = TREE_OPERAND (arg0, 1);
9799 if (TREE_CODE (tree01) == MULT_EXPR
9800 && TREE_CODE (tree00) == MULT_EXPR)
9802 tree tree0;
9803 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9804 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9809 bit_rotate:
9810 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9811 is a rotate of A by C1 bits. */
9812 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9813 is a rotate of A by B bits. */
9815 enum tree_code code0, code1;
9816 tree rtype;
9817 code0 = TREE_CODE (arg0);
9818 code1 = TREE_CODE (arg1);
9819 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9820 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9821 && operand_equal_p (TREE_OPERAND (arg0, 0),
9822 TREE_OPERAND (arg1, 0), 0)
9823 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9824 TYPE_UNSIGNED (rtype))
9825 /* Only create rotates in complete modes. Other cases are not
9826 expanded properly. */
9827 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9829 tree tree01, tree11;
9830 enum tree_code code01, code11;
9832 tree01 = TREE_OPERAND (arg0, 1);
9833 tree11 = TREE_OPERAND (arg1, 1);
9834 STRIP_NOPS (tree01);
9835 STRIP_NOPS (tree11);
9836 code01 = TREE_CODE (tree01);
9837 code11 = TREE_CODE (tree11);
9838 if (code01 == INTEGER_CST
9839 && code11 == INTEGER_CST
9840 && TREE_INT_CST_HIGH (tree01) == 0
9841 && TREE_INT_CST_HIGH (tree11) == 0
9842 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9843 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9845 tem = build2_loc (loc, LROTATE_EXPR,
9846 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9847 TREE_OPERAND (arg0, 0),
9848 code0 == LSHIFT_EXPR ? tree01 : tree11);
9849 return fold_convert_loc (loc, type, tem);
9851 else if (code11 == MINUS_EXPR)
9853 tree tree110, tree111;
9854 tree110 = TREE_OPERAND (tree11, 0);
9855 tree111 = TREE_OPERAND (tree11, 1);
9856 STRIP_NOPS (tree110);
9857 STRIP_NOPS (tree111);
9858 if (TREE_CODE (tree110) == INTEGER_CST
9859 && 0 == compare_tree_int (tree110,
9860 TYPE_PRECISION
9861 (TREE_TYPE (TREE_OPERAND
9862 (arg0, 0))))
9863 && operand_equal_p (tree01, tree111, 0))
9864 return
9865 fold_convert_loc (loc, type,
9866 build2 ((code0 == LSHIFT_EXPR
9867 ? LROTATE_EXPR
9868 : RROTATE_EXPR),
9869 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9870 TREE_OPERAND (arg0, 0), tree01));
9872 else if (code01 == MINUS_EXPR)
9874 tree tree010, tree011;
9875 tree010 = TREE_OPERAND (tree01, 0);
9876 tree011 = TREE_OPERAND (tree01, 1);
9877 STRIP_NOPS (tree010);
9878 STRIP_NOPS (tree011);
9879 if (TREE_CODE (tree010) == INTEGER_CST
9880 && 0 == compare_tree_int (tree010,
9881 TYPE_PRECISION
9882 (TREE_TYPE (TREE_OPERAND
9883 (arg0, 0))))
9884 && operand_equal_p (tree11, tree011, 0))
9885 return fold_convert_loc
9886 (loc, type,
9887 build2 ((code0 != LSHIFT_EXPR
9888 ? LROTATE_EXPR
9889 : RROTATE_EXPR),
9890 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9891 TREE_OPERAND (arg0, 0), tree11));
9896 associate:
9897 /* In most languages, can't associate operations on floats through
9898 parentheses. Rather than remember where the parentheses were, we
9899 don't associate floats at all, unless the user has specified
9900 -fassociative-math.
9901 And, we need to make sure type is not saturating. */
9903 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9904 && !TYPE_SATURATING (type))
9906 tree var0, con0, lit0, minus_lit0;
9907 tree var1, con1, lit1, minus_lit1;
9908 bool ok = true;
9910 /* Split both trees into variables, constants, and literals. Then
9911 associate each group together, the constants with literals,
9912 then the result with variables. This increases the chances of
9913 literals being recombined later and of generating relocatable
9914 expressions for the sum of a constant and literal. */
9915 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9916 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9917 code == MINUS_EXPR);
9919 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9920 if (code == MINUS_EXPR)
9921 code = PLUS_EXPR;
9923 /* With undefined overflow we can only associate constants with one
9924 variable, and constants whose association doesn't overflow. */
9925 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9926 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9928 if (var0 && var1)
9930 tree tmp0 = var0;
9931 tree tmp1 = var1;
9933 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9934 tmp0 = TREE_OPERAND (tmp0, 0);
9935 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9936 tmp1 = TREE_OPERAND (tmp1, 0);
9937 /* The only case we can still associate with two variables
9938 is if they are the same, modulo negation. */
9939 if (!operand_equal_p (tmp0, tmp1, 0))
9940 ok = false;
9943 if (ok && lit0 && lit1)
9945 tree tmp0 = fold_convert (type, lit0);
9946 tree tmp1 = fold_convert (type, lit1);
9948 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9949 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9950 ok = false;
9954 /* Only do something if we found more than two objects. Otherwise,
9955 nothing has changed and we risk infinite recursion. */
9956 if (ok
9957 && (2 < ((var0 != 0) + (var1 != 0)
9958 + (con0 != 0) + (con1 != 0)
9959 + (lit0 != 0) + (lit1 != 0)
9960 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9962 var0 = associate_trees (loc, var0, var1, code, type);
9963 con0 = associate_trees (loc, con0, con1, code, type);
9964 lit0 = associate_trees (loc, lit0, lit1, code, type);
9965 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9967 /* Preserve the MINUS_EXPR if the negative part of the literal is
9968 greater than the positive part. Otherwise, the multiplicative
9969 folding code (i.e extract_muldiv) may be fooled in case
9970 unsigned constants are subtracted, like in the following
9971 example: ((X*2 + 4) - 8U)/2. */
9972 if (minus_lit0 && lit0)
9974 if (TREE_CODE (lit0) == INTEGER_CST
9975 && TREE_CODE (minus_lit0) == INTEGER_CST
9976 && tree_int_cst_lt (lit0, minus_lit0))
9978 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9979 MINUS_EXPR, type);
9980 lit0 = 0;
9982 else
9984 lit0 = associate_trees (loc, lit0, minus_lit0,
9985 MINUS_EXPR, type);
9986 minus_lit0 = 0;
9989 if (minus_lit0)
9991 if (con0 == 0)
9992 return
9993 fold_convert_loc (loc, type,
9994 associate_trees (loc, var0, minus_lit0,
9995 MINUS_EXPR, type));
9996 else
9998 con0 = associate_trees (loc, con0, minus_lit0,
9999 MINUS_EXPR, type);
10000 return
10001 fold_convert_loc (loc, type,
10002 associate_trees (loc, var0, con0,
10003 PLUS_EXPR, type));
10007 con0 = associate_trees (loc, con0, lit0, code, type);
10008 return
10009 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10010 code, type));
10014 return NULL_TREE;
10016 case MINUS_EXPR:
10017 /* Pointer simplifications for subtraction, simple reassociations. */
10018 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10020 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10021 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10022 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10024 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10025 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10026 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10027 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10028 return fold_build2_loc (loc, PLUS_EXPR, type,
10029 fold_build2_loc (loc, MINUS_EXPR, type,
10030 arg00, arg10),
10031 fold_build2_loc (loc, MINUS_EXPR, type,
10032 arg01, arg11));
10034 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10035 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10037 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10038 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10039 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10040 fold_convert_loc (loc, type, arg1));
10041 if (tmp)
10042 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10045 /* A - (-B) -> A + B */
10046 if (TREE_CODE (arg1) == NEGATE_EXPR)
10047 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10048 fold_convert_loc (loc, type,
10049 TREE_OPERAND (arg1, 0)));
10050 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10051 if (TREE_CODE (arg0) == NEGATE_EXPR
10052 && (FLOAT_TYPE_P (type)
10053 || INTEGRAL_TYPE_P (type))
10054 && negate_expr_p (arg1)
10055 && reorder_operands_p (arg0, arg1))
10056 return fold_build2_loc (loc, MINUS_EXPR, type,
10057 fold_convert_loc (loc, type,
10058 negate_expr (arg1)),
10059 fold_convert_loc (loc, type,
10060 TREE_OPERAND (arg0, 0)));
10061 /* Convert -A - 1 to ~A. */
10062 if (INTEGRAL_TYPE_P (type)
10063 && TREE_CODE (arg0) == NEGATE_EXPR
10064 && integer_onep (arg1)
10065 && !TYPE_OVERFLOW_TRAPS (type))
10066 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10067 fold_convert_loc (loc, type,
10068 TREE_OPERAND (arg0, 0)));
10070 /* Convert -1 - A to ~A. */
10071 if (INTEGRAL_TYPE_P (type)
10072 && integer_all_onesp (arg0))
10073 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10076 /* X - (X / CST) * CST is X % CST. */
10077 if (INTEGRAL_TYPE_P (type)
10078 && TREE_CODE (arg1) == MULT_EXPR
10079 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10080 && operand_equal_p (arg0,
10081 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10082 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10083 TREE_OPERAND (arg1, 1), 0))
10084 return
10085 fold_convert_loc (loc, type,
10086 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10087 arg0, TREE_OPERAND (arg1, 1)));
10089 if (! FLOAT_TYPE_P (type))
10091 if (integer_zerop (arg0))
10092 return negate_expr (fold_convert_loc (loc, type, arg1));
10093 if (integer_zerop (arg1))
10094 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10096 /* Fold A - (A & B) into ~B & A. */
10097 if (!TREE_SIDE_EFFECTS (arg0)
10098 && TREE_CODE (arg1) == BIT_AND_EXPR)
10100 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10102 tree arg10 = fold_convert_loc (loc, type,
10103 TREE_OPERAND (arg1, 0));
10104 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10105 fold_build1_loc (loc, BIT_NOT_EXPR,
10106 type, arg10),
10107 fold_convert_loc (loc, type, arg0));
10109 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10111 tree arg11 = fold_convert_loc (loc,
10112 type, TREE_OPERAND (arg1, 1));
10113 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10114 fold_build1_loc (loc, BIT_NOT_EXPR,
10115 type, arg11),
10116 fold_convert_loc (loc, type, arg0));
10120 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10121 any power of 2 minus 1. */
10122 if (TREE_CODE (arg0) == BIT_AND_EXPR
10123 && TREE_CODE (arg1) == BIT_AND_EXPR
10124 && operand_equal_p (TREE_OPERAND (arg0, 0),
10125 TREE_OPERAND (arg1, 0), 0))
10127 tree mask0 = TREE_OPERAND (arg0, 1);
10128 tree mask1 = TREE_OPERAND (arg1, 1);
10129 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10131 if (operand_equal_p (tem, mask1, 0))
10133 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10134 TREE_OPERAND (arg0, 0), mask1);
10135 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10140 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10141 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10142 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10144 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10145 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10146 (-ARG1 + ARG0) reduces to -ARG1. */
10147 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10148 return negate_expr (fold_convert_loc (loc, type, arg1));
10150 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10151 __complex__ ( x, -y ). This is not the same for SNaNs or if
10152 signed zeros are involved. */
10153 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10154 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10155 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10157 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10158 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10159 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10160 bool arg0rz = false, arg0iz = false;
10161 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10162 || (arg0i && (arg0iz = real_zerop (arg0i))))
10164 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10165 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10166 if (arg0rz && arg1i && real_zerop (arg1i))
10168 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10169 arg1r ? arg1r
10170 : build1 (REALPART_EXPR, rtype, arg1));
10171 tree ip = arg0i ? arg0i
10172 : build1 (IMAGPART_EXPR, rtype, arg0);
10173 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10175 else if (arg0iz && arg1r && real_zerop (arg1r))
10177 tree rp = arg0r ? arg0r
10178 : build1 (REALPART_EXPR, rtype, arg0);
10179 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10180 arg1i ? arg1i
10181 : build1 (IMAGPART_EXPR, rtype, arg1));
10182 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10187 /* Fold &x - &x. This can happen from &x.foo - &x.
10188 This is unsafe for certain floats even in non-IEEE formats.
10189 In IEEE, it is unsafe because it does wrong for NaNs.
10190 Also note that operand_equal_p is always false if an operand
10191 is volatile. */
10193 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10194 && operand_equal_p (arg0, arg1, 0))
10195 return build_zero_cst (type);
10197 /* A - B -> A + (-B) if B is easily negatable. */
10198 if (negate_expr_p (arg1)
10199 && ((FLOAT_TYPE_P (type)
10200 /* Avoid this transformation if B is a positive REAL_CST. */
10201 && (TREE_CODE (arg1) != REAL_CST
10202 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10203 || INTEGRAL_TYPE_P (type)))
10204 return fold_build2_loc (loc, PLUS_EXPR, type,
10205 fold_convert_loc (loc, type, arg0),
10206 fold_convert_loc (loc, type,
10207 negate_expr (arg1)));
10209 /* Try folding difference of addresses. */
10211 HOST_WIDE_INT diff;
10213 if ((TREE_CODE (arg0) == ADDR_EXPR
10214 || TREE_CODE (arg1) == ADDR_EXPR)
10215 && ptr_difference_const (arg0, arg1, &diff))
10216 return build_int_cst_type (type, diff);
10219 /* Fold &a[i] - &a[j] to i-j. */
10220 if (TREE_CODE (arg0) == ADDR_EXPR
10221 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10222 && TREE_CODE (arg1) == ADDR_EXPR
10223 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10225 tree aref0 = TREE_OPERAND (arg0, 0);
10226 tree aref1 = TREE_OPERAND (arg1, 0);
10227 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10228 TREE_OPERAND (aref1, 0), 0))
10230 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10231 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10232 tree esz = array_ref_element_size (aref0);
10233 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10234 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10235 fold_convert_loc (loc, type, esz));
10240 if (FLOAT_TYPE_P (type)
10241 && flag_unsafe_math_optimizations
10242 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10243 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10244 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10245 return tem;
10247 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10248 same or one. Make sure type is not saturating.
10249 fold_plusminus_mult_expr will re-associate. */
10250 if ((TREE_CODE (arg0) == MULT_EXPR
10251 || TREE_CODE (arg1) == MULT_EXPR)
10252 && !TYPE_SATURATING (type)
10253 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10255 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10256 if (tem)
10257 return tem;
10260 goto associate;
10262 case MULT_EXPR:
10263 /* (-A) * (-B) -> A * B */
10264 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10265 return fold_build2_loc (loc, MULT_EXPR, type,
10266 fold_convert_loc (loc, type,
10267 TREE_OPERAND (arg0, 0)),
10268 fold_convert_loc (loc, type,
10269 negate_expr (arg1)));
10270 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10271 return fold_build2_loc (loc, MULT_EXPR, type,
10272 fold_convert_loc (loc, type,
10273 negate_expr (arg0)),
10274 fold_convert_loc (loc, type,
10275 TREE_OPERAND (arg1, 0)));
10277 if (! FLOAT_TYPE_P (type))
10279 if (integer_zerop (arg1))
10280 return omit_one_operand_loc (loc, type, arg1, arg0);
10281 if (integer_onep (arg1))
10282 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10283 /* Transform x * -1 into -x. Make sure to do the negation
10284 on the original operand with conversions not stripped
10285 because we can only strip non-sign-changing conversions. */
10286 if (integer_all_onesp (arg1))
10287 return fold_convert_loc (loc, type, negate_expr (op0));
10288 /* Transform x * -C into -x * C if x is easily negatable. */
10289 if (TREE_CODE (arg1) == INTEGER_CST
10290 && tree_int_cst_sgn (arg1) == -1
10291 && negate_expr_p (arg0)
10292 && (tem = negate_expr (arg1)) != arg1
10293 && !TREE_OVERFLOW (tem))
10294 return fold_build2_loc (loc, MULT_EXPR, type,
10295 fold_convert_loc (loc, type,
10296 negate_expr (arg0)),
10297 tem);
10299 /* (a * (1 << b)) is (a << b) */
10300 if (TREE_CODE (arg1) == LSHIFT_EXPR
10301 && integer_onep (TREE_OPERAND (arg1, 0)))
10302 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10303 TREE_OPERAND (arg1, 1));
10304 if (TREE_CODE (arg0) == LSHIFT_EXPR
10305 && integer_onep (TREE_OPERAND (arg0, 0)))
10306 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10307 TREE_OPERAND (arg0, 1));
10309 /* (A + A) * C -> A * 2 * C */
10310 if (TREE_CODE (arg0) == PLUS_EXPR
10311 && TREE_CODE (arg1) == INTEGER_CST
10312 && operand_equal_p (TREE_OPERAND (arg0, 0),
10313 TREE_OPERAND (arg0, 1), 0))
10314 return fold_build2_loc (loc, MULT_EXPR, type,
10315 omit_one_operand_loc (loc, type,
10316 TREE_OPERAND (arg0, 0),
10317 TREE_OPERAND (arg0, 1)),
10318 fold_build2_loc (loc, MULT_EXPR, type,
10319 build_int_cst (type, 2) , arg1));
10321 strict_overflow_p = false;
10322 if (TREE_CODE (arg1) == INTEGER_CST
10323 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10324 &strict_overflow_p)))
10326 if (strict_overflow_p)
10327 fold_overflow_warning (("assuming signed overflow does not "
10328 "occur when simplifying "
10329 "multiplication"),
10330 WARN_STRICT_OVERFLOW_MISC);
10331 return fold_convert_loc (loc, type, tem);
10334 /* Optimize z * conj(z) for integer complex numbers. */
10335 if (TREE_CODE (arg0) == CONJ_EXPR
10336 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10337 return fold_mult_zconjz (loc, type, arg1);
10338 if (TREE_CODE (arg1) == CONJ_EXPR
10339 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10340 return fold_mult_zconjz (loc, type, arg0);
10342 else
10344 /* Maybe fold x * 0 to 0. The expressions aren't the same
10345 when x is NaN, since x * 0 is also NaN. Nor are they the
10346 same in modes with signed zeros, since multiplying a
10347 negative value by 0 gives -0, not +0. */
10348 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10349 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10350 && real_zerop (arg1))
10351 return omit_one_operand_loc (loc, type, arg1, arg0);
10352 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10353 Likewise for complex arithmetic with signed zeros. */
10354 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10355 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10356 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10357 && real_onep (arg1))
10358 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10360 /* Transform x * -1.0 into -x. */
10361 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10362 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10363 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10364 && real_minus_onep (arg1))
10365 return fold_convert_loc (loc, type, negate_expr (arg0));
10367 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10368 the result for floating point types due to rounding so it is applied
10369 only if -fassociative-math was specify. */
10370 if (flag_associative_math
10371 && TREE_CODE (arg0) == RDIV_EXPR
10372 && TREE_CODE (arg1) == REAL_CST
10373 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10375 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10376 arg1);
10377 if (tem)
10378 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10379 TREE_OPERAND (arg0, 1));
10382 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10383 if (operand_equal_p (arg0, arg1, 0))
10385 tree tem = fold_strip_sign_ops (arg0);
10386 if (tem != NULL_TREE)
10388 tem = fold_convert_loc (loc, type, tem);
10389 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10393 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10394 This is not the same for NaNs or if signed zeros are
10395 involved. */
10396 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10397 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10398 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10399 && TREE_CODE (arg1) == COMPLEX_CST
10400 && real_zerop (TREE_REALPART (arg1)))
10402 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10403 if (real_onep (TREE_IMAGPART (arg1)))
10404 return
10405 fold_build2_loc (loc, COMPLEX_EXPR, type,
10406 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10407 rtype, arg0)),
10408 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10409 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10410 return
10411 fold_build2_loc (loc, COMPLEX_EXPR, type,
10412 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10413 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10414 rtype, arg0)));
10417 /* Optimize z * conj(z) for floating point complex numbers.
10418 Guarded by flag_unsafe_math_optimizations as non-finite
10419 imaginary components don't produce scalar results. */
10420 if (flag_unsafe_math_optimizations
10421 && TREE_CODE (arg0) == CONJ_EXPR
10422 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10423 return fold_mult_zconjz (loc, type, arg1);
10424 if (flag_unsafe_math_optimizations
10425 && TREE_CODE (arg1) == CONJ_EXPR
10426 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10427 return fold_mult_zconjz (loc, type, arg0);
10429 if (flag_unsafe_math_optimizations)
10431 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10432 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10434 /* Optimizations of root(...)*root(...). */
10435 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10437 tree rootfn, arg;
10438 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10439 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10441 /* Optimize sqrt(x)*sqrt(x) as x. */
10442 if (BUILTIN_SQRT_P (fcode0)
10443 && operand_equal_p (arg00, arg10, 0)
10444 && ! HONOR_SNANS (TYPE_MODE (type)))
10445 return arg00;
10447 /* Optimize root(x)*root(y) as root(x*y). */
10448 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10449 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10450 return build_call_expr_loc (loc, rootfn, 1, arg);
10453 /* Optimize expN(x)*expN(y) as expN(x+y). */
10454 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10456 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10457 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10458 CALL_EXPR_ARG (arg0, 0),
10459 CALL_EXPR_ARG (arg1, 0));
10460 return build_call_expr_loc (loc, expfn, 1, arg);
10463 /* Optimizations of pow(...)*pow(...). */
10464 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10465 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10466 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10468 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10469 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10470 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10471 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10473 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10474 if (operand_equal_p (arg01, arg11, 0))
10476 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10477 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10478 arg00, arg10);
10479 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10482 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10483 if (operand_equal_p (arg00, arg10, 0))
10485 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10486 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10487 arg01, arg11);
10488 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10492 /* Optimize tan(x)*cos(x) as sin(x). */
10493 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10494 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10495 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10496 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10497 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10498 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10499 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10500 CALL_EXPR_ARG (arg1, 0), 0))
10502 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10504 if (sinfn != NULL_TREE)
10505 return build_call_expr_loc (loc, sinfn, 1,
10506 CALL_EXPR_ARG (arg0, 0));
10509 /* Optimize x*pow(x,c) as pow(x,c+1). */
10510 if (fcode1 == BUILT_IN_POW
10511 || fcode1 == BUILT_IN_POWF
10512 || fcode1 == BUILT_IN_POWL)
10514 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10515 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10516 if (TREE_CODE (arg11) == REAL_CST
10517 && !TREE_OVERFLOW (arg11)
10518 && operand_equal_p (arg0, arg10, 0))
10520 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10521 REAL_VALUE_TYPE c;
10522 tree arg;
10524 c = TREE_REAL_CST (arg11);
10525 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10526 arg = build_real (type, c);
10527 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10531 /* Optimize pow(x,c)*x as pow(x,c+1). */
10532 if (fcode0 == BUILT_IN_POW
10533 || fcode0 == BUILT_IN_POWF
10534 || fcode0 == BUILT_IN_POWL)
10536 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10537 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10538 if (TREE_CODE (arg01) == REAL_CST
10539 && !TREE_OVERFLOW (arg01)
10540 && operand_equal_p (arg1, arg00, 0))
10542 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10543 REAL_VALUE_TYPE c;
10544 tree arg;
10546 c = TREE_REAL_CST (arg01);
10547 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10548 arg = build_real (type, c);
10549 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10553 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10554 if (optimize_function_for_speed_p (cfun)
10555 && operand_equal_p (arg0, arg1, 0))
10557 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10559 if (powfn)
10561 tree arg = build_real (type, dconst2);
10562 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10567 goto associate;
10569 case BIT_IOR_EXPR:
10570 bit_ior:
10571 if (integer_all_onesp (arg1))
10572 return omit_one_operand_loc (loc, type, arg1, arg0);
10573 if (integer_zerop (arg1))
10574 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10575 if (operand_equal_p (arg0, arg1, 0))
10576 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10578 /* ~X | X is -1. */
10579 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10582 t1 = build_zero_cst (type);
10583 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10584 return omit_one_operand_loc (loc, type, t1, arg1);
10587 /* X | ~X is -1. */
10588 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10589 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10591 t1 = build_zero_cst (type);
10592 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10593 return omit_one_operand_loc (loc, type, t1, arg0);
10596 /* Canonicalize (X & C1) | C2. */
10597 if (TREE_CODE (arg0) == BIT_AND_EXPR
10598 && TREE_CODE (arg1) == INTEGER_CST
10599 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10601 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10602 int width = TYPE_PRECISION (type), w;
10603 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10604 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10605 hi2 = TREE_INT_CST_HIGH (arg1);
10606 lo2 = TREE_INT_CST_LOW (arg1);
10608 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10609 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10610 return omit_one_operand_loc (loc, type, arg1,
10611 TREE_OPERAND (arg0, 0));
10613 if (width > HOST_BITS_PER_WIDE_INT)
10615 mhi = (unsigned HOST_WIDE_INT) -1
10616 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10617 mlo = -1;
10619 else
10621 mhi = 0;
10622 mlo = (unsigned HOST_WIDE_INT) -1
10623 >> (HOST_BITS_PER_WIDE_INT - width);
10626 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10627 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10628 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10629 TREE_OPERAND (arg0, 0), arg1);
10631 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10632 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10633 mode which allows further optimizations. */
10634 hi1 &= mhi;
10635 lo1 &= mlo;
10636 hi2 &= mhi;
10637 lo2 &= mlo;
10638 hi3 = hi1 & ~hi2;
10639 lo3 = lo1 & ~lo2;
10640 for (w = BITS_PER_UNIT;
10641 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10642 w <<= 1)
10644 unsigned HOST_WIDE_INT mask
10645 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10646 if (((lo1 | lo2) & mask) == mask
10647 && (lo1 & ~mask) == 0 && hi1 == 0)
10649 hi3 = 0;
10650 lo3 = mask;
10651 break;
10654 if (hi3 != hi1 || lo3 != lo1)
10655 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10656 fold_build2_loc (loc, BIT_AND_EXPR, type,
10657 TREE_OPERAND (arg0, 0),
10658 build_int_cst_wide (type,
10659 lo3, hi3)),
10660 arg1);
10663 /* (X & Y) | Y is (X, Y). */
10664 if (TREE_CODE (arg0) == BIT_AND_EXPR
10665 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10666 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10667 /* (X & Y) | X is (Y, X). */
10668 if (TREE_CODE (arg0) == BIT_AND_EXPR
10669 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10670 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10671 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10672 /* X | (X & Y) is (Y, X). */
10673 if (TREE_CODE (arg1) == BIT_AND_EXPR
10674 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10675 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10676 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10677 /* X | (Y & X) is (Y, X). */
10678 if (TREE_CODE (arg1) == BIT_AND_EXPR
10679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10680 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10681 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10683 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10684 if (t1 != NULL_TREE)
10685 return t1;
10687 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10689 This results in more efficient code for machines without a NAND
10690 instruction. Combine will canonicalize to the first form
10691 which will allow use of NAND instructions provided by the
10692 backend if they exist. */
10693 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10694 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10696 return
10697 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10698 build2 (BIT_AND_EXPR, type,
10699 fold_convert_loc (loc, type,
10700 TREE_OPERAND (arg0, 0)),
10701 fold_convert_loc (loc, type,
10702 TREE_OPERAND (arg1, 0))));
10705 /* See if this can be simplified into a rotate first. If that
10706 is unsuccessful continue in the association code. */
10707 goto bit_rotate;
10709 case BIT_XOR_EXPR:
10710 if (integer_zerop (arg1))
10711 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10712 if (integer_all_onesp (arg1))
10713 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10714 if (operand_equal_p (arg0, arg1, 0))
10715 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10717 /* ~X ^ X is -1. */
10718 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10719 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10721 t1 = build_zero_cst (type);
10722 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10723 return omit_one_operand_loc (loc, type, t1, arg1);
10726 /* X ^ ~X is -1. */
10727 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10728 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10730 t1 = build_zero_cst (type);
10731 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10732 return omit_one_operand_loc (loc, type, t1, arg0);
10735 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10736 with a constant, and the two constants have no bits in common,
10737 we should treat this as a BIT_IOR_EXPR since this may produce more
10738 simplifications. */
10739 if (TREE_CODE (arg0) == BIT_AND_EXPR
10740 && TREE_CODE (arg1) == BIT_AND_EXPR
10741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10742 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10743 && integer_zerop (const_binop (BIT_AND_EXPR,
10744 TREE_OPERAND (arg0, 1),
10745 TREE_OPERAND (arg1, 1))))
10747 code = BIT_IOR_EXPR;
10748 goto bit_ior;
10751 /* (X | Y) ^ X -> Y & ~ X*/
10752 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10753 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10755 tree t2 = TREE_OPERAND (arg0, 1);
10756 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10757 arg1);
10758 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10759 fold_convert_loc (loc, type, t2),
10760 fold_convert_loc (loc, type, t1));
10761 return t1;
10764 /* (Y | X) ^ X -> Y & ~ X*/
10765 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10766 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10768 tree t2 = TREE_OPERAND (arg0, 0);
10769 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10770 arg1);
10771 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10772 fold_convert_loc (loc, type, t2),
10773 fold_convert_loc (loc, type, t1));
10774 return t1;
10777 /* X ^ (X | Y) -> Y & ~ X*/
10778 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10781 tree t2 = TREE_OPERAND (arg1, 1);
10782 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10783 arg0);
10784 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10785 fold_convert_loc (loc, type, t2),
10786 fold_convert_loc (loc, type, t1));
10787 return t1;
10790 /* X ^ (Y | X) -> Y & ~ X*/
10791 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10792 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10794 tree t2 = TREE_OPERAND (arg1, 0);
10795 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10796 arg0);
10797 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10798 fold_convert_loc (loc, type, t2),
10799 fold_convert_loc (loc, type, t1));
10800 return t1;
10803 /* Convert ~X ^ ~Y to X ^ Y. */
10804 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10805 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10806 return fold_build2_loc (loc, code, type,
10807 fold_convert_loc (loc, type,
10808 TREE_OPERAND (arg0, 0)),
10809 fold_convert_loc (loc, type,
10810 TREE_OPERAND (arg1, 0)));
10812 /* Convert ~X ^ C to X ^ ~C. */
10813 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10814 && TREE_CODE (arg1) == INTEGER_CST)
10815 return fold_build2_loc (loc, code, type,
10816 fold_convert_loc (loc, type,
10817 TREE_OPERAND (arg0, 0)),
10818 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10820 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10821 if (TREE_CODE (arg0) == BIT_AND_EXPR
10822 && integer_onep (TREE_OPERAND (arg0, 1))
10823 && integer_onep (arg1))
10824 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10825 build_int_cst (TREE_TYPE (arg0), 0));
10827 /* Fold (X & Y) ^ Y as ~X & Y. */
10828 if (TREE_CODE (arg0) == BIT_AND_EXPR
10829 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10831 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10832 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10833 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10834 fold_convert_loc (loc, type, arg1));
10836 /* Fold (X & Y) ^ X as ~Y & X. */
10837 if (TREE_CODE (arg0) == BIT_AND_EXPR
10838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10839 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10841 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10842 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10843 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10844 fold_convert_loc (loc, type, arg1));
10846 /* Fold X ^ (X & Y) as X & ~Y. */
10847 if (TREE_CODE (arg1) == BIT_AND_EXPR
10848 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10850 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10851 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10852 fold_convert_loc (loc, type, arg0),
10853 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10855 /* Fold X ^ (Y & X) as ~Y & X. */
10856 if (TREE_CODE (arg1) == BIT_AND_EXPR
10857 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10858 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10860 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10861 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10862 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10863 fold_convert_loc (loc, type, arg0));
10866 /* See if this can be simplified into a rotate first. If that
10867 is unsuccessful continue in the association code. */
10868 goto bit_rotate;
10870 case BIT_AND_EXPR:
10871 if (integer_all_onesp (arg1))
10872 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10873 if (integer_zerop (arg1))
10874 return omit_one_operand_loc (loc, type, arg1, arg0);
10875 if (operand_equal_p (arg0, arg1, 0))
10876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10878 /* ~X & X is always zero. */
10879 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10880 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10881 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10883 /* X & ~X is always zero. */
10884 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10885 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10886 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10888 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10889 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10890 && TREE_CODE (arg1) == INTEGER_CST
10891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10893 tree tmp1 = fold_convert_loc (loc, type, arg1);
10894 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10895 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10896 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10897 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10898 return
10899 fold_convert_loc (loc, type,
10900 fold_build2_loc (loc, BIT_IOR_EXPR,
10901 type, tmp2, tmp3));
10904 /* (X | Y) & Y is (X, Y). */
10905 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10906 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10907 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10908 /* (X | Y) & X is (Y, X). */
10909 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10910 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10911 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10912 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10913 /* X & (X | Y) is (Y, X). */
10914 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10915 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10916 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10917 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10918 /* X & (Y | X) is (Y, X). */
10919 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10920 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10921 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10922 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10924 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10925 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10926 && integer_onep (TREE_OPERAND (arg0, 1))
10927 && integer_onep (arg1))
10929 tem = TREE_OPERAND (arg0, 0);
10930 return fold_build2_loc (loc, EQ_EXPR, type,
10931 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10932 build_int_cst (TREE_TYPE (tem), 1)),
10933 build_int_cst (TREE_TYPE (tem), 0));
10935 /* Fold ~X & 1 as (X & 1) == 0. */
10936 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10937 && integer_onep (arg1))
10939 tem = TREE_OPERAND (arg0, 0);
10940 return fold_build2_loc (loc, EQ_EXPR, type,
10941 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10942 build_int_cst (TREE_TYPE (tem), 1)),
10943 build_int_cst (TREE_TYPE (tem), 0));
10946 /* Fold (X ^ Y) & Y as ~X & Y. */
10947 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10948 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10950 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10951 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10952 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10953 fold_convert_loc (loc, type, arg1));
10955 /* Fold (X ^ Y) & X as ~Y & X. */
10956 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10957 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10958 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10960 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10961 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10962 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10963 fold_convert_loc (loc, type, arg1));
10965 /* Fold X & (X ^ Y) as X & ~Y. */
10966 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10967 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10969 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10970 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10971 fold_convert_loc (loc, type, arg0),
10972 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10974 /* Fold X & (Y ^ X) as ~Y & X. */
10975 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10976 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10977 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10979 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10980 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10981 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10982 fold_convert_loc (loc, type, arg0));
10985 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10986 ((A & N) + B) & M -> (A + B) & M
10987 Similarly if (N & M) == 0,
10988 ((A | N) + B) & M -> (A + B) & M
10989 and for - instead of + (or unary - instead of +)
10990 and/or ^ instead of |.
10991 If B is constant and (B & M) == 0, fold into A & M. */
10992 if (host_integerp (arg1, 1))
10994 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
10995 if (~cst1 && (cst1 & (cst1 + 1)) == 0
10996 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10997 && (TREE_CODE (arg0) == PLUS_EXPR
10998 || TREE_CODE (arg0) == MINUS_EXPR
10999 || TREE_CODE (arg0) == NEGATE_EXPR)
11000 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11001 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11003 tree pmop[2];
11004 int which = 0;
11005 unsigned HOST_WIDE_INT cst0;
11007 /* Now we know that arg0 is (C + D) or (C - D) or
11008 -C and arg1 (M) is == (1LL << cst) - 1.
11009 Store C into PMOP[0] and D into PMOP[1]. */
11010 pmop[0] = TREE_OPERAND (arg0, 0);
11011 pmop[1] = NULL;
11012 if (TREE_CODE (arg0) != NEGATE_EXPR)
11014 pmop[1] = TREE_OPERAND (arg0, 1);
11015 which = 1;
11018 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11019 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11020 & cst1) != cst1)
11021 which = -1;
11023 for (; which >= 0; which--)
11024 switch (TREE_CODE (pmop[which]))
11026 case BIT_AND_EXPR:
11027 case BIT_IOR_EXPR:
11028 case BIT_XOR_EXPR:
11029 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11030 != INTEGER_CST)
11031 break;
11032 /* tree_low_cst not used, because we don't care about
11033 the upper bits. */
11034 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11035 cst0 &= cst1;
11036 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11038 if (cst0 != cst1)
11039 break;
11041 else if (cst0 != 0)
11042 break;
11043 /* If C or D is of the form (A & N) where
11044 (N & M) == M, or of the form (A | N) or
11045 (A ^ N) where (N & M) == 0, replace it with A. */
11046 pmop[which] = TREE_OPERAND (pmop[which], 0);
11047 break;
11048 case INTEGER_CST:
11049 /* If C or D is a N where (N & M) == 0, it can be
11050 omitted (assumed 0). */
11051 if ((TREE_CODE (arg0) == PLUS_EXPR
11052 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11053 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11054 pmop[which] = NULL;
11055 break;
11056 default:
11057 break;
11060 /* Only build anything new if we optimized one or both arguments
11061 above. */
11062 if (pmop[0] != TREE_OPERAND (arg0, 0)
11063 || (TREE_CODE (arg0) != NEGATE_EXPR
11064 && pmop[1] != TREE_OPERAND (arg0, 1)))
11066 tree utype = TREE_TYPE (arg0);
11067 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11069 /* Perform the operations in a type that has defined
11070 overflow behavior. */
11071 utype = unsigned_type_for (TREE_TYPE (arg0));
11072 if (pmop[0] != NULL)
11073 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11074 if (pmop[1] != NULL)
11075 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11078 if (TREE_CODE (arg0) == NEGATE_EXPR)
11079 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11080 else if (TREE_CODE (arg0) == PLUS_EXPR)
11082 if (pmop[0] != NULL && pmop[1] != NULL)
11083 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11084 pmop[0], pmop[1]);
11085 else if (pmop[0] != NULL)
11086 tem = pmop[0];
11087 else if (pmop[1] != NULL)
11088 tem = pmop[1];
11089 else
11090 return build_int_cst (type, 0);
11092 else if (pmop[0] == NULL)
11093 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11094 else
11095 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11096 pmop[0], pmop[1]);
11097 /* TEM is now the new binary +, - or unary - replacement. */
11098 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11099 fold_convert_loc (loc, utype, arg1));
11100 return fold_convert_loc (loc, type, tem);
11105 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11106 if (t1 != NULL_TREE)
11107 return t1;
11108 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11109 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11110 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11112 unsigned int prec
11113 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11115 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11116 && (~TREE_INT_CST_LOW (arg1)
11117 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11118 return
11119 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11122 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11124 This results in more efficient code for machines without a NOR
11125 instruction. Combine will canonicalize to the first form
11126 which will allow use of NOR instructions provided by the
11127 backend if they exist. */
11128 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11129 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11131 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11132 build2 (BIT_IOR_EXPR, type,
11133 fold_convert_loc (loc, type,
11134 TREE_OPERAND (arg0, 0)),
11135 fold_convert_loc (loc, type,
11136 TREE_OPERAND (arg1, 0))));
11139 /* If arg0 is derived from the address of an object or function, we may
11140 be able to fold this expression using the object or function's
11141 alignment. */
11142 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11144 unsigned HOST_WIDE_INT modulus, residue;
11145 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11147 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11148 integer_onep (arg1));
11150 /* This works because modulus is a power of 2. If this weren't the
11151 case, we'd have to replace it by its greatest power-of-2
11152 divisor: modulus & -modulus. */
11153 if (low < modulus)
11154 return build_int_cst (type, residue & low);
11157 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11158 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11159 if the new mask might be further optimized. */
11160 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11161 || TREE_CODE (arg0) == RSHIFT_EXPR)
11162 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11163 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11164 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11165 < TYPE_PRECISION (TREE_TYPE (arg0))
11166 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11167 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11169 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11170 unsigned HOST_WIDE_INT mask
11171 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11172 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11173 tree shift_type = TREE_TYPE (arg0);
11175 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11176 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11177 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11178 && TYPE_PRECISION (TREE_TYPE (arg0))
11179 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11181 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11182 tree arg00 = TREE_OPERAND (arg0, 0);
11183 /* See if more bits can be proven as zero because of
11184 zero extension. */
11185 if (TREE_CODE (arg00) == NOP_EXPR
11186 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11188 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11189 if (TYPE_PRECISION (inner_type)
11190 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11191 && TYPE_PRECISION (inner_type) < prec)
11193 prec = TYPE_PRECISION (inner_type);
11194 /* See if we can shorten the right shift. */
11195 if (shiftc < prec)
11196 shift_type = inner_type;
11199 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11200 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11201 zerobits <<= prec - shiftc;
11202 /* For arithmetic shift if sign bit could be set, zerobits
11203 can contain actually sign bits, so no transformation is
11204 possible, unless MASK masks them all away. In that
11205 case the shift needs to be converted into logical shift. */
11206 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11207 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11209 if ((mask & zerobits) == 0)
11210 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11211 else
11212 zerobits = 0;
11216 /* ((X << 16) & 0xff00) is (X, 0). */
11217 if ((mask & zerobits) == mask)
11218 return omit_one_operand_loc (loc, type,
11219 build_int_cst (type, 0), arg0);
11221 newmask = mask | zerobits;
11222 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11224 unsigned int prec;
11226 /* Only do the transformation if NEWMASK is some integer
11227 mode's mask. */
11228 for (prec = BITS_PER_UNIT;
11229 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11230 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11231 break;
11232 if (prec < HOST_BITS_PER_WIDE_INT
11233 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11235 tree newmaskt;
11237 if (shift_type != TREE_TYPE (arg0))
11239 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11240 fold_convert_loc (loc, shift_type,
11241 TREE_OPERAND (arg0, 0)),
11242 TREE_OPERAND (arg0, 1));
11243 tem = fold_convert_loc (loc, type, tem);
11245 else
11246 tem = op0;
11247 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11248 if (!tree_int_cst_equal (newmaskt, arg1))
11249 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11254 goto associate;
11256 case RDIV_EXPR:
11257 /* Don't touch a floating-point divide by zero unless the mode
11258 of the constant can represent infinity. */
11259 if (TREE_CODE (arg1) == REAL_CST
11260 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11261 && real_zerop (arg1))
11262 return NULL_TREE;
11264 /* Optimize A / A to 1.0 if we don't care about
11265 NaNs or Infinities. Skip the transformation
11266 for non-real operands. */
11267 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11268 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11269 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11270 && operand_equal_p (arg0, arg1, 0))
11272 tree r = build_real (TREE_TYPE (arg0), dconst1);
11274 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11277 /* The complex version of the above A / A optimization. */
11278 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11279 && operand_equal_p (arg0, arg1, 0))
11281 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11282 if (! HONOR_NANS (TYPE_MODE (elem_type))
11283 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11285 tree r = build_real (elem_type, dconst1);
11286 /* omit_two_operands will call fold_convert for us. */
11287 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11291 /* (-A) / (-B) -> A / B */
11292 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11293 return fold_build2_loc (loc, RDIV_EXPR, type,
11294 TREE_OPERAND (arg0, 0),
11295 negate_expr (arg1));
11296 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11297 return fold_build2_loc (loc, RDIV_EXPR, type,
11298 negate_expr (arg0),
11299 TREE_OPERAND (arg1, 0));
11301 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11302 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11303 && real_onep (arg1))
11304 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11306 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11307 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11308 && real_minus_onep (arg1))
11309 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11310 negate_expr (arg0)));
11312 /* If ARG1 is a constant, we can convert this to a multiply by the
11313 reciprocal. This does not have the same rounding properties,
11314 so only do this if -freciprocal-math. We can actually
11315 always safely do it if ARG1 is a power of two, but it's hard to
11316 tell if it is or not in a portable manner. */
11317 if (TREE_CODE (arg1) == REAL_CST)
11319 if (flag_reciprocal_math
11320 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11321 arg1)))
11322 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11323 /* Find the reciprocal if optimizing and the result is exact. */
11324 if (optimize)
11326 REAL_VALUE_TYPE r;
11327 r = TREE_REAL_CST (arg1);
11328 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11330 tem = build_real (type, r);
11331 return fold_build2_loc (loc, MULT_EXPR, type,
11332 fold_convert_loc (loc, type, arg0), tem);
11336 /* Convert A/B/C to A/(B*C). */
11337 if (flag_reciprocal_math
11338 && TREE_CODE (arg0) == RDIV_EXPR)
11339 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11340 fold_build2_loc (loc, MULT_EXPR, type,
11341 TREE_OPERAND (arg0, 1), arg1));
11343 /* Convert A/(B/C) to (A/B)*C. */
11344 if (flag_reciprocal_math
11345 && TREE_CODE (arg1) == RDIV_EXPR)
11346 return fold_build2_loc (loc, MULT_EXPR, type,
11347 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11348 TREE_OPERAND (arg1, 0)),
11349 TREE_OPERAND (arg1, 1));
11351 /* Convert C1/(X*C2) into (C1/C2)/X. */
11352 if (flag_reciprocal_math
11353 && TREE_CODE (arg1) == MULT_EXPR
11354 && TREE_CODE (arg0) == REAL_CST
11355 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11357 tree tem = const_binop (RDIV_EXPR, arg0,
11358 TREE_OPERAND (arg1, 1));
11359 if (tem)
11360 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11361 TREE_OPERAND (arg1, 0));
11364 if (flag_unsafe_math_optimizations)
11366 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11367 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11369 /* Optimize sin(x)/cos(x) as tan(x). */
11370 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11371 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11372 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11373 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11374 CALL_EXPR_ARG (arg1, 0), 0))
11376 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11378 if (tanfn != NULL_TREE)
11379 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11382 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11383 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11384 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11385 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11386 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11387 CALL_EXPR_ARG (arg1, 0), 0))
11389 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11391 if (tanfn != NULL_TREE)
11393 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11394 CALL_EXPR_ARG (arg0, 0));
11395 return fold_build2_loc (loc, RDIV_EXPR, type,
11396 build_real (type, dconst1), tmp);
11400 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11401 NaNs or Infinities. */
11402 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11403 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11404 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11406 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11407 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11409 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11410 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11411 && operand_equal_p (arg00, arg01, 0))
11413 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11415 if (cosfn != NULL_TREE)
11416 return build_call_expr_loc (loc, cosfn, 1, arg00);
11420 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11421 NaNs or Infinities. */
11422 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11423 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11424 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11426 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11427 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11429 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11430 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11431 && operand_equal_p (arg00, arg01, 0))
11433 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11435 if (cosfn != NULL_TREE)
11437 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11438 return fold_build2_loc (loc, RDIV_EXPR, type,
11439 build_real (type, dconst1),
11440 tmp);
11445 /* Optimize pow(x,c)/x as pow(x,c-1). */
11446 if (fcode0 == BUILT_IN_POW
11447 || fcode0 == BUILT_IN_POWF
11448 || fcode0 == BUILT_IN_POWL)
11450 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11451 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11452 if (TREE_CODE (arg01) == REAL_CST
11453 && !TREE_OVERFLOW (arg01)
11454 && operand_equal_p (arg1, arg00, 0))
11456 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11457 REAL_VALUE_TYPE c;
11458 tree arg;
11460 c = TREE_REAL_CST (arg01);
11461 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11462 arg = build_real (type, c);
11463 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11467 /* Optimize a/root(b/c) into a*root(c/b). */
11468 if (BUILTIN_ROOT_P (fcode1))
11470 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11472 if (TREE_CODE (rootarg) == RDIV_EXPR)
11474 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11475 tree b = TREE_OPERAND (rootarg, 0);
11476 tree c = TREE_OPERAND (rootarg, 1);
11478 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11480 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11481 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11485 /* Optimize x/expN(y) into x*expN(-y). */
11486 if (BUILTIN_EXPONENT_P (fcode1))
11488 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11489 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11490 arg1 = build_call_expr_loc (loc,
11491 expfn, 1,
11492 fold_convert_loc (loc, type, arg));
11493 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11496 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11497 if (fcode1 == BUILT_IN_POW
11498 || fcode1 == BUILT_IN_POWF
11499 || fcode1 == BUILT_IN_POWL)
11501 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11502 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11503 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11504 tree neg11 = fold_convert_loc (loc, type,
11505 negate_expr (arg11));
11506 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11507 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11510 return NULL_TREE;
11512 case TRUNC_DIV_EXPR:
11513 /* Optimize (X & (-A)) / A where A is a power of 2,
11514 to X >> log2(A) */
11515 if (TREE_CODE (arg0) == BIT_AND_EXPR
11516 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11517 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11519 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11520 arg1, TREE_OPERAND (arg0, 1));
11521 if (sum && integer_zerop (sum)) {
11522 unsigned long pow2;
11524 if (TREE_INT_CST_LOW (arg1))
11525 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11526 else
11527 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11528 + HOST_BITS_PER_WIDE_INT;
11530 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11531 TREE_OPERAND (arg0, 0),
11532 build_int_cst (NULL_TREE, pow2));
11536 /* Fall thru */
11538 case FLOOR_DIV_EXPR:
11539 /* Simplify A / (B << N) where A and B are positive and B is
11540 a power of 2, to A >> (N + log2(B)). */
11541 strict_overflow_p = false;
11542 if (TREE_CODE (arg1) == LSHIFT_EXPR
11543 && (TYPE_UNSIGNED (type)
11544 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11546 tree sval = TREE_OPERAND (arg1, 0);
11547 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11549 tree sh_cnt = TREE_OPERAND (arg1, 1);
11550 unsigned long pow2;
11552 if (TREE_INT_CST_LOW (sval))
11553 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11554 else
11555 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11556 + HOST_BITS_PER_WIDE_INT;
11558 if (strict_overflow_p)
11559 fold_overflow_warning (("assuming signed overflow does not "
11560 "occur when simplifying A / (B << N)"),
11561 WARN_STRICT_OVERFLOW_MISC);
11563 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11564 sh_cnt, build_int_cst (NULL_TREE, pow2));
11565 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11566 fold_convert_loc (loc, type, arg0), sh_cnt);
11570 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11571 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11572 if (INTEGRAL_TYPE_P (type)
11573 && TYPE_UNSIGNED (type)
11574 && code == FLOOR_DIV_EXPR)
11575 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11577 /* Fall thru */
11579 case ROUND_DIV_EXPR:
11580 case CEIL_DIV_EXPR:
11581 case EXACT_DIV_EXPR:
11582 if (integer_onep (arg1))
11583 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11584 if (integer_zerop (arg1))
11585 return NULL_TREE;
11586 /* X / -1 is -X. */
11587 if (!TYPE_UNSIGNED (type)
11588 && TREE_CODE (arg1) == INTEGER_CST
11589 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11590 && TREE_INT_CST_HIGH (arg1) == -1)
11591 return fold_convert_loc (loc, type, negate_expr (arg0));
11593 /* Convert -A / -B to A / B when the type is signed and overflow is
11594 undefined. */
11595 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11596 && TREE_CODE (arg0) == NEGATE_EXPR
11597 && negate_expr_p (arg1))
11599 if (INTEGRAL_TYPE_P (type))
11600 fold_overflow_warning (("assuming signed overflow does not occur "
11601 "when distributing negation across "
11602 "division"),
11603 WARN_STRICT_OVERFLOW_MISC);
11604 return fold_build2_loc (loc, code, type,
11605 fold_convert_loc (loc, type,
11606 TREE_OPERAND (arg0, 0)),
11607 fold_convert_loc (loc, type,
11608 negate_expr (arg1)));
11610 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11611 && TREE_CODE (arg1) == NEGATE_EXPR
11612 && negate_expr_p (arg0))
11614 if (INTEGRAL_TYPE_P (type))
11615 fold_overflow_warning (("assuming signed overflow does not occur "
11616 "when distributing negation across "
11617 "division"),
11618 WARN_STRICT_OVERFLOW_MISC);
11619 return fold_build2_loc (loc, code, type,
11620 fold_convert_loc (loc, type,
11621 negate_expr (arg0)),
11622 fold_convert_loc (loc, type,
11623 TREE_OPERAND (arg1, 0)));
11626 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11627 operation, EXACT_DIV_EXPR.
11629 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11630 At one time others generated faster code, it's not clear if they do
11631 after the last round to changes to the DIV code in expmed.c. */
11632 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11633 && multiple_of_p (type, arg0, arg1))
11634 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11636 strict_overflow_p = false;
11637 if (TREE_CODE (arg1) == INTEGER_CST
11638 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11639 &strict_overflow_p)))
11641 if (strict_overflow_p)
11642 fold_overflow_warning (("assuming signed overflow does not occur "
11643 "when simplifying division"),
11644 WARN_STRICT_OVERFLOW_MISC);
11645 return fold_convert_loc (loc, type, tem);
11648 return NULL_TREE;
11650 case CEIL_MOD_EXPR:
11651 case FLOOR_MOD_EXPR:
11652 case ROUND_MOD_EXPR:
11653 case TRUNC_MOD_EXPR:
11654 /* X % 1 is always zero, but be sure to preserve any side
11655 effects in X. */
11656 if (integer_onep (arg1))
11657 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11659 /* X % 0, return X % 0 unchanged so that we can get the
11660 proper warnings and errors. */
11661 if (integer_zerop (arg1))
11662 return NULL_TREE;
11664 /* 0 % X is always zero, but be sure to preserve any side
11665 effects in X. Place this after checking for X == 0. */
11666 if (integer_zerop (arg0))
11667 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11669 /* X % -1 is zero. */
11670 if (!TYPE_UNSIGNED (type)
11671 && TREE_CODE (arg1) == INTEGER_CST
11672 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11673 && TREE_INT_CST_HIGH (arg1) == -1)
11674 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11676 /* X % -C is the same as X % C. */
11677 if (code == TRUNC_MOD_EXPR
11678 && !TYPE_UNSIGNED (type)
11679 && TREE_CODE (arg1) == INTEGER_CST
11680 && !TREE_OVERFLOW (arg1)
11681 && TREE_INT_CST_HIGH (arg1) < 0
11682 && !TYPE_OVERFLOW_TRAPS (type)
11683 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11684 && !sign_bit_p (arg1, arg1))
11685 return fold_build2_loc (loc, code, type,
11686 fold_convert_loc (loc, type, arg0),
11687 fold_convert_loc (loc, type,
11688 negate_expr (arg1)));
11690 /* X % -Y is the same as X % Y. */
11691 if (code == TRUNC_MOD_EXPR
11692 && !TYPE_UNSIGNED (type)
11693 && TREE_CODE (arg1) == NEGATE_EXPR
11694 && !TYPE_OVERFLOW_TRAPS (type))
11695 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11696 fold_convert_loc (loc, type,
11697 TREE_OPERAND (arg1, 0)));
11699 strict_overflow_p = false;
11700 if (TREE_CODE (arg1) == INTEGER_CST
11701 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11702 &strict_overflow_p)))
11704 if (strict_overflow_p)
11705 fold_overflow_warning (("assuming signed overflow does not occur "
11706 "when simplifying modulus"),
11707 WARN_STRICT_OVERFLOW_MISC);
11708 return fold_convert_loc (loc, type, tem);
11711 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11712 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11713 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11714 && (TYPE_UNSIGNED (type)
11715 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11717 tree c = arg1;
11718 /* Also optimize A % (C << N) where C is a power of 2,
11719 to A & ((C << N) - 1). */
11720 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11721 c = TREE_OPERAND (arg1, 0);
11723 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11725 tree mask
11726 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11727 build_int_cst (TREE_TYPE (arg1), 1));
11728 if (strict_overflow_p)
11729 fold_overflow_warning (("assuming signed overflow does not "
11730 "occur when simplifying "
11731 "X % (power of two)"),
11732 WARN_STRICT_OVERFLOW_MISC);
11733 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11734 fold_convert_loc (loc, type, arg0),
11735 fold_convert_loc (loc, type, mask));
11739 return NULL_TREE;
11741 case LROTATE_EXPR:
11742 case RROTATE_EXPR:
11743 if (integer_all_onesp (arg0))
11744 return omit_one_operand_loc (loc, type, arg0, arg1);
11745 goto shift;
11747 case RSHIFT_EXPR:
11748 /* Optimize -1 >> x for arithmetic right shifts. */
11749 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11750 && tree_expr_nonnegative_p (arg1))
11751 return omit_one_operand_loc (loc, type, arg0, arg1);
11752 /* ... fall through ... */
11754 case LSHIFT_EXPR:
11755 shift:
11756 if (integer_zerop (arg1))
11757 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11758 if (integer_zerop (arg0))
11759 return omit_one_operand_loc (loc, type, arg0, arg1);
11761 /* Since negative shift count is not well-defined,
11762 don't try to compute it in the compiler. */
11763 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11764 return NULL_TREE;
11766 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11767 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11768 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11769 && host_integerp (TREE_OPERAND (arg0, 1), false)
11770 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11772 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11773 + TREE_INT_CST_LOW (arg1));
11775 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11776 being well defined. */
11777 if (low >= TYPE_PRECISION (type))
11779 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11780 low = low % TYPE_PRECISION (type);
11781 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11782 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11783 TREE_OPERAND (arg0, 0));
11784 else
11785 low = TYPE_PRECISION (type) - 1;
11788 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11789 build_int_cst (type, low));
11792 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11793 into x & ((unsigned)-1 >> c) for unsigned types. */
11794 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11795 || (TYPE_UNSIGNED (type)
11796 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11797 && host_integerp (arg1, false)
11798 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11799 && host_integerp (TREE_OPERAND (arg0, 1), false)
11800 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11802 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11803 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11804 tree lshift;
11805 tree arg00;
11807 if (low0 == low1)
11809 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11811 lshift = build_int_cst (type, -1);
11812 lshift = int_const_binop (code, lshift, arg1, 0);
11814 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11818 /* Rewrite an LROTATE_EXPR by a constant into an
11819 RROTATE_EXPR by a new constant. */
11820 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11822 tree tem = build_int_cst (TREE_TYPE (arg1),
11823 TYPE_PRECISION (type));
11824 tem = const_binop (MINUS_EXPR, tem, arg1);
11825 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11828 /* If we have a rotate of a bit operation with the rotate count and
11829 the second operand of the bit operation both constant,
11830 permute the two operations. */
11831 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11832 && (TREE_CODE (arg0) == BIT_AND_EXPR
11833 || TREE_CODE (arg0) == BIT_IOR_EXPR
11834 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11835 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11836 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11837 fold_build2_loc (loc, code, type,
11838 TREE_OPERAND (arg0, 0), arg1),
11839 fold_build2_loc (loc, code, type,
11840 TREE_OPERAND (arg0, 1), arg1));
11842 /* Two consecutive rotates adding up to the precision of the
11843 type can be ignored. */
11844 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11845 && TREE_CODE (arg0) == RROTATE_EXPR
11846 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11847 && TREE_INT_CST_HIGH (arg1) == 0
11848 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11849 && ((TREE_INT_CST_LOW (arg1)
11850 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11851 == (unsigned int) TYPE_PRECISION (type)))
11852 return TREE_OPERAND (arg0, 0);
11854 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11855 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11856 if the latter can be further optimized. */
11857 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11858 && TREE_CODE (arg0) == BIT_AND_EXPR
11859 && TREE_CODE (arg1) == INTEGER_CST
11860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11862 tree mask = fold_build2_loc (loc, code, type,
11863 fold_convert_loc (loc, type,
11864 TREE_OPERAND (arg0, 1)),
11865 arg1);
11866 tree shift = fold_build2_loc (loc, code, type,
11867 fold_convert_loc (loc, type,
11868 TREE_OPERAND (arg0, 0)),
11869 arg1);
11870 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11871 if (tem)
11872 return tem;
11875 return NULL_TREE;
11877 case MIN_EXPR:
11878 if (operand_equal_p (arg0, arg1, 0))
11879 return omit_one_operand_loc (loc, type, arg0, arg1);
11880 if (INTEGRAL_TYPE_P (type)
11881 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11882 return omit_one_operand_loc (loc, type, arg1, arg0);
11883 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11884 if (tem)
11885 return tem;
11886 goto associate;
11888 case MAX_EXPR:
11889 if (operand_equal_p (arg0, arg1, 0))
11890 return omit_one_operand_loc (loc, type, arg0, arg1);
11891 if (INTEGRAL_TYPE_P (type)
11892 && TYPE_MAX_VALUE (type)
11893 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11894 return omit_one_operand_loc (loc, type, arg1, arg0);
11895 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11896 if (tem)
11897 return tem;
11898 goto associate;
11900 case TRUTH_ANDIF_EXPR:
11901 /* Note that the operands of this must be ints
11902 and their values must be 0 or 1.
11903 ("true" is a fixed value perhaps depending on the language.) */
11904 /* If first arg is constant zero, return it. */
11905 if (integer_zerop (arg0))
11906 return fold_convert_loc (loc, type, arg0);
11907 case TRUTH_AND_EXPR:
11908 /* If either arg is constant true, drop it. */
11909 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11910 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11911 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11912 /* Preserve sequence points. */
11913 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11914 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11915 /* If second arg is constant zero, result is zero, but first arg
11916 must be evaluated. */
11917 if (integer_zerop (arg1))
11918 return omit_one_operand_loc (loc, type, arg1, arg0);
11919 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11920 case will be handled here. */
11921 if (integer_zerop (arg0))
11922 return omit_one_operand_loc (loc, type, arg0, arg1);
11924 /* !X && X is always false. */
11925 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11926 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11927 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11928 /* X && !X is always false. */
11929 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11930 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11931 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11933 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11934 means A >= Y && A != MAX, but in this case we know that
11935 A < X <= MAX. */
11937 if (!TREE_SIDE_EFFECTS (arg0)
11938 && !TREE_SIDE_EFFECTS (arg1))
11940 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11941 if (tem && !operand_equal_p (tem, arg0, 0))
11942 return fold_build2_loc (loc, code, type, tem, arg1);
11944 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11945 if (tem && !operand_equal_p (tem, arg1, 0))
11946 return fold_build2_loc (loc, code, type, arg0, tem);
11949 truth_andor:
11950 /* We only do these simplifications if we are optimizing. */
11951 if (!optimize)
11952 return NULL_TREE;
11954 /* Check for things like (A || B) && (A || C). We can convert this
11955 to A || (B && C). Note that either operator can be any of the four
11956 truth and/or operations and the transformation will still be
11957 valid. Also note that we only care about order for the
11958 ANDIF and ORIF operators. If B contains side effects, this
11959 might change the truth-value of A. */
11960 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11961 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11962 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11963 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11964 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11965 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11967 tree a00 = TREE_OPERAND (arg0, 0);
11968 tree a01 = TREE_OPERAND (arg0, 1);
11969 tree a10 = TREE_OPERAND (arg1, 0);
11970 tree a11 = TREE_OPERAND (arg1, 1);
11971 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11972 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11973 && (code == TRUTH_AND_EXPR
11974 || code == TRUTH_OR_EXPR));
11976 if (operand_equal_p (a00, a10, 0))
11977 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11978 fold_build2_loc (loc, code, type, a01, a11));
11979 else if (commutative && operand_equal_p (a00, a11, 0))
11980 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11981 fold_build2_loc (loc, code, type, a01, a10));
11982 else if (commutative && operand_equal_p (a01, a10, 0))
11983 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11984 fold_build2_loc (loc, code, type, a00, a11));
11986 /* This case if tricky because we must either have commutative
11987 operators or else A10 must not have side-effects. */
11989 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11990 && operand_equal_p (a01, a11, 0))
11991 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11992 fold_build2_loc (loc, code, type, a00, a10),
11993 a01);
11996 /* See if we can build a range comparison. */
11997 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11998 return tem;
12000 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12001 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12003 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12004 if (tem)
12005 return fold_build2_loc (loc, code, type, tem, arg1);
12008 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12009 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12011 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12012 if (tem)
12013 return fold_build2_loc (loc, code, type, arg0, tem);
12016 /* Check for the possibility of merging component references. If our
12017 lhs is another similar operation, try to merge its rhs with our
12018 rhs. Then try to merge our lhs and rhs. */
12019 if (TREE_CODE (arg0) == code
12020 && 0 != (tem = fold_truthop (loc, code, type,
12021 TREE_OPERAND (arg0, 1), arg1)))
12022 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12024 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12025 return tem;
12027 return NULL_TREE;
12029 case TRUTH_ORIF_EXPR:
12030 /* Note that the operands of this must be ints
12031 and their values must be 0 or true.
12032 ("true" is a fixed value perhaps depending on the language.) */
12033 /* If first arg is constant true, return it. */
12034 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12035 return fold_convert_loc (loc, type, arg0);
12036 case TRUTH_OR_EXPR:
12037 /* If either arg is constant zero, drop it. */
12038 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12039 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12040 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12041 /* Preserve sequence points. */
12042 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12043 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12044 /* If second arg is constant true, result is true, but we must
12045 evaluate first arg. */
12046 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12047 return omit_one_operand_loc (loc, type, arg1, arg0);
12048 /* Likewise for first arg, but note this only occurs here for
12049 TRUTH_OR_EXPR. */
12050 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12051 return omit_one_operand_loc (loc, type, arg0, arg1);
12053 /* !X || X is always true. */
12054 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12055 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12056 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12057 /* X || !X is always true. */
12058 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12059 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12060 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12062 goto truth_andor;
12064 case TRUTH_XOR_EXPR:
12065 /* If the second arg is constant zero, drop it. */
12066 if (integer_zerop (arg1))
12067 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12068 /* If the second arg is constant true, this is a logical inversion. */
12069 if (integer_onep (arg1))
12071 /* Only call invert_truthvalue if operand is a truth value. */
12072 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12073 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12074 else
12075 tem = invert_truthvalue_loc (loc, arg0);
12076 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12078 /* Identical arguments cancel to zero. */
12079 if (operand_equal_p (arg0, arg1, 0))
12080 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12082 /* !X ^ X is always true. */
12083 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12084 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12085 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12087 /* X ^ !X is always true. */
12088 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12089 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12090 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12092 return NULL_TREE;
12094 case EQ_EXPR:
12095 case NE_EXPR:
12096 STRIP_NOPS (arg0);
12097 STRIP_NOPS (arg1);
12099 tem = fold_comparison (loc, code, type, op0, op1);
12100 if (tem != NULL_TREE)
12101 return tem;
12103 /* bool_var != 0 becomes bool_var. */
12104 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12105 && code == NE_EXPR)
12106 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12108 /* bool_var == 1 becomes bool_var. */
12109 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12110 && code == EQ_EXPR)
12111 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12113 /* bool_var != 1 becomes !bool_var. */
12114 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12115 && code == NE_EXPR)
12116 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12117 fold_convert_loc (loc, type, arg0));
12119 /* bool_var == 0 becomes !bool_var. */
12120 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12121 && code == EQ_EXPR)
12122 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12123 fold_convert_loc (loc, type, arg0));
12125 /* !exp != 0 becomes !exp */
12126 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12127 && code == NE_EXPR)
12128 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12130 /* If this is an equality comparison of the address of two non-weak,
12131 unaliased symbols neither of which are extern (since we do not
12132 have access to attributes for externs), then we know the result. */
12133 if (TREE_CODE (arg0) == ADDR_EXPR
12134 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12135 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12136 && ! lookup_attribute ("alias",
12137 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12138 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12139 && TREE_CODE (arg1) == ADDR_EXPR
12140 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12141 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12142 && ! lookup_attribute ("alias",
12143 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12144 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12146 /* We know that we're looking at the address of two
12147 non-weak, unaliased, static _DECL nodes.
12149 It is both wasteful and incorrect to call operand_equal_p
12150 to compare the two ADDR_EXPR nodes. It is wasteful in that
12151 all we need to do is test pointer equality for the arguments
12152 to the two ADDR_EXPR nodes. It is incorrect to use
12153 operand_equal_p as that function is NOT equivalent to a
12154 C equality test. It can in fact return false for two
12155 objects which would test as equal using the C equality
12156 operator. */
12157 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12158 return constant_boolean_node (equal
12159 ? code == EQ_EXPR : code != EQ_EXPR,
12160 type);
12163 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12164 a MINUS_EXPR of a constant, we can convert it into a comparison with
12165 a revised constant as long as no overflow occurs. */
12166 if (TREE_CODE (arg1) == INTEGER_CST
12167 && (TREE_CODE (arg0) == PLUS_EXPR
12168 || TREE_CODE (arg0) == MINUS_EXPR)
12169 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12170 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12171 ? MINUS_EXPR : PLUS_EXPR,
12172 fold_convert_loc (loc, TREE_TYPE (arg0),
12173 arg1),
12174 TREE_OPERAND (arg0, 1)))
12175 && !TREE_OVERFLOW (tem))
12176 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12178 /* Similarly for a NEGATE_EXPR. */
12179 if (TREE_CODE (arg0) == NEGATE_EXPR
12180 && TREE_CODE (arg1) == INTEGER_CST
12181 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12182 arg1)))
12183 && TREE_CODE (tem) == INTEGER_CST
12184 && !TREE_OVERFLOW (tem))
12185 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12187 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12188 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12189 && TREE_CODE (arg1) == INTEGER_CST
12190 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12191 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12192 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12193 fold_convert_loc (loc,
12194 TREE_TYPE (arg0),
12195 arg1),
12196 TREE_OPERAND (arg0, 1)));
12198 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12199 if ((TREE_CODE (arg0) == PLUS_EXPR
12200 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12201 || TREE_CODE (arg0) == MINUS_EXPR)
12202 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12203 0)),
12204 arg1, 0)
12205 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12206 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12208 tree val = TREE_OPERAND (arg0, 1);
12209 return omit_two_operands_loc (loc, type,
12210 fold_build2_loc (loc, code, type,
12211 val,
12212 build_int_cst (TREE_TYPE (val),
12213 0)),
12214 TREE_OPERAND (arg0, 0), arg1);
12217 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12218 if (TREE_CODE (arg0) == MINUS_EXPR
12219 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12220 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12221 1)),
12222 arg1, 0)
12223 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12225 return omit_two_operands_loc (loc, type,
12226 code == NE_EXPR
12227 ? boolean_true_node : boolean_false_node,
12228 TREE_OPERAND (arg0, 1), arg1);
12231 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12232 for !=. Don't do this for ordered comparisons due to overflow. */
12233 if (TREE_CODE (arg0) == MINUS_EXPR
12234 && integer_zerop (arg1))
12235 return fold_build2_loc (loc, code, type,
12236 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12238 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12239 if (TREE_CODE (arg0) == ABS_EXPR
12240 && (integer_zerop (arg1) || real_zerop (arg1)))
12241 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12243 /* If this is an EQ or NE comparison with zero and ARG0 is
12244 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12245 two operations, but the latter can be done in one less insn
12246 on machines that have only two-operand insns or on which a
12247 constant cannot be the first operand. */
12248 if (TREE_CODE (arg0) == BIT_AND_EXPR
12249 && integer_zerop (arg1))
12251 tree arg00 = TREE_OPERAND (arg0, 0);
12252 tree arg01 = TREE_OPERAND (arg0, 1);
12253 if (TREE_CODE (arg00) == LSHIFT_EXPR
12254 && integer_onep (TREE_OPERAND (arg00, 0)))
12256 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12257 arg01, TREE_OPERAND (arg00, 1));
12258 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12259 build_int_cst (TREE_TYPE (arg0), 1));
12260 return fold_build2_loc (loc, code, type,
12261 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12262 arg1);
12264 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12265 && integer_onep (TREE_OPERAND (arg01, 0)))
12267 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12268 arg00, TREE_OPERAND (arg01, 1));
12269 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12270 build_int_cst (TREE_TYPE (arg0), 1));
12271 return fold_build2_loc (loc, code, type,
12272 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12273 arg1);
12277 /* If this is an NE or EQ comparison of zero against the result of a
12278 signed MOD operation whose second operand is a power of 2, make
12279 the MOD operation unsigned since it is simpler and equivalent. */
12280 if (integer_zerop (arg1)
12281 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12282 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12283 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12284 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12285 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12286 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12288 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12289 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12290 fold_convert_loc (loc, newtype,
12291 TREE_OPERAND (arg0, 0)),
12292 fold_convert_loc (loc, newtype,
12293 TREE_OPERAND (arg0, 1)));
12295 return fold_build2_loc (loc, code, type, newmod,
12296 fold_convert_loc (loc, newtype, arg1));
12299 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12300 C1 is a valid shift constant, and C2 is a power of two, i.e.
12301 a single bit. */
12302 if (TREE_CODE (arg0) == BIT_AND_EXPR
12303 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12304 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12305 == INTEGER_CST
12306 && integer_pow2p (TREE_OPERAND (arg0, 1))
12307 && integer_zerop (arg1))
12309 tree itype = TREE_TYPE (arg0);
12310 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12311 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12313 /* Check for a valid shift count. */
12314 if (TREE_INT_CST_HIGH (arg001) == 0
12315 && TREE_INT_CST_LOW (arg001) < prec)
12317 tree arg01 = TREE_OPERAND (arg0, 1);
12318 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12319 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12320 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12321 can be rewritten as (X & (C2 << C1)) != 0. */
12322 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12324 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12325 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12326 return fold_build2_loc (loc, code, type, tem,
12327 fold_convert_loc (loc, itype, arg1));
12329 /* Otherwise, for signed (arithmetic) shifts,
12330 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12331 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12332 else if (!TYPE_UNSIGNED (itype))
12333 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12334 arg000, build_int_cst (itype, 0));
12335 /* Otherwise, of unsigned (logical) shifts,
12336 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12337 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12338 else
12339 return omit_one_operand_loc (loc, type,
12340 code == EQ_EXPR ? integer_one_node
12341 : integer_zero_node,
12342 arg000);
12346 /* If this is an NE comparison of zero with an AND of one, remove the
12347 comparison since the AND will give the correct value. */
12348 if (code == NE_EXPR
12349 && integer_zerop (arg1)
12350 && TREE_CODE (arg0) == BIT_AND_EXPR
12351 && integer_onep (TREE_OPERAND (arg0, 1)))
12352 return fold_convert_loc (loc, type, arg0);
12354 /* If we have (A & C) == C where C is a power of 2, convert this into
12355 (A & C) != 0. Similarly for NE_EXPR. */
12356 if (TREE_CODE (arg0) == BIT_AND_EXPR
12357 && integer_pow2p (TREE_OPERAND (arg0, 1))
12358 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12359 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12360 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12361 integer_zero_node));
12363 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12364 bit, then fold the expression into A < 0 or A >= 0. */
12365 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12366 if (tem)
12367 return tem;
12369 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12370 Similarly for NE_EXPR. */
12371 if (TREE_CODE (arg0) == BIT_AND_EXPR
12372 && TREE_CODE (arg1) == INTEGER_CST
12373 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12375 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12376 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12377 TREE_OPERAND (arg0, 1));
12378 tree dandnotc
12379 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12380 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12381 notc);
12382 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12383 if (integer_nonzerop (dandnotc))
12384 return omit_one_operand_loc (loc, type, rslt, arg0);
12387 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12388 Similarly for NE_EXPR. */
12389 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12390 && TREE_CODE (arg1) == INTEGER_CST
12391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12393 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12394 tree candnotd
12395 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12396 TREE_OPERAND (arg0, 1),
12397 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12398 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12399 if (integer_nonzerop (candnotd))
12400 return omit_one_operand_loc (loc, type, rslt, arg0);
12403 /* If this is a comparison of a field, we may be able to simplify it. */
12404 if ((TREE_CODE (arg0) == COMPONENT_REF
12405 || TREE_CODE (arg0) == BIT_FIELD_REF)
12406 /* Handle the constant case even without -O
12407 to make sure the warnings are given. */
12408 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12410 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12411 if (t1)
12412 return t1;
12415 /* Optimize comparisons of strlen vs zero to a compare of the
12416 first character of the string vs zero. To wit,
12417 strlen(ptr) == 0 => *ptr == 0
12418 strlen(ptr) != 0 => *ptr != 0
12419 Other cases should reduce to one of these two (or a constant)
12420 due to the return value of strlen being unsigned. */
12421 if (TREE_CODE (arg0) == CALL_EXPR
12422 && integer_zerop (arg1))
12424 tree fndecl = get_callee_fndecl (arg0);
12426 if (fndecl
12427 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12428 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12429 && call_expr_nargs (arg0) == 1
12430 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12432 tree iref = build_fold_indirect_ref_loc (loc,
12433 CALL_EXPR_ARG (arg0, 0));
12434 return fold_build2_loc (loc, code, type, iref,
12435 build_int_cst (TREE_TYPE (iref), 0));
12439 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12440 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12441 if (TREE_CODE (arg0) == RSHIFT_EXPR
12442 && integer_zerop (arg1)
12443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12445 tree arg00 = TREE_OPERAND (arg0, 0);
12446 tree arg01 = TREE_OPERAND (arg0, 1);
12447 tree itype = TREE_TYPE (arg00);
12448 if (TREE_INT_CST_HIGH (arg01) == 0
12449 && TREE_INT_CST_LOW (arg01)
12450 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12452 if (TYPE_UNSIGNED (itype))
12454 itype = signed_type_for (itype);
12455 arg00 = fold_convert_loc (loc, itype, arg00);
12457 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12458 type, arg00, build_int_cst (itype, 0));
12462 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12463 if (integer_zerop (arg1)
12464 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12465 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12466 TREE_OPERAND (arg0, 1));
12468 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12469 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12470 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12471 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12472 build_int_cst (TREE_TYPE (arg0), 0));
12473 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12474 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12475 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12476 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12477 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12478 build_int_cst (TREE_TYPE (arg0), 0));
12480 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12481 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12482 && TREE_CODE (arg1) == INTEGER_CST
12483 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12484 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12485 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12486 TREE_OPERAND (arg0, 1), arg1));
12488 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12489 (X & C) == 0 when C is a single bit. */
12490 if (TREE_CODE (arg0) == BIT_AND_EXPR
12491 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12492 && integer_zerop (arg1)
12493 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12495 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12496 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12497 TREE_OPERAND (arg0, 1));
12498 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12499 type, tem,
12500 fold_convert_loc (loc, TREE_TYPE (arg0),
12501 arg1));
12504 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12505 constant C is a power of two, i.e. a single bit. */
12506 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12507 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12508 && integer_zerop (arg1)
12509 && integer_pow2p (TREE_OPERAND (arg0, 1))
12510 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12511 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12513 tree arg00 = TREE_OPERAND (arg0, 0);
12514 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12515 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12518 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12519 when is C is a power of two, i.e. a single bit. */
12520 if (TREE_CODE (arg0) == BIT_AND_EXPR
12521 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12522 && integer_zerop (arg1)
12523 && integer_pow2p (TREE_OPERAND (arg0, 1))
12524 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12525 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12527 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12528 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12529 arg000, TREE_OPERAND (arg0, 1));
12530 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12531 tem, build_int_cst (TREE_TYPE (tem), 0));
12534 if (integer_zerop (arg1)
12535 && tree_expr_nonzero_p (arg0))
12537 tree res = constant_boolean_node (code==NE_EXPR, type);
12538 return omit_one_operand_loc (loc, type, res, arg0);
12541 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12542 if (TREE_CODE (arg0) == NEGATE_EXPR
12543 && TREE_CODE (arg1) == NEGATE_EXPR)
12544 return fold_build2_loc (loc, code, type,
12545 TREE_OPERAND (arg0, 0),
12546 fold_convert_loc (loc, TREE_TYPE (arg0),
12547 TREE_OPERAND (arg1, 0)));
12549 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12550 if (TREE_CODE (arg0) == BIT_AND_EXPR
12551 && TREE_CODE (arg1) == BIT_AND_EXPR)
12553 tree arg00 = TREE_OPERAND (arg0, 0);
12554 tree arg01 = TREE_OPERAND (arg0, 1);
12555 tree arg10 = TREE_OPERAND (arg1, 0);
12556 tree arg11 = TREE_OPERAND (arg1, 1);
12557 tree itype = TREE_TYPE (arg0);
12559 if (operand_equal_p (arg01, arg11, 0))
12560 return fold_build2_loc (loc, code, type,
12561 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12562 fold_build2_loc (loc,
12563 BIT_XOR_EXPR, itype,
12564 arg00, arg10),
12565 arg01),
12566 build_int_cst (itype, 0));
12568 if (operand_equal_p (arg01, arg10, 0))
12569 return fold_build2_loc (loc, code, type,
12570 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12571 fold_build2_loc (loc,
12572 BIT_XOR_EXPR, itype,
12573 arg00, arg11),
12574 arg01),
12575 build_int_cst (itype, 0));
12577 if (operand_equal_p (arg00, arg11, 0))
12578 return fold_build2_loc (loc, code, type,
12579 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12580 fold_build2_loc (loc,
12581 BIT_XOR_EXPR, itype,
12582 arg01, arg10),
12583 arg00),
12584 build_int_cst (itype, 0));
12586 if (operand_equal_p (arg00, arg10, 0))
12587 return fold_build2_loc (loc, code, type,
12588 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12589 fold_build2_loc (loc,
12590 BIT_XOR_EXPR, itype,
12591 arg01, arg11),
12592 arg00),
12593 build_int_cst (itype, 0));
12596 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12597 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12599 tree arg00 = TREE_OPERAND (arg0, 0);
12600 tree arg01 = TREE_OPERAND (arg0, 1);
12601 tree arg10 = TREE_OPERAND (arg1, 0);
12602 tree arg11 = TREE_OPERAND (arg1, 1);
12603 tree itype = TREE_TYPE (arg0);
12605 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12606 operand_equal_p guarantees no side-effects so we don't need
12607 to use omit_one_operand on Z. */
12608 if (operand_equal_p (arg01, arg11, 0))
12609 return fold_build2_loc (loc, code, type, arg00,
12610 fold_convert_loc (loc, TREE_TYPE (arg00),
12611 arg10));
12612 if (operand_equal_p (arg01, arg10, 0))
12613 return fold_build2_loc (loc, code, type, arg00,
12614 fold_convert_loc (loc, TREE_TYPE (arg00),
12615 arg11));
12616 if (operand_equal_p (arg00, arg11, 0))
12617 return fold_build2_loc (loc, code, type, arg01,
12618 fold_convert_loc (loc, TREE_TYPE (arg01),
12619 arg10));
12620 if (operand_equal_p (arg00, arg10, 0))
12621 return fold_build2_loc (loc, code, type, arg01,
12622 fold_convert_loc (loc, TREE_TYPE (arg01),
12623 arg11));
12625 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12626 if (TREE_CODE (arg01) == INTEGER_CST
12627 && TREE_CODE (arg11) == INTEGER_CST)
12629 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12630 fold_convert_loc (loc, itype, arg11));
12631 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12632 return fold_build2_loc (loc, code, type, tem,
12633 fold_convert_loc (loc, itype, arg10));
12637 /* Attempt to simplify equality/inequality comparisons of complex
12638 values. Only lower the comparison if the result is known or
12639 can be simplified to a single scalar comparison. */
12640 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12641 || TREE_CODE (arg0) == COMPLEX_CST)
12642 && (TREE_CODE (arg1) == COMPLEX_EXPR
12643 || TREE_CODE (arg1) == COMPLEX_CST))
12645 tree real0, imag0, real1, imag1;
12646 tree rcond, icond;
12648 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12650 real0 = TREE_OPERAND (arg0, 0);
12651 imag0 = TREE_OPERAND (arg0, 1);
12653 else
12655 real0 = TREE_REALPART (arg0);
12656 imag0 = TREE_IMAGPART (arg0);
12659 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12661 real1 = TREE_OPERAND (arg1, 0);
12662 imag1 = TREE_OPERAND (arg1, 1);
12664 else
12666 real1 = TREE_REALPART (arg1);
12667 imag1 = TREE_IMAGPART (arg1);
12670 rcond = fold_binary_loc (loc, code, type, real0, real1);
12671 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12673 if (integer_zerop (rcond))
12675 if (code == EQ_EXPR)
12676 return omit_two_operands_loc (loc, type, boolean_false_node,
12677 imag0, imag1);
12678 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12680 else
12682 if (code == NE_EXPR)
12683 return omit_two_operands_loc (loc, type, boolean_true_node,
12684 imag0, imag1);
12685 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12689 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12690 if (icond && TREE_CODE (icond) == INTEGER_CST)
12692 if (integer_zerop (icond))
12694 if (code == EQ_EXPR)
12695 return omit_two_operands_loc (loc, type, boolean_false_node,
12696 real0, real1);
12697 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12699 else
12701 if (code == NE_EXPR)
12702 return omit_two_operands_loc (loc, type, boolean_true_node,
12703 real0, real1);
12704 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12709 return NULL_TREE;
12711 case LT_EXPR:
12712 case GT_EXPR:
12713 case LE_EXPR:
12714 case GE_EXPR:
12715 tem = fold_comparison (loc, code, type, op0, op1);
12716 if (tem != NULL_TREE)
12717 return tem;
12719 /* Transform comparisons of the form X +- C CMP X. */
12720 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12721 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12722 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12723 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12724 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12725 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12727 tree arg01 = TREE_OPERAND (arg0, 1);
12728 enum tree_code code0 = TREE_CODE (arg0);
12729 int is_positive;
12731 if (TREE_CODE (arg01) == REAL_CST)
12732 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12733 else
12734 is_positive = tree_int_cst_sgn (arg01);
12736 /* (X - c) > X becomes false. */
12737 if (code == GT_EXPR
12738 && ((code0 == MINUS_EXPR && is_positive >= 0)
12739 || (code0 == PLUS_EXPR && is_positive <= 0)))
12741 if (TREE_CODE (arg01) == INTEGER_CST
12742 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12743 fold_overflow_warning (("assuming signed overflow does not "
12744 "occur when assuming that (X - c) > X "
12745 "is always false"),
12746 WARN_STRICT_OVERFLOW_ALL);
12747 return constant_boolean_node (0, type);
12750 /* Likewise (X + c) < X becomes false. */
12751 if (code == LT_EXPR
12752 && ((code0 == PLUS_EXPR && is_positive >= 0)
12753 || (code0 == MINUS_EXPR && is_positive <= 0)))
12755 if (TREE_CODE (arg01) == INTEGER_CST
12756 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12757 fold_overflow_warning (("assuming signed overflow does not "
12758 "occur when assuming that "
12759 "(X + c) < X is always false"),
12760 WARN_STRICT_OVERFLOW_ALL);
12761 return constant_boolean_node (0, type);
12764 /* Convert (X - c) <= X to true. */
12765 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12766 && code == LE_EXPR
12767 && ((code0 == MINUS_EXPR && is_positive >= 0)
12768 || (code0 == PLUS_EXPR && is_positive <= 0)))
12770 if (TREE_CODE (arg01) == INTEGER_CST
12771 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12772 fold_overflow_warning (("assuming signed overflow does not "
12773 "occur when assuming that "
12774 "(X - c) <= X is always true"),
12775 WARN_STRICT_OVERFLOW_ALL);
12776 return constant_boolean_node (1, type);
12779 /* Convert (X + c) >= X to true. */
12780 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12781 && code == GE_EXPR
12782 && ((code0 == PLUS_EXPR && is_positive >= 0)
12783 || (code0 == MINUS_EXPR && is_positive <= 0)))
12785 if (TREE_CODE (arg01) == INTEGER_CST
12786 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12787 fold_overflow_warning (("assuming signed overflow does not "
12788 "occur when assuming that "
12789 "(X + c) >= X is always true"),
12790 WARN_STRICT_OVERFLOW_ALL);
12791 return constant_boolean_node (1, type);
12794 if (TREE_CODE (arg01) == INTEGER_CST)
12796 /* Convert X + c > X and X - c < X to true for integers. */
12797 if (code == GT_EXPR
12798 && ((code0 == PLUS_EXPR && is_positive > 0)
12799 || (code0 == MINUS_EXPR && is_positive < 0)))
12801 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12802 fold_overflow_warning (("assuming signed overflow does "
12803 "not occur when assuming that "
12804 "(X + c) > X is always true"),
12805 WARN_STRICT_OVERFLOW_ALL);
12806 return constant_boolean_node (1, type);
12809 if (code == LT_EXPR
12810 && ((code0 == MINUS_EXPR && is_positive > 0)
12811 || (code0 == PLUS_EXPR && is_positive < 0)))
12813 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12814 fold_overflow_warning (("assuming signed overflow does "
12815 "not occur when assuming that "
12816 "(X - c) < X is always true"),
12817 WARN_STRICT_OVERFLOW_ALL);
12818 return constant_boolean_node (1, type);
12821 /* Convert X + c <= X and X - c >= X to false for integers. */
12822 if (code == LE_EXPR
12823 && ((code0 == PLUS_EXPR && is_positive > 0)
12824 || (code0 == MINUS_EXPR && is_positive < 0)))
12826 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12827 fold_overflow_warning (("assuming signed overflow does "
12828 "not occur when assuming that "
12829 "(X + c) <= X is always false"),
12830 WARN_STRICT_OVERFLOW_ALL);
12831 return constant_boolean_node (0, type);
12834 if (code == GE_EXPR
12835 && ((code0 == MINUS_EXPR && is_positive > 0)
12836 || (code0 == PLUS_EXPR && is_positive < 0)))
12838 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12839 fold_overflow_warning (("assuming signed overflow does "
12840 "not occur when assuming that "
12841 "(X - c) >= X is always false"),
12842 WARN_STRICT_OVERFLOW_ALL);
12843 return constant_boolean_node (0, type);
12848 /* Comparisons with the highest or lowest possible integer of
12849 the specified precision will have known values. */
12851 tree arg1_type = TREE_TYPE (arg1);
12852 unsigned int width = TYPE_PRECISION (arg1_type);
12854 if (TREE_CODE (arg1) == INTEGER_CST
12855 && width <= 2 * HOST_BITS_PER_WIDE_INT
12856 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12858 HOST_WIDE_INT signed_max_hi;
12859 unsigned HOST_WIDE_INT signed_max_lo;
12860 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12862 if (width <= HOST_BITS_PER_WIDE_INT)
12864 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12865 - 1;
12866 signed_max_hi = 0;
12867 max_hi = 0;
12869 if (TYPE_UNSIGNED (arg1_type))
12871 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12872 min_lo = 0;
12873 min_hi = 0;
12875 else
12877 max_lo = signed_max_lo;
12878 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12879 min_hi = -1;
12882 else
12884 width -= HOST_BITS_PER_WIDE_INT;
12885 signed_max_lo = -1;
12886 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12887 - 1;
12888 max_lo = -1;
12889 min_lo = 0;
12891 if (TYPE_UNSIGNED (arg1_type))
12893 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12894 min_hi = 0;
12896 else
12898 max_hi = signed_max_hi;
12899 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12903 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12904 && TREE_INT_CST_LOW (arg1) == max_lo)
12905 switch (code)
12907 case GT_EXPR:
12908 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12910 case GE_EXPR:
12911 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12913 case LE_EXPR:
12914 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12916 case LT_EXPR:
12917 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12919 /* The GE_EXPR and LT_EXPR cases above are not normally
12920 reached because of previous transformations. */
12922 default:
12923 break;
12925 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12926 == max_hi
12927 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12928 switch (code)
12930 case GT_EXPR:
12931 arg1 = const_binop (PLUS_EXPR, arg1,
12932 build_int_cst (TREE_TYPE (arg1), 1));
12933 return fold_build2_loc (loc, EQ_EXPR, type,
12934 fold_convert_loc (loc,
12935 TREE_TYPE (arg1), arg0),
12936 arg1);
12937 case LE_EXPR:
12938 arg1 = const_binop (PLUS_EXPR, arg1,
12939 build_int_cst (TREE_TYPE (arg1), 1));
12940 return fold_build2_loc (loc, NE_EXPR, type,
12941 fold_convert_loc (loc, TREE_TYPE (arg1),
12942 arg0),
12943 arg1);
12944 default:
12945 break;
12947 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12948 == min_hi
12949 && TREE_INT_CST_LOW (arg1) == min_lo)
12950 switch (code)
12952 case LT_EXPR:
12953 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12955 case LE_EXPR:
12956 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12958 case GE_EXPR:
12959 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12961 case GT_EXPR:
12962 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12964 default:
12965 break;
12967 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12968 == min_hi
12969 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12970 switch (code)
12972 case GE_EXPR:
12973 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12974 return fold_build2_loc (loc, NE_EXPR, type,
12975 fold_convert_loc (loc,
12976 TREE_TYPE (arg1), arg0),
12977 arg1);
12978 case LT_EXPR:
12979 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12980 return fold_build2_loc (loc, EQ_EXPR, type,
12981 fold_convert_loc (loc, TREE_TYPE (arg1),
12982 arg0),
12983 arg1);
12984 default:
12985 break;
12988 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12989 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12990 && TYPE_UNSIGNED (arg1_type)
12991 /* We will flip the signedness of the comparison operator
12992 associated with the mode of arg1, so the sign bit is
12993 specified by this mode. Check that arg1 is the signed
12994 max associated with this sign bit. */
12995 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12996 /* signed_type does not work on pointer types. */
12997 && INTEGRAL_TYPE_P (arg1_type))
12999 /* The following case also applies to X < signed_max+1
13000 and X >= signed_max+1 because previous transformations. */
13001 if (code == LE_EXPR || code == GT_EXPR)
13003 tree st;
13004 st = signed_type_for (TREE_TYPE (arg1));
13005 return fold_build2_loc (loc,
13006 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13007 type, fold_convert_loc (loc, st, arg0),
13008 build_int_cst (st, 0));
13014 /* If we are comparing an ABS_EXPR with a constant, we can
13015 convert all the cases into explicit comparisons, but they may
13016 well not be faster than doing the ABS and one comparison.
13017 But ABS (X) <= C is a range comparison, which becomes a subtraction
13018 and a comparison, and is probably faster. */
13019 if (code == LE_EXPR
13020 && TREE_CODE (arg1) == INTEGER_CST
13021 && TREE_CODE (arg0) == ABS_EXPR
13022 && ! TREE_SIDE_EFFECTS (arg0)
13023 && (0 != (tem = negate_expr (arg1)))
13024 && TREE_CODE (tem) == INTEGER_CST
13025 && !TREE_OVERFLOW (tem))
13026 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13027 build2 (GE_EXPR, type,
13028 TREE_OPERAND (arg0, 0), tem),
13029 build2 (LE_EXPR, type,
13030 TREE_OPERAND (arg0, 0), arg1));
13032 /* Convert ABS_EXPR<x> >= 0 to true. */
13033 strict_overflow_p = false;
13034 if (code == GE_EXPR
13035 && (integer_zerop (arg1)
13036 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13037 && real_zerop (arg1)))
13038 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13040 if (strict_overflow_p)
13041 fold_overflow_warning (("assuming signed overflow does not occur "
13042 "when simplifying comparison of "
13043 "absolute value and zero"),
13044 WARN_STRICT_OVERFLOW_CONDITIONAL);
13045 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13048 /* Convert ABS_EXPR<x> < 0 to false. */
13049 strict_overflow_p = false;
13050 if (code == LT_EXPR
13051 && (integer_zerop (arg1) || real_zerop (arg1))
13052 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13054 if (strict_overflow_p)
13055 fold_overflow_warning (("assuming signed overflow does not occur "
13056 "when simplifying comparison of "
13057 "absolute value and zero"),
13058 WARN_STRICT_OVERFLOW_CONDITIONAL);
13059 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13062 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13063 and similarly for >= into !=. */
13064 if ((code == LT_EXPR || code == GE_EXPR)
13065 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13066 && TREE_CODE (arg1) == LSHIFT_EXPR
13067 && integer_onep (TREE_OPERAND (arg1, 0)))
13068 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13069 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13070 TREE_OPERAND (arg1, 1)),
13071 build_int_cst (TREE_TYPE (arg0), 0));
13073 if ((code == LT_EXPR || code == GE_EXPR)
13074 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13075 && CONVERT_EXPR_P (arg1)
13076 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13077 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13079 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13080 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13081 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13082 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13083 build_int_cst (TREE_TYPE (arg0), 0));
13086 return NULL_TREE;
13088 case UNORDERED_EXPR:
13089 case ORDERED_EXPR:
13090 case UNLT_EXPR:
13091 case UNLE_EXPR:
13092 case UNGT_EXPR:
13093 case UNGE_EXPR:
13094 case UNEQ_EXPR:
13095 case LTGT_EXPR:
13096 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13098 t1 = fold_relational_const (code, type, arg0, arg1);
13099 if (t1 != NULL_TREE)
13100 return t1;
13103 /* If the first operand is NaN, the result is constant. */
13104 if (TREE_CODE (arg0) == REAL_CST
13105 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13106 && (code != LTGT_EXPR || ! flag_trapping_math))
13108 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13109 ? integer_zero_node
13110 : integer_one_node;
13111 return omit_one_operand_loc (loc, type, t1, arg1);
13114 /* If the second operand is NaN, the result is constant. */
13115 if (TREE_CODE (arg1) == REAL_CST
13116 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13117 && (code != LTGT_EXPR || ! flag_trapping_math))
13119 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13120 ? integer_zero_node
13121 : integer_one_node;
13122 return omit_one_operand_loc (loc, type, t1, arg0);
13125 /* Simplify unordered comparison of something with itself. */
13126 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13127 && operand_equal_p (arg0, arg1, 0))
13128 return constant_boolean_node (1, type);
13130 if (code == LTGT_EXPR
13131 && !flag_trapping_math
13132 && operand_equal_p (arg0, arg1, 0))
13133 return constant_boolean_node (0, type);
13135 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13137 tree targ0 = strip_float_extensions (arg0);
13138 tree targ1 = strip_float_extensions (arg1);
13139 tree newtype = TREE_TYPE (targ0);
13141 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13142 newtype = TREE_TYPE (targ1);
13144 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13145 return fold_build2_loc (loc, code, type,
13146 fold_convert_loc (loc, newtype, targ0),
13147 fold_convert_loc (loc, newtype, targ1));
13150 return NULL_TREE;
13152 case COMPOUND_EXPR:
13153 /* When pedantic, a compound expression can be neither an lvalue
13154 nor an integer constant expression. */
13155 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13156 return NULL_TREE;
13157 /* Don't let (0, 0) be null pointer constant. */
13158 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13159 : fold_convert_loc (loc, type, arg1);
13160 return pedantic_non_lvalue_loc (loc, tem);
13162 case COMPLEX_EXPR:
13163 if ((TREE_CODE (arg0) == REAL_CST
13164 && TREE_CODE (arg1) == REAL_CST)
13165 || (TREE_CODE (arg0) == INTEGER_CST
13166 && TREE_CODE (arg1) == INTEGER_CST))
13167 return build_complex (type, arg0, arg1);
13168 if (TREE_CODE (arg0) == REALPART_EXPR
13169 && TREE_CODE (arg1) == IMAGPART_EXPR
13170 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
13171 == TYPE_MAIN_VARIANT (type))
13172 && operand_equal_p (TREE_OPERAND (arg0, 0),
13173 TREE_OPERAND (arg1, 0), 0))
13174 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13175 TREE_OPERAND (arg1, 0));
13176 return NULL_TREE;
13178 case ASSERT_EXPR:
13179 /* An ASSERT_EXPR should never be passed to fold_binary. */
13180 gcc_unreachable ();
13182 default:
13183 return NULL_TREE;
13184 } /* switch (code) */
13187 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13188 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13189 of GOTO_EXPR. */
13191 static tree
13192 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13194 switch (TREE_CODE (*tp))
13196 case LABEL_EXPR:
13197 return *tp;
13199 case GOTO_EXPR:
13200 *walk_subtrees = 0;
13202 /* ... fall through ... */
13204 default:
13205 return NULL_TREE;
13209 /* Return whether the sub-tree ST contains a label which is accessible from
13210 outside the sub-tree. */
13212 static bool
13213 contains_label_p (tree st)
13215 return
13216 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13219 /* Fold a ternary expression of code CODE and type TYPE with operands
13220 OP0, OP1, and OP2. Return the folded expression if folding is
13221 successful. Otherwise, return NULL_TREE. */
13223 tree
13224 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13225 tree op0, tree op1, tree op2)
13227 tree tem;
13228 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13229 enum tree_code_class kind = TREE_CODE_CLASS (code);
13231 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13232 && TREE_CODE_LENGTH (code) == 3);
13234 /* Strip any conversions that don't change the mode. This is safe
13235 for every expression, except for a comparison expression because
13236 its signedness is derived from its operands. So, in the latter
13237 case, only strip conversions that don't change the signedness.
13239 Note that this is done as an internal manipulation within the
13240 constant folder, in order to find the simplest representation of
13241 the arguments so that their form can be studied. In any cases,
13242 the appropriate type conversions should be put back in the tree
13243 that will get out of the constant folder. */
13244 if (op0)
13246 arg0 = op0;
13247 STRIP_NOPS (arg0);
13250 if (op1)
13252 arg1 = op1;
13253 STRIP_NOPS (arg1);
13256 if (op2)
13258 arg2 = op2;
13259 STRIP_NOPS (arg2);
13262 switch (code)
13264 case COMPONENT_REF:
13265 if (TREE_CODE (arg0) == CONSTRUCTOR
13266 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13268 unsigned HOST_WIDE_INT idx;
13269 tree field, value;
13270 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13271 if (field == arg1)
13272 return value;
13274 return NULL_TREE;
13276 case COND_EXPR:
13277 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13278 so all simple results must be passed through pedantic_non_lvalue. */
13279 if (TREE_CODE (arg0) == INTEGER_CST)
13281 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13282 tem = integer_zerop (arg0) ? op2 : op1;
13283 /* Only optimize constant conditions when the selected branch
13284 has the same type as the COND_EXPR. This avoids optimizing
13285 away "c ? x : throw", where the throw has a void type.
13286 Avoid throwing away that operand which contains label. */
13287 if ((!TREE_SIDE_EFFECTS (unused_op)
13288 || !contains_label_p (unused_op))
13289 && (! VOID_TYPE_P (TREE_TYPE (tem))
13290 || VOID_TYPE_P (type)))
13291 return pedantic_non_lvalue_loc (loc, tem);
13292 return NULL_TREE;
13294 if (operand_equal_p (arg1, op2, 0))
13295 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13297 /* If we have A op B ? A : C, we may be able to convert this to a
13298 simpler expression, depending on the operation and the values
13299 of B and C. Signed zeros prevent all of these transformations,
13300 for reasons given above each one.
13302 Also try swapping the arguments and inverting the conditional. */
13303 if (COMPARISON_CLASS_P (arg0)
13304 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13305 arg1, TREE_OPERAND (arg0, 1))
13306 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13308 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13309 if (tem)
13310 return tem;
13313 if (COMPARISON_CLASS_P (arg0)
13314 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13315 op2,
13316 TREE_OPERAND (arg0, 1))
13317 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13319 location_t loc0 = EXPR_LOCATION (arg0);
13320 if (loc0 == UNKNOWN_LOCATION)
13321 loc0 = loc;
13322 tem = fold_truth_not_expr (loc0, arg0);
13323 if (tem && COMPARISON_CLASS_P (tem))
13325 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13326 if (tem)
13327 return tem;
13331 /* If the second operand is simpler than the third, swap them
13332 since that produces better jump optimization results. */
13333 if (truth_value_p (TREE_CODE (arg0))
13334 && tree_swap_operands_p (op1, op2, false))
13336 location_t loc0 = EXPR_LOCATION (arg0);
13337 if (loc0 == UNKNOWN_LOCATION)
13338 loc0 = loc;
13339 /* See if this can be inverted. If it can't, possibly because
13340 it was a floating-point inequality comparison, don't do
13341 anything. */
13342 tem = fold_truth_not_expr (loc0, arg0);
13343 if (tem)
13344 return fold_build3_loc (loc, code, type, tem, op2, op1);
13347 /* Convert A ? 1 : 0 to simply A. */
13348 if (integer_onep (op1)
13349 && integer_zerop (op2)
13350 /* If we try to convert OP0 to our type, the
13351 call to fold will try to move the conversion inside
13352 a COND, which will recurse. In that case, the COND_EXPR
13353 is probably the best choice, so leave it alone. */
13354 && type == TREE_TYPE (arg0))
13355 return pedantic_non_lvalue_loc (loc, arg0);
13357 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13358 over COND_EXPR in cases such as floating point comparisons. */
13359 if (integer_zerop (op1)
13360 && integer_onep (op2)
13361 && truth_value_p (TREE_CODE (arg0)))
13362 return pedantic_non_lvalue_loc (loc,
13363 fold_convert_loc (loc, type,
13364 invert_truthvalue_loc (loc,
13365 arg0)));
13367 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13368 if (TREE_CODE (arg0) == LT_EXPR
13369 && integer_zerop (TREE_OPERAND (arg0, 1))
13370 && integer_zerop (op2)
13371 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13373 /* sign_bit_p only checks ARG1 bits within A's precision.
13374 If <sign bit of A> has wider type than A, bits outside
13375 of A's precision in <sign bit of A> need to be checked.
13376 If they are all 0, this optimization needs to be done
13377 in unsigned A's type, if they are all 1 in signed A's type,
13378 otherwise this can't be done. */
13379 if (TYPE_PRECISION (TREE_TYPE (tem))
13380 < TYPE_PRECISION (TREE_TYPE (arg1))
13381 && TYPE_PRECISION (TREE_TYPE (tem))
13382 < TYPE_PRECISION (type))
13384 unsigned HOST_WIDE_INT mask_lo;
13385 HOST_WIDE_INT mask_hi;
13386 int inner_width, outer_width;
13387 tree tem_type;
13389 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13390 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13391 if (outer_width > TYPE_PRECISION (type))
13392 outer_width = TYPE_PRECISION (type);
13394 if (outer_width > HOST_BITS_PER_WIDE_INT)
13396 mask_hi = ((unsigned HOST_WIDE_INT) -1
13397 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13398 mask_lo = -1;
13400 else
13402 mask_hi = 0;
13403 mask_lo = ((unsigned HOST_WIDE_INT) -1
13404 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13406 if (inner_width > HOST_BITS_PER_WIDE_INT)
13408 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13409 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13410 mask_lo = 0;
13412 else
13413 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13414 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13416 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13417 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13419 tem_type = signed_type_for (TREE_TYPE (tem));
13420 tem = fold_convert_loc (loc, tem_type, tem);
13422 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13423 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13425 tem_type = unsigned_type_for (TREE_TYPE (tem));
13426 tem = fold_convert_loc (loc, tem_type, tem);
13428 else
13429 tem = NULL;
13432 if (tem)
13433 return
13434 fold_convert_loc (loc, type,
13435 fold_build2_loc (loc, BIT_AND_EXPR,
13436 TREE_TYPE (tem), tem,
13437 fold_convert_loc (loc,
13438 TREE_TYPE (tem),
13439 arg1)));
13442 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13443 already handled above. */
13444 if (TREE_CODE (arg0) == BIT_AND_EXPR
13445 && integer_onep (TREE_OPERAND (arg0, 1))
13446 && integer_zerop (op2)
13447 && integer_pow2p (arg1))
13449 tree tem = TREE_OPERAND (arg0, 0);
13450 STRIP_NOPS (tem);
13451 if (TREE_CODE (tem) == RSHIFT_EXPR
13452 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13453 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13454 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13455 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13456 TREE_OPERAND (tem, 0), arg1);
13459 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13460 is probably obsolete because the first operand should be a
13461 truth value (that's why we have the two cases above), but let's
13462 leave it in until we can confirm this for all front-ends. */
13463 if (integer_zerop (op2)
13464 && TREE_CODE (arg0) == NE_EXPR
13465 && integer_zerop (TREE_OPERAND (arg0, 1))
13466 && integer_pow2p (arg1)
13467 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13468 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13469 arg1, OEP_ONLY_CONST))
13470 return pedantic_non_lvalue_loc (loc,
13471 fold_convert_loc (loc, type,
13472 TREE_OPERAND (arg0, 0)));
13474 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13475 if (integer_zerop (op2)
13476 && truth_value_p (TREE_CODE (arg0))
13477 && truth_value_p (TREE_CODE (arg1)))
13478 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13479 fold_convert_loc (loc, type, arg0),
13480 arg1);
13482 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13483 if (integer_onep (op2)
13484 && truth_value_p (TREE_CODE (arg0))
13485 && truth_value_p (TREE_CODE (arg1)))
13487 location_t loc0 = EXPR_LOCATION (arg0);
13488 if (loc0 == UNKNOWN_LOCATION)
13489 loc0 = loc;
13490 /* Only perform transformation if ARG0 is easily inverted. */
13491 tem = fold_truth_not_expr (loc0, arg0);
13492 if (tem)
13493 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13494 fold_convert_loc (loc, type, tem),
13495 arg1);
13498 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13499 if (integer_zerop (arg1)
13500 && truth_value_p (TREE_CODE (arg0))
13501 && truth_value_p (TREE_CODE (op2)))
13503 location_t loc0 = EXPR_LOCATION (arg0);
13504 if (loc0 == UNKNOWN_LOCATION)
13505 loc0 = loc;
13506 /* Only perform transformation if ARG0 is easily inverted. */
13507 tem = fold_truth_not_expr (loc0, arg0);
13508 if (tem)
13509 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13510 fold_convert_loc (loc, type, tem),
13511 op2);
13514 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13515 if (integer_onep (arg1)
13516 && truth_value_p (TREE_CODE (arg0))
13517 && truth_value_p (TREE_CODE (op2)))
13518 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13519 fold_convert_loc (loc, type, arg0),
13520 op2);
13522 return NULL_TREE;
13524 case CALL_EXPR:
13525 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13526 of fold_ternary on them. */
13527 gcc_unreachable ();
13529 case BIT_FIELD_REF:
13530 if ((TREE_CODE (arg0) == VECTOR_CST
13531 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13532 && type == TREE_TYPE (TREE_TYPE (arg0)))
13534 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13535 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13537 if (width != 0
13538 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13539 && (idx % width) == 0
13540 && (idx = idx / width)
13541 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13543 tree elements = NULL_TREE;
13545 if (TREE_CODE (arg0) == VECTOR_CST)
13546 elements = TREE_VECTOR_CST_ELTS (arg0);
13547 else
13549 unsigned HOST_WIDE_INT idx;
13550 tree value;
13552 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13553 elements = tree_cons (NULL_TREE, value, elements);
13555 while (idx-- > 0 && elements)
13556 elements = TREE_CHAIN (elements);
13557 if (elements)
13558 return TREE_VALUE (elements);
13559 else
13560 return build_zero_cst (type);
13564 /* A bit-field-ref that referenced the full argument can be stripped. */
13565 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13566 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13567 && integer_zerop (op2))
13568 return fold_convert_loc (loc, type, arg0);
13570 return NULL_TREE;
13572 case FMA_EXPR:
13573 /* For integers we can decompose the FMA if possible. */
13574 if (TREE_CODE (arg0) == INTEGER_CST
13575 && TREE_CODE (arg1) == INTEGER_CST)
13576 return fold_build2_loc (loc, PLUS_EXPR, type,
13577 const_binop (MULT_EXPR, arg0, arg1), arg2);
13578 if (integer_zerop (arg2))
13579 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13581 return fold_fma (loc, type, arg0, arg1, arg2);
13583 default:
13584 return NULL_TREE;
13585 } /* switch (code) */
13588 /* Perform constant folding and related simplification of EXPR.
13589 The related simplifications include x*1 => x, x*0 => 0, etc.,
13590 and application of the associative law.
13591 NOP_EXPR conversions may be removed freely (as long as we
13592 are careful not to change the type of the overall expression).
13593 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13594 but we can constant-fold them if they have constant operands. */
13596 #ifdef ENABLE_FOLD_CHECKING
13597 # define fold(x) fold_1 (x)
13598 static tree fold_1 (tree);
13599 static
13600 #endif
13601 tree
13602 fold (tree expr)
13604 const tree t = expr;
13605 enum tree_code code = TREE_CODE (t);
13606 enum tree_code_class kind = TREE_CODE_CLASS (code);
13607 tree tem;
13608 location_t loc = EXPR_LOCATION (expr);
13610 /* Return right away if a constant. */
13611 if (kind == tcc_constant)
13612 return t;
13614 /* CALL_EXPR-like objects with variable numbers of operands are
13615 treated specially. */
13616 if (kind == tcc_vl_exp)
13618 if (code == CALL_EXPR)
13620 tem = fold_call_expr (loc, expr, false);
13621 return tem ? tem : expr;
13623 return expr;
13626 if (IS_EXPR_CODE_CLASS (kind))
13628 tree type = TREE_TYPE (t);
13629 tree op0, op1, op2;
13631 switch (TREE_CODE_LENGTH (code))
13633 case 1:
13634 op0 = TREE_OPERAND (t, 0);
13635 tem = fold_unary_loc (loc, code, type, op0);
13636 return tem ? tem : expr;
13637 case 2:
13638 op0 = TREE_OPERAND (t, 0);
13639 op1 = TREE_OPERAND (t, 1);
13640 tem = fold_binary_loc (loc, code, type, op0, op1);
13641 return tem ? tem : expr;
13642 case 3:
13643 op0 = TREE_OPERAND (t, 0);
13644 op1 = TREE_OPERAND (t, 1);
13645 op2 = TREE_OPERAND (t, 2);
13646 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13647 return tem ? tem : expr;
13648 default:
13649 break;
13653 switch (code)
13655 case ARRAY_REF:
13657 tree op0 = TREE_OPERAND (t, 0);
13658 tree op1 = TREE_OPERAND (t, 1);
13660 if (TREE_CODE (op1) == INTEGER_CST
13661 && TREE_CODE (op0) == CONSTRUCTOR
13662 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13664 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13665 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13666 unsigned HOST_WIDE_INT begin = 0;
13668 /* Find a matching index by means of a binary search. */
13669 while (begin != end)
13671 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13672 tree index = VEC_index (constructor_elt, elts, middle)->index;
13674 if (TREE_CODE (index) == INTEGER_CST
13675 && tree_int_cst_lt (index, op1))
13676 begin = middle + 1;
13677 else if (TREE_CODE (index) == INTEGER_CST
13678 && tree_int_cst_lt (op1, index))
13679 end = middle;
13680 else if (TREE_CODE (index) == RANGE_EXPR
13681 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13682 begin = middle + 1;
13683 else if (TREE_CODE (index) == RANGE_EXPR
13684 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13685 end = middle;
13686 else
13687 return VEC_index (constructor_elt, elts, middle)->value;
13691 return t;
13694 case CONST_DECL:
13695 return fold (DECL_INITIAL (t));
13697 default:
13698 return t;
13699 } /* switch (code) */
13702 #ifdef ENABLE_FOLD_CHECKING
13703 #undef fold
13705 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13706 static void fold_check_failed (const_tree, const_tree);
13707 void print_fold_checksum (const_tree);
13709 /* When --enable-checking=fold, compute a digest of expr before
13710 and after actual fold call to see if fold did not accidentally
13711 change original expr. */
13713 tree
13714 fold (tree expr)
13716 tree ret;
13717 struct md5_ctx ctx;
13718 unsigned char checksum_before[16], checksum_after[16];
13719 htab_t ht;
13721 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13722 md5_init_ctx (&ctx);
13723 fold_checksum_tree (expr, &ctx, ht);
13724 md5_finish_ctx (&ctx, checksum_before);
13725 htab_empty (ht);
13727 ret = fold_1 (expr);
13729 md5_init_ctx (&ctx);
13730 fold_checksum_tree (expr, &ctx, ht);
13731 md5_finish_ctx (&ctx, checksum_after);
13732 htab_delete (ht);
13734 if (memcmp (checksum_before, checksum_after, 16))
13735 fold_check_failed (expr, ret);
13737 return ret;
13740 void
13741 print_fold_checksum (const_tree expr)
13743 struct md5_ctx ctx;
13744 unsigned char checksum[16], cnt;
13745 htab_t ht;
13747 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13748 md5_init_ctx (&ctx);
13749 fold_checksum_tree (expr, &ctx, ht);
13750 md5_finish_ctx (&ctx, checksum);
13751 htab_delete (ht);
13752 for (cnt = 0; cnt < 16; ++cnt)
13753 fprintf (stderr, "%02x", checksum[cnt]);
13754 putc ('\n', stderr);
13757 static void
13758 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13760 internal_error ("fold check: original tree changed by fold");
13763 static void
13764 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13766 void **slot;
13767 enum tree_code code;
13768 union tree_node buf;
13769 int i, len;
13771 recursive_label:
13773 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13774 <= sizeof (struct tree_function_decl))
13775 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13776 if (expr == NULL)
13777 return;
13778 slot = (void **) htab_find_slot (ht, expr, INSERT);
13779 if (*slot != NULL)
13780 return;
13781 *slot = CONST_CAST_TREE (expr);
13782 code = TREE_CODE (expr);
13783 if (TREE_CODE_CLASS (code) == tcc_declaration
13784 && DECL_ASSEMBLER_NAME_SET_P (expr))
13786 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13787 memcpy ((char *) &buf, expr, tree_size (expr));
13788 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13789 expr = (tree) &buf;
13791 else if (TREE_CODE_CLASS (code) == tcc_type
13792 && (TYPE_POINTER_TO (expr)
13793 || TYPE_REFERENCE_TO (expr)
13794 || TYPE_CACHED_VALUES_P (expr)
13795 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13796 || TYPE_NEXT_VARIANT (expr)))
13798 /* Allow these fields to be modified. */
13799 tree tmp;
13800 memcpy ((char *) &buf, expr, tree_size (expr));
13801 expr = tmp = (tree) &buf;
13802 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13803 TYPE_POINTER_TO (tmp) = NULL;
13804 TYPE_REFERENCE_TO (tmp) = NULL;
13805 TYPE_NEXT_VARIANT (tmp) = NULL;
13806 if (TYPE_CACHED_VALUES_P (tmp))
13808 TYPE_CACHED_VALUES_P (tmp) = 0;
13809 TYPE_CACHED_VALUES (tmp) = NULL;
13812 md5_process_bytes (expr, tree_size (expr), ctx);
13813 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13814 if (TREE_CODE_CLASS (code) != tcc_type
13815 && TREE_CODE_CLASS (code) != tcc_declaration
13816 && code != TREE_LIST
13817 && code != SSA_NAME)
13818 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13819 switch (TREE_CODE_CLASS (code))
13821 case tcc_constant:
13822 switch (code)
13824 case STRING_CST:
13825 md5_process_bytes (TREE_STRING_POINTER (expr),
13826 TREE_STRING_LENGTH (expr), ctx);
13827 break;
13828 case COMPLEX_CST:
13829 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13830 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13831 break;
13832 case VECTOR_CST:
13833 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13834 break;
13835 default:
13836 break;
13838 break;
13839 case tcc_exceptional:
13840 switch (code)
13842 case TREE_LIST:
13843 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13844 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13845 expr = TREE_CHAIN (expr);
13846 goto recursive_label;
13847 break;
13848 case TREE_VEC:
13849 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13850 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13851 break;
13852 default:
13853 break;
13855 break;
13856 case tcc_expression:
13857 case tcc_reference:
13858 case tcc_comparison:
13859 case tcc_unary:
13860 case tcc_binary:
13861 case tcc_statement:
13862 case tcc_vl_exp:
13863 len = TREE_OPERAND_LENGTH (expr);
13864 for (i = 0; i < len; ++i)
13865 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13866 break;
13867 case tcc_declaration:
13868 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13869 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13870 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13872 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13873 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13874 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13875 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13876 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13878 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13879 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13881 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13883 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13884 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13885 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13887 break;
13888 case tcc_type:
13889 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13890 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13891 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13892 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13893 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13894 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13895 if (INTEGRAL_TYPE_P (expr)
13896 || SCALAR_FLOAT_TYPE_P (expr))
13898 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13899 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13901 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13902 if (TREE_CODE (expr) == RECORD_TYPE
13903 || TREE_CODE (expr) == UNION_TYPE
13904 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13905 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13906 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13907 break;
13908 default:
13909 break;
13913 /* Helper function for outputting the checksum of a tree T. When
13914 debugging with gdb, you can "define mynext" to be "next" followed
13915 by "call debug_fold_checksum (op0)", then just trace down till the
13916 outputs differ. */
13918 DEBUG_FUNCTION void
13919 debug_fold_checksum (const_tree t)
13921 int i;
13922 unsigned char checksum[16];
13923 struct md5_ctx ctx;
13924 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13926 md5_init_ctx (&ctx);
13927 fold_checksum_tree (t, &ctx, ht);
13928 md5_finish_ctx (&ctx, checksum);
13929 htab_empty (ht);
13931 for (i = 0; i < 16; i++)
13932 fprintf (stderr, "%d ", checksum[i]);
13934 fprintf (stderr, "\n");
13937 #endif
13939 /* Fold a unary tree expression with code CODE of type TYPE with an
13940 operand OP0. LOC is the location of the resulting expression.
13941 Return a folded expression if successful. Otherwise, return a tree
13942 expression with code CODE of type TYPE with an operand OP0. */
13944 tree
13945 fold_build1_stat_loc (location_t loc,
13946 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13948 tree tem;
13949 #ifdef ENABLE_FOLD_CHECKING
13950 unsigned char checksum_before[16], checksum_after[16];
13951 struct md5_ctx ctx;
13952 htab_t ht;
13954 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13955 md5_init_ctx (&ctx);
13956 fold_checksum_tree (op0, &ctx, ht);
13957 md5_finish_ctx (&ctx, checksum_before);
13958 htab_empty (ht);
13959 #endif
13961 tem = fold_unary_loc (loc, code, type, op0);
13962 if (!tem)
13963 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13965 #ifdef ENABLE_FOLD_CHECKING
13966 md5_init_ctx (&ctx);
13967 fold_checksum_tree (op0, &ctx, ht);
13968 md5_finish_ctx (&ctx, checksum_after);
13969 htab_delete (ht);
13971 if (memcmp (checksum_before, checksum_after, 16))
13972 fold_check_failed (op0, tem);
13973 #endif
13974 return tem;
13977 /* Fold a binary tree expression with code CODE of type TYPE with
13978 operands OP0 and OP1. LOC is the location of the resulting
13979 expression. Return a folded expression if successful. Otherwise,
13980 return a tree expression with code CODE of type TYPE with operands
13981 OP0 and OP1. */
13983 tree
13984 fold_build2_stat_loc (location_t loc,
13985 enum tree_code code, tree type, tree op0, tree op1
13986 MEM_STAT_DECL)
13988 tree tem;
13989 #ifdef ENABLE_FOLD_CHECKING
13990 unsigned char checksum_before_op0[16],
13991 checksum_before_op1[16],
13992 checksum_after_op0[16],
13993 checksum_after_op1[16];
13994 struct md5_ctx ctx;
13995 htab_t ht;
13997 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13998 md5_init_ctx (&ctx);
13999 fold_checksum_tree (op0, &ctx, ht);
14000 md5_finish_ctx (&ctx, checksum_before_op0);
14001 htab_empty (ht);
14003 md5_init_ctx (&ctx);
14004 fold_checksum_tree (op1, &ctx, ht);
14005 md5_finish_ctx (&ctx, checksum_before_op1);
14006 htab_empty (ht);
14007 #endif
14009 tem = fold_binary_loc (loc, code, type, op0, op1);
14010 if (!tem)
14011 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14013 #ifdef ENABLE_FOLD_CHECKING
14014 md5_init_ctx (&ctx);
14015 fold_checksum_tree (op0, &ctx, ht);
14016 md5_finish_ctx (&ctx, checksum_after_op0);
14017 htab_empty (ht);
14019 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14020 fold_check_failed (op0, tem);
14022 md5_init_ctx (&ctx);
14023 fold_checksum_tree (op1, &ctx, ht);
14024 md5_finish_ctx (&ctx, checksum_after_op1);
14025 htab_delete (ht);
14027 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14028 fold_check_failed (op1, tem);
14029 #endif
14030 return tem;
14033 /* Fold a ternary tree expression with code CODE of type TYPE with
14034 operands OP0, OP1, and OP2. Return a folded expression if
14035 successful. Otherwise, return a tree expression with code CODE of
14036 type TYPE with operands OP0, OP1, and OP2. */
14038 tree
14039 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14040 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14042 tree tem;
14043 #ifdef ENABLE_FOLD_CHECKING
14044 unsigned char checksum_before_op0[16],
14045 checksum_before_op1[16],
14046 checksum_before_op2[16],
14047 checksum_after_op0[16],
14048 checksum_after_op1[16],
14049 checksum_after_op2[16];
14050 struct md5_ctx ctx;
14051 htab_t ht;
14053 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14054 md5_init_ctx (&ctx);
14055 fold_checksum_tree (op0, &ctx, ht);
14056 md5_finish_ctx (&ctx, checksum_before_op0);
14057 htab_empty (ht);
14059 md5_init_ctx (&ctx);
14060 fold_checksum_tree (op1, &ctx, ht);
14061 md5_finish_ctx (&ctx, checksum_before_op1);
14062 htab_empty (ht);
14064 md5_init_ctx (&ctx);
14065 fold_checksum_tree (op2, &ctx, ht);
14066 md5_finish_ctx (&ctx, checksum_before_op2);
14067 htab_empty (ht);
14068 #endif
14070 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14071 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14072 if (!tem)
14073 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14075 #ifdef ENABLE_FOLD_CHECKING
14076 md5_init_ctx (&ctx);
14077 fold_checksum_tree (op0, &ctx, ht);
14078 md5_finish_ctx (&ctx, checksum_after_op0);
14079 htab_empty (ht);
14081 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14082 fold_check_failed (op0, tem);
14084 md5_init_ctx (&ctx);
14085 fold_checksum_tree (op1, &ctx, ht);
14086 md5_finish_ctx (&ctx, checksum_after_op1);
14087 htab_empty (ht);
14089 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14090 fold_check_failed (op1, tem);
14092 md5_init_ctx (&ctx);
14093 fold_checksum_tree (op2, &ctx, ht);
14094 md5_finish_ctx (&ctx, checksum_after_op2);
14095 htab_delete (ht);
14097 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14098 fold_check_failed (op2, tem);
14099 #endif
14100 return tem;
14103 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14104 arguments in ARGARRAY, and a null static chain.
14105 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14106 of type TYPE from the given operands as constructed by build_call_array. */
14108 tree
14109 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14110 int nargs, tree *argarray)
14112 tree tem;
14113 #ifdef ENABLE_FOLD_CHECKING
14114 unsigned char checksum_before_fn[16],
14115 checksum_before_arglist[16],
14116 checksum_after_fn[16],
14117 checksum_after_arglist[16];
14118 struct md5_ctx ctx;
14119 htab_t ht;
14120 int i;
14122 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14123 md5_init_ctx (&ctx);
14124 fold_checksum_tree (fn, &ctx, ht);
14125 md5_finish_ctx (&ctx, checksum_before_fn);
14126 htab_empty (ht);
14128 md5_init_ctx (&ctx);
14129 for (i = 0; i < nargs; i++)
14130 fold_checksum_tree (argarray[i], &ctx, ht);
14131 md5_finish_ctx (&ctx, checksum_before_arglist);
14132 htab_empty (ht);
14133 #endif
14135 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14137 #ifdef ENABLE_FOLD_CHECKING
14138 md5_init_ctx (&ctx);
14139 fold_checksum_tree (fn, &ctx, ht);
14140 md5_finish_ctx (&ctx, checksum_after_fn);
14141 htab_empty (ht);
14143 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14144 fold_check_failed (fn, tem);
14146 md5_init_ctx (&ctx);
14147 for (i = 0; i < nargs; i++)
14148 fold_checksum_tree (argarray[i], &ctx, ht);
14149 md5_finish_ctx (&ctx, checksum_after_arglist);
14150 htab_delete (ht);
14152 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14153 fold_check_failed (NULL_TREE, tem);
14154 #endif
14155 return tem;
14158 /* Perform constant folding and related simplification of initializer
14159 expression EXPR. These behave identically to "fold_buildN" but ignore
14160 potential run-time traps and exceptions that fold must preserve. */
14162 #define START_FOLD_INIT \
14163 int saved_signaling_nans = flag_signaling_nans;\
14164 int saved_trapping_math = flag_trapping_math;\
14165 int saved_rounding_math = flag_rounding_math;\
14166 int saved_trapv = flag_trapv;\
14167 int saved_folding_initializer = folding_initializer;\
14168 flag_signaling_nans = 0;\
14169 flag_trapping_math = 0;\
14170 flag_rounding_math = 0;\
14171 flag_trapv = 0;\
14172 folding_initializer = 1;
14174 #define END_FOLD_INIT \
14175 flag_signaling_nans = saved_signaling_nans;\
14176 flag_trapping_math = saved_trapping_math;\
14177 flag_rounding_math = saved_rounding_math;\
14178 flag_trapv = saved_trapv;\
14179 folding_initializer = saved_folding_initializer;
14181 tree
14182 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14183 tree type, tree op)
14185 tree result;
14186 START_FOLD_INIT;
14188 result = fold_build1_loc (loc, code, type, op);
14190 END_FOLD_INIT;
14191 return result;
14194 tree
14195 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14196 tree type, tree op0, tree op1)
14198 tree result;
14199 START_FOLD_INIT;
14201 result = fold_build2_loc (loc, code, type, op0, op1);
14203 END_FOLD_INIT;
14204 return result;
14207 tree
14208 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14209 tree type, tree op0, tree op1, tree op2)
14211 tree result;
14212 START_FOLD_INIT;
14214 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14216 END_FOLD_INIT;
14217 return result;
14220 tree
14221 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14222 int nargs, tree *argarray)
14224 tree result;
14225 START_FOLD_INIT;
14227 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14229 END_FOLD_INIT;
14230 return result;
14233 #undef START_FOLD_INIT
14234 #undef END_FOLD_INIT
14236 /* Determine if first argument is a multiple of second argument. Return 0 if
14237 it is not, or we cannot easily determined it to be.
14239 An example of the sort of thing we care about (at this point; this routine
14240 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14241 fold cases do now) is discovering that
14243 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14245 is a multiple of
14247 SAVE_EXPR (J * 8)
14249 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14251 This code also handles discovering that
14253 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14255 is a multiple of 8 so we don't have to worry about dealing with a
14256 possible remainder.
14258 Note that we *look* inside a SAVE_EXPR only to determine how it was
14259 calculated; it is not safe for fold to do much of anything else with the
14260 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14261 at run time. For example, the latter example above *cannot* be implemented
14262 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14263 evaluation time of the original SAVE_EXPR is not necessarily the same at
14264 the time the new expression is evaluated. The only optimization of this
14265 sort that would be valid is changing
14267 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14269 divided by 8 to
14271 SAVE_EXPR (I) * SAVE_EXPR (J)
14273 (where the same SAVE_EXPR (J) is used in the original and the
14274 transformed version). */
14277 multiple_of_p (tree type, const_tree top, const_tree bottom)
14279 if (operand_equal_p (top, bottom, 0))
14280 return 1;
14282 if (TREE_CODE (type) != INTEGER_TYPE)
14283 return 0;
14285 switch (TREE_CODE (top))
14287 case BIT_AND_EXPR:
14288 /* Bitwise and provides a power of two multiple. If the mask is
14289 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14290 if (!integer_pow2p (bottom))
14291 return 0;
14292 /* FALLTHRU */
14294 case MULT_EXPR:
14295 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14296 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14298 case PLUS_EXPR:
14299 case MINUS_EXPR:
14300 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14301 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14303 case LSHIFT_EXPR:
14304 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14306 tree op1, t1;
14308 op1 = TREE_OPERAND (top, 1);
14309 /* const_binop may not detect overflow correctly,
14310 so check for it explicitly here. */
14311 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14312 > TREE_INT_CST_LOW (op1)
14313 && TREE_INT_CST_HIGH (op1) == 0
14314 && 0 != (t1 = fold_convert (type,
14315 const_binop (LSHIFT_EXPR,
14316 size_one_node,
14317 op1)))
14318 && !TREE_OVERFLOW (t1))
14319 return multiple_of_p (type, t1, bottom);
14321 return 0;
14323 case NOP_EXPR:
14324 /* Can't handle conversions from non-integral or wider integral type. */
14325 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14326 || (TYPE_PRECISION (type)
14327 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14328 return 0;
14330 /* .. fall through ... */
14332 case SAVE_EXPR:
14333 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14335 case COND_EXPR:
14336 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14337 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14339 case INTEGER_CST:
14340 if (TREE_CODE (bottom) != INTEGER_CST
14341 || integer_zerop (bottom)
14342 || (TYPE_UNSIGNED (type)
14343 && (tree_int_cst_sgn (top) < 0
14344 || tree_int_cst_sgn (bottom) < 0)))
14345 return 0;
14346 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14347 top, bottom, 0));
14349 default:
14350 return 0;
14354 /* Return true if CODE or TYPE is known to be non-negative. */
14356 static bool
14357 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14359 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14360 && truth_value_p (code))
14361 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14362 have a signed:1 type (where the value is -1 and 0). */
14363 return true;
14364 return false;
14367 /* Return true if (CODE OP0) is known to be non-negative. If the return
14368 value is based on the assumption that signed overflow is undefined,
14369 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14370 *STRICT_OVERFLOW_P. */
14372 bool
14373 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14374 bool *strict_overflow_p)
14376 if (TYPE_UNSIGNED (type))
14377 return true;
14379 switch (code)
14381 case ABS_EXPR:
14382 /* We can't return 1 if flag_wrapv is set because
14383 ABS_EXPR<INT_MIN> = INT_MIN. */
14384 if (!INTEGRAL_TYPE_P (type))
14385 return true;
14386 if (TYPE_OVERFLOW_UNDEFINED (type))
14388 *strict_overflow_p = true;
14389 return true;
14391 break;
14393 case NON_LVALUE_EXPR:
14394 case FLOAT_EXPR:
14395 case FIX_TRUNC_EXPR:
14396 return tree_expr_nonnegative_warnv_p (op0,
14397 strict_overflow_p);
14399 case NOP_EXPR:
14401 tree inner_type = TREE_TYPE (op0);
14402 tree outer_type = type;
14404 if (TREE_CODE (outer_type) == REAL_TYPE)
14406 if (TREE_CODE (inner_type) == REAL_TYPE)
14407 return tree_expr_nonnegative_warnv_p (op0,
14408 strict_overflow_p);
14409 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14411 if (TYPE_UNSIGNED (inner_type))
14412 return true;
14413 return tree_expr_nonnegative_warnv_p (op0,
14414 strict_overflow_p);
14417 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14419 if (TREE_CODE (inner_type) == REAL_TYPE)
14420 return tree_expr_nonnegative_warnv_p (op0,
14421 strict_overflow_p);
14422 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14423 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14424 && TYPE_UNSIGNED (inner_type);
14427 break;
14429 default:
14430 return tree_simple_nonnegative_warnv_p (code, type);
14433 /* We don't know sign of `t', so be conservative and return false. */
14434 return false;
14437 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14438 value is based on the assumption that signed overflow is undefined,
14439 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14440 *STRICT_OVERFLOW_P. */
14442 bool
14443 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14444 tree op1, bool *strict_overflow_p)
14446 if (TYPE_UNSIGNED (type))
14447 return true;
14449 switch (code)
14451 case POINTER_PLUS_EXPR:
14452 case PLUS_EXPR:
14453 if (FLOAT_TYPE_P (type))
14454 return (tree_expr_nonnegative_warnv_p (op0,
14455 strict_overflow_p)
14456 && tree_expr_nonnegative_warnv_p (op1,
14457 strict_overflow_p));
14459 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14460 both unsigned and at least 2 bits shorter than the result. */
14461 if (TREE_CODE (type) == INTEGER_TYPE
14462 && TREE_CODE (op0) == NOP_EXPR
14463 && TREE_CODE (op1) == NOP_EXPR)
14465 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14466 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14467 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14468 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14470 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14471 TYPE_PRECISION (inner2)) + 1;
14472 return prec < TYPE_PRECISION (type);
14475 break;
14477 case MULT_EXPR:
14478 if (FLOAT_TYPE_P (type))
14480 /* x * x for floating point x is always non-negative. */
14481 if (operand_equal_p (op0, op1, 0))
14482 return true;
14483 return (tree_expr_nonnegative_warnv_p (op0,
14484 strict_overflow_p)
14485 && tree_expr_nonnegative_warnv_p (op1,
14486 strict_overflow_p));
14489 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14490 both unsigned and their total bits is shorter than the result. */
14491 if (TREE_CODE (type) == INTEGER_TYPE
14492 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14493 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14495 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14496 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14497 : TREE_TYPE (op0);
14498 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14499 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14500 : TREE_TYPE (op1);
14502 bool unsigned0 = TYPE_UNSIGNED (inner0);
14503 bool unsigned1 = TYPE_UNSIGNED (inner1);
14505 if (TREE_CODE (op0) == INTEGER_CST)
14506 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14508 if (TREE_CODE (op1) == INTEGER_CST)
14509 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14511 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14512 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14514 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14515 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14516 : TYPE_PRECISION (inner0);
14518 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14519 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14520 : TYPE_PRECISION (inner1);
14522 return precision0 + precision1 < TYPE_PRECISION (type);
14525 return false;
14527 case BIT_AND_EXPR:
14528 case MAX_EXPR:
14529 return (tree_expr_nonnegative_warnv_p (op0,
14530 strict_overflow_p)
14531 || tree_expr_nonnegative_warnv_p (op1,
14532 strict_overflow_p));
14534 case BIT_IOR_EXPR:
14535 case BIT_XOR_EXPR:
14536 case MIN_EXPR:
14537 case RDIV_EXPR:
14538 case TRUNC_DIV_EXPR:
14539 case CEIL_DIV_EXPR:
14540 case FLOOR_DIV_EXPR:
14541 case ROUND_DIV_EXPR:
14542 return (tree_expr_nonnegative_warnv_p (op0,
14543 strict_overflow_p)
14544 && tree_expr_nonnegative_warnv_p (op1,
14545 strict_overflow_p));
14547 case TRUNC_MOD_EXPR:
14548 case CEIL_MOD_EXPR:
14549 case FLOOR_MOD_EXPR:
14550 case ROUND_MOD_EXPR:
14551 return tree_expr_nonnegative_warnv_p (op0,
14552 strict_overflow_p);
14553 default:
14554 return tree_simple_nonnegative_warnv_p (code, type);
14557 /* We don't know sign of `t', so be conservative and return false. */
14558 return false;
14561 /* Return true if T is known to be non-negative. If the return
14562 value is based on the assumption that signed overflow is undefined,
14563 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14564 *STRICT_OVERFLOW_P. */
14566 bool
14567 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14569 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14570 return true;
14572 switch (TREE_CODE (t))
14574 case INTEGER_CST:
14575 return tree_int_cst_sgn (t) >= 0;
14577 case REAL_CST:
14578 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14580 case FIXED_CST:
14581 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14583 case COND_EXPR:
14584 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14585 strict_overflow_p)
14586 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14587 strict_overflow_p));
14588 default:
14589 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14590 TREE_TYPE (t));
14592 /* We don't know sign of `t', so be conservative and return false. */
14593 return false;
14596 /* Return true if T is known to be non-negative. If the return
14597 value is based on the assumption that signed overflow is undefined,
14598 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14599 *STRICT_OVERFLOW_P. */
14601 bool
14602 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14603 tree arg0, tree arg1, bool *strict_overflow_p)
14605 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14606 switch (DECL_FUNCTION_CODE (fndecl))
14608 CASE_FLT_FN (BUILT_IN_ACOS):
14609 CASE_FLT_FN (BUILT_IN_ACOSH):
14610 CASE_FLT_FN (BUILT_IN_CABS):
14611 CASE_FLT_FN (BUILT_IN_COSH):
14612 CASE_FLT_FN (BUILT_IN_ERFC):
14613 CASE_FLT_FN (BUILT_IN_EXP):
14614 CASE_FLT_FN (BUILT_IN_EXP10):
14615 CASE_FLT_FN (BUILT_IN_EXP2):
14616 CASE_FLT_FN (BUILT_IN_FABS):
14617 CASE_FLT_FN (BUILT_IN_FDIM):
14618 CASE_FLT_FN (BUILT_IN_HYPOT):
14619 CASE_FLT_FN (BUILT_IN_POW10):
14620 CASE_INT_FN (BUILT_IN_FFS):
14621 CASE_INT_FN (BUILT_IN_PARITY):
14622 CASE_INT_FN (BUILT_IN_POPCOUNT):
14623 case BUILT_IN_BSWAP32:
14624 case BUILT_IN_BSWAP64:
14625 /* Always true. */
14626 return true;
14628 CASE_FLT_FN (BUILT_IN_SQRT):
14629 /* sqrt(-0.0) is -0.0. */
14630 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14631 return true;
14632 return tree_expr_nonnegative_warnv_p (arg0,
14633 strict_overflow_p);
14635 CASE_FLT_FN (BUILT_IN_ASINH):
14636 CASE_FLT_FN (BUILT_IN_ATAN):
14637 CASE_FLT_FN (BUILT_IN_ATANH):
14638 CASE_FLT_FN (BUILT_IN_CBRT):
14639 CASE_FLT_FN (BUILT_IN_CEIL):
14640 CASE_FLT_FN (BUILT_IN_ERF):
14641 CASE_FLT_FN (BUILT_IN_EXPM1):
14642 CASE_FLT_FN (BUILT_IN_FLOOR):
14643 CASE_FLT_FN (BUILT_IN_FMOD):
14644 CASE_FLT_FN (BUILT_IN_FREXP):
14645 CASE_FLT_FN (BUILT_IN_LCEIL):
14646 CASE_FLT_FN (BUILT_IN_LDEXP):
14647 CASE_FLT_FN (BUILT_IN_LFLOOR):
14648 CASE_FLT_FN (BUILT_IN_LLCEIL):
14649 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14650 CASE_FLT_FN (BUILT_IN_LLRINT):
14651 CASE_FLT_FN (BUILT_IN_LLROUND):
14652 CASE_FLT_FN (BUILT_IN_LRINT):
14653 CASE_FLT_FN (BUILT_IN_LROUND):
14654 CASE_FLT_FN (BUILT_IN_MODF):
14655 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14656 CASE_FLT_FN (BUILT_IN_RINT):
14657 CASE_FLT_FN (BUILT_IN_ROUND):
14658 CASE_FLT_FN (BUILT_IN_SCALB):
14659 CASE_FLT_FN (BUILT_IN_SCALBLN):
14660 CASE_FLT_FN (BUILT_IN_SCALBN):
14661 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14662 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14663 CASE_FLT_FN (BUILT_IN_SINH):
14664 CASE_FLT_FN (BUILT_IN_TANH):
14665 CASE_FLT_FN (BUILT_IN_TRUNC):
14666 /* True if the 1st argument is nonnegative. */
14667 return tree_expr_nonnegative_warnv_p (arg0,
14668 strict_overflow_p);
14670 CASE_FLT_FN (BUILT_IN_FMAX):
14671 /* True if the 1st OR 2nd arguments are nonnegative. */
14672 return (tree_expr_nonnegative_warnv_p (arg0,
14673 strict_overflow_p)
14674 || (tree_expr_nonnegative_warnv_p (arg1,
14675 strict_overflow_p)));
14677 CASE_FLT_FN (BUILT_IN_FMIN):
14678 /* True if the 1st AND 2nd arguments are nonnegative. */
14679 return (tree_expr_nonnegative_warnv_p (arg0,
14680 strict_overflow_p)
14681 && (tree_expr_nonnegative_warnv_p (arg1,
14682 strict_overflow_p)));
14684 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14685 /* True if the 2nd argument is nonnegative. */
14686 return tree_expr_nonnegative_warnv_p (arg1,
14687 strict_overflow_p);
14689 CASE_FLT_FN (BUILT_IN_POWI):
14690 /* True if the 1st argument is nonnegative or the second
14691 argument is an even integer. */
14692 if (TREE_CODE (arg1) == INTEGER_CST
14693 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14694 return true;
14695 return tree_expr_nonnegative_warnv_p (arg0,
14696 strict_overflow_p);
14698 CASE_FLT_FN (BUILT_IN_POW):
14699 /* True if the 1st argument is nonnegative or the second
14700 argument is an even integer valued real. */
14701 if (TREE_CODE (arg1) == REAL_CST)
14703 REAL_VALUE_TYPE c;
14704 HOST_WIDE_INT n;
14706 c = TREE_REAL_CST (arg1);
14707 n = real_to_integer (&c);
14708 if ((n & 1) == 0)
14710 REAL_VALUE_TYPE cint;
14711 real_from_integer (&cint, VOIDmode, n,
14712 n < 0 ? -1 : 0, 0);
14713 if (real_identical (&c, &cint))
14714 return true;
14717 return tree_expr_nonnegative_warnv_p (arg0,
14718 strict_overflow_p);
14720 default:
14721 break;
14723 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14724 type);
14727 /* Return true if T is known to be non-negative. If the return
14728 value is based on the assumption that signed overflow is undefined,
14729 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14730 *STRICT_OVERFLOW_P. */
14732 bool
14733 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14735 enum tree_code code = TREE_CODE (t);
14736 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14737 return true;
14739 switch (code)
14741 case TARGET_EXPR:
14743 tree temp = TARGET_EXPR_SLOT (t);
14744 t = TARGET_EXPR_INITIAL (t);
14746 /* If the initializer is non-void, then it's a normal expression
14747 that will be assigned to the slot. */
14748 if (!VOID_TYPE_P (t))
14749 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14751 /* Otherwise, the initializer sets the slot in some way. One common
14752 way is an assignment statement at the end of the initializer. */
14753 while (1)
14755 if (TREE_CODE (t) == BIND_EXPR)
14756 t = expr_last (BIND_EXPR_BODY (t));
14757 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14758 || TREE_CODE (t) == TRY_CATCH_EXPR)
14759 t = expr_last (TREE_OPERAND (t, 0));
14760 else if (TREE_CODE (t) == STATEMENT_LIST)
14761 t = expr_last (t);
14762 else
14763 break;
14765 if (TREE_CODE (t) == MODIFY_EXPR
14766 && TREE_OPERAND (t, 0) == temp)
14767 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14768 strict_overflow_p);
14770 return false;
14773 case CALL_EXPR:
14775 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14776 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14778 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14779 get_callee_fndecl (t),
14780 arg0,
14781 arg1,
14782 strict_overflow_p);
14784 case COMPOUND_EXPR:
14785 case MODIFY_EXPR:
14786 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14787 strict_overflow_p);
14788 case BIND_EXPR:
14789 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14790 strict_overflow_p);
14791 case SAVE_EXPR:
14792 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14793 strict_overflow_p);
14795 default:
14796 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14797 TREE_TYPE (t));
14800 /* We don't know sign of `t', so be conservative and return false. */
14801 return false;
14804 /* Return true if T is known to be non-negative. If the return
14805 value is based on the assumption that signed overflow is undefined,
14806 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14807 *STRICT_OVERFLOW_P. */
14809 bool
14810 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14812 enum tree_code code;
14813 if (t == error_mark_node)
14814 return false;
14816 code = TREE_CODE (t);
14817 switch (TREE_CODE_CLASS (code))
14819 case tcc_binary:
14820 case tcc_comparison:
14821 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14822 TREE_TYPE (t),
14823 TREE_OPERAND (t, 0),
14824 TREE_OPERAND (t, 1),
14825 strict_overflow_p);
14827 case tcc_unary:
14828 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14829 TREE_TYPE (t),
14830 TREE_OPERAND (t, 0),
14831 strict_overflow_p);
14833 case tcc_constant:
14834 case tcc_declaration:
14835 case tcc_reference:
14836 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14838 default:
14839 break;
14842 switch (code)
14844 case TRUTH_AND_EXPR:
14845 case TRUTH_OR_EXPR:
14846 case TRUTH_XOR_EXPR:
14847 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14848 TREE_TYPE (t),
14849 TREE_OPERAND (t, 0),
14850 TREE_OPERAND (t, 1),
14851 strict_overflow_p);
14852 case TRUTH_NOT_EXPR:
14853 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14854 TREE_TYPE (t),
14855 TREE_OPERAND (t, 0),
14856 strict_overflow_p);
14858 case COND_EXPR:
14859 case CONSTRUCTOR:
14860 case OBJ_TYPE_REF:
14861 case ASSERT_EXPR:
14862 case ADDR_EXPR:
14863 case WITH_SIZE_EXPR:
14864 case SSA_NAME:
14865 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14867 default:
14868 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14872 /* Return true if `t' is known to be non-negative. Handle warnings
14873 about undefined signed overflow. */
14875 bool
14876 tree_expr_nonnegative_p (tree t)
14878 bool ret, strict_overflow_p;
14880 strict_overflow_p = false;
14881 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14882 if (strict_overflow_p)
14883 fold_overflow_warning (("assuming signed overflow does not occur when "
14884 "determining that expression is always "
14885 "non-negative"),
14886 WARN_STRICT_OVERFLOW_MISC);
14887 return ret;
14891 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14892 For floating point we further ensure that T is not denormal.
14893 Similar logic is present in nonzero_address in rtlanal.h.
14895 If the return value is based on the assumption that signed overflow
14896 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14897 change *STRICT_OVERFLOW_P. */
14899 bool
14900 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14901 bool *strict_overflow_p)
14903 switch (code)
14905 case ABS_EXPR:
14906 return tree_expr_nonzero_warnv_p (op0,
14907 strict_overflow_p);
14909 case NOP_EXPR:
14911 tree inner_type = TREE_TYPE (op0);
14912 tree outer_type = type;
14914 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14915 && tree_expr_nonzero_warnv_p (op0,
14916 strict_overflow_p));
14918 break;
14920 case NON_LVALUE_EXPR:
14921 return tree_expr_nonzero_warnv_p (op0,
14922 strict_overflow_p);
14924 default:
14925 break;
14928 return false;
14931 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14932 For floating point we further ensure that T is not denormal.
14933 Similar logic is present in nonzero_address in rtlanal.h.
14935 If the return value is based on the assumption that signed overflow
14936 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14937 change *STRICT_OVERFLOW_P. */
14939 bool
14940 tree_binary_nonzero_warnv_p (enum tree_code code,
14941 tree type,
14942 tree op0,
14943 tree op1, bool *strict_overflow_p)
14945 bool sub_strict_overflow_p;
14946 switch (code)
14948 case POINTER_PLUS_EXPR:
14949 case PLUS_EXPR:
14950 if (TYPE_OVERFLOW_UNDEFINED (type))
14952 /* With the presence of negative values it is hard
14953 to say something. */
14954 sub_strict_overflow_p = false;
14955 if (!tree_expr_nonnegative_warnv_p (op0,
14956 &sub_strict_overflow_p)
14957 || !tree_expr_nonnegative_warnv_p (op1,
14958 &sub_strict_overflow_p))
14959 return false;
14960 /* One of operands must be positive and the other non-negative. */
14961 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14962 overflows, on a twos-complement machine the sum of two
14963 nonnegative numbers can never be zero. */
14964 return (tree_expr_nonzero_warnv_p (op0,
14965 strict_overflow_p)
14966 || tree_expr_nonzero_warnv_p (op1,
14967 strict_overflow_p));
14969 break;
14971 case MULT_EXPR:
14972 if (TYPE_OVERFLOW_UNDEFINED (type))
14974 if (tree_expr_nonzero_warnv_p (op0,
14975 strict_overflow_p)
14976 && tree_expr_nonzero_warnv_p (op1,
14977 strict_overflow_p))
14979 *strict_overflow_p = true;
14980 return true;
14983 break;
14985 case MIN_EXPR:
14986 sub_strict_overflow_p = false;
14987 if (tree_expr_nonzero_warnv_p (op0,
14988 &sub_strict_overflow_p)
14989 && tree_expr_nonzero_warnv_p (op1,
14990 &sub_strict_overflow_p))
14992 if (sub_strict_overflow_p)
14993 *strict_overflow_p = true;
14995 break;
14997 case MAX_EXPR:
14998 sub_strict_overflow_p = false;
14999 if (tree_expr_nonzero_warnv_p (op0,
15000 &sub_strict_overflow_p))
15002 if (sub_strict_overflow_p)
15003 *strict_overflow_p = true;
15005 /* When both operands are nonzero, then MAX must be too. */
15006 if (tree_expr_nonzero_warnv_p (op1,
15007 strict_overflow_p))
15008 return true;
15010 /* MAX where operand 0 is positive is positive. */
15011 return tree_expr_nonnegative_warnv_p (op0,
15012 strict_overflow_p);
15014 /* MAX where operand 1 is positive is positive. */
15015 else if (tree_expr_nonzero_warnv_p (op1,
15016 &sub_strict_overflow_p)
15017 && tree_expr_nonnegative_warnv_p (op1,
15018 &sub_strict_overflow_p))
15020 if (sub_strict_overflow_p)
15021 *strict_overflow_p = true;
15022 return true;
15024 break;
15026 case BIT_IOR_EXPR:
15027 return (tree_expr_nonzero_warnv_p (op1,
15028 strict_overflow_p)
15029 || tree_expr_nonzero_warnv_p (op0,
15030 strict_overflow_p));
15032 default:
15033 break;
15036 return false;
15039 /* Return true when T is an address and is known to be nonzero.
15040 For floating point we further ensure that T is not denormal.
15041 Similar logic is present in nonzero_address in rtlanal.h.
15043 If the return value is based on the assumption that signed overflow
15044 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15045 change *STRICT_OVERFLOW_P. */
15047 bool
15048 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15050 bool sub_strict_overflow_p;
15051 switch (TREE_CODE (t))
15053 case INTEGER_CST:
15054 return !integer_zerop (t);
15056 case ADDR_EXPR:
15058 tree base = TREE_OPERAND (t, 0);
15059 if (!DECL_P (base))
15060 base = get_base_address (base);
15062 if (!base)
15063 return false;
15065 /* Weak declarations may link to NULL. Other things may also be NULL
15066 so protect with -fdelete-null-pointer-checks; but not variables
15067 allocated on the stack. */
15068 if (DECL_P (base)
15069 && (flag_delete_null_pointer_checks
15070 || (DECL_CONTEXT (base)
15071 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15072 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15073 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15075 /* Constants are never weak. */
15076 if (CONSTANT_CLASS_P (base))
15077 return true;
15079 return false;
15082 case COND_EXPR:
15083 sub_strict_overflow_p = false;
15084 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15085 &sub_strict_overflow_p)
15086 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15087 &sub_strict_overflow_p))
15089 if (sub_strict_overflow_p)
15090 *strict_overflow_p = true;
15091 return true;
15093 break;
15095 default:
15096 break;
15098 return false;
15101 /* Return true when T is an address and is known to be nonzero.
15102 For floating point we further ensure that T is not denormal.
15103 Similar logic is present in nonzero_address in rtlanal.h.
15105 If the return value is based on the assumption that signed overflow
15106 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15107 change *STRICT_OVERFLOW_P. */
15109 bool
15110 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15112 tree type = TREE_TYPE (t);
15113 enum tree_code code;
15115 /* Doing something useful for floating point would need more work. */
15116 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15117 return false;
15119 code = TREE_CODE (t);
15120 switch (TREE_CODE_CLASS (code))
15122 case tcc_unary:
15123 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15124 strict_overflow_p);
15125 case tcc_binary:
15126 case tcc_comparison:
15127 return tree_binary_nonzero_warnv_p (code, type,
15128 TREE_OPERAND (t, 0),
15129 TREE_OPERAND (t, 1),
15130 strict_overflow_p);
15131 case tcc_constant:
15132 case tcc_declaration:
15133 case tcc_reference:
15134 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15136 default:
15137 break;
15140 switch (code)
15142 case TRUTH_NOT_EXPR:
15143 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15144 strict_overflow_p);
15146 case TRUTH_AND_EXPR:
15147 case TRUTH_OR_EXPR:
15148 case TRUTH_XOR_EXPR:
15149 return tree_binary_nonzero_warnv_p (code, type,
15150 TREE_OPERAND (t, 0),
15151 TREE_OPERAND (t, 1),
15152 strict_overflow_p);
15154 case COND_EXPR:
15155 case CONSTRUCTOR:
15156 case OBJ_TYPE_REF:
15157 case ASSERT_EXPR:
15158 case ADDR_EXPR:
15159 case WITH_SIZE_EXPR:
15160 case SSA_NAME:
15161 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15163 case COMPOUND_EXPR:
15164 case MODIFY_EXPR:
15165 case BIND_EXPR:
15166 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15167 strict_overflow_p);
15169 case SAVE_EXPR:
15170 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15171 strict_overflow_p);
15173 case CALL_EXPR:
15174 return alloca_call_p (t);
15176 default:
15177 break;
15179 return false;
15182 /* Return true when T is an address and is known to be nonzero.
15183 Handle warnings about undefined signed overflow. */
15185 bool
15186 tree_expr_nonzero_p (tree t)
15188 bool ret, strict_overflow_p;
15190 strict_overflow_p = false;
15191 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15192 if (strict_overflow_p)
15193 fold_overflow_warning (("assuming signed overflow does not occur when "
15194 "determining that expression is always "
15195 "non-zero"),
15196 WARN_STRICT_OVERFLOW_MISC);
15197 return ret;
15200 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15201 attempt to fold the expression to a constant without modifying TYPE,
15202 OP0 or OP1.
15204 If the expression could be simplified to a constant, then return
15205 the constant. If the expression would not be simplified to a
15206 constant, then return NULL_TREE. */
15208 tree
15209 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15211 tree tem = fold_binary (code, type, op0, op1);
15212 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15215 /* Given the components of a unary expression CODE, TYPE and OP0,
15216 attempt to fold the expression to a constant without modifying
15217 TYPE or OP0.
15219 If the expression could be simplified to a constant, then return
15220 the constant. If the expression would not be simplified to a
15221 constant, then return NULL_TREE. */
15223 tree
15224 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15226 tree tem = fold_unary (code, type, op0);
15227 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15230 /* If EXP represents referencing an element in a constant string
15231 (either via pointer arithmetic or array indexing), return the
15232 tree representing the value accessed, otherwise return NULL. */
15234 tree
15235 fold_read_from_constant_string (tree exp)
15237 if ((TREE_CODE (exp) == INDIRECT_REF
15238 || TREE_CODE (exp) == ARRAY_REF)
15239 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15241 tree exp1 = TREE_OPERAND (exp, 0);
15242 tree index;
15243 tree string;
15244 location_t loc = EXPR_LOCATION (exp);
15246 if (TREE_CODE (exp) == INDIRECT_REF)
15247 string = string_constant (exp1, &index);
15248 else
15250 tree low_bound = array_ref_low_bound (exp);
15251 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15253 /* Optimize the special-case of a zero lower bound.
15255 We convert the low_bound to sizetype to avoid some problems
15256 with constant folding. (E.g. suppose the lower bound is 1,
15257 and its mode is QI. Without the conversion,l (ARRAY
15258 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15259 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15260 if (! integer_zerop (low_bound))
15261 index = size_diffop_loc (loc, index,
15262 fold_convert_loc (loc, sizetype, low_bound));
15264 string = exp1;
15267 if (string
15268 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15269 && TREE_CODE (string) == STRING_CST
15270 && TREE_CODE (index) == INTEGER_CST
15271 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15272 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15273 == MODE_INT)
15274 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15275 return build_int_cst_type (TREE_TYPE (exp),
15276 (TREE_STRING_POINTER (string)
15277 [TREE_INT_CST_LOW (index)]));
15279 return NULL;
15282 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15283 an integer constant, real, or fixed-point constant.
15285 TYPE is the type of the result. */
15287 static tree
15288 fold_negate_const (tree arg0, tree type)
15290 tree t = NULL_TREE;
15292 switch (TREE_CODE (arg0))
15294 case INTEGER_CST:
15296 double_int val = tree_to_double_int (arg0);
15297 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15299 t = force_fit_type_double (type, val, 1,
15300 (overflow | TREE_OVERFLOW (arg0))
15301 && !TYPE_UNSIGNED (type));
15302 break;
15305 case REAL_CST:
15306 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15307 break;
15309 case FIXED_CST:
15311 FIXED_VALUE_TYPE f;
15312 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15313 &(TREE_FIXED_CST (arg0)), NULL,
15314 TYPE_SATURATING (type));
15315 t = build_fixed (type, f);
15316 /* Propagate overflow flags. */
15317 if (overflow_p | TREE_OVERFLOW (arg0))
15318 TREE_OVERFLOW (t) = 1;
15319 break;
15322 default:
15323 gcc_unreachable ();
15326 return t;
15329 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15330 an integer constant or real constant.
15332 TYPE is the type of the result. */
15334 tree
15335 fold_abs_const (tree arg0, tree type)
15337 tree t = NULL_TREE;
15339 switch (TREE_CODE (arg0))
15341 case INTEGER_CST:
15343 double_int val = tree_to_double_int (arg0);
15345 /* If the value is unsigned or non-negative, then the absolute value
15346 is the same as the ordinary value. */
15347 if (TYPE_UNSIGNED (type)
15348 || !double_int_negative_p (val))
15349 t = arg0;
15351 /* If the value is negative, then the absolute value is
15352 its negation. */
15353 else
15355 int overflow;
15357 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15358 t = force_fit_type_double (type, val, -1,
15359 overflow | TREE_OVERFLOW (arg0));
15362 break;
15364 case REAL_CST:
15365 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15366 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15367 else
15368 t = arg0;
15369 break;
15371 default:
15372 gcc_unreachable ();
15375 return t;
15378 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15379 constant. TYPE is the type of the result. */
15381 static tree
15382 fold_not_const (const_tree arg0, tree type)
15384 double_int val;
15386 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15388 val = double_int_not (tree_to_double_int (arg0));
15389 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15392 /* Given CODE, a relational operator, the target type, TYPE and two
15393 constant operands OP0 and OP1, return the result of the
15394 relational operation. If the result is not a compile time
15395 constant, then return NULL_TREE. */
15397 static tree
15398 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15400 int result, invert;
15402 /* From here on, the only cases we handle are when the result is
15403 known to be a constant. */
15405 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15407 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15408 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15410 /* Handle the cases where either operand is a NaN. */
15411 if (real_isnan (c0) || real_isnan (c1))
15413 switch (code)
15415 case EQ_EXPR:
15416 case ORDERED_EXPR:
15417 result = 0;
15418 break;
15420 case NE_EXPR:
15421 case UNORDERED_EXPR:
15422 case UNLT_EXPR:
15423 case UNLE_EXPR:
15424 case UNGT_EXPR:
15425 case UNGE_EXPR:
15426 case UNEQ_EXPR:
15427 result = 1;
15428 break;
15430 case LT_EXPR:
15431 case LE_EXPR:
15432 case GT_EXPR:
15433 case GE_EXPR:
15434 case LTGT_EXPR:
15435 if (flag_trapping_math)
15436 return NULL_TREE;
15437 result = 0;
15438 break;
15440 default:
15441 gcc_unreachable ();
15444 return constant_boolean_node (result, type);
15447 return constant_boolean_node (real_compare (code, c0, c1), type);
15450 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15452 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15453 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15454 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15457 /* Handle equality/inequality of complex constants. */
15458 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15460 tree rcond = fold_relational_const (code, type,
15461 TREE_REALPART (op0),
15462 TREE_REALPART (op1));
15463 tree icond = fold_relational_const (code, type,
15464 TREE_IMAGPART (op0),
15465 TREE_IMAGPART (op1));
15466 if (code == EQ_EXPR)
15467 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15468 else if (code == NE_EXPR)
15469 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15470 else
15471 return NULL_TREE;
15474 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15476 To compute GT, swap the arguments and do LT.
15477 To compute GE, do LT and invert the result.
15478 To compute LE, swap the arguments, do LT and invert the result.
15479 To compute NE, do EQ and invert the result.
15481 Therefore, the code below must handle only EQ and LT. */
15483 if (code == LE_EXPR || code == GT_EXPR)
15485 tree tem = op0;
15486 op0 = op1;
15487 op1 = tem;
15488 code = swap_tree_comparison (code);
15491 /* Note that it is safe to invert for real values here because we
15492 have already handled the one case that it matters. */
15494 invert = 0;
15495 if (code == NE_EXPR || code == GE_EXPR)
15497 invert = 1;
15498 code = invert_tree_comparison (code, false);
15501 /* Compute a result for LT or EQ if args permit;
15502 Otherwise return T. */
15503 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15505 if (code == EQ_EXPR)
15506 result = tree_int_cst_equal (op0, op1);
15507 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15508 result = INT_CST_LT_UNSIGNED (op0, op1);
15509 else
15510 result = INT_CST_LT (op0, op1);
15512 else
15513 return NULL_TREE;
15515 if (invert)
15516 result ^= 1;
15517 return constant_boolean_node (result, type);
15520 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15521 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15522 itself. */
15524 tree
15525 fold_build_cleanup_point_expr (tree type, tree expr)
15527 /* If the expression does not have side effects then we don't have to wrap
15528 it with a cleanup point expression. */
15529 if (!TREE_SIDE_EFFECTS (expr))
15530 return expr;
15532 /* If the expression is a return, check to see if the expression inside the
15533 return has no side effects or the right hand side of the modify expression
15534 inside the return. If either don't have side effects set we don't need to
15535 wrap the expression in a cleanup point expression. Note we don't check the
15536 left hand side of the modify because it should always be a return decl. */
15537 if (TREE_CODE (expr) == RETURN_EXPR)
15539 tree op = TREE_OPERAND (expr, 0);
15540 if (!op || !TREE_SIDE_EFFECTS (op))
15541 return expr;
15542 op = TREE_OPERAND (op, 1);
15543 if (!TREE_SIDE_EFFECTS (op))
15544 return expr;
15547 return build1 (CLEANUP_POINT_EXPR, type, expr);
15550 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15551 of an indirection through OP0, or NULL_TREE if no simplification is
15552 possible. */
15554 tree
15555 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15557 tree sub = op0;
15558 tree subtype;
15560 STRIP_NOPS (sub);
15561 subtype = TREE_TYPE (sub);
15562 if (!POINTER_TYPE_P (subtype))
15563 return NULL_TREE;
15565 if (TREE_CODE (sub) == ADDR_EXPR)
15567 tree op = TREE_OPERAND (sub, 0);
15568 tree optype = TREE_TYPE (op);
15569 /* *&CONST_DECL -> to the value of the const decl. */
15570 if (TREE_CODE (op) == CONST_DECL)
15571 return DECL_INITIAL (op);
15572 /* *&p => p; make sure to handle *&"str"[cst] here. */
15573 if (type == optype)
15575 tree fop = fold_read_from_constant_string (op);
15576 if (fop)
15577 return fop;
15578 else
15579 return op;
15581 /* *(foo *)&fooarray => fooarray[0] */
15582 else if (TREE_CODE (optype) == ARRAY_TYPE
15583 && type == TREE_TYPE (optype)
15584 && (!in_gimple_form
15585 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15587 tree type_domain = TYPE_DOMAIN (optype);
15588 tree min_val = size_zero_node;
15589 if (type_domain && TYPE_MIN_VALUE (type_domain))
15590 min_val = TYPE_MIN_VALUE (type_domain);
15591 if (in_gimple_form
15592 && TREE_CODE (min_val) != INTEGER_CST)
15593 return NULL_TREE;
15594 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15595 NULL_TREE, NULL_TREE);
15597 /* *(foo *)&complexfoo => __real__ complexfoo */
15598 else if (TREE_CODE (optype) == COMPLEX_TYPE
15599 && type == TREE_TYPE (optype))
15600 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15601 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15602 else if (TREE_CODE (optype) == VECTOR_TYPE
15603 && type == TREE_TYPE (optype))
15605 tree part_width = TYPE_SIZE (type);
15606 tree index = bitsize_int (0);
15607 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15611 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15612 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15614 tree op00 = TREE_OPERAND (sub, 0);
15615 tree op01 = TREE_OPERAND (sub, 1);
15617 STRIP_NOPS (op00);
15618 if (TREE_CODE (op00) == ADDR_EXPR)
15620 tree op00type;
15621 op00 = TREE_OPERAND (op00, 0);
15622 op00type = TREE_TYPE (op00);
15624 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15625 if (TREE_CODE (op00type) == VECTOR_TYPE
15626 && type == TREE_TYPE (op00type))
15628 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15629 tree part_width = TYPE_SIZE (type);
15630 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15631 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15632 tree index = bitsize_int (indexi);
15634 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15635 return fold_build3_loc (loc,
15636 BIT_FIELD_REF, type, op00,
15637 part_width, index);
15640 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15641 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15642 && type == TREE_TYPE (op00type))
15644 tree size = TYPE_SIZE_UNIT (type);
15645 if (tree_int_cst_equal (size, op01))
15646 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15648 /* ((foo *)&fooarray)[1] => fooarray[1] */
15649 else if (TREE_CODE (op00type) == ARRAY_TYPE
15650 && type == TREE_TYPE (op00type))
15652 tree type_domain = TYPE_DOMAIN (op00type);
15653 tree min_val = size_zero_node;
15654 if (type_domain && TYPE_MIN_VALUE (type_domain))
15655 min_val = TYPE_MIN_VALUE (type_domain);
15656 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15657 TYPE_SIZE_UNIT (type));
15658 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15659 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15660 NULL_TREE, NULL_TREE);
15665 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15666 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15667 && type == TREE_TYPE (TREE_TYPE (subtype))
15668 && (!in_gimple_form
15669 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15671 tree type_domain;
15672 tree min_val = size_zero_node;
15673 sub = build_fold_indirect_ref_loc (loc, sub);
15674 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15675 if (type_domain && TYPE_MIN_VALUE (type_domain))
15676 min_val = TYPE_MIN_VALUE (type_domain);
15677 if (in_gimple_form
15678 && TREE_CODE (min_val) != INTEGER_CST)
15679 return NULL_TREE;
15680 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15681 NULL_TREE);
15684 return NULL_TREE;
15687 /* Builds an expression for an indirection through T, simplifying some
15688 cases. */
15690 tree
15691 build_fold_indirect_ref_loc (location_t loc, tree t)
15693 tree type = TREE_TYPE (TREE_TYPE (t));
15694 tree sub = fold_indirect_ref_1 (loc, type, t);
15696 if (sub)
15697 return sub;
15699 return build1_loc (loc, INDIRECT_REF, type, t);
15702 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15704 tree
15705 fold_indirect_ref_loc (location_t loc, tree t)
15707 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15709 if (sub)
15710 return sub;
15711 else
15712 return t;
15715 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15716 whose result is ignored. The type of the returned tree need not be
15717 the same as the original expression. */
15719 tree
15720 fold_ignored_result (tree t)
15722 if (!TREE_SIDE_EFFECTS (t))
15723 return integer_zero_node;
15725 for (;;)
15726 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15728 case tcc_unary:
15729 t = TREE_OPERAND (t, 0);
15730 break;
15732 case tcc_binary:
15733 case tcc_comparison:
15734 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15735 t = TREE_OPERAND (t, 0);
15736 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15737 t = TREE_OPERAND (t, 1);
15738 else
15739 return t;
15740 break;
15742 case tcc_expression:
15743 switch (TREE_CODE (t))
15745 case COMPOUND_EXPR:
15746 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15747 return t;
15748 t = TREE_OPERAND (t, 0);
15749 break;
15751 case COND_EXPR:
15752 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15753 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15754 return t;
15755 t = TREE_OPERAND (t, 0);
15756 break;
15758 default:
15759 return t;
15761 break;
15763 default:
15764 return t;
15768 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15769 This can only be applied to objects of a sizetype. */
15771 tree
15772 round_up_loc (location_t loc, tree value, int divisor)
15774 tree div = NULL_TREE;
15776 gcc_assert (divisor > 0);
15777 if (divisor == 1)
15778 return value;
15780 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15781 have to do anything. Only do this when we are not given a const,
15782 because in that case, this check is more expensive than just
15783 doing it. */
15784 if (TREE_CODE (value) != INTEGER_CST)
15786 div = build_int_cst (TREE_TYPE (value), divisor);
15788 if (multiple_of_p (TREE_TYPE (value), value, div))
15789 return value;
15792 /* If divisor is a power of two, simplify this to bit manipulation. */
15793 if (divisor == (divisor & -divisor))
15795 if (TREE_CODE (value) == INTEGER_CST)
15797 double_int val = tree_to_double_int (value);
15798 bool overflow_p;
15800 if ((val.low & (divisor - 1)) == 0)
15801 return value;
15803 overflow_p = TREE_OVERFLOW (value);
15804 val.low &= ~(divisor - 1);
15805 val.low += divisor;
15806 if (val.low == 0)
15808 val.high++;
15809 if (val.high == 0)
15810 overflow_p = true;
15813 return force_fit_type_double (TREE_TYPE (value), val,
15814 -1, overflow_p);
15816 else
15818 tree t;
15820 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15821 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15822 t = build_int_cst (TREE_TYPE (value), -divisor);
15823 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15826 else
15828 if (!div)
15829 div = build_int_cst (TREE_TYPE (value), divisor);
15830 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15831 value = size_binop_loc (loc, MULT_EXPR, value, div);
15834 return value;
15837 /* Likewise, but round down. */
15839 tree
15840 round_down_loc (location_t loc, tree value, int divisor)
15842 tree div = NULL_TREE;
15844 gcc_assert (divisor > 0);
15845 if (divisor == 1)
15846 return value;
15848 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15849 have to do anything. Only do this when we are not given a const,
15850 because in that case, this check is more expensive than just
15851 doing it. */
15852 if (TREE_CODE (value) != INTEGER_CST)
15854 div = build_int_cst (TREE_TYPE (value), divisor);
15856 if (multiple_of_p (TREE_TYPE (value), value, div))
15857 return value;
15860 /* If divisor is a power of two, simplify this to bit manipulation. */
15861 if (divisor == (divisor & -divisor))
15863 tree t;
15865 t = build_int_cst (TREE_TYPE (value), -divisor);
15866 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15868 else
15870 if (!div)
15871 div = build_int_cst (TREE_TYPE (value), divisor);
15872 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15873 value = size_binop_loc (loc, MULT_EXPR, value, div);
15876 return value;
15879 /* Returns the pointer to the base of the object addressed by EXP and
15880 extracts the information about the offset of the access, storing it
15881 to PBITPOS and POFFSET. */
15883 static tree
15884 split_address_to_core_and_offset (tree exp,
15885 HOST_WIDE_INT *pbitpos, tree *poffset)
15887 tree core;
15888 enum machine_mode mode;
15889 int unsignedp, volatilep;
15890 HOST_WIDE_INT bitsize;
15891 location_t loc = EXPR_LOCATION (exp);
15893 if (TREE_CODE (exp) == ADDR_EXPR)
15895 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15896 poffset, &mode, &unsignedp, &volatilep,
15897 false);
15898 core = build_fold_addr_expr_loc (loc, core);
15900 else
15902 core = exp;
15903 *pbitpos = 0;
15904 *poffset = NULL_TREE;
15907 return core;
15910 /* Returns true if addresses of E1 and E2 differ by a constant, false
15911 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15913 bool
15914 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15916 tree core1, core2;
15917 HOST_WIDE_INT bitpos1, bitpos2;
15918 tree toffset1, toffset2, tdiff, type;
15920 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15921 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15923 if (bitpos1 % BITS_PER_UNIT != 0
15924 || bitpos2 % BITS_PER_UNIT != 0
15925 || !operand_equal_p (core1, core2, 0))
15926 return false;
15928 if (toffset1 && toffset2)
15930 type = TREE_TYPE (toffset1);
15931 if (type != TREE_TYPE (toffset2))
15932 toffset2 = fold_convert (type, toffset2);
15934 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15935 if (!cst_and_fits_in_hwi (tdiff))
15936 return false;
15938 *diff = int_cst_value (tdiff);
15940 else if (toffset1 || toffset2)
15942 /* If only one of the offsets is non-constant, the difference cannot
15943 be a constant. */
15944 return false;
15946 else
15947 *diff = 0;
15949 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15950 return true;
15953 /* Simplify the floating point expression EXP when the sign of the
15954 result is not significant. Return NULL_TREE if no simplification
15955 is possible. */
15957 tree
15958 fold_strip_sign_ops (tree exp)
15960 tree arg0, arg1;
15961 location_t loc = EXPR_LOCATION (exp);
15963 switch (TREE_CODE (exp))
15965 case ABS_EXPR:
15966 case NEGATE_EXPR:
15967 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15968 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15970 case MULT_EXPR:
15971 case RDIV_EXPR:
15972 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15973 return NULL_TREE;
15974 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15975 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15976 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15977 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15978 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15979 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15980 break;
15982 case COMPOUND_EXPR:
15983 arg0 = TREE_OPERAND (exp, 0);
15984 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15985 if (arg1)
15986 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15987 break;
15989 case COND_EXPR:
15990 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15991 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15992 if (arg0 || arg1)
15993 return fold_build3_loc (loc,
15994 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15995 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15996 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15997 break;
15999 case CALL_EXPR:
16001 const enum built_in_function fcode = builtin_mathfn_code (exp);
16002 switch (fcode)
16004 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16005 /* Strip copysign function call, return the 1st argument. */
16006 arg0 = CALL_EXPR_ARG (exp, 0);
16007 arg1 = CALL_EXPR_ARG (exp, 1);
16008 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16010 default:
16011 /* Strip sign ops from the argument of "odd" math functions. */
16012 if (negate_mathfn_p (fcode))
16014 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16015 if (arg0)
16016 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16018 break;
16021 break;
16023 default:
16024 break;
16026 return NULL_TREE;