* ChangeLog: Fix whitespace.
[official-gcc.git] / gcc / fold-const.c
blob3bfd203bea8794a65747f852d62bc5404c8c7f36
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
187 double_int quo, rem;
188 int uns;
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
195 quo = double_int_divmod (tree_to_double_int (arg1),
196 tree_to_double_int (arg2),
197 uns, code, &rem);
199 if (double_int_zero_p (rem))
200 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
202 return NULL_TREE;
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
231 void
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 const char *warnmsg;
250 location_t locus;
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
267 return;
269 if (gimple_no_warning_p (stmt))
270 return;
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
278 return;
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
298 bool
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
326 static bool
327 negate_mathfn_p (enum built_in_function code)
329 switch (code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
362 default:
363 break;
365 return false;
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
371 bool
372 may_negate_without_overflow_p (const_tree t)
374 unsigned HOST_WIDE_INT val;
375 unsigned int prec;
376 tree type;
378 gcc_assert (TREE_CODE (t) == INTEGER_CST);
380 type = TREE_TYPE (t);
381 if (TYPE_UNSIGNED (type))
382 return false;
384 prec = TYPE_PRECISION (type);
385 if (prec > HOST_BITS_PER_WIDE_INT)
387 if (TREE_INT_CST_LOW (t) != 0)
388 return true;
389 prec -= HOST_BITS_PER_WIDE_INT;
390 val = TREE_INT_CST_HIGH (t);
392 else
393 val = TREE_INT_CST_LOW (t);
394 if (prec < HOST_BITS_PER_WIDE_INT)
395 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
396 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
399 /* Determine whether an expression T can be cheaply negated using
400 the function negate_expr without introducing undefined overflow. */
402 static bool
403 negate_expr_p (tree t)
405 tree type;
407 if (t == 0)
408 return false;
410 type = TREE_TYPE (t);
412 STRIP_SIGN_NOPS (t);
413 switch (TREE_CODE (t))
415 case INTEGER_CST:
416 if (TYPE_OVERFLOW_WRAPS (type))
417 return true;
419 /* Check that -CST will not overflow type. */
420 return may_negate_without_overflow_p (t);
421 case BIT_NOT_EXPR:
422 return (INTEGRAL_TYPE_P (type)
423 && TYPE_OVERFLOW_WRAPS (type));
425 case FIXED_CST:
426 case NEGATE_EXPR:
427 return true;
429 case REAL_CST:
430 /* We want to canonicalize to positive real constants. Pretend
431 that only negative ones can be easily negated. */
432 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
434 case COMPLEX_CST:
435 return negate_expr_p (TREE_REALPART (t))
436 && negate_expr_p (TREE_IMAGPART (t));
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
487 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 return negate_expr_p (TREE_OPERAND (t, 1))
490 || negate_expr_p (TREE_OPERAND (t, 0));
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
500 break;
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 tree op1 = TREE_OPERAND (t, 1);
513 if (TREE_INT_CST_HIGH (op1) == 0
514 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
515 == TREE_INT_CST_LOW (op1))
516 return true;
518 break;
520 default:
521 break;
523 return false;
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
529 returned. */
531 static tree
532 fold_negate_expr (location_t loc, tree t)
534 tree type = TREE_TYPE (t);
535 tree tem;
537 switch (TREE_CODE (t))
539 /* Convert - (~A) to A + 1. */
540 case BIT_NOT_EXPR:
541 if (INTEGRAL_TYPE_P (type))
542 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 build_int_cst (type, 1));
544 break;
546 case INTEGER_CST:
547 tem = fold_negate_const (t, type);
548 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 || !TYPE_OVERFLOW_TRAPS (type))
550 return tem;
551 break;
553 case REAL_CST:
554 tem = fold_negate_const (t, type);
555 /* Two's complement FP formats, such as c4x, may overflow. */
556 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
557 return tem;
558 break;
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case COMPLEX_CST:
566 tree rpart = negate_expr (TREE_REALPART (t));
567 tree ipart = negate_expr (TREE_IMAGPART (t));
569 if ((TREE_CODE (rpart) == REAL_CST
570 && TREE_CODE (ipart) == REAL_CST)
571 || (TREE_CODE (rpart) == INTEGER_CST
572 && TREE_CODE (ipart) == INTEGER_CST))
573 return build_complex (type, rpart, ipart);
575 break;
577 case COMPLEX_EXPR:
578 if (negate_expr_p (t))
579 return fold_build2_loc (loc, COMPLEX_EXPR, type,
580 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
581 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
582 break;
584 case CONJ_EXPR:
585 if (negate_expr_p (t))
586 return fold_build1_loc (loc, CONJ_EXPR, type,
587 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
588 break;
590 case NEGATE_EXPR:
591 return TREE_OPERAND (t, 0);
593 case PLUS_EXPR:
594 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
595 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
597 /* -(A + B) -> (-B) - A. */
598 if (negate_expr_p (TREE_OPERAND (t, 1))
599 && reorder_operands_p (TREE_OPERAND (t, 0),
600 TREE_OPERAND (t, 1)))
602 tem = negate_expr (TREE_OPERAND (t, 1));
603 return fold_build2_loc (loc, MINUS_EXPR, type,
604 tem, TREE_OPERAND (t, 0));
607 /* -(A + B) -> (-A) - B. */
608 if (negate_expr_p (TREE_OPERAND (t, 0)))
610 tem = negate_expr (TREE_OPERAND (t, 0));
611 return fold_build2_loc (loc, MINUS_EXPR, type,
612 tem, TREE_OPERAND (t, 1));
615 break;
617 case MINUS_EXPR:
618 /* - (A - B) -> B - A */
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
621 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
622 return fold_build2_loc (loc, MINUS_EXPR, type,
623 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
624 break;
626 case MULT_EXPR:
627 if (TYPE_UNSIGNED (type))
628 break;
630 /* Fall through. */
632 case RDIV_EXPR:
633 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
635 tem = TREE_OPERAND (t, 1);
636 if (negate_expr_p (tem))
637 return fold_build2_loc (loc, TREE_CODE (t), type,
638 TREE_OPERAND (t, 0), negate_expr (tem));
639 tem = TREE_OPERAND (t, 0);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 negate_expr (tem), TREE_OPERAND (t, 1));
644 break;
646 case TRUNC_DIV_EXPR:
647 case ROUND_DIV_EXPR:
648 case FLOOR_DIV_EXPR:
649 case CEIL_DIV_EXPR:
650 case EXACT_DIV_EXPR:
651 /* In general we can't negate A / B, because if A is INT_MIN and
652 B is 1, we may turn this into INT_MIN / -1 which is undefined
653 and actually traps on some architectures. But if overflow is
654 undefined, we can negate, because - (INT_MIN / 1) is an
655 overflow. */
656 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
658 const char * const warnmsg = G_("assuming signed overflow does not "
659 "occur when negating a division");
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
663 if (INTEGRAL_TYPE_P (type)
664 && (TREE_CODE (tem) != INTEGER_CST
665 || integer_onep (tem)))
666 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0), negate_expr (tem));
670 tem = TREE_OPERAND (t, 0);
671 if (negate_expr_p (tem))
673 if (INTEGRAL_TYPE_P (type)
674 && (TREE_CODE (tem) != INTEGER_CST
675 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
676 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 negate_expr (tem), TREE_OPERAND (t, 1));
681 break;
683 case NOP_EXPR:
684 /* Convert -((double)float) into (double)(-float). */
685 if (TREE_CODE (type) == REAL_TYPE)
687 tem = strip_float_extensions (t);
688 if (tem != t && negate_expr_p (tem))
689 return fold_convert_loc (loc, type, negate_expr (tem));
691 break;
693 case CALL_EXPR:
694 /* Negate -f(x) as f(-x). */
695 if (negate_mathfn_p (builtin_mathfn_code (t))
696 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
698 tree fndecl, arg;
700 fndecl = get_callee_fndecl (t);
701 arg = negate_expr (CALL_EXPR_ARG (t, 0));
702 return build_call_expr_loc (loc, fndecl, 1, arg);
704 break;
706 case RSHIFT_EXPR:
707 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
708 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
710 tree op1 = TREE_OPERAND (t, 1);
711 if (TREE_INT_CST_HIGH (op1) == 0
712 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
713 == TREE_INT_CST_LOW (op1))
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
723 break;
725 default:
726 break;
729 return NULL_TREE;
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
779 tree var = 0;
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
836 else if (TREE_CONSTANT (in))
837 *conp = in;
838 else
839 var = in;
841 if (negate_p)
843 if (*litp)
844 *minus_litp = *litp, *litp = 0;
845 else if (*minus_litp)
846 *litp = *minus_litp, *minus_litp = 0;
847 *conp = negate_expr (*conp);
848 var = negate_expr (var);
851 return var;
854 /* Re-associate trees split by the above function. T1 and T2 are
855 either expressions to associate or null. Return the new
856 expression, if any. LOC is the location of the new expression. If
857 we build an operation, do it in TYPE and with CODE. */
859 static tree
860 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
862 if (t1 == 0)
863 return t2;
864 else if (t2 == 0)
865 return t1;
867 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
868 try to fold this since we will have infinite recursion. But do
869 deal with any NEGATE_EXPRs. */
870 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
871 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
873 if (code == PLUS_EXPR)
875 if (TREE_CODE (t1) == NEGATE_EXPR)
876 return build2_loc (loc, MINUS_EXPR, type,
877 fold_convert_loc (loc, type, t2),
878 fold_convert_loc (loc, type,
879 TREE_OPERAND (t1, 0)));
880 else if (TREE_CODE (t2) == NEGATE_EXPR)
881 return build2_loc (loc, MINUS_EXPR, type,
882 fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type,
884 TREE_OPERAND (t2, 0)));
885 else if (integer_zerop (t2))
886 return fold_convert_loc (loc, type, t1);
888 else if (code == MINUS_EXPR)
890 if (integer_zerop (t2))
891 return fold_convert_loc (loc, type, t1);
894 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
895 fold_convert_loc (loc, type, t2));
898 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
899 fold_convert_loc (loc, type, t2));
902 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
903 for use in int_const_binop, size_binop and size_diffop. */
905 static bool
906 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
908 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
909 return false;
910 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
911 return false;
913 switch (code)
915 case LSHIFT_EXPR:
916 case RSHIFT_EXPR:
917 case LROTATE_EXPR:
918 case RROTATE_EXPR:
919 return true;
921 default:
922 break;
925 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
926 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
927 && TYPE_MODE (type1) == TYPE_MODE (type2);
931 /* Combine two integer constants ARG1 and ARG2 under operation CODE
932 to produce a new constant. Return NULL_TREE if we don't know how
933 to evaluate CODE at compile-time. */
935 static tree
936 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
937 int overflowable)
939 double_int op1, op2, res, tmp;
940 tree t;
941 tree type = TREE_TYPE (arg1);
942 bool uns = TYPE_UNSIGNED (type);
943 bool overflow = false;
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
948 switch (code)
950 case BIT_IOR_EXPR:
951 res = double_int_ior (op1, op2);
952 break;
954 case BIT_XOR_EXPR:
955 res = double_int_xor (op1, op2);
956 break;
958 case BIT_AND_EXPR:
959 res = double_int_and (op1, op2);
960 break;
962 case RSHIFT_EXPR:
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
965 break;
967 case LSHIFT_EXPR:
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
973 break;
975 case RROTATE_EXPR:
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
978 break;
980 case LROTATE_EXPR:
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
983 break;
985 case PLUS_EXPR:
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
988 break;
990 case MINUS_EXPR:
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
995 break;
997 case MULT_EXPR:
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
1000 break;
1002 case MULT_HIGHPART_EXPR:
1003 /* ??? Need quad precision, or an additional shift operand
1004 to the multiply primitive, to handle very large highparts. */
1005 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1006 return NULL_TREE;
1007 tmp = double_int_mul (op1, op2);
1008 res = double_int_rshift (tmp, TYPE_PRECISION (type),
1009 TYPE_PRECISION (type), !uns);
1010 break;
1012 case TRUNC_DIV_EXPR:
1013 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1014 case EXACT_DIV_EXPR:
1015 /* This is a shortcut for a common special case. */
1016 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1017 && !TREE_OVERFLOW (arg1)
1018 && !TREE_OVERFLOW (arg2)
1019 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1021 if (code == CEIL_DIV_EXPR)
1022 op1.low += op2.low - 1;
1024 res.low = op1.low / op2.low, res.high = 0;
1025 break;
1028 /* ... fall through ... */
1030 case ROUND_DIV_EXPR:
1031 if (double_int_zero_p (op2))
1032 return NULL_TREE;
1033 if (double_int_one_p (op2))
1035 res = op1;
1036 break;
1038 if (double_int_equal_p (op1, op2)
1039 && ! double_int_zero_p (op1))
1041 res = double_int_one;
1042 break;
1044 overflow = div_and_round_double (code, uns,
1045 op1.low, op1.high, op2.low, op2.high,
1046 &res.low, &res.high,
1047 &tmp.low, &tmp.high);
1048 break;
1050 case TRUNC_MOD_EXPR:
1051 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1052 /* This is a shortcut for a common special case. */
1053 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1054 && !TREE_OVERFLOW (arg1)
1055 && !TREE_OVERFLOW (arg2)
1056 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1058 if (code == CEIL_MOD_EXPR)
1059 op1.low += op2.low - 1;
1060 res.low = op1.low % op2.low, res.high = 0;
1061 break;
1064 /* ... fall through ... */
1066 case ROUND_MOD_EXPR:
1067 if (double_int_zero_p (op2))
1068 return NULL_TREE;
1069 overflow = div_and_round_double (code, uns,
1070 op1.low, op1.high, op2.low, op2.high,
1071 &tmp.low, &tmp.high,
1072 &res.low, &res.high);
1073 break;
1075 case MIN_EXPR:
1076 res = double_int_min (op1, op2, uns);
1077 break;
1079 case MAX_EXPR:
1080 res = double_int_max (op1, op2, uns);
1081 break;
1083 default:
1084 return NULL_TREE;
1087 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1088 (!uns && overflow)
1089 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1091 return t;
1094 tree
1095 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1097 return int_const_binop_1 (code, arg1, arg2, 1);
1100 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1101 constant. We assume ARG1 and ARG2 have the same data type, or at least
1102 are the same kind of constant and the same machine mode. Return zero if
1103 combining the constants is not allowed in the current operating mode. */
1105 static tree
1106 const_binop (enum tree_code code, tree arg1, tree arg2)
1108 /* Sanity check for the recursive cases. */
1109 if (!arg1 || !arg2)
1110 return NULL_TREE;
1112 STRIP_NOPS (arg1);
1113 STRIP_NOPS (arg2);
1115 if (TREE_CODE (arg1) == INTEGER_CST)
1116 return int_const_binop (code, arg1, arg2);
1118 if (TREE_CODE (arg1) == REAL_CST)
1120 enum machine_mode mode;
1121 REAL_VALUE_TYPE d1;
1122 REAL_VALUE_TYPE d2;
1123 REAL_VALUE_TYPE value;
1124 REAL_VALUE_TYPE result;
1125 bool inexact;
1126 tree t, type;
1128 /* The following codes are handled by real_arithmetic. */
1129 switch (code)
1131 case PLUS_EXPR:
1132 case MINUS_EXPR:
1133 case MULT_EXPR:
1134 case RDIV_EXPR:
1135 case MIN_EXPR:
1136 case MAX_EXPR:
1137 break;
1139 default:
1140 return NULL_TREE;
1143 d1 = TREE_REAL_CST (arg1);
1144 d2 = TREE_REAL_CST (arg2);
1146 type = TREE_TYPE (arg1);
1147 mode = TYPE_MODE (type);
1149 /* Don't perform operation if we honor signaling NaNs and
1150 either operand is a NaN. */
1151 if (HONOR_SNANS (mode)
1152 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1153 return NULL_TREE;
1155 /* Don't perform operation if it would raise a division
1156 by zero exception. */
1157 if (code == RDIV_EXPR
1158 && REAL_VALUES_EQUAL (d2, dconst0)
1159 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1160 return NULL_TREE;
1162 /* If either operand is a NaN, just return it. Otherwise, set up
1163 for floating-point trap; we return an overflow. */
1164 if (REAL_VALUE_ISNAN (d1))
1165 return arg1;
1166 else if (REAL_VALUE_ISNAN (d2))
1167 return arg2;
1169 inexact = real_arithmetic (&value, code, &d1, &d2);
1170 real_convert (&result, mode, &value);
1172 /* Don't constant fold this floating point operation if
1173 the result has overflowed and flag_trapping_math. */
1174 if (flag_trapping_math
1175 && MODE_HAS_INFINITIES (mode)
1176 && REAL_VALUE_ISINF (result)
1177 && !REAL_VALUE_ISINF (d1)
1178 && !REAL_VALUE_ISINF (d2))
1179 return NULL_TREE;
1181 /* Don't constant fold this floating point operation if the
1182 result may dependent upon the run-time rounding mode and
1183 flag_rounding_math is set, or if GCC's software emulation
1184 is unable to accurately represent the result. */
1185 if ((flag_rounding_math
1186 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1187 && (inexact || !real_identical (&result, &value)))
1188 return NULL_TREE;
1190 t = build_real (type, result);
1192 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1193 return t;
1196 if (TREE_CODE (arg1) == FIXED_CST)
1198 FIXED_VALUE_TYPE f1;
1199 FIXED_VALUE_TYPE f2;
1200 FIXED_VALUE_TYPE result;
1201 tree t, type;
1202 int sat_p;
1203 bool overflow_p;
1205 /* The following codes are handled by fixed_arithmetic. */
1206 switch (code)
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 case TRUNC_DIV_EXPR:
1212 f2 = TREE_FIXED_CST (arg2);
1213 break;
1215 case LSHIFT_EXPR:
1216 case RSHIFT_EXPR:
1217 f2.data.high = TREE_INT_CST_HIGH (arg2);
1218 f2.data.low = TREE_INT_CST_LOW (arg2);
1219 f2.mode = SImode;
1220 break;
1222 default:
1223 return NULL_TREE;
1226 f1 = TREE_FIXED_CST (arg1);
1227 type = TREE_TYPE (arg1);
1228 sat_p = TYPE_SATURATING (type);
1229 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1230 t = build_fixed (type, result);
1231 /* Propagate overflow flags. */
1232 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1233 TREE_OVERFLOW (t) = 1;
1234 return t;
1237 if (TREE_CODE (arg1) == COMPLEX_CST)
1239 tree type = TREE_TYPE (arg1);
1240 tree r1 = TREE_REALPART (arg1);
1241 tree i1 = TREE_IMAGPART (arg1);
1242 tree r2 = TREE_REALPART (arg2);
1243 tree i2 = TREE_IMAGPART (arg2);
1244 tree real, imag;
1246 switch (code)
1248 case PLUS_EXPR:
1249 case MINUS_EXPR:
1250 real = const_binop (code, r1, r2);
1251 imag = const_binop (code, i1, i2);
1252 break;
1254 case MULT_EXPR:
1255 if (COMPLEX_FLOAT_TYPE_P (type))
1256 return do_mpc_arg2 (arg1, arg2, type,
1257 /* do_nonfinite= */ folding_initializer,
1258 mpc_mul);
1260 real = const_binop (MINUS_EXPR,
1261 const_binop (MULT_EXPR, r1, r2),
1262 const_binop (MULT_EXPR, i1, i2));
1263 imag = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, i2),
1265 const_binop (MULT_EXPR, i1, r2));
1266 break;
1268 case RDIV_EXPR:
1269 if (COMPLEX_FLOAT_TYPE_P (type))
1270 return do_mpc_arg2 (arg1, arg2, type,
1271 /* do_nonfinite= */ folding_initializer,
1272 mpc_div);
1273 /* Fallthru ... */
1274 case TRUNC_DIV_EXPR:
1275 case CEIL_DIV_EXPR:
1276 case FLOOR_DIV_EXPR:
1277 case ROUND_DIV_EXPR:
1278 if (flag_complex_method == 0)
1280 /* Keep this algorithm in sync with
1281 tree-complex.c:expand_complex_div_straight().
1283 Expand complex division to scalars, straightforward algorithm.
1284 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1285 t = br*br + bi*bi
1287 tree magsquared
1288 = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r2, r2),
1290 const_binop (MULT_EXPR, i2, i2));
1291 tree t1
1292 = const_binop (PLUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 tree t2
1296 = const_binop (MINUS_EXPR,
1297 const_binop (MULT_EXPR, i1, r2),
1298 const_binop (MULT_EXPR, r1, i2));
1300 real = const_binop (code, t1, magsquared);
1301 imag = const_binop (code, t2, magsquared);
1303 else
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_wide().
1308 Expand complex division to scalars, modified algorithm to minimize
1309 overflow with wide input ranges. */
1310 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1311 fold_abs_const (r2, TREE_TYPE (type)),
1312 fold_abs_const (i2, TREE_TYPE (type)));
1314 if (integer_nonzerop (compare))
1316 /* In the TRUE branch, we compute
1317 ratio = br/bi;
1318 div = (br * ratio) + bi;
1319 tr = (ar * ratio) + ai;
1320 ti = (ai * ratio) - ar;
1321 tr = tr / div;
1322 ti = ti / div; */
1323 tree ratio = const_binop (code, r2, i2);
1324 tree div = const_binop (PLUS_EXPR, i2,
1325 const_binop (MULT_EXPR, r2, ratio));
1326 real = const_binop (MULT_EXPR, r1, ratio);
1327 real = const_binop (PLUS_EXPR, real, i1);
1328 real = const_binop (code, real, div);
1330 imag = const_binop (MULT_EXPR, i1, ratio);
1331 imag = const_binop (MINUS_EXPR, imag, r1);
1332 imag = const_binop (code, imag, div);
1334 else
1336 /* In the FALSE branch, we compute
1337 ratio = d/c;
1338 divisor = (d * ratio) + c;
1339 tr = (b * ratio) + a;
1340 ti = b - (a * ratio);
1341 tr = tr / div;
1342 ti = ti / div; */
1343 tree ratio = const_binop (code, i2, r2);
1344 tree div = const_binop (PLUS_EXPR, r2,
1345 const_binop (MULT_EXPR, i2, ratio));
1347 real = const_binop (MULT_EXPR, i1, ratio);
1348 real = const_binop (PLUS_EXPR, real, r1);
1349 real = const_binop (code, real, div);
1351 imag = const_binop (MULT_EXPR, r1, ratio);
1352 imag = const_binop (MINUS_EXPR, i1, imag);
1353 imag = const_binop (code, imag, div);
1356 break;
1358 default:
1359 return NULL_TREE;
1362 if (real && imag)
1363 return build_complex (type, real, imag);
1366 if (TREE_CODE (arg1) == VECTOR_CST
1367 && TREE_CODE (arg2) == VECTOR_CST)
1369 tree type = TREE_TYPE(arg1);
1370 int count = TYPE_VECTOR_SUBPARTS (type), i;
1371 tree *elts = XALLOCAVEC (tree, count);
1373 for (i = 0; i < count; i++)
1375 tree elem1 = VECTOR_CST_ELT (arg1, i);
1376 tree elem2 = VECTOR_CST_ELT (arg2, i);
1378 elts[i] = const_binop (code, elem1, elem2);
1380 /* It is possible that const_binop cannot handle the given
1381 code and return NULL_TREE */
1382 if(elts[i] == NULL_TREE)
1383 return NULL_TREE;
1386 return build_vector (type, elts);
1388 return NULL_TREE;
1391 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1392 indicates which particular sizetype to create. */
1394 tree
1395 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1397 return build_int_cst (sizetype_tab[(int) kind], number);
1400 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1401 is a tree code. The type of the result is taken from the operands.
1402 Both must be equivalent integer types, ala int_binop_types_match_p.
1403 If the operands are constant, so is the result. */
1405 tree
1406 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1408 tree type = TREE_TYPE (arg0);
1410 if (arg0 == error_mark_node || arg1 == error_mark_node)
1411 return error_mark_node;
1413 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1414 TREE_TYPE (arg1)));
1416 /* Handle the special case of two integer constants faster. */
1417 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1419 /* And some specific cases even faster than that. */
1420 if (code == PLUS_EXPR)
1422 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1423 return arg1;
1424 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1425 return arg0;
1427 else if (code == MINUS_EXPR)
1429 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1430 return arg0;
1432 else if (code == MULT_EXPR)
1434 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1435 return arg1;
1438 /* Handle general case of two integer constants. For sizetype
1439 constant calculations we always want to know about overflow,
1440 even in the unsigned case. */
1441 return int_const_binop_1 (code, arg0, arg1, -1);
1444 return fold_build2_loc (loc, code, type, arg0, arg1);
1447 /* Given two values, either both of sizetype or both of bitsizetype,
1448 compute the difference between the two values. Return the value
1449 in signed type corresponding to the type of the operands. */
1451 tree
1452 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1454 tree type = TREE_TYPE (arg0);
1455 tree ctype;
1457 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1458 TREE_TYPE (arg1)));
1460 /* If the type is already signed, just do the simple thing. */
1461 if (!TYPE_UNSIGNED (type))
1462 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1464 if (type == sizetype)
1465 ctype = ssizetype;
1466 else if (type == bitsizetype)
1467 ctype = sbitsizetype;
1468 else
1469 ctype = signed_type_for (type);
1471 /* If either operand is not a constant, do the conversions to the signed
1472 type and subtract. The hardware will do the right thing with any
1473 overflow in the subtraction. */
1474 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1475 return size_binop_loc (loc, MINUS_EXPR,
1476 fold_convert_loc (loc, ctype, arg0),
1477 fold_convert_loc (loc, ctype, arg1));
1479 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1480 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1481 overflow) and negate (which can't either). Special-case a result
1482 of zero while we're here. */
1483 if (tree_int_cst_equal (arg0, arg1))
1484 return build_int_cst (ctype, 0);
1485 else if (tree_int_cst_lt (arg1, arg0))
1486 return fold_convert_loc (loc, ctype,
1487 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1488 else
1489 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1490 fold_convert_loc (loc, ctype,
1491 size_binop_loc (loc,
1492 MINUS_EXPR,
1493 arg1, arg0)));
1496 /* A subroutine of fold_convert_const handling conversions of an
1497 INTEGER_CST to another integer type. */
1499 static tree
1500 fold_convert_const_int_from_int (tree type, const_tree arg1)
1502 tree t;
1504 /* Given an integer constant, make new constant with new type,
1505 appropriately sign-extended or truncated. */
1506 t = force_fit_type_double (type, tree_to_double_int (arg1),
1507 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1508 (TREE_INT_CST_HIGH (arg1) < 0
1509 && (TYPE_UNSIGNED (type)
1510 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1511 | TREE_OVERFLOW (arg1));
1513 return t;
1516 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1517 to an integer type. */
1519 static tree
1520 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1522 int overflow = 0;
1523 tree t;
1525 /* The following code implements the floating point to integer
1526 conversion rules required by the Java Language Specification,
1527 that IEEE NaNs are mapped to zero and values that overflow
1528 the target precision saturate, i.e. values greater than
1529 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1530 are mapped to INT_MIN. These semantics are allowed by the
1531 C and C++ standards that simply state that the behavior of
1532 FP-to-integer conversion is unspecified upon overflow. */
1534 double_int val;
1535 REAL_VALUE_TYPE r;
1536 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1538 switch (code)
1540 case FIX_TRUNC_EXPR:
1541 real_trunc (&r, VOIDmode, &x);
1542 break;
1544 default:
1545 gcc_unreachable ();
1548 /* If R is NaN, return zero and show we have an overflow. */
1549 if (REAL_VALUE_ISNAN (r))
1551 overflow = 1;
1552 val = double_int_zero;
1555 /* See if R is less than the lower bound or greater than the
1556 upper bound. */
1558 if (! overflow)
1560 tree lt = TYPE_MIN_VALUE (type);
1561 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1562 if (REAL_VALUES_LESS (r, l))
1564 overflow = 1;
1565 val = tree_to_double_int (lt);
1569 if (! overflow)
1571 tree ut = TYPE_MAX_VALUE (type);
1572 if (ut)
1574 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1575 if (REAL_VALUES_LESS (u, r))
1577 overflow = 1;
1578 val = tree_to_double_int (ut);
1583 if (! overflow)
1584 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1586 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1587 return t;
1590 /* A subroutine of fold_convert_const handling conversions of a
1591 FIXED_CST to an integer type. */
1593 static tree
1594 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1596 tree t;
1597 double_int temp, temp_trunc;
1598 unsigned int mode;
1600 /* Right shift FIXED_CST to temp by fbit. */
1601 temp = TREE_FIXED_CST (arg1).data;
1602 mode = TREE_FIXED_CST (arg1).mode;
1603 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1605 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1606 HOST_BITS_PER_DOUBLE_INT,
1607 SIGNED_FIXED_POINT_MODE_P (mode));
1609 /* Left shift temp to temp_trunc by fbit. */
1610 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1611 HOST_BITS_PER_DOUBLE_INT,
1612 SIGNED_FIXED_POINT_MODE_P (mode));
1614 else
1616 temp = double_int_zero;
1617 temp_trunc = double_int_zero;
1620 /* If FIXED_CST is negative, we need to round the value toward 0.
1621 By checking if the fractional bits are not zero to add 1 to temp. */
1622 if (SIGNED_FIXED_POINT_MODE_P (mode)
1623 && double_int_negative_p (temp_trunc)
1624 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1625 temp = double_int_add (temp, double_int_one);
1627 /* Given a fixed-point constant, make new constant with new type,
1628 appropriately sign-extended or truncated. */
1629 t = force_fit_type_double (type, temp, -1,
1630 (double_int_negative_p (temp)
1631 && (TYPE_UNSIGNED (type)
1632 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1633 | TREE_OVERFLOW (arg1));
1635 return t;
1638 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1639 to another floating point type. */
1641 static tree
1642 fold_convert_const_real_from_real (tree type, const_tree arg1)
1644 REAL_VALUE_TYPE value;
1645 tree t;
1647 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1648 t = build_real (type, value);
1650 /* If converting an infinity or NAN to a representation that doesn't
1651 have one, set the overflow bit so that we can produce some kind of
1652 error message at the appropriate point if necessary. It's not the
1653 most user-friendly message, but it's better than nothing. */
1654 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1655 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1656 TREE_OVERFLOW (t) = 1;
1657 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1658 && !MODE_HAS_NANS (TYPE_MODE (type)))
1659 TREE_OVERFLOW (t) = 1;
1660 /* Regular overflow, conversion produced an infinity in a mode that
1661 can't represent them. */
1662 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1663 && REAL_VALUE_ISINF (value)
1664 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1665 TREE_OVERFLOW (t) = 1;
1666 else
1667 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1668 return t;
1671 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1672 to a floating point type. */
1674 static tree
1675 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1677 REAL_VALUE_TYPE value;
1678 tree t;
1680 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1681 t = build_real (type, value);
1683 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1684 return t;
1687 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1688 to another fixed-point type. */
1690 static tree
1691 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1693 FIXED_VALUE_TYPE value;
1694 tree t;
1695 bool overflow_p;
1697 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1698 TYPE_SATURATING (type));
1699 t = build_fixed (type, value);
1701 /* Propagate overflow flags. */
1702 if (overflow_p | TREE_OVERFLOW (arg1))
1703 TREE_OVERFLOW (t) = 1;
1704 return t;
1707 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1708 to a fixed-point type. */
1710 static tree
1711 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1713 FIXED_VALUE_TYPE value;
1714 tree t;
1715 bool overflow_p;
1717 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1718 TREE_INT_CST (arg1),
1719 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1720 TYPE_SATURATING (type));
1721 t = build_fixed (type, value);
1723 /* Propagate overflow flags. */
1724 if (overflow_p | TREE_OVERFLOW (arg1))
1725 TREE_OVERFLOW (t) = 1;
1726 return t;
1729 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1730 to a fixed-point type. */
1732 static tree
1733 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1735 FIXED_VALUE_TYPE value;
1736 tree t;
1737 bool overflow_p;
1739 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1740 &TREE_REAL_CST (arg1),
1741 TYPE_SATURATING (type));
1742 t = build_fixed (type, value);
1744 /* Propagate overflow flags. */
1745 if (overflow_p | TREE_OVERFLOW (arg1))
1746 TREE_OVERFLOW (t) = 1;
1747 return t;
1750 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1751 type TYPE. If no simplification can be done return NULL_TREE. */
1753 static tree
1754 fold_convert_const (enum tree_code code, tree type, tree arg1)
1756 if (TREE_TYPE (arg1) == type)
1757 return arg1;
1759 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1760 || TREE_CODE (type) == OFFSET_TYPE)
1762 if (TREE_CODE (arg1) == INTEGER_CST)
1763 return fold_convert_const_int_from_int (type, arg1);
1764 else if (TREE_CODE (arg1) == REAL_CST)
1765 return fold_convert_const_int_from_real (code, type, arg1);
1766 else if (TREE_CODE (arg1) == FIXED_CST)
1767 return fold_convert_const_int_from_fixed (type, arg1);
1769 else if (TREE_CODE (type) == REAL_TYPE)
1771 if (TREE_CODE (arg1) == INTEGER_CST)
1772 return build_real_from_int_cst (type, arg1);
1773 else if (TREE_CODE (arg1) == REAL_CST)
1774 return fold_convert_const_real_from_real (type, arg1);
1775 else if (TREE_CODE (arg1) == FIXED_CST)
1776 return fold_convert_const_real_from_fixed (type, arg1);
1778 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1780 if (TREE_CODE (arg1) == FIXED_CST)
1781 return fold_convert_const_fixed_from_fixed (type, arg1);
1782 else if (TREE_CODE (arg1) == INTEGER_CST)
1783 return fold_convert_const_fixed_from_int (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_fixed_from_real (type, arg1);
1787 return NULL_TREE;
1790 /* Construct a vector of zero elements of vector type TYPE. */
1792 static tree
1793 build_zero_vector (tree type)
1795 tree t;
1797 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1798 return build_vector_from_val (type, t);
1801 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1803 bool
1804 fold_convertible_p (const_tree type, const_tree arg)
1806 tree orig = TREE_TYPE (arg);
1808 if (type == orig)
1809 return true;
1811 if (TREE_CODE (arg) == ERROR_MARK
1812 || TREE_CODE (type) == ERROR_MARK
1813 || TREE_CODE (orig) == ERROR_MARK)
1814 return false;
1816 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1817 return true;
1819 switch (TREE_CODE (type))
1821 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1822 case POINTER_TYPE: case REFERENCE_TYPE:
1823 case OFFSET_TYPE:
1824 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1825 || TREE_CODE (orig) == OFFSET_TYPE)
1826 return true;
1827 return (TREE_CODE (orig) == VECTOR_TYPE
1828 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1830 case REAL_TYPE:
1831 case FIXED_POINT_TYPE:
1832 case COMPLEX_TYPE:
1833 case VECTOR_TYPE:
1834 case VOID_TYPE:
1835 return TREE_CODE (type) == TREE_CODE (orig);
1837 default:
1838 return false;
1842 /* Convert expression ARG to type TYPE. Used by the middle-end for
1843 simple conversions in preference to calling the front-end's convert. */
1845 tree
1846 fold_convert_loc (location_t loc, tree type, tree arg)
1848 tree orig = TREE_TYPE (arg);
1849 tree tem;
1851 if (type == orig)
1852 return arg;
1854 if (TREE_CODE (arg) == ERROR_MARK
1855 || TREE_CODE (type) == ERROR_MARK
1856 || TREE_CODE (orig) == ERROR_MARK)
1857 return error_mark_node;
1859 switch (TREE_CODE (type))
1861 case POINTER_TYPE:
1862 case REFERENCE_TYPE:
1863 /* Handle conversions between pointers to different address spaces. */
1864 if (POINTER_TYPE_P (orig)
1865 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1866 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1867 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1868 /* fall through */
1870 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1871 case OFFSET_TYPE:
1872 if (TREE_CODE (arg) == INTEGER_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1879 || TREE_CODE (orig) == OFFSET_TYPE)
1880 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1881 if (TREE_CODE (orig) == COMPLEX_TYPE)
1882 return fold_convert_loc (loc, type,
1883 fold_build1_loc (loc, REALPART_EXPR,
1884 TREE_TYPE (orig), arg));
1885 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1886 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1887 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889 case REAL_TYPE:
1890 if (TREE_CODE (arg) == INTEGER_CST)
1892 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1893 if (tem != NULL_TREE)
1894 return tem;
1896 else if (TREE_CODE (arg) == REAL_CST)
1898 tem = fold_convert_const (NOP_EXPR, type, arg);
1899 if (tem != NULL_TREE)
1900 return tem;
1902 else if (TREE_CODE (arg) == FIXED_CST)
1904 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1906 return tem;
1909 switch (TREE_CODE (orig))
1911 case INTEGER_TYPE:
1912 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1913 case POINTER_TYPE: case REFERENCE_TYPE:
1914 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1919 case FIXED_POINT_TYPE:
1920 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1922 case COMPLEX_TYPE:
1923 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1924 return fold_convert_loc (loc, type, tem);
1926 default:
1927 gcc_unreachable ();
1930 case FIXED_POINT_TYPE:
1931 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1932 || TREE_CODE (arg) == REAL_CST)
1934 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 goto fold_convert_exit;
1939 switch (TREE_CODE (orig))
1941 case FIXED_POINT_TYPE:
1942 case INTEGER_TYPE:
1943 case ENUMERAL_TYPE:
1944 case BOOLEAN_TYPE:
1945 case REAL_TYPE:
1946 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1948 case COMPLEX_TYPE:
1949 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1950 return fold_convert_loc (loc, type, tem);
1952 default:
1953 gcc_unreachable ();
1956 case COMPLEX_TYPE:
1957 switch (TREE_CODE (orig))
1959 case INTEGER_TYPE:
1960 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1961 case POINTER_TYPE: case REFERENCE_TYPE:
1962 case REAL_TYPE:
1963 case FIXED_POINT_TYPE:
1964 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1965 fold_convert_loc (loc, TREE_TYPE (type), arg),
1966 fold_convert_loc (loc, TREE_TYPE (type),
1967 integer_zero_node));
1968 case COMPLEX_TYPE:
1970 tree rpart, ipart;
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1975 TREE_OPERAND (arg, 0));
1976 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1977 TREE_OPERAND (arg, 1));
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1981 arg = save_expr (arg);
1982 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1983 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1984 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1985 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1989 default:
1990 gcc_unreachable ();
1993 case VECTOR_TYPE:
1994 if (integer_zerop (arg))
1995 return build_zero_vector (type);
1996 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1997 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1998 || TREE_CODE (orig) == VECTOR_TYPE);
1999 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2001 case VOID_TYPE:
2002 tem = fold_ignored_result (arg);
2003 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2005 default:
2006 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2007 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2008 gcc_unreachable ();
2010 fold_convert_exit:
2011 protected_set_expr_location_unshare (tem, loc);
2012 return tem;
2015 /* Return false if expr can be assumed not to be an lvalue, true
2016 otherwise. */
2018 static bool
2019 maybe_lvalue_p (const_tree x)
2021 /* We only need to wrap lvalue tree codes. */
2022 switch (TREE_CODE (x))
2024 case VAR_DECL:
2025 case PARM_DECL:
2026 case RESULT_DECL:
2027 case LABEL_DECL:
2028 case FUNCTION_DECL:
2029 case SSA_NAME:
2031 case COMPONENT_REF:
2032 case MEM_REF:
2033 case INDIRECT_REF:
2034 case ARRAY_REF:
2035 case ARRAY_RANGE_REF:
2036 case BIT_FIELD_REF:
2037 case OBJ_TYPE_REF:
2039 case REALPART_EXPR:
2040 case IMAGPART_EXPR:
2041 case PREINCREMENT_EXPR:
2042 case PREDECREMENT_EXPR:
2043 case SAVE_EXPR:
2044 case TRY_CATCH_EXPR:
2045 case WITH_CLEANUP_EXPR:
2046 case COMPOUND_EXPR:
2047 case MODIFY_EXPR:
2048 case TARGET_EXPR:
2049 case COND_EXPR:
2050 case BIND_EXPR:
2051 break;
2053 default:
2054 /* Assume the worst for front-end tree codes. */
2055 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2056 break;
2057 return false;
2060 return true;
2063 /* Return an expr equal to X but certainly not valid as an lvalue. */
2065 tree
2066 non_lvalue_loc (location_t loc, tree x)
2068 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2069 us. */
2070 if (in_gimple_form)
2071 return x;
2073 if (! maybe_lvalue_p (x))
2074 return x;
2075 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2078 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2079 Zero means allow extended lvalues. */
2081 int pedantic_lvalues;
2083 /* When pedantic, return an expr equal to X but certainly not valid as a
2084 pedantic lvalue. Otherwise, return X. */
2086 static tree
2087 pedantic_non_lvalue_loc (location_t loc, tree x)
2089 if (pedantic_lvalues)
2090 return non_lvalue_loc (loc, x);
2092 return protected_set_expr_location_unshare (x, loc);
2095 /* Given a tree comparison code, return the code that is the logical inverse.
2096 It is generally not safe to do this for floating-point comparisons, except
2097 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2098 ERROR_MARK in this case. */
2100 enum tree_code
2101 invert_tree_comparison (enum tree_code code, bool honor_nans)
2103 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2104 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2105 return ERROR_MARK;
2107 switch (code)
2109 case EQ_EXPR:
2110 return NE_EXPR;
2111 case NE_EXPR:
2112 return EQ_EXPR;
2113 case GT_EXPR:
2114 return honor_nans ? UNLE_EXPR : LE_EXPR;
2115 case GE_EXPR:
2116 return honor_nans ? UNLT_EXPR : LT_EXPR;
2117 case LT_EXPR:
2118 return honor_nans ? UNGE_EXPR : GE_EXPR;
2119 case LE_EXPR:
2120 return honor_nans ? UNGT_EXPR : GT_EXPR;
2121 case LTGT_EXPR:
2122 return UNEQ_EXPR;
2123 case UNEQ_EXPR:
2124 return LTGT_EXPR;
2125 case UNGT_EXPR:
2126 return LE_EXPR;
2127 case UNGE_EXPR:
2128 return LT_EXPR;
2129 case UNLT_EXPR:
2130 return GE_EXPR;
2131 case UNLE_EXPR:
2132 return GT_EXPR;
2133 case ORDERED_EXPR:
2134 return UNORDERED_EXPR;
2135 case UNORDERED_EXPR:
2136 return ORDERED_EXPR;
2137 default:
2138 gcc_unreachable ();
2142 /* Similar, but return the comparison that results if the operands are
2143 swapped. This is safe for floating-point. */
2145 enum tree_code
2146 swap_tree_comparison (enum tree_code code)
2148 switch (code)
2150 case EQ_EXPR:
2151 case NE_EXPR:
2152 case ORDERED_EXPR:
2153 case UNORDERED_EXPR:
2154 case LTGT_EXPR:
2155 case UNEQ_EXPR:
2156 return code;
2157 case GT_EXPR:
2158 return LT_EXPR;
2159 case GE_EXPR:
2160 return LE_EXPR;
2161 case LT_EXPR:
2162 return GT_EXPR;
2163 case LE_EXPR:
2164 return GE_EXPR;
2165 case UNGT_EXPR:
2166 return UNLT_EXPR;
2167 case UNGE_EXPR:
2168 return UNLE_EXPR;
2169 case UNLT_EXPR:
2170 return UNGT_EXPR;
2171 case UNLE_EXPR:
2172 return UNGE_EXPR;
2173 default:
2174 gcc_unreachable ();
2179 /* Convert a comparison tree code from an enum tree_code representation
2180 into a compcode bit-based encoding. This function is the inverse of
2181 compcode_to_comparison. */
2183 static enum comparison_code
2184 comparison_to_compcode (enum tree_code code)
2186 switch (code)
2188 case LT_EXPR:
2189 return COMPCODE_LT;
2190 case EQ_EXPR:
2191 return COMPCODE_EQ;
2192 case LE_EXPR:
2193 return COMPCODE_LE;
2194 case GT_EXPR:
2195 return COMPCODE_GT;
2196 case NE_EXPR:
2197 return COMPCODE_NE;
2198 case GE_EXPR:
2199 return COMPCODE_GE;
2200 case ORDERED_EXPR:
2201 return COMPCODE_ORD;
2202 case UNORDERED_EXPR:
2203 return COMPCODE_UNORD;
2204 case UNLT_EXPR:
2205 return COMPCODE_UNLT;
2206 case UNEQ_EXPR:
2207 return COMPCODE_UNEQ;
2208 case UNLE_EXPR:
2209 return COMPCODE_UNLE;
2210 case UNGT_EXPR:
2211 return COMPCODE_UNGT;
2212 case LTGT_EXPR:
2213 return COMPCODE_LTGT;
2214 case UNGE_EXPR:
2215 return COMPCODE_UNGE;
2216 default:
2217 gcc_unreachable ();
2221 /* Convert a compcode bit-based encoding of a comparison operator back
2222 to GCC's enum tree_code representation. This function is the
2223 inverse of comparison_to_compcode. */
2225 static enum tree_code
2226 compcode_to_comparison (enum comparison_code code)
2228 switch (code)
2230 case COMPCODE_LT:
2231 return LT_EXPR;
2232 case COMPCODE_EQ:
2233 return EQ_EXPR;
2234 case COMPCODE_LE:
2235 return LE_EXPR;
2236 case COMPCODE_GT:
2237 return GT_EXPR;
2238 case COMPCODE_NE:
2239 return NE_EXPR;
2240 case COMPCODE_GE:
2241 return GE_EXPR;
2242 case COMPCODE_ORD:
2243 return ORDERED_EXPR;
2244 case COMPCODE_UNORD:
2245 return UNORDERED_EXPR;
2246 case COMPCODE_UNLT:
2247 return UNLT_EXPR;
2248 case COMPCODE_UNEQ:
2249 return UNEQ_EXPR;
2250 case COMPCODE_UNLE:
2251 return UNLE_EXPR;
2252 case COMPCODE_UNGT:
2253 return UNGT_EXPR;
2254 case COMPCODE_LTGT:
2255 return LTGT_EXPR;
2256 case COMPCODE_UNGE:
2257 return UNGE_EXPR;
2258 default:
2259 gcc_unreachable ();
2263 /* Return a tree for the comparison which is the combination of
2264 doing the AND or OR (depending on CODE) of the two operations LCODE
2265 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2266 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2267 if this makes the transformation invalid. */
2269 tree
2270 combine_comparisons (location_t loc,
2271 enum tree_code code, enum tree_code lcode,
2272 enum tree_code rcode, tree truth_type,
2273 tree ll_arg, tree lr_arg)
2275 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2276 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2277 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2278 int compcode;
2280 switch (code)
2282 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2283 compcode = lcompcode & rcompcode;
2284 break;
2286 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2287 compcode = lcompcode | rcompcode;
2288 break;
2290 default:
2291 return NULL_TREE;
2294 if (!honor_nans)
2296 /* Eliminate unordered comparisons, as well as LTGT and ORD
2297 which are not used unless the mode has NaNs. */
2298 compcode &= ~COMPCODE_UNORD;
2299 if (compcode == COMPCODE_LTGT)
2300 compcode = COMPCODE_NE;
2301 else if (compcode == COMPCODE_ORD)
2302 compcode = COMPCODE_TRUE;
2304 else if (flag_trapping_math)
2306 /* Check that the original operation and the optimized ones will trap
2307 under the same condition. */
2308 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2309 && (lcompcode != COMPCODE_EQ)
2310 && (lcompcode != COMPCODE_ORD);
2311 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2312 && (rcompcode != COMPCODE_EQ)
2313 && (rcompcode != COMPCODE_ORD);
2314 bool trap = (compcode & COMPCODE_UNORD) == 0
2315 && (compcode != COMPCODE_EQ)
2316 && (compcode != COMPCODE_ORD);
2318 /* In a short-circuited boolean expression the LHS might be
2319 such that the RHS, if evaluated, will never trap. For
2320 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2321 if neither x nor y is NaN. (This is a mixed blessing: for
2322 example, the expression above will never trap, hence
2323 optimizing it to x < y would be invalid). */
2324 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2325 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2326 rtrap = false;
2328 /* If the comparison was short-circuited, and only the RHS
2329 trapped, we may now generate a spurious trap. */
2330 if (rtrap && !ltrap
2331 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2332 return NULL_TREE;
2334 /* If we changed the conditions that cause a trap, we lose. */
2335 if ((ltrap || rtrap) != trap)
2336 return NULL_TREE;
2339 if (compcode == COMPCODE_TRUE)
2340 return constant_boolean_node (true, truth_type);
2341 else if (compcode == COMPCODE_FALSE)
2342 return constant_boolean_node (false, truth_type);
2343 else
2345 enum tree_code tcode;
2347 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2348 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2352 /* Return nonzero if two operands (typically of the same tree node)
2353 are necessarily equal. If either argument has side-effects this
2354 function returns zero. FLAGS modifies behavior as follows:
2356 If OEP_ONLY_CONST is set, only return nonzero for constants.
2357 This function tests whether the operands are indistinguishable;
2358 it does not test whether they are equal using C's == operation.
2359 The distinction is important for IEEE floating point, because
2360 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2361 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2363 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2364 even though it may hold multiple values during a function.
2365 This is because a GCC tree node guarantees that nothing else is
2366 executed between the evaluation of its "operands" (which may often
2367 be evaluated in arbitrary order). Hence if the operands themselves
2368 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2369 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2370 unset means assuming isochronic (or instantaneous) tree equivalence.
2371 Unless comparing arbitrary expression trees, such as from different
2372 statements, this flag can usually be left unset.
2374 If OEP_PURE_SAME is set, then pure functions with identical arguments
2375 are considered the same. It is used when the caller has other ways
2376 to ensure that global memory is unchanged in between. */
2379 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2381 /* If either is ERROR_MARK, they aren't equal. */
2382 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2383 || TREE_TYPE (arg0) == error_mark_node
2384 || TREE_TYPE (arg1) == error_mark_node)
2385 return 0;
2387 /* Similar, if either does not have a type (like a released SSA name),
2388 they aren't equal. */
2389 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2390 return 0;
2392 /* Check equality of integer constants before bailing out due to
2393 precision differences. */
2394 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2395 return tree_int_cst_equal (arg0, arg1);
2397 /* If both types don't have the same signedness, then we can't consider
2398 them equal. We must check this before the STRIP_NOPS calls
2399 because they may change the signedness of the arguments. As pointers
2400 strictly don't have a signedness, require either two pointers or
2401 two non-pointers as well. */
2402 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2403 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2404 return 0;
2406 /* We cannot consider pointers to different address space equal. */
2407 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2408 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2409 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2410 return 0;
2412 /* If both types don't have the same precision, then it is not safe
2413 to strip NOPs. */
2414 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2415 return 0;
2417 STRIP_NOPS (arg0);
2418 STRIP_NOPS (arg1);
2420 /* In case both args are comparisons but with different comparison
2421 code, try to swap the comparison operands of one arg to produce
2422 a match and compare that variant. */
2423 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2424 && COMPARISON_CLASS_P (arg0)
2425 && COMPARISON_CLASS_P (arg1))
2427 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2429 if (TREE_CODE (arg0) == swap_code)
2430 return operand_equal_p (TREE_OPERAND (arg0, 0),
2431 TREE_OPERAND (arg1, 1), flags)
2432 && operand_equal_p (TREE_OPERAND (arg0, 1),
2433 TREE_OPERAND (arg1, 0), flags);
2436 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2437 /* This is needed for conversions and for COMPONENT_REF.
2438 Might as well play it safe and always test this. */
2439 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2440 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2441 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2442 return 0;
2444 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2445 We don't care about side effects in that case because the SAVE_EXPR
2446 takes care of that for us. In all other cases, two expressions are
2447 equal if they have no side effects. If we have two identical
2448 expressions with side effects that should be treated the same due
2449 to the only side effects being identical SAVE_EXPR's, that will
2450 be detected in the recursive calls below.
2451 If we are taking an invariant address of two identical objects
2452 they are necessarily equal as well. */
2453 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2454 && (TREE_CODE (arg0) == SAVE_EXPR
2455 || (flags & OEP_CONSTANT_ADDRESS_OF)
2456 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2457 return 1;
2459 /* Next handle constant cases, those for which we can return 1 even
2460 if ONLY_CONST is set. */
2461 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2462 switch (TREE_CODE (arg0))
2464 case INTEGER_CST:
2465 return tree_int_cst_equal (arg0, arg1);
2467 case FIXED_CST:
2468 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2469 TREE_FIXED_CST (arg1));
2471 case REAL_CST:
2472 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2473 TREE_REAL_CST (arg1)))
2474 return 1;
2477 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2479 /* If we do not distinguish between signed and unsigned zero,
2480 consider them equal. */
2481 if (real_zerop (arg0) && real_zerop (arg1))
2482 return 1;
2484 return 0;
2486 case VECTOR_CST:
2488 unsigned i;
2490 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2491 return 0;
2493 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2495 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2496 VECTOR_CST_ELT (arg1, i), flags))
2497 return 0;
2499 return 1;
2502 case COMPLEX_CST:
2503 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2504 flags)
2505 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2506 flags));
2508 case STRING_CST:
2509 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2510 && ! memcmp (TREE_STRING_POINTER (arg0),
2511 TREE_STRING_POINTER (arg1),
2512 TREE_STRING_LENGTH (arg0)));
2514 case ADDR_EXPR:
2515 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2516 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2517 ? OEP_CONSTANT_ADDRESS_OF : 0);
2518 default:
2519 break;
2522 if (flags & OEP_ONLY_CONST)
2523 return 0;
2525 /* Define macros to test an operand from arg0 and arg1 for equality and a
2526 variant that allows null and views null as being different from any
2527 non-null value. In the latter case, if either is null, the both
2528 must be; otherwise, do the normal comparison. */
2529 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2530 TREE_OPERAND (arg1, N), flags)
2532 #define OP_SAME_WITH_NULL(N) \
2533 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2534 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2536 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2538 case tcc_unary:
2539 /* Two conversions are equal only if signedness and modes match. */
2540 switch (TREE_CODE (arg0))
2542 CASE_CONVERT:
2543 case FIX_TRUNC_EXPR:
2544 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2545 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2546 return 0;
2547 break;
2548 default:
2549 break;
2552 return OP_SAME (0);
2555 case tcc_comparison:
2556 case tcc_binary:
2557 if (OP_SAME (0) && OP_SAME (1))
2558 return 1;
2560 /* For commutative ops, allow the other order. */
2561 return (commutative_tree_code (TREE_CODE (arg0))
2562 && operand_equal_p (TREE_OPERAND (arg0, 0),
2563 TREE_OPERAND (arg1, 1), flags)
2564 && operand_equal_p (TREE_OPERAND (arg0, 1),
2565 TREE_OPERAND (arg1, 0), flags));
2567 case tcc_reference:
2568 /* If either of the pointer (or reference) expressions we are
2569 dereferencing contain a side effect, these cannot be equal. */
2570 if (TREE_SIDE_EFFECTS (arg0)
2571 || TREE_SIDE_EFFECTS (arg1))
2572 return 0;
2574 switch (TREE_CODE (arg0))
2576 case INDIRECT_REF:
2577 case REALPART_EXPR:
2578 case IMAGPART_EXPR:
2579 return OP_SAME (0);
2581 case TARGET_MEM_REF:
2582 /* Require equal extra operands and then fall through to MEM_REF
2583 handling of the two common operands. */
2584 if (!OP_SAME_WITH_NULL (2)
2585 || !OP_SAME_WITH_NULL (3)
2586 || !OP_SAME_WITH_NULL (4))
2587 return 0;
2588 /* Fallthru. */
2589 case MEM_REF:
2590 /* Require equal access sizes, and similar pointer types.
2591 We can have incomplete types for array references of
2592 variable-sized arrays from the Fortran frontent
2593 though. */
2594 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2595 || (TYPE_SIZE (TREE_TYPE (arg0))
2596 && TYPE_SIZE (TREE_TYPE (arg1))
2597 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2598 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2600 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2601 && OP_SAME (0) && OP_SAME (1));
2603 case ARRAY_REF:
2604 case ARRAY_RANGE_REF:
2605 /* Operands 2 and 3 may be null.
2606 Compare the array index by value if it is constant first as we
2607 may have different types but same value here. */
2608 return (OP_SAME (0)
2609 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2610 TREE_OPERAND (arg1, 1))
2611 || OP_SAME (1))
2612 && OP_SAME_WITH_NULL (2)
2613 && OP_SAME_WITH_NULL (3));
2615 case COMPONENT_REF:
2616 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2617 may be NULL when we're called to compare MEM_EXPRs. */
2618 return OP_SAME_WITH_NULL (0)
2619 && OP_SAME (1)
2620 && OP_SAME_WITH_NULL (2);
2622 case BIT_FIELD_REF:
2623 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2625 default:
2626 return 0;
2629 case tcc_expression:
2630 switch (TREE_CODE (arg0))
2632 case ADDR_EXPR:
2633 case TRUTH_NOT_EXPR:
2634 return OP_SAME (0);
2636 case TRUTH_ANDIF_EXPR:
2637 case TRUTH_ORIF_EXPR:
2638 return OP_SAME (0) && OP_SAME (1);
2640 case FMA_EXPR:
2641 case WIDEN_MULT_PLUS_EXPR:
2642 case WIDEN_MULT_MINUS_EXPR:
2643 if (!OP_SAME (2))
2644 return 0;
2645 /* The multiplcation operands are commutative. */
2646 /* FALLTHRU */
2648 case TRUTH_AND_EXPR:
2649 case TRUTH_OR_EXPR:
2650 case TRUTH_XOR_EXPR:
2651 if (OP_SAME (0) && OP_SAME (1))
2652 return 1;
2654 /* Otherwise take into account this is a commutative operation. */
2655 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags));
2660 case COND_EXPR:
2661 case VEC_COND_EXPR:
2662 case DOT_PROD_EXPR:
2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2665 default:
2666 return 0;
2669 case tcc_vl_exp:
2670 switch (TREE_CODE (arg0))
2672 case CALL_EXPR:
2673 /* If the CALL_EXPRs call different functions, then they
2674 clearly can not be equal. */
2675 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2676 flags))
2677 return 0;
2680 unsigned int cef = call_expr_flags (arg0);
2681 if (flags & OEP_PURE_SAME)
2682 cef &= ECF_CONST | ECF_PURE;
2683 else
2684 cef &= ECF_CONST;
2685 if (!cef)
2686 return 0;
2689 /* Now see if all the arguments are the same. */
2691 const_call_expr_arg_iterator iter0, iter1;
2692 const_tree a0, a1;
2693 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2694 a1 = first_const_call_expr_arg (arg1, &iter1);
2695 a0 && a1;
2696 a0 = next_const_call_expr_arg (&iter0),
2697 a1 = next_const_call_expr_arg (&iter1))
2698 if (! operand_equal_p (a0, a1, flags))
2699 return 0;
2701 /* If we get here and both argument lists are exhausted
2702 then the CALL_EXPRs are equal. */
2703 return ! (a0 || a1);
2705 default:
2706 return 0;
2709 case tcc_declaration:
2710 /* Consider __builtin_sqrt equal to sqrt. */
2711 return (TREE_CODE (arg0) == FUNCTION_DECL
2712 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2713 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2714 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2716 default:
2717 return 0;
2720 #undef OP_SAME
2721 #undef OP_SAME_WITH_NULL
2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2725 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727 When in doubt, return 0. */
2729 static int
2730 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2732 int unsignedp1, unsignedpo;
2733 tree primarg0, primarg1, primother;
2734 unsigned int correct_width;
2736 if (operand_equal_p (arg0, arg1, 0))
2737 return 1;
2739 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2740 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2741 return 0;
2743 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2744 and see if the inner values are the same. This removes any
2745 signedness comparison, which doesn't matter here. */
2746 primarg0 = arg0, primarg1 = arg1;
2747 STRIP_NOPS (primarg0);
2748 STRIP_NOPS (primarg1);
2749 if (operand_equal_p (primarg0, primarg1, 0))
2750 return 1;
2752 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2753 actual comparison operand, ARG0.
2755 First throw away any conversions to wider types
2756 already present in the operands. */
2758 primarg1 = get_narrower (arg1, &unsignedp1);
2759 primother = get_narrower (other, &unsignedpo);
2761 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2762 if (unsignedp1 == unsignedpo
2763 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2764 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2766 tree type = TREE_TYPE (arg0);
2768 /* Make sure shorter operand is extended the right way
2769 to match the longer operand. */
2770 primarg1 = fold_convert (signed_or_unsigned_type_for
2771 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2773 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2774 return 1;
2777 return 0;
2780 /* See if ARG is an expression that is either a comparison or is performing
2781 arithmetic on comparisons. The comparisons must only be comparing
2782 two different values, which will be stored in *CVAL1 and *CVAL2; if
2783 they are nonzero it means that some operands have already been found.
2784 No variables may be used anywhere else in the expression except in the
2785 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2786 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788 If this is true, return 1. Otherwise, return zero. */
2790 static int
2791 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2793 enum tree_code code = TREE_CODE (arg);
2794 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2796 /* We can handle some of the tcc_expression cases here. */
2797 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2798 tclass = tcc_unary;
2799 else if (tclass == tcc_expression
2800 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2801 || code == COMPOUND_EXPR))
2802 tclass = tcc_binary;
2804 else if (tclass == tcc_expression && code == SAVE_EXPR
2805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2807 /* If we've already found a CVAL1 or CVAL2, this expression is
2808 two complex to handle. */
2809 if (*cval1 || *cval2)
2810 return 0;
2812 tclass = tcc_unary;
2813 *save_p = 1;
2816 switch (tclass)
2818 case tcc_unary:
2819 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2821 case tcc_binary:
2822 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2823 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2824 cval1, cval2, save_p));
2826 case tcc_constant:
2827 return 1;
2829 case tcc_expression:
2830 if (code == COND_EXPR)
2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2832 cval1, cval2, save_p)
2833 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2834 cval1, cval2, save_p)
2835 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2836 cval1, cval2, save_p));
2837 return 0;
2839 case tcc_comparison:
2840 /* First see if we can handle the first operand, then the second. For
2841 the second operand, we know *CVAL1 can't be zero. It must be that
2842 one side of the comparison is each of the values; test for the
2843 case where this isn't true by failing if the two operands
2844 are the same. */
2846 if (operand_equal_p (TREE_OPERAND (arg, 0),
2847 TREE_OPERAND (arg, 1), 0))
2848 return 0;
2850 if (*cval1 == 0)
2851 *cval1 = TREE_OPERAND (arg, 0);
2852 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2854 else if (*cval2 == 0)
2855 *cval2 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2858 else
2859 return 0;
2861 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2863 else if (*cval2 == 0)
2864 *cval2 = TREE_OPERAND (arg, 1);
2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2867 else
2868 return 0;
2870 return 1;
2872 default:
2873 return 0;
2877 /* ARG is a tree that is known to contain just arithmetic operations and
2878 comparisons. Evaluate the operations in the tree substituting NEW0 for
2879 any occurrence of OLD0 as an operand of a comparison and likewise for
2880 NEW1 and OLD1. */
2882 static tree
2883 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2884 tree old1, tree new1)
2886 tree type = TREE_TYPE (arg);
2887 enum tree_code code = TREE_CODE (arg);
2888 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2892 tclass = tcc_unary;
2893 else if (tclass == tcc_expression
2894 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2895 tclass = tcc_binary;
2897 switch (tclass)
2899 case tcc_unary:
2900 return fold_build1_loc (loc, code, type,
2901 eval_subst (loc, TREE_OPERAND (arg, 0),
2902 old0, new0, old1, new1));
2904 case tcc_binary:
2905 return fold_build2_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1),
2908 eval_subst (loc, TREE_OPERAND (arg, 1),
2909 old0, new0, old1, new1));
2911 case tcc_expression:
2912 switch (code)
2914 case SAVE_EXPR:
2915 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2916 old1, new1);
2918 case COMPOUND_EXPR:
2919 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2920 old1, new1);
2922 case COND_EXPR:
2923 return fold_build3_loc (loc, code, type,
2924 eval_subst (loc, TREE_OPERAND (arg, 0),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 1),
2927 old0, new0, old1, new1),
2928 eval_subst (loc, TREE_OPERAND (arg, 2),
2929 old0, new0, old1, new1));
2930 default:
2931 break;
2933 /* Fall through - ??? */
2935 case tcc_comparison:
2937 tree arg0 = TREE_OPERAND (arg, 0);
2938 tree arg1 = TREE_OPERAND (arg, 1);
2940 /* We need to check both for exact equality and tree equality. The
2941 former will be true if the operand has a side-effect. In that
2942 case, we know the operand occurred exactly once. */
2944 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2945 arg0 = new0;
2946 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2947 arg0 = new1;
2949 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2950 arg1 = new0;
2951 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2952 arg1 = new1;
2954 return fold_build2_loc (loc, code, type, arg0, arg1);
2957 default:
2958 return arg;
2962 /* Return a tree for the case when the result of an expression is RESULT
2963 converted to TYPE and OMITTED was previously an operand of the expression
2964 but is now not needed (e.g., we folded OMITTED * 0).
2966 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2967 the conversion of RESULT to TYPE. */
2969 tree
2970 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2972 tree t = fold_convert_loc (loc, type, result);
2974 /* If the resulting operand is an empty statement, just return the omitted
2975 statement casted to void. */
2976 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2977 return build1_loc (loc, NOP_EXPR, void_type_node,
2978 fold_ignored_result (omitted));
2980 if (TREE_SIDE_EFFECTS (omitted))
2981 return build2_loc (loc, COMPOUND_EXPR, type,
2982 fold_ignored_result (omitted), t);
2984 return non_lvalue_loc (loc, t);
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2989 static tree
2990 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2991 tree omitted)
2993 tree t = fold_convert_loc (loc, type, result);
2995 /* If the resulting operand is an empty statement, just return the omitted
2996 statement casted to void. */
2997 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2998 return build1_loc (loc, NOP_EXPR, void_type_node,
2999 fold_ignored_result (omitted));
3001 if (TREE_SIDE_EFFECTS (omitted))
3002 return build2_loc (loc, COMPOUND_EXPR, type,
3003 fold_ignored_result (omitted), t);
3005 return pedantic_non_lvalue_loc (loc, t);
3008 /* Return a tree for the case when the result of an expression is RESULT
3009 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3010 of the expression but are now not needed.
3012 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3013 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3014 evaluated before OMITTED2. Otherwise, if neither has side effects,
3015 just do the conversion of RESULT to TYPE. */
3017 tree
3018 omit_two_operands_loc (location_t loc, tree type, tree result,
3019 tree omitted1, tree omitted2)
3021 tree t = fold_convert_loc (loc, type, result);
3023 if (TREE_SIDE_EFFECTS (omitted2))
3024 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3025 if (TREE_SIDE_EFFECTS (omitted1))
3026 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3028 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3032 /* Return a simplified tree node for the truth-negation of ARG. This
3033 never alters ARG itself. We assume that ARG is an operation that
3034 returns a truth value (0 or 1).
3036 FIXME: one would think we would fold the result, but it causes
3037 problems with the dominator optimizer. */
3039 tree
3040 fold_truth_not_expr (location_t loc, tree arg)
3042 tree type = TREE_TYPE (arg);
3043 enum tree_code code = TREE_CODE (arg);
3044 location_t loc1, loc2;
3046 /* If this is a comparison, we can simply invert it, except for
3047 floating-point non-equality comparisons, in which case we just
3048 enclose a TRUTH_NOT_EXPR around what we have. */
3050 if (TREE_CODE_CLASS (code) == tcc_comparison)
3052 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3053 if (FLOAT_TYPE_P (op_type)
3054 && flag_trapping_math
3055 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3056 && code != NE_EXPR && code != EQ_EXPR)
3057 return NULL_TREE;
3059 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3060 if (code == ERROR_MARK)
3061 return NULL_TREE;
3063 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3064 TREE_OPERAND (arg, 1));
3067 switch (code)
3069 case INTEGER_CST:
3070 return constant_boolean_node (integer_zerop (arg), type);
3072 case TRUTH_AND_EXPR:
3073 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3074 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3075 return build2_loc (loc, TRUTH_OR_EXPR, type,
3076 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3077 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3079 case TRUTH_OR_EXPR:
3080 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3081 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3082 return build2_loc (loc, TRUTH_AND_EXPR, type,
3083 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3084 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3086 case TRUTH_XOR_EXPR:
3087 /* Here we can invert either operand. We invert the first operand
3088 unless the second operand is a TRUTH_NOT_EXPR in which case our
3089 result is the XOR of the first operand with the inside of the
3090 negation of the second operand. */
3092 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3093 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3094 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3095 else
3096 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3097 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3098 TREE_OPERAND (arg, 1));
3100 case TRUTH_ANDIF_EXPR:
3101 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3102 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3103 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3104 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3105 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3107 case TRUTH_ORIF_EXPR:
3108 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3109 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3110 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3111 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3112 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3114 case TRUTH_NOT_EXPR:
3115 return TREE_OPERAND (arg, 0);
3117 case COND_EXPR:
3119 tree arg1 = TREE_OPERAND (arg, 1);
3120 tree arg2 = TREE_OPERAND (arg, 2);
3122 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3123 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3125 /* A COND_EXPR may have a throw as one operand, which
3126 then has void type. Just leave void operands
3127 as they are. */
3128 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3129 VOID_TYPE_P (TREE_TYPE (arg1))
3130 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3131 VOID_TYPE_P (TREE_TYPE (arg2))
3132 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3135 case COMPOUND_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3137 return build2_loc (loc, COMPOUND_EXPR, type,
3138 TREE_OPERAND (arg, 0),
3139 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3141 case NON_LVALUE_EXPR:
3142 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3143 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3145 CASE_CONVERT:
3146 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3147 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3149 /* ... fall through ... */
3151 case FLOAT_EXPR:
3152 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3153 return build1_loc (loc, TREE_CODE (arg), type,
3154 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3156 case BIT_AND_EXPR:
3157 if (!integer_onep (TREE_OPERAND (arg, 1)))
3158 return NULL_TREE;
3159 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3161 case SAVE_EXPR:
3162 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3164 case CLEANUP_POINT_EXPR:
3165 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3166 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3167 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3169 default:
3170 return NULL_TREE;
3174 /* Return a simplified tree node for the truth-negation of ARG. This
3175 never alters ARG itself. We assume that ARG is an operation that
3176 returns a truth value (0 or 1).
3178 FIXME: one would think we would fold the result, but it causes
3179 problems with the dominator optimizer. */
3181 tree
3182 invert_truthvalue_loc (location_t loc, tree arg)
3184 tree tem;
3186 if (TREE_CODE (arg) == ERROR_MARK)
3187 return arg;
3189 tem = fold_truth_not_expr (loc, arg);
3190 if (!tem)
3191 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3193 return tem;
3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3197 operands are another bit-wise operation with a common input. If so,
3198 distribute the bit operations to save an operation and possibly two if
3199 constants are involved. For example, convert
3200 (A | B) & (A | C) into A | (B & C)
3201 Further simplification will occur if B and C are constants.
3203 If this optimization cannot be done, 0 will be returned. */
3205 static tree
3206 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3207 tree arg0, tree arg1)
3209 tree common;
3210 tree left, right;
3212 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3213 || TREE_CODE (arg0) == code
3214 || (TREE_CODE (arg0) != BIT_AND_EXPR
3215 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3216 return 0;
3218 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3220 common = TREE_OPERAND (arg0, 0);
3221 left = TREE_OPERAND (arg0, 1);
3222 right = TREE_OPERAND (arg1, 1);
3224 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3226 common = TREE_OPERAND (arg0, 0);
3227 left = TREE_OPERAND (arg0, 1);
3228 right = TREE_OPERAND (arg1, 0);
3230 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3232 common = TREE_OPERAND (arg0, 1);
3233 left = TREE_OPERAND (arg0, 0);
3234 right = TREE_OPERAND (arg1, 1);
3236 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3238 common = TREE_OPERAND (arg0, 1);
3239 left = TREE_OPERAND (arg0, 0);
3240 right = TREE_OPERAND (arg1, 0);
3242 else
3243 return 0;
3245 common = fold_convert_loc (loc, type, common);
3246 left = fold_convert_loc (loc, type, left);
3247 right = fold_convert_loc (loc, type, right);
3248 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3249 fold_build2_loc (loc, code, type, left, right));
3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3253 with code CODE. This optimization is unsafe. */
3254 static tree
3255 distribute_real_division (location_t loc, enum tree_code code, tree type,
3256 tree arg0, tree arg1)
3258 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3259 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3261 /* (A / C) +- (B / C) -> (A +- B) / C. */
3262 if (mul0 == mul1
3263 && operand_equal_p (TREE_OPERAND (arg0, 1),
3264 TREE_OPERAND (arg1, 1), 0))
3265 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3266 fold_build2_loc (loc, code, type,
3267 TREE_OPERAND (arg0, 0),
3268 TREE_OPERAND (arg1, 0)),
3269 TREE_OPERAND (arg0, 1));
3271 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3272 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0), 0)
3274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3275 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3277 REAL_VALUE_TYPE r0, r1;
3278 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3279 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3280 if (!mul0)
3281 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3282 if (!mul1)
3283 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3284 real_arithmetic (&r0, code, &r0, &r1);
3285 return fold_build2_loc (loc, MULT_EXPR, type,
3286 TREE_OPERAND (arg0, 0),
3287 build_real (type, r0));
3290 return NULL_TREE;
3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3294 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3296 static tree
3297 make_bit_field_ref (location_t loc, tree inner, tree type,
3298 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3300 tree result, bftype;
3302 if (bitpos == 0)
3304 tree size = TYPE_SIZE (TREE_TYPE (inner));
3305 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3306 || POINTER_TYPE_P (TREE_TYPE (inner)))
3307 && host_integerp (size, 0)
3308 && tree_low_cst (size, 0) == bitsize)
3309 return fold_convert_loc (loc, type, inner);
3312 bftype = type;
3313 if (TYPE_PRECISION (bftype) != bitsize
3314 || TYPE_UNSIGNED (bftype) == !unsignedp)
3315 bftype = build_nonstandard_integer_type (bitsize, 0);
3317 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3318 size_int (bitsize), bitsize_int (bitpos));
3320 if (bftype != type)
3321 result = fold_convert_loc (loc, type, result);
3323 return result;
3326 /* Optimize a bit-field compare.
3328 There are two cases: First is a compare against a constant and the
3329 second is a comparison of two items where the fields are at the same
3330 bit position relative to the start of a chunk (byte, halfword, word)
3331 large enough to contain it. In these cases we can avoid the shift
3332 implicit in bitfield extractions.
3334 For constants, we emit a compare of the shifted constant with the
3335 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3336 compared. For two fields at the same position, we do the ANDs with the
3337 similar mask and compare the result of the ANDs.
3339 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3340 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3341 are the left and right operands of the comparison, respectively.
3343 If the optimization described above can be done, we return the resulting
3344 tree. Otherwise we return zero. */
3346 static tree
3347 optimize_bit_field_compare (location_t loc, enum tree_code code,
3348 tree compare_type, tree lhs, tree rhs)
3350 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3351 tree type = TREE_TYPE (lhs);
3352 tree signed_type, unsigned_type;
3353 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3354 enum machine_mode lmode, rmode, nmode;
3355 int lunsignedp, runsignedp;
3356 int lvolatilep = 0, rvolatilep = 0;
3357 tree linner, rinner = NULL_TREE;
3358 tree mask;
3359 tree offset;
3361 /* In the strict volatile bitfields case, doing code changes here may prevent
3362 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3363 if (flag_strict_volatile_bitfields > 0)
3364 return 0;
3366 /* Get all the information about the extractions being done. If the bit size
3367 if the same as the size of the underlying object, we aren't doing an
3368 extraction at all and so can do nothing. We also don't want to
3369 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3370 then will no longer be able to replace it. */
3371 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3372 &lunsignedp, &lvolatilep, false);
3373 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3374 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3375 return 0;
3377 if (!const_p)
3379 /* If this is not a constant, we can only do something if bit positions,
3380 sizes, and signedness are the same. */
3381 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3382 &runsignedp, &rvolatilep, false);
3384 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3385 || lunsignedp != runsignedp || offset != 0
3386 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3387 return 0;
3390 /* See if we can find a mode to refer to this field. We should be able to,
3391 but fail if we can't. */
3392 if (lvolatilep
3393 && GET_MODE_BITSIZE (lmode) > 0
3394 && flag_strict_volatile_bitfields > 0)
3395 nmode = lmode;
3396 else
3397 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3398 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3399 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3400 TYPE_ALIGN (TREE_TYPE (rinner))),
3401 word_mode, lvolatilep || rvolatilep);
3402 if (nmode == VOIDmode)
3403 return 0;
3405 /* Set signed and unsigned types of the precision of this mode for the
3406 shifts below. */
3407 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3408 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3410 /* Compute the bit position and size for the new reference and our offset
3411 within it. If the new reference is the same size as the original, we
3412 won't optimize anything, so return zero. */
3413 nbitsize = GET_MODE_BITSIZE (nmode);
3414 nbitpos = lbitpos & ~ (nbitsize - 1);
3415 lbitpos -= nbitpos;
3416 if (nbitsize == lbitsize)
3417 return 0;
3419 if (BYTES_BIG_ENDIAN)
3420 lbitpos = nbitsize - lbitsize - lbitpos;
3422 /* Make the mask to be used against the extracted field. */
3423 mask = build_int_cst_type (unsigned_type, -1);
3424 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3425 mask = const_binop (RSHIFT_EXPR, mask,
3426 size_int (nbitsize - lbitsize - lbitpos));
3428 if (! const_p)
3429 /* If not comparing with constant, just rework the comparison
3430 and return. */
3431 return fold_build2_loc (loc, code, compare_type,
3432 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3433 make_bit_field_ref (loc, linner,
3434 unsigned_type,
3435 nbitsize, nbitpos,
3437 mask),
3438 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3439 make_bit_field_ref (loc, rinner,
3440 unsigned_type,
3441 nbitsize, nbitpos,
3443 mask));
3445 /* Otherwise, we are handling the constant case. See if the constant is too
3446 big for the field. Warn and return a tree of for 0 (false) if so. We do
3447 this not only for its own sake, but to avoid having to test for this
3448 error case below. If we didn't, we might generate wrong code.
3450 For unsigned fields, the constant shifted right by the field length should
3451 be all zero. For signed fields, the high-order bits should agree with
3452 the sign bit. */
3454 if (lunsignedp)
3456 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3457 fold_convert_loc (loc,
3458 unsigned_type, rhs),
3459 size_int (lbitsize))))
3461 warning (0, "comparison is always %d due to width of bit-field",
3462 code == NE_EXPR);
3463 return constant_boolean_node (code == NE_EXPR, compare_type);
3466 else
3468 tree tem = const_binop (RSHIFT_EXPR,
3469 fold_convert_loc (loc, signed_type, rhs),
3470 size_int (lbitsize - 1));
3471 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3473 warning (0, "comparison is always %d due to width of bit-field",
3474 code == NE_EXPR);
3475 return constant_boolean_node (code == NE_EXPR, compare_type);
3479 /* Single-bit compares should always be against zero. */
3480 if (lbitsize == 1 && ! integer_zerop (rhs))
3482 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3483 rhs = build_int_cst (type, 0);
3486 /* Make a new bitfield reference, shift the constant over the
3487 appropriate number of bits and mask it with the computed mask
3488 (in case this was a signed field). If we changed it, make a new one. */
3489 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3490 if (lvolatilep)
3492 TREE_SIDE_EFFECTS (lhs) = 1;
3493 TREE_THIS_VOLATILE (lhs) = 1;
3496 rhs = const_binop (BIT_AND_EXPR,
3497 const_binop (LSHIFT_EXPR,
3498 fold_convert_loc (loc, unsigned_type, rhs),
3499 size_int (lbitpos)),
3500 mask);
3502 lhs = build2_loc (loc, code, compare_type,
3503 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3504 return lhs;
3507 /* Subroutine for fold_truth_andor_1: decode a field reference.
3509 If EXP is a comparison reference, we return the innermost reference.
3511 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3512 set to the starting bit number.
3514 If the innermost field can be completely contained in a mode-sized
3515 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3517 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3518 otherwise it is not changed.
3520 *PUNSIGNEDP is set to the signedness of the field.
3522 *PMASK is set to the mask used. This is either contained in a
3523 BIT_AND_EXPR or derived from the width of the field.
3525 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3527 Return 0 if this is not a component reference or is one that we can't
3528 do anything with. */
3530 static tree
3531 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3532 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3533 int *punsignedp, int *pvolatilep,
3534 tree *pmask, tree *pand_mask)
3536 tree outer_type = 0;
3537 tree and_mask = 0;
3538 tree mask, inner, offset;
3539 tree unsigned_type;
3540 unsigned int precision;
3542 /* All the optimizations using this function assume integer fields.
3543 There are problems with FP fields since the type_for_size call
3544 below can fail for, e.g., XFmode. */
3545 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3546 return 0;
3548 /* We are interested in the bare arrangement of bits, so strip everything
3549 that doesn't affect the machine mode. However, record the type of the
3550 outermost expression if it may matter below. */
3551 if (CONVERT_EXPR_P (exp)
3552 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3553 outer_type = TREE_TYPE (exp);
3554 STRIP_NOPS (exp);
3556 if (TREE_CODE (exp) == BIT_AND_EXPR)
3558 and_mask = TREE_OPERAND (exp, 1);
3559 exp = TREE_OPERAND (exp, 0);
3560 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3561 if (TREE_CODE (and_mask) != INTEGER_CST)
3562 return 0;
3565 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3566 punsignedp, pvolatilep, false);
3567 if ((inner == exp && and_mask == 0)
3568 || *pbitsize < 0 || offset != 0
3569 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3570 return 0;
3572 /* If the number of bits in the reference is the same as the bitsize of
3573 the outer type, then the outer type gives the signedness. Otherwise
3574 (in case of a small bitfield) the signedness is unchanged. */
3575 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3576 *punsignedp = TYPE_UNSIGNED (outer_type);
3578 /* Compute the mask to access the bitfield. */
3579 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3580 precision = TYPE_PRECISION (unsigned_type);
3582 mask = build_int_cst_type (unsigned_type, -1);
3584 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3585 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3587 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3588 if (and_mask != 0)
3589 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3590 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3592 *pmask = mask;
3593 *pand_mask = and_mask;
3594 return inner;
3597 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3598 bit positions. */
3600 static int
3601 all_ones_mask_p (const_tree mask, int size)
3603 tree type = TREE_TYPE (mask);
3604 unsigned int precision = TYPE_PRECISION (type);
3605 tree tmask;
3607 tmask = build_int_cst_type (signed_type_for (type), -1);
3609 return
3610 tree_int_cst_equal (mask,
3611 const_binop (RSHIFT_EXPR,
3612 const_binop (LSHIFT_EXPR, tmask,
3613 size_int (precision - size)),
3614 size_int (precision - size)));
3617 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3618 represents the sign bit of EXP's type. If EXP represents a sign
3619 or zero extension, also test VAL against the unextended type.
3620 The return value is the (sub)expression whose sign bit is VAL,
3621 or NULL_TREE otherwise. */
3623 static tree
3624 sign_bit_p (tree exp, const_tree val)
3626 unsigned HOST_WIDE_INT mask_lo, lo;
3627 HOST_WIDE_INT mask_hi, hi;
3628 int width;
3629 tree t;
3631 /* Tree EXP must have an integral type. */
3632 t = TREE_TYPE (exp);
3633 if (! INTEGRAL_TYPE_P (t))
3634 return NULL_TREE;
3636 /* Tree VAL must be an integer constant. */
3637 if (TREE_CODE (val) != INTEGER_CST
3638 || TREE_OVERFLOW (val))
3639 return NULL_TREE;
3641 width = TYPE_PRECISION (t);
3642 if (width > HOST_BITS_PER_WIDE_INT)
3644 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3645 lo = 0;
3647 mask_hi = ((unsigned HOST_WIDE_INT) -1
3648 >> (HOST_BITS_PER_DOUBLE_INT - width));
3649 mask_lo = -1;
3651 else
3653 hi = 0;
3654 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3656 mask_hi = 0;
3657 mask_lo = ((unsigned HOST_WIDE_INT) -1
3658 >> (HOST_BITS_PER_WIDE_INT - width));
3661 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3662 treat VAL as if it were unsigned. */
3663 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3664 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3665 return exp;
3667 /* Handle extension from a narrower type. */
3668 if (TREE_CODE (exp) == NOP_EXPR
3669 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3670 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3672 return NULL_TREE;
3675 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3676 to be evaluated unconditionally. */
3678 static int
3679 simple_operand_p (const_tree exp)
3681 /* Strip any conversions that don't change the machine mode. */
3682 STRIP_NOPS (exp);
3684 return (CONSTANT_CLASS_P (exp)
3685 || TREE_CODE (exp) == SSA_NAME
3686 || (DECL_P (exp)
3687 && ! TREE_ADDRESSABLE (exp)
3688 && ! TREE_THIS_VOLATILE (exp)
3689 && ! DECL_NONLOCAL (exp)
3690 /* Don't regard global variables as simple. They may be
3691 allocated in ways unknown to the compiler (shared memory,
3692 #pragma weak, etc). */
3693 && ! TREE_PUBLIC (exp)
3694 && ! DECL_EXTERNAL (exp)
3695 /* Loading a static variable is unduly expensive, but global
3696 registers aren't expensive. */
3697 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3700 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3701 to be evaluated unconditionally.
3702 I addition to simple_operand_p, we assume that comparisons, conversions,
3703 and logic-not operations are simple, if their operands are simple, too. */
3705 static bool
3706 simple_operand_p_2 (tree exp)
3708 enum tree_code code;
3710 if (TREE_SIDE_EFFECTS (exp)
3711 || tree_could_trap_p (exp))
3712 return false;
3714 while (CONVERT_EXPR_P (exp))
3715 exp = TREE_OPERAND (exp, 0);
3717 code = TREE_CODE (exp);
3719 if (TREE_CODE_CLASS (code) == tcc_comparison)
3720 return (simple_operand_p (TREE_OPERAND (exp, 0))
3721 && simple_operand_p (TREE_OPERAND (exp, 1)));
3723 if (code == TRUTH_NOT_EXPR)
3724 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3726 return simple_operand_p (exp);
3730 /* The following functions are subroutines to fold_range_test and allow it to
3731 try to change a logical combination of comparisons into a range test.
3733 For example, both
3734 X == 2 || X == 3 || X == 4 || X == 5
3736 X >= 2 && X <= 5
3737 are converted to
3738 (unsigned) (X - 2) <= 3
3740 We describe each set of comparisons as being either inside or outside
3741 a range, using a variable named like IN_P, and then describe the
3742 range with a lower and upper bound. If one of the bounds is omitted,
3743 it represents either the highest or lowest value of the type.
3745 In the comments below, we represent a range by two numbers in brackets
3746 preceded by a "+" to designate being inside that range, or a "-" to
3747 designate being outside that range, so the condition can be inverted by
3748 flipping the prefix. An omitted bound is represented by a "-". For
3749 example, "- [-, 10]" means being outside the range starting at the lowest
3750 possible value and ending at 10, in other words, being greater than 10.
3751 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3752 always false.
3754 We set up things so that the missing bounds are handled in a consistent
3755 manner so neither a missing bound nor "true" and "false" need to be
3756 handled using a special case. */
3758 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3759 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3760 and UPPER1_P are nonzero if the respective argument is an upper bound
3761 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3762 must be specified for a comparison. ARG1 will be converted to ARG0's
3763 type if both are specified. */
3765 static tree
3766 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3767 tree arg1, int upper1_p)
3769 tree tem;
3770 int result;
3771 int sgn0, sgn1;
3773 /* If neither arg represents infinity, do the normal operation.
3774 Else, if not a comparison, return infinity. Else handle the special
3775 comparison rules. Note that most of the cases below won't occur, but
3776 are handled for consistency. */
3778 if (arg0 != 0 && arg1 != 0)
3780 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3781 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3782 STRIP_NOPS (tem);
3783 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3786 if (TREE_CODE_CLASS (code) != tcc_comparison)
3787 return 0;
3789 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3790 for neither. In real maths, we cannot assume open ended ranges are
3791 the same. But, this is computer arithmetic, where numbers are finite.
3792 We can therefore make the transformation of any unbounded range with
3793 the value Z, Z being greater than any representable number. This permits
3794 us to treat unbounded ranges as equal. */
3795 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3796 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3797 switch (code)
3799 case EQ_EXPR:
3800 result = sgn0 == sgn1;
3801 break;
3802 case NE_EXPR:
3803 result = sgn0 != sgn1;
3804 break;
3805 case LT_EXPR:
3806 result = sgn0 < sgn1;
3807 break;
3808 case LE_EXPR:
3809 result = sgn0 <= sgn1;
3810 break;
3811 case GT_EXPR:
3812 result = sgn0 > sgn1;
3813 break;
3814 case GE_EXPR:
3815 result = sgn0 >= sgn1;
3816 break;
3817 default:
3818 gcc_unreachable ();
3821 return constant_boolean_node (result, type);
3824 /* Helper routine for make_range. Perform one step for it, return
3825 new expression if the loop should continue or NULL_TREE if it should
3826 stop. */
3828 tree
3829 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3830 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3831 bool *strict_overflow_p)
3833 tree arg0_type = TREE_TYPE (arg0);
3834 tree n_low, n_high, low = *p_low, high = *p_high;
3835 int in_p = *p_in_p, n_in_p;
3837 switch (code)
3839 case TRUTH_NOT_EXPR:
3840 *p_in_p = ! in_p;
3841 return arg0;
3843 case EQ_EXPR: case NE_EXPR:
3844 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3845 /* We can only do something if the range is testing for zero
3846 and if the second operand is an integer constant. Note that
3847 saying something is "in" the range we make is done by
3848 complementing IN_P since it will set in the initial case of
3849 being not equal to zero; "out" is leaving it alone. */
3850 if (low == NULL_TREE || high == NULL_TREE
3851 || ! integer_zerop (low) || ! integer_zerop (high)
3852 || TREE_CODE (arg1) != INTEGER_CST)
3853 return NULL_TREE;
3855 switch (code)
3857 case NE_EXPR: /* - [c, c] */
3858 low = high = arg1;
3859 break;
3860 case EQ_EXPR: /* + [c, c] */
3861 in_p = ! in_p, low = high = arg1;
3862 break;
3863 case GT_EXPR: /* - [-, c] */
3864 low = 0, high = arg1;
3865 break;
3866 case GE_EXPR: /* + [c, -] */
3867 in_p = ! in_p, low = arg1, high = 0;
3868 break;
3869 case LT_EXPR: /* - [c, -] */
3870 low = arg1, high = 0;
3871 break;
3872 case LE_EXPR: /* + [-, c] */
3873 in_p = ! in_p, low = 0, high = arg1;
3874 break;
3875 default:
3876 gcc_unreachable ();
3879 /* If this is an unsigned comparison, we also know that EXP is
3880 greater than or equal to zero. We base the range tests we make
3881 on that fact, so we record it here so we can parse existing
3882 range tests. We test arg0_type since often the return type
3883 of, e.g. EQ_EXPR, is boolean. */
3884 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3886 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3887 in_p, low, high, 1,
3888 build_int_cst (arg0_type, 0),
3889 NULL_TREE))
3890 return NULL_TREE;
3892 in_p = n_in_p, low = n_low, high = n_high;
3894 /* If the high bound is missing, but we have a nonzero low
3895 bound, reverse the range so it goes from zero to the low bound
3896 minus 1. */
3897 if (high == 0 && low && ! integer_zerop (low))
3899 in_p = ! in_p;
3900 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3901 integer_one_node, 0);
3902 low = build_int_cst (arg0_type, 0);
3906 *p_low = low;
3907 *p_high = high;
3908 *p_in_p = in_p;
3909 return arg0;
3911 case NEGATE_EXPR:
3912 /* (-x) IN [a,b] -> x in [-b, -a] */
3913 n_low = range_binop (MINUS_EXPR, exp_type,
3914 build_int_cst (exp_type, 0),
3915 0, high, 1);
3916 n_high = range_binop (MINUS_EXPR, exp_type,
3917 build_int_cst (exp_type, 0),
3918 0, low, 0);
3919 if (n_high != 0 && TREE_OVERFLOW (n_high))
3920 return NULL_TREE;
3921 goto normalize;
3923 case BIT_NOT_EXPR:
3924 /* ~ X -> -X - 1 */
3925 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3926 build_int_cst (exp_type, 1));
3928 case PLUS_EXPR:
3929 case MINUS_EXPR:
3930 if (TREE_CODE (arg1) != INTEGER_CST)
3931 return NULL_TREE;
3933 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3934 move a constant to the other side. */
3935 if (!TYPE_UNSIGNED (arg0_type)
3936 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3937 return NULL_TREE;
3939 /* If EXP is signed, any overflow in the computation is undefined,
3940 so we don't worry about it so long as our computations on
3941 the bounds don't overflow. For unsigned, overflow is defined
3942 and this is exactly the right thing. */
3943 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3944 arg0_type, low, 0, arg1, 0);
3945 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3946 arg0_type, high, 1, arg1, 0);
3947 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3948 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3949 return NULL_TREE;
3951 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3952 *strict_overflow_p = true;
3954 normalize:
3955 /* Check for an unsigned range which has wrapped around the maximum
3956 value thus making n_high < n_low, and normalize it. */
3957 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3959 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3960 integer_one_node, 0);
3961 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3962 integer_one_node, 0);
3964 /* If the range is of the form +/- [ x+1, x ], we won't
3965 be able to normalize it. But then, it represents the
3966 whole range or the empty set, so make it
3967 +/- [ -, - ]. */
3968 if (tree_int_cst_equal (n_low, low)
3969 && tree_int_cst_equal (n_high, high))
3970 low = high = 0;
3971 else
3972 in_p = ! in_p;
3974 else
3975 low = n_low, high = n_high;
3977 *p_low = low;
3978 *p_high = high;
3979 *p_in_p = in_p;
3980 return arg0;
3982 CASE_CONVERT:
3983 case NON_LVALUE_EXPR:
3984 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3985 return NULL_TREE;
3987 if (! INTEGRAL_TYPE_P (arg0_type)
3988 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3989 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3990 return NULL_TREE;
3992 n_low = low, n_high = high;
3994 if (n_low != 0)
3995 n_low = fold_convert_loc (loc, arg0_type, n_low);
3997 if (n_high != 0)
3998 n_high = fold_convert_loc (loc, arg0_type, n_high);
4000 /* If we're converting arg0 from an unsigned type, to exp,
4001 a signed type, we will be doing the comparison as unsigned.
4002 The tests above have already verified that LOW and HIGH
4003 are both positive.
4005 So we have to ensure that we will handle large unsigned
4006 values the same way that the current signed bounds treat
4007 negative values. */
4009 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4011 tree high_positive;
4012 tree equiv_type;
4013 /* For fixed-point modes, we need to pass the saturating flag
4014 as the 2nd parameter. */
4015 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4016 equiv_type
4017 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4018 TYPE_SATURATING (arg0_type));
4019 else
4020 equiv_type
4021 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4023 /* A range without an upper bound is, naturally, unbounded.
4024 Since convert would have cropped a very large value, use
4025 the max value for the destination type. */
4026 high_positive
4027 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4028 : TYPE_MAX_VALUE (arg0_type);
4030 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4031 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4032 fold_convert_loc (loc, arg0_type,
4033 high_positive),
4034 build_int_cst (arg0_type, 1));
4036 /* If the low bound is specified, "and" the range with the
4037 range for which the original unsigned value will be
4038 positive. */
4039 if (low != 0)
4041 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4042 1, fold_convert_loc (loc, arg0_type,
4043 integer_zero_node),
4044 high_positive))
4045 return NULL_TREE;
4047 in_p = (n_in_p == in_p);
4049 else
4051 /* Otherwise, "or" the range with the range of the input
4052 that will be interpreted as negative. */
4053 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4054 1, fold_convert_loc (loc, arg0_type,
4055 integer_zero_node),
4056 high_positive))
4057 return NULL_TREE;
4059 in_p = (in_p != n_in_p);
4063 *p_low = n_low;
4064 *p_high = n_high;
4065 *p_in_p = in_p;
4066 return arg0;
4068 default:
4069 return NULL_TREE;
4073 /* Given EXP, a logical expression, set the range it is testing into
4074 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4075 actually being tested. *PLOW and *PHIGH will be made of the same
4076 type as the returned expression. If EXP is not a comparison, we
4077 will most likely not be returning a useful value and range. Set
4078 *STRICT_OVERFLOW_P to true if the return value is only valid
4079 because signed overflow is undefined; otherwise, do not change
4080 *STRICT_OVERFLOW_P. */
4082 tree
4083 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4084 bool *strict_overflow_p)
4086 enum tree_code code;
4087 tree arg0, arg1 = NULL_TREE;
4088 tree exp_type, nexp;
4089 int in_p;
4090 tree low, high;
4091 location_t loc = EXPR_LOCATION (exp);
4093 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4094 and see if we can refine the range. Some of the cases below may not
4095 happen, but it doesn't seem worth worrying about this. We "continue"
4096 the outer loop when we've changed something; otherwise we "break"
4097 the switch, which will "break" the while. */
4099 in_p = 0;
4100 low = high = build_int_cst (TREE_TYPE (exp), 0);
4102 while (1)
4104 code = TREE_CODE (exp);
4105 exp_type = TREE_TYPE (exp);
4106 arg0 = NULL_TREE;
4108 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4110 if (TREE_OPERAND_LENGTH (exp) > 0)
4111 arg0 = TREE_OPERAND (exp, 0);
4112 if (TREE_CODE_CLASS (code) == tcc_binary
4113 || TREE_CODE_CLASS (code) == tcc_comparison
4114 || (TREE_CODE_CLASS (code) == tcc_expression
4115 && TREE_OPERAND_LENGTH (exp) > 1))
4116 arg1 = TREE_OPERAND (exp, 1);
4118 if (arg0 == NULL_TREE)
4119 break;
4121 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4122 &high, &in_p, strict_overflow_p);
4123 if (nexp == NULL_TREE)
4124 break;
4125 exp = nexp;
4128 /* If EXP is a constant, we can evaluate whether this is true or false. */
4129 if (TREE_CODE (exp) == INTEGER_CST)
4131 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4132 exp, 0, low, 0))
4133 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4134 exp, 1, high, 1)));
4135 low = high = 0;
4136 exp = 0;
4139 *pin_p = in_p, *plow = low, *phigh = high;
4140 return exp;
4143 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4144 type, TYPE, return an expression to test if EXP is in (or out of, depending
4145 on IN_P) the range. Return 0 if the test couldn't be created. */
4147 tree
4148 build_range_check (location_t loc, tree type, tree exp, int in_p,
4149 tree low, tree high)
4151 tree etype = TREE_TYPE (exp), value;
4153 #ifdef HAVE_canonicalize_funcptr_for_compare
4154 /* Disable this optimization for function pointer expressions
4155 on targets that require function pointer canonicalization. */
4156 if (HAVE_canonicalize_funcptr_for_compare
4157 && TREE_CODE (etype) == POINTER_TYPE
4158 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4159 return NULL_TREE;
4160 #endif
4162 if (! in_p)
4164 value = build_range_check (loc, type, exp, 1, low, high);
4165 if (value != 0)
4166 return invert_truthvalue_loc (loc, value);
4168 return 0;
4171 if (low == 0 && high == 0)
4172 return build_int_cst (type, 1);
4174 if (low == 0)
4175 return fold_build2_loc (loc, LE_EXPR, type, exp,
4176 fold_convert_loc (loc, etype, high));
4178 if (high == 0)
4179 return fold_build2_loc (loc, GE_EXPR, type, exp,
4180 fold_convert_loc (loc, etype, low));
4182 if (operand_equal_p (low, high, 0))
4183 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4184 fold_convert_loc (loc, etype, low));
4186 if (integer_zerop (low))
4188 if (! TYPE_UNSIGNED (etype))
4190 etype = unsigned_type_for (etype);
4191 high = fold_convert_loc (loc, etype, high);
4192 exp = fold_convert_loc (loc, etype, exp);
4194 return build_range_check (loc, type, exp, 1, 0, high);
4197 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4198 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4200 unsigned HOST_WIDE_INT lo;
4201 HOST_WIDE_INT hi;
4202 int prec;
4204 prec = TYPE_PRECISION (etype);
4205 if (prec <= HOST_BITS_PER_WIDE_INT)
4207 hi = 0;
4208 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4210 else
4212 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4213 lo = (unsigned HOST_WIDE_INT) -1;
4216 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4218 if (TYPE_UNSIGNED (etype))
4220 tree signed_etype = signed_type_for (etype);
4221 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4222 etype
4223 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4224 else
4225 etype = signed_etype;
4226 exp = fold_convert_loc (loc, etype, exp);
4228 return fold_build2_loc (loc, GT_EXPR, type, exp,
4229 build_int_cst (etype, 0));
4233 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4234 This requires wrap-around arithmetics for the type of the expression.
4235 First make sure that arithmetics in this type is valid, then make sure
4236 that it wraps around. */
4237 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4238 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4239 TYPE_UNSIGNED (etype));
4241 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4243 tree utype, minv, maxv;
4245 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4246 for the type in question, as we rely on this here. */
4247 utype = unsigned_type_for (etype);
4248 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4249 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4250 integer_one_node, 1);
4251 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4253 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4254 minv, 1, maxv, 1)))
4255 etype = utype;
4256 else
4257 return 0;
4260 high = fold_convert_loc (loc, etype, high);
4261 low = fold_convert_loc (loc, etype, low);
4262 exp = fold_convert_loc (loc, etype, exp);
4264 value = const_binop (MINUS_EXPR, high, low);
4267 if (POINTER_TYPE_P (etype))
4269 if (value != 0 && !TREE_OVERFLOW (value))
4271 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4272 return build_range_check (loc, type,
4273 fold_build_pointer_plus_loc (loc, exp, low),
4274 1, build_int_cst (etype, 0), value);
4276 return 0;
4279 if (value != 0 && !TREE_OVERFLOW (value))
4280 return build_range_check (loc, type,
4281 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4282 1, build_int_cst (etype, 0), value);
4284 return 0;
4287 /* Return the predecessor of VAL in its type, handling the infinite case. */
4289 static tree
4290 range_predecessor (tree val)
4292 tree type = TREE_TYPE (val);
4294 if (INTEGRAL_TYPE_P (type)
4295 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4296 return 0;
4297 else
4298 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4301 /* Return the successor of VAL in its type, handling the infinite case. */
4303 static tree
4304 range_successor (tree val)
4306 tree type = TREE_TYPE (val);
4308 if (INTEGRAL_TYPE_P (type)
4309 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4310 return 0;
4311 else
4312 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4315 /* Given two ranges, see if we can merge them into one. Return 1 if we
4316 can, 0 if we can't. Set the output range into the specified parameters. */
4318 bool
4319 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4320 tree high0, int in1_p, tree low1, tree high1)
4322 int no_overlap;
4323 int subset;
4324 int temp;
4325 tree tem;
4326 int in_p;
4327 tree low, high;
4328 int lowequal = ((low0 == 0 && low1 == 0)
4329 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4330 low0, 0, low1, 0)));
4331 int highequal = ((high0 == 0 && high1 == 0)
4332 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4333 high0, 1, high1, 1)));
4335 /* Make range 0 be the range that starts first, or ends last if they
4336 start at the same value. Swap them if it isn't. */
4337 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4338 low0, 0, low1, 0))
4339 || (lowequal
4340 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4341 high1, 1, high0, 1))))
4343 temp = in0_p, in0_p = in1_p, in1_p = temp;
4344 tem = low0, low0 = low1, low1 = tem;
4345 tem = high0, high0 = high1, high1 = tem;
4348 /* Now flag two cases, whether the ranges are disjoint or whether the
4349 second range is totally subsumed in the first. Note that the tests
4350 below are simplified by the ones above. */
4351 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4352 high0, 1, low1, 0));
4353 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4354 high1, 1, high0, 1));
4356 /* We now have four cases, depending on whether we are including or
4357 excluding the two ranges. */
4358 if (in0_p && in1_p)
4360 /* If they don't overlap, the result is false. If the second range
4361 is a subset it is the result. Otherwise, the range is from the start
4362 of the second to the end of the first. */
4363 if (no_overlap)
4364 in_p = 0, low = high = 0;
4365 else if (subset)
4366 in_p = 1, low = low1, high = high1;
4367 else
4368 in_p = 1, low = low1, high = high0;
4371 else if (in0_p && ! in1_p)
4373 /* If they don't overlap, the result is the first range. If they are
4374 equal, the result is false. If the second range is a subset of the
4375 first, and the ranges begin at the same place, we go from just after
4376 the end of the second range to the end of the first. If the second
4377 range is not a subset of the first, or if it is a subset and both
4378 ranges end at the same place, the range starts at the start of the
4379 first range and ends just before the second range.
4380 Otherwise, we can't describe this as a single range. */
4381 if (no_overlap)
4382 in_p = 1, low = low0, high = high0;
4383 else if (lowequal && highequal)
4384 in_p = 0, low = high = 0;
4385 else if (subset && lowequal)
4387 low = range_successor (high1);
4388 high = high0;
4389 in_p = 1;
4390 if (low == 0)
4392 /* We are in the weird situation where high0 > high1 but
4393 high1 has no successor. Punt. */
4394 return 0;
4397 else if (! subset || highequal)
4399 low = low0;
4400 high = range_predecessor (low1);
4401 in_p = 1;
4402 if (high == 0)
4404 /* low0 < low1 but low1 has no predecessor. Punt. */
4405 return 0;
4408 else
4409 return 0;
4412 else if (! in0_p && in1_p)
4414 /* If they don't overlap, the result is the second range. If the second
4415 is a subset of the first, the result is false. Otherwise,
4416 the range starts just after the first range and ends at the
4417 end of the second. */
4418 if (no_overlap)
4419 in_p = 1, low = low1, high = high1;
4420 else if (subset || highequal)
4421 in_p = 0, low = high = 0;
4422 else
4424 low = range_successor (high0);
4425 high = high1;
4426 in_p = 1;
4427 if (low == 0)
4429 /* high1 > high0 but high0 has no successor. Punt. */
4430 return 0;
4435 else
4437 /* The case where we are excluding both ranges. Here the complex case
4438 is if they don't overlap. In that case, the only time we have a
4439 range is if they are adjacent. If the second is a subset of the
4440 first, the result is the first. Otherwise, the range to exclude
4441 starts at the beginning of the first range and ends at the end of the
4442 second. */
4443 if (no_overlap)
4445 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4446 range_successor (high0),
4447 1, low1, 0)))
4448 in_p = 0, low = low0, high = high1;
4449 else
4451 /* Canonicalize - [min, x] into - [-, x]. */
4452 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4453 switch (TREE_CODE (TREE_TYPE (low0)))
4455 case ENUMERAL_TYPE:
4456 if (TYPE_PRECISION (TREE_TYPE (low0))
4457 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4458 break;
4459 /* FALLTHROUGH */
4460 case INTEGER_TYPE:
4461 if (tree_int_cst_equal (low0,
4462 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4463 low0 = 0;
4464 break;
4465 case POINTER_TYPE:
4466 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4467 && integer_zerop (low0))
4468 low0 = 0;
4469 break;
4470 default:
4471 break;
4474 /* Canonicalize - [x, max] into - [x, -]. */
4475 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4476 switch (TREE_CODE (TREE_TYPE (high1)))
4478 case ENUMERAL_TYPE:
4479 if (TYPE_PRECISION (TREE_TYPE (high1))
4480 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4481 break;
4482 /* FALLTHROUGH */
4483 case INTEGER_TYPE:
4484 if (tree_int_cst_equal (high1,
4485 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4486 high1 = 0;
4487 break;
4488 case POINTER_TYPE:
4489 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4490 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4491 high1, 1,
4492 integer_one_node, 1)))
4493 high1 = 0;
4494 break;
4495 default:
4496 break;
4499 /* The ranges might be also adjacent between the maximum and
4500 minimum values of the given type. For
4501 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4502 return + [x + 1, y - 1]. */
4503 if (low0 == 0 && high1 == 0)
4505 low = range_successor (high0);
4506 high = range_predecessor (low1);
4507 if (low == 0 || high == 0)
4508 return 0;
4510 in_p = 1;
4512 else
4513 return 0;
4516 else if (subset)
4517 in_p = 0, low = low0, high = high0;
4518 else
4519 in_p = 0, low = low0, high = high1;
4522 *pin_p = in_p, *plow = low, *phigh = high;
4523 return 1;
4527 /* Subroutine of fold, looking inside expressions of the form
4528 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4529 of the COND_EXPR. This function is being used also to optimize
4530 A op B ? C : A, by reversing the comparison first.
4532 Return a folded expression whose code is not a COND_EXPR
4533 anymore, or NULL_TREE if no folding opportunity is found. */
4535 static tree
4536 fold_cond_expr_with_comparison (location_t loc, tree type,
4537 tree arg0, tree arg1, tree arg2)
4539 enum tree_code comp_code = TREE_CODE (arg0);
4540 tree arg00 = TREE_OPERAND (arg0, 0);
4541 tree arg01 = TREE_OPERAND (arg0, 1);
4542 tree arg1_type = TREE_TYPE (arg1);
4543 tree tem;
4545 STRIP_NOPS (arg1);
4546 STRIP_NOPS (arg2);
4548 /* If we have A op 0 ? A : -A, consider applying the following
4549 transformations:
4551 A == 0? A : -A same as -A
4552 A != 0? A : -A same as A
4553 A >= 0? A : -A same as abs (A)
4554 A > 0? A : -A same as abs (A)
4555 A <= 0? A : -A same as -abs (A)
4556 A < 0? A : -A same as -abs (A)
4558 None of these transformations work for modes with signed
4559 zeros. If A is +/-0, the first two transformations will
4560 change the sign of the result (from +0 to -0, or vice
4561 versa). The last four will fix the sign of the result,
4562 even though the original expressions could be positive or
4563 negative, depending on the sign of A.
4565 Note that all these transformations are correct if A is
4566 NaN, since the two alternatives (A and -A) are also NaNs. */
4567 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4568 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4569 ? real_zerop (arg01)
4570 : integer_zerop (arg01))
4571 && ((TREE_CODE (arg2) == NEGATE_EXPR
4572 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4573 /* In the case that A is of the form X-Y, '-A' (arg2) may
4574 have already been folded to Y-X, check for that. */
4575 || (TREE_CODE (arg1) == MINUS_EXPR
4576 && TREE_CODE (arg2) == MINUS_EXPR
4577 && operand_equal_p (TREE_OPERAND (arg1, 0),
4578 TREE_OPERAND (arg2, 1), 0)
4579 && operand_equal_p (TREE_OPERAND (arg1, 1),
4580 TREE_OPERAND (arg2, 0), 0))))
4581 switch (comp_code)
4583 case EQ_EXPR:
4584 case UNEQ_EXPR:
4585 tem = fold_convert_loc (loc, arg1_type, arg1);
4586 return pedantic_non_lvalue_loc (loc,
4587 fold_convert_loc (loc, type,
4588 negate_expr (tem)));
4589 case NE_EXPR:
4590 case LTGT_EXPR:
4591 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4592 case UNGE_EXPR:
4593 case UNGT_EXPR:
4594 if (flag_trapping_math)
4595 break;
4596 /* Fall through. */
4597 case GE_EXPR:
4598 case GT_EXPR:
4599 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4600 arg1 = fold_convert_loc (loc, signed_type_for
4601 (TREE_TYPE (arg1)), arg1);
4602 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4603 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4604 case UNLE_EXPR:
4605 case UNLT_EXPR:
4606 if (flag_trapping_math)
4607 break;
4608 case LE_EXPR:
4609 case LT_EXPR:
4610 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4611 arg1 = fold_convert_loc (loc, signed_type_for
4612 (TREE_TYPE (arg1)), arg1);
4613 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4614 return negate_expr (fold_convert_loc (loc, type, tem));
4615 default:
4616 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4617 break;
4620 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4621 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4622 both transformations are correct when A is NaN: A != 0
4623 is then true, and A == 0 is false. */
4625 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4626 && integer_zerop (arg01) && integer_zerop (arg2))
4628 if (comp_code == NE_EXPR)
4629 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4630 else if (comp_code == EQ_EXPR)
4631 return build_int_cst (type, 0);
4634 /* Try some transformations of A op B ? A : B.
4636 A == B? A : B same as B
4637 A != B? A : B same as A
4638 A >= B? A : B same as max (A, B)
4639 A > B? A : B same as max (B, A)
4640 A <= B? A : B same as min (A, B)
4641 A < B? A : B same as min (B, A)
4643 As above, these transformations don't work in the presence
4644 of signed zeros. For example, if A and B are zeros of
4645 opposite sign, the first two transformations will change
4646 the sign of the result. In the last four, the original
4647 expressions give different results for (A=+0, B=-0) and
4648 (A=-0, B=+0), but the transformed expressions do not.
4650 The first two transformations are correct if either A or B
4651 is a NaN. In the first transformation, the condition will
4652 be false, and B will indeed be chosen. In the case of the
4653 second transformation, the condition A != B will be true,
4654 and A will be chosen.
4656 The conversions to max() and min() are not correct if B is
4657 a number and A is not. The conditions in the original
4658 expressions will be false, so all four give B. The min()
4659 and max() versions would give a NaN instead. */
4660 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4661 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4662 /* Avoid these transformations if the COND_EXPR may be used
4663 as an lvalue in the C++ front-end. PR c++/19199. */
4664 && (in_gimple_form
4665 || (strcmp (lang_hooks.name, "GNU C++") != 0
4666 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4667 || ! maybe_lvalue_p (arg1)
4668 || ! maybe_lvalue_p (arg2)))
4670 tree comp_op0 = arg00;
4671 tree comp_op1 = arg01;
4672 tree comp_type = TREE_TYPE (comp_op0);
4674 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4675 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4677 comp_type = type;
4678 comp_op0 = arg1;
4679 comp_op1 = arg2;
4682 switch (comp_code)
4684 case EQ_EXPR:
4685 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4686 case NE_EXPR:
4687 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4688 case LE_EXPR:
4689 case LT_EXPR:
4690 case UNLE_EXPR:
4691 case UNLT_EXPR:
4692 /* In C++ a ?: expression can be an lvalue, so put the
4693 operand which will be used if they are equal first
4694 so that we can convert this back to the
4695 corresponding COND_EXPR. */
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4698 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4699 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4700 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4701 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4702 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4703 comp_op1, comp_op0);
4704 return pedantic_non_lvalue_loc (loc,
4705 fold_convert_loc (loc, type, tem));
4707 break;
4708 case GE_EXPR:
4709 case GT_EXPR:
4710 case UNGE_EXPR:
4711 case UNGT_EXPR:
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4714 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4715 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4716 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4717 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4718 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4719 comp_op1, comp_op0);
4720 return pedantic_non_lvalue_loc (loc,
4721 fold_convert_loc (loc, type, tem));
4723 break;
4724 case UNEQ_EXPR:
4725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4726 return pedantic_non_lvalue_loc (loc,
4727 fold_convert_loc (loc, type, arg2));
4728 break;
4729 case LTGT_EXPR:
4730 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4731 return pedantic_non_lvalue_loc (loc,
4732 fold_convert_loc (loc, type, arg1));
4733 break;
4734 default:
4735 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4736 break;
4740 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4741 we might still be able to simplify this. For example,
4742 if C1 is one less or one more than C2, this might have started
4743 out as a MIN or MAX and been transformed by this function.
4744 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4746 if (INTEGRAL_TYPE_P (type)
4747 && TREE_CODE (arg01) == INTEGER_CST
4748 && TREE_CODE (arg2) == INTEGER_CST)
4749 switch (comp_code)
4751 case EQ_EXPR:
4752 if (TREE_CODE (arg1) == INTEGER_CST)
4753 break;
4754 /* We can replace A with C1 in this case. */
4755 arg1 = fold_convert_loc (loc, type, arg01);
4756 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4758 case LT_EXPR:
4759 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4760 MIN_EXPR, to preserve the signedness of the comparison. */
4761 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4762 OEP_ONLY_CONST)
4763 && operand_equal_p (arg01,
4764 const_binop (PLUS_EXPR, arg2,
4765 build_int_cst (type, 1)),
4766 OEP_ONLY_CONST))
4768 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4769 fold_convert_loc (loc, TREE_TYPE (arg00),
4770 arg2));
4771 return pedantic_non_lvalue_loc (loc,
4772 fold_convert_loc (loc, type, tem));
4774 break;
4776 case LE_EXPR:
4777 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4778 as above. */
4779 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4780 OEP_ONLY_CONST)
4781 && operand_equal_p (arg01,
4782 const_binop (MINUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4784 OEP_ONLY_CONST))
4786 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 arg2));
4789 return pedantic_non_lvalue_loc (loc,
4790 fold_convert_loc (loc, type, tem));
4792 break;
4794 case GT_EXPR:
4795 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4796 MAX_EXPR, to preserve the signedness of the comparison. */
4797 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4798 OEP_ONLY_CONST)
4799 && operand_equal_p (arg01,
4800 const_binop (MINUS_EXPR, arg2,
4801 build_int_cst (type, 1)),
4802 OEP_ONLY_CONST))
4804 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4805 fold_convert_loc (loc, TREE_TYPE (arg00),
4806 arg2));
4807 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4809 break;
4811 case GE_EXPR:
4812 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4813 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4814 OEP_ONLY_CONST)
4815 && operand_equal_p (arg01,
4816 const_binop (PLUS_EXPR, arg2,
4817 build_int_cst (type, 1)),
4818 OEP_ONLY_CONST))
4820 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4821 fold_convert_loc (loc, TREE_TYPE (arg00),
4822 arg2));
4823 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4825 break;
4826 case NE_EXPR:
4827 break;
4828 default:
4829 gcc_unreachable ();
4832 return NULL_TREE;
4837 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4838 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4839 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4840 false) >= 2)
4841 #endif
4843 /* EXP is some logical combination of boolean tests. See if we can
4844 merge it into some range test. Return the new tree if so. */
4846 static tree
4847 fold_range_test (location_t loc, enum tree_code code, tree type,
4848 tree op0, tree op1)
4850 int or_op = (code == TRUTH_ORIF_EXPR
4851 || code == TRUTH_OR_EXPR);
4852 int in0_p, in1_p, in_p;
4853 tree low0, low1, low, high0, high1, high;
4854 bool strict_overflow_p = false;
4855 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4856 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4857 tree tem;
4858 const char * const warnmsg = G_("assuming signed overflow does not occur "
4859 "when simplifying range test");
4861 /* If this is an OR operation, invert both sides; we will invert
4862 again at the end. */
4863 if (or_op)
4864 in0_p = ! in0_p, in1_p = ! in1_p;
4866 /* If both expressions are the same, if we can merge the ranges, and we
4867 can build the range test, return it or it inverted. If one of the
4868 ranges is always true or always false, consider it to be the same
4869 expression as the other. */
4870 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4871 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4872 in1_p, low1, high1)
4873 && 0 != (tem = (build_range_check (loc, type,
4874 lhs != 0 ? lhs
4875 : rhs != 0 ? rhs : integer_zero_node,
4876 in_p, low, high))))
4878 if (strict_overflow_p)
4879 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4880 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4883 /* On machines where the branch cost is expensive, if this is a
4884 short-circuited branch and the underlying object on both sides
4885 is the same, make a non-short-circuit operation. */
4886 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4887 && lhs != 0 && rhs != 0
4888 && (code == TRUTH_ANDIF_EXPR
4889 || code == TRUTH_ORIF_EXPR)
4890 && operand_equal_p (lhs, rhs, 0))
4892 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4893 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4894 which cases we can't do this. */
4895 if (simple_operand_p (lhs))
4896 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4897 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4898 type, op0, op1);
4900 else if (!lang_hooks.decls.global_bindings_p ()
4901 && !CONTAINS_PLACEHOLDER_P (lhs))
4903 tree common = save_expr (lhs);
4905 if (0 != (lhs = build_range_check (loc, type, common,
4906 or_op ? ! in0_p : in0_p,
4907 low0, high0))
4908 && (0 != (rhs = build_range_check (loc, type, common,
4909 or_op ? ! in1_p : in1_p,
4910 low1, high1))))
4912 if (strict_overflow_p)
4913 fold_overflow_warning (warnmsg,
4914 WARN_STRICT_OVERFLOW_COMPARISON);
4915 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4916 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4917 type, lhs, rhs);
4922 return 0;
4925 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4926 bit value. Arrange things so the extra bits will be set to zero if and
4927 only if C is signed-extended to its full width. If MASK is nonzero,
4928 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4930 static tree
4931 unextend (tree c, int p, int unsignedp, tree mask)
4933 tree type = TREE_TYPE (c);
4934 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4935 tree temp;
4937 if (p == modesize || unsignedp)
4938 return c;
4940 /* We work by getting just the sign bit into the low-order bit, then
4941 into the high-order bit, then sign-extend. We then XOR that value
4942 with C. */
4943 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4944 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4946 /* We must use a signed type in order to get an arithmetic right shift.
4947 However, we must also avoid introducing accidental overflows, so that
4948 a subsequent call to integer_zerop will work. Hence we must
4949 do the type conversion here. At this point, the constant is either
4950 zero or one, and the conversion to a signed type can never overflow.
4951 We could get an overflow if this conversion is done anywhere else. */
4952 if (TYPE_UNSIGNED (type))
4953 temp = fold_convert (signed_type_for (type), temp);
4955 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4956 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4957 if (mask != 0)
4958 temp = const_binop (BIT_AND_EXPR, temp,
4959 fold_convert (TREE_TYPE (c), mask));
4960 /* If necessary, convert the type back to match the type of C. */
4961 if (TYPE_UNSIGNED (type))
4962 temp = fold_convert (type, temp);
4964 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4967 /* For an expression that has the form
4968 (A && B) || ~B
4970 (A || B) && ~B,
4971 we can drop one of the inner expressions and simplify to
4972 A || ~B
4974 A && ~B
4975 LOC is the location of the resulting expression. OP is the inner
4976 logical operation; the left-hand side in the examples above, while CMPOP
4977 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4978 removing a condition that guards another, as in
4979 (A != NULL && A->...) || A == NULL
4980 which we must not transform. If RHS_ONLY is true, only eliminate the
4981 right-most operand of the inner logical operation. */
4983 static tree
4984 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4985 bool rhs_only)
4987 tree type = TREE_TYPE (cmpop);
4988 enum tree_code code = TREE_CODE (cmpop);
4989 enum tree_code truthop_code = TREE_CODE (op);
4990 tree lhs = TREE_OPERAND (op, 0);
4991 tree rhs = TREE_OPERAND (op, 1);
4992 tree orig_lhs = lhs, orig_rhs = rhs;
4993 enum tree_code rhs_code = TREE_CODE (rhs);
4994 enum tree_code lhs_code = TREE_CODE (lhs);
4995 enum tree_code inv_code;
4997 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4998 return NULL_TREE;
5000 if (TREE_CODE_CLASS (code) != tcc_comparison)
5001 return NULL_TREE;
5003 if (rhs_code == truthop_code)
5005 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5006 if (newrhs != NULL_TREE)
5008 rhs = newrhs;
5009 rhs_code = TREE_CODE (rhs);
5012 if (lhs_code == truthop_code && !rhs_only)
5014 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5015 if (newlhs != NULL_TREE)
5017 lhs = newlhs;
5018 lhs_code = TREE_CODE (lhs);
5022 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5023 if (inv_code == rhs_code
5024 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5025 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5026 return lhs;
5027 if (!rhs_only && inv_code == lhs_code
5028 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5029 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5030 return rhs;
5031 if (rhs != orig_rhs || lhs != orig_lhs)
5032 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5033 lhs, rhs);
5034 return NULL_TREE;
5037 /* Find ways of folding logical expressions of LHS and RHS:
5038 Try to merge two comparisons to the same innermost item.
5039 Look for range tests like "ch >= '0' && ch <= '9'".
5040 Look for combinations of simple terms on machines with expensive branches
5041 and evaluate the RHS unconditionally.
5043 For example, if we have p->a == 2 && p->b == 4 and we can make an
5044 object large enough to span both A and B, we can do this with a comparison
5045 against the object ANDed with the a mask.
5047 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5048 operations to do this with one comparison.
5050 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5051 function and the one above.
5053 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5054 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5056 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5057 two operands.
5059 We return the simplified tree or 0 if no optimization is possible. */
5061 static tree
5062 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5063 tree lhs, tree rhs)
5065 /* If this is the "or" of two comparisons, we can do something if
5066 the comparisons are NE_EXPR. If this is the "and", we can do something
5067 if the comparisons are EQ_EXPR. I.e.,
5068 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5070 WANTED_CODE is this operation code. For single bit fields, we can
5071 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5072 comparison for one-bit fields. */
5074 enum tree_code wanted_code;
5075 enum tree_code lcode, rcode;
5076 tree ll_arg, lr_arg, rl_arg, rr_arg;
5077 tree ll_inner, lr_inner, rl_inner, rr_inner;
5078 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5079 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5080 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5081 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5082 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5083 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5084 enum machine_mode lnmode, rnmode;
5085 tree ll_mask, lr_mask, rl_mask, rr_mask;
5086 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5087 tree l_const, r_const;
5088 tree lntype, rntype, result;
5089 HOST_WIDE_INT first_bit, end_bit;
5090 int volatilep;
5092 /* Start by getting the comparison codes. Fail if anything is volatile.
5093 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5094 it were surrounded with a NE_EXPR. */
5096 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5097 return 0;
5099 lcode = TREE_CODE (lhs);
5100 rcode = TREE_CODE (rhs);
5102 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5104 lhs = build2 (NE_EXPR, truth_type, lhs,
5105 build_int_cst (TREE_TYPE (lhs), 0));
5106 lcode = NE_EXPR;
5109 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5111 rhs = build2 (NE_EXPR, truth_type, rhs,
5112 build_int_cst (TREE_TYPE (rhs), 0));
5113 rcode = NE_EXPR;
5116 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5117 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5118 return 0;
5120 ll_arg = TREE_OPERAND (lhs, 0);
5121 lr_arg = TREE_OPERAND (lhs, 1);
5122 rl_arg = TREE_OPERAND (rhs, 0);
5123 rr_arg = TREE_OPERAND (rhs, 1);
5125 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5126 if (simple_operand_p (ll_arg)
5127 && simple_operand_p (lr_arg))
5129 if (operand_equal_p (ll_arg, rl_arg, 0)
5130 && operand_equal_p (lr_arg, rr_arg, 0))
5132 result = combine_comparisons (loc, code, lcode, rcode,
5133 truth_type, ll_arg, lr_arg);
5134 if (result)
5135 return result;
5137 else if (operand_equal_p (ll_arg, rr_arg, 0)
5138 && operand_equal_p (lr_arg, rl_arg, 0))
5140 result = combine_comparisons (loc, code, lcode,
5141 swap_tree_comparison (rcode),
5142 truth_type, ll_arg, lr_arg);
5143 if (result)
5144 return result;
5148 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5149 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5151 /* If the RHS can be evaluated unconditionally and its operands are
5152 simple, it wins to evaluate the RHS unconditionally on machines
5153 with expensive branches. In this case, this isn't a comparison
5154 that can be merged. */
5156 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5157 false) >= 2
5158 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5159 && simple_operand_p (rl_arg)
5160 && simple_operand_p (rr_arg))
5162 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5163 if (code == TRUTH_OR_EXPR
5164 && lcode == NE_EXPR && integer_zerop (lr_arg)
5165 && rcode == NE_EXPR && integer_zerop (rr_arg)
5166 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5167 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5168 return build2_loc (loc, NE_EXPR, truth_type,
5169 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5170 ll_arg, rl_arg),
5171 build_int_cst (TREE_TYPE (ll_arg), 0));
5173 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5174 if (code == TRUTH_AND_EXPR
5175 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5176 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5177 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5178 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5179 return build2_loc (loc, EQ_EXPR, truth_type,
5180 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5181 ll_arg, rl_arg),
5182 build_int_cst (TREE_TYPE (ll_arg), 0));
5185 /* See if the comparisons can be merged. Then get all the parameters for
5186 each side. */
5188 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5189 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5190 return 0;
5192 volatilep = 0;
5193 ll_inner = decode_field_reference (loc, ll_arg,
5194 &ll_bitsize, &ll_bitpos, &ll_mode,
5195 &ll_unsignedp, &volatilep, &ll_mask,
5196 &ll_and_mask);
5197 lr_inner = decode_field_reference (loc, lr_arg,
5198 &lr_bitsize, &lr_bitpos, &lr_mode,
5199 &lr_unsignedp, &volatilep, &lr_mask,
5200 &lr_and_mask);
5201 rl_inner = decode_field_reference (loc, rl_arg,
5202 &rl_bitsize, &rl_bitpos, &rl_mode,
5203 &rl_unsignedp, &volatilep, &rl_mask,
5204 &rl_and_mask);
5205 rr_inner = decode_field_reference (loc, rr_arg,
5206 &rr_bitsize, &rr_bitpos, &rr_mode,
5207 &rr_unsignedp, &volatilep, &rr_mask,
5208 &rr_and_mask);
5210 /* It must be true that the inner operation on the lhs of each
5211 comparison must be the same if we are to be able to do anything.
5212 Then see if we have constants. If not, the same must be true for
5213 the rhs's. */
5214 if (volatilep || ll_inner == 0 || rl_inner == 0
5215 || ! operand_equal_p (ll_inner, rl_inner, 0))
5216 return 0;
5218 if (TREE_CODE (lr_arg) == INTEGER_CST
5219 && TREE_CODE (rr_arg) == INTEGER_CST)
5220 l_const = lr_arg, r_const = rr_arg;
5221 else if (lr_inner == 0 || rr_inner == 0
5222 || ! operand_equal_p (lr_inner, rr_inner, 0))
5223 return 0;
5224 else
5225 l_const = r_const = 0;
5227 /* If either comparison code is not correct for our logical operation,
5228 fail. However, we can convert a one-bit comparison against zero into
5229 the opposite comparison against that bit being set in the field. */
5231 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5232 if (lcode != wanted_code)
5234 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5236 /* Make the left operand unsigned, since we are only interested
5237 in the value of one bit. Otherwise we are doing the wrong
5238 thing below. */
5239 ll_unsignedp = 1;
5240 l_const = ll_mask;
5242 else
5243 return 0;
5246 /* This is analogous to the code for l_const above. */
5247 if (rcode != wanted_code)
5249 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5251 rl_unsignedp = 1;
5252 r_const = rl_mask;
5254 else
5255 return 0;
5258 /* See if we can find a mode that contains both fields being compared on
5259 the left. If we can't, fail. Otherwise, update all constants and masks
5260 to be relative to a field of that size. */
5261 first_bit = MIN (ll_bitpos, rl_bitpos);
5262 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5263 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5264 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5265 volatilep);
5266 if (lnmode == VOIDmode)
5267 return 0;
5269 lnbitsize = GET_MODE_BITSIZE (lnmode);
5270 lnbitpos = first_bit & ~ (lnbitsize - 1);
5271 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5272 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5274 if (BYTES_BIG_ENDIAN)
5276 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5277 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5280 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5281 size_int (xll_bitpos));
5282 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5283 size_int (xrl_bitpos));
5285 if (l_const)
5287 l_const = fold_convert_loc (loc, lntype, l_const);
5288 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5289 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5290 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5291 fold_build1_loc (loc, BIT_NOT_EXPR,
5292 lntype, ll_mask))))
5294 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5296 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5299 if (r_const)
5301 r_const = fold_convert_loc (loc, lntype, r_const);
5302 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5303 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5304 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5305 fold_build1_loc (loc, BIT_NOT_EXPR,
5306 lntype, rl_mask))))
5308 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5310 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5314 /* If the right sides are not constant, do the same for it. Also,
5315 disallow this optimization if a size or signedness mismatch occurs
5316 between the left and right sides. */
5317 if (l_const == 0)
5319 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5320 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5321 /* Make sure the two fields on the right
5322 correspond to the left without being swapped. */
5323 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5324 return 0;
5326 first_bit = MIN (lr_bitpos, rr_bitpos);
5327 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5328 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5329 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5330 volatilep);
5331 if (rnmode == VOIDmode)
5332 return 0;
5334 rnbitsize = GET_MODE_BITSIZE (rnmode);
5335 rnbitpos = first_bit & ~ (rnbitsize - 1);
5336 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5337 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5339 if (BYTES_BIG_ENDIAN)
5341 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5342 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5345 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5346 rntype, lr_mask),
5347 size_int (xlr_bitpos));
5348 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5349 rntype, rr_mask),
5350 size_int (xrr_bitpos));
5352 /* Make a mask that corresponds to both fields being compared.
5353 Do this for both items being compared. If the operands are the
5354 same size and the bits being compared are in the same position
5355 then we can do this by masking both and comparing the masked
5356 results. */
5357 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5358 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5359 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5361 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5362 ll_unsignedp || rl_unsignedp);
5363 if (! all_ones_mask_p (ll_mask, lnbitsize))
5364 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5366 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5367 lr_unsignedp || rr_unsignedp);
5368 if (! all_ones_mask_p (lr_mask, rnbitsize))
5369 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5371 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5374 /* There is still another way we can do something: If both pairs of
5375 fields being compared are adjacent, we may be able to make a wider
5376 field containing them both.
5378 Note that we still must mask the lhs/rhs expressions. Furthermore,
5379 the mask must be shifted to account for the shift done by
5380 make_bit_field_ref. */
5381 if ((ll_bitsize + ll_bitpos == rl_bitpos
5382 && lr_bitsize + lr_bitpos == rr_bitpos)
5383 || (ll_bitpos == rl_bitpos + rl_bitsize
5384 && lr_bitpos == rr_bitpos + rr_bitsize))
5386 tree type;
5388 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5389 ll_bitsize + rl_bitsize,
5390 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5391 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5392 lr_bitsize + rr_bitsize,
5393 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5395 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5396 size_int (MIN (xll_bitpos, xrl_bitpos)));
5397 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5398 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5400 /* Convert to the smaller type before masking out unwanted bits. */
5401 type = lntype;
5402 if (lntype != rntype)
5404 if (lnbitsize > rnbitsize)
5406 lhs = fold_convert_loc (loc, rntype, lhs);
5407 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5408 type = rntype;
5410 else if (lnbitsize < rnbitsize)
5412 rhs = fold_convert_loc (loc, lntype, rhs);
5413 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5414 type = lntype;
5418 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5419 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5421 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5422 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5424 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5427 return 0;
5430 /* Handle the case of comparisons with constants. If there is something in
5431 common between the masks, those bits of the constants must be the same.
5432 If not, the condition is always false. Test for this to avoid generating
5433 incorrect code below. */
5434 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5435 if (! integer_zerop (result)
5436 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5437 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5439 if (wanted_code == NE_EXPR)
5441 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5442 return constant_boolean_node (true, truth_type);
5444 else
5446 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5447 return constant_boolean_node (false, truth_type);
5451 /* Construct the expression we will return. First get the component
5452 reference we will make. Unless the mask is all ones the width of
5453 that field, perform the mask operation. Then compare with the
5454 merged constant. */
5455 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5456 ll_unsignedp || rl_unsignedp);
5458 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5459 if (! all_ones_mask_p (ll_mask, lnbitsize))
5460 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5462 return build2_loc (loc, wanted_code, truth_type, result,
5463 const_binop (BIT_IOR_EXPR, l_const, r_const));
5466 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5467 constant. */
5469 static tree
5470 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5471 tree op0, tree op1)
5473 tree arg0 = op0;
5474 enum tree_code op_code;
5475 tree comp_const;
5476 tree minmax_const;
5477 int consts_equal, consts_lt;
5478 tree inner;
5480 STRIP_SIGN_NOPS (arg0);
5482 op_code = TREE_CODE (arg0);
5483 minmax_const = TREE_OPERAND (arg0, 1);
5484 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5485 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5486 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5487 inner = TREE_OPERAND (arg0, 0);
5489 /* If something does not permit us to optimize, return the original tree. */
5490 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5491 || TREE_CODE (comp_const) != INTEGER_CST
5492 || TREE_OVERFLOW (comp_const)
5493 || TREE_CODE (minmax_const) != INTEGER_CST
5494 || TREE_OVERFLOW (minmax_const))
5495 return NULL_TREE;
5497 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5498 and GT_EXPR, doing the rest with recursive calls using logical
5499 simplifications. */
5500 switch (code)
5502 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5504 tree tem
5505 = optimize_minmax_comparison (loc,
5506 invert_tree_comparison (code, false),
5507 type, op0, op1);
5508 if (tem)
5509 return invert_truthvalue_loc (loc, tem);
5510 return NULL_TREE;
5513 case GE_EXPR:
5514 return
5515 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5516 optimize_minmax_comparison
5517 (loc, EQ_EXPR, type, arg0, comp_const),
5518 optimize_minmax_comparison
5519 (loc, GT_EXPR, type, arg0, comp_const));
5521 case EQ_EXPR:
5522 if (op_code == MAX_EXPR && consts_equal)
5523 /* MAX (X, 0) == 0 -> X <= 0 */
5524 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5526 else if (op_code == MAX_EXPR && consts_lt)
5527 /* MAX (X, 0) == 5 -> X == 5 */
5528 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5530 else if (op_code == MAX_EXPR)
5531 /* MAX (X, 0) == -1 -> false */
5532 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5534 else if (consts_equal)
5535 /* MIN (X, 0) == 0 -> X >= 0 */
5536 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5538 else if (consts_lt)
5539 /* MIN (X, 0) == 5 -> false */
5540 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5542 else
5543 /* MIN (X, 0) == -1 -> X == -1 */
5544 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5546 case GT_EXPR:
5547 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5548 /* MAX (X, 0) > 0 -> X > 0
5549 MAX (X, 0) > 5 -> X > 5 */
5550 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5552 else if (op_code == MAX_EXPR)
5553 /* MAX (X, 0) > -1 -> true */
5554 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5556 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5557 /* MIN (X, 0) > 0 -> false
5558 MIN (X, 0) > 5 -> false */
5559 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5561 else
5562 /* MIN (X, 0) > -1 -> X > -1 */
5563 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5565 default:
5566 return NULL_TREE;
5570 /* T is an integer expression that is being multiplied, divided, or taken a
5571 modulus (CODE says which and what kind of divide or modulus) by a
5572 constant C. See if we can eliminate that operation by folding it with
5573 other operations already in T. WIDE_TYPE, if non-null, is a type that
5574 should be used for the computation if wider than our type.
5576 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5577 (X * 2) + (Y * 4). We must, however, be assured that either the original
5578 expression would not overflow or that overflow is undefined for the type
5579 in the language in question.
5581 If we return a non-null expression, it is an equivalent form of the
5582 original computation, but need not be in the original type.
5584 We set *STRICT_OVERFLOW_P to true if the return values depends on
5585 signed overflow being undefined. Otherwise we do not change
5586 *STRICT_OVERFLOW_P. */
5588 static tree
5589 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5590 bool *strict_overflow_p)
5592 /* To avoid exponential search depth, refuse to allow recursion past
5593 three levels. Beyond that (1) it's highly unlikely that we'll find
5594 something interesting and (2) we've probably processed it before
5595 when we built the inner expression. */
5597 static int depth;
5598 tree ret;
5600 if (depth > 3)
5601 return NULL;
5603 depth++;
5604 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5605 depth--;
5607 return ret;
5610 static tree
5611 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5612 bool *strict_overflow_p)
5614 tree type = TREE_TYPE (t);
5615 enum tree_code tcode = TREE_CODE (t);
5616 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5617 > GET_MODE_SIZE (TYPE_MODE (type)))
5618 ? wide_type : type);
5619 tree t1, t2;
5620 int same_p = tcode == code;
5621 tree op0 = NULL_TREE, op1 = NULL_TREE;
5622 bool sub_strict_overflow_p;
5624 /* Don't deal with constants of zero here; they confuse the code below. */
5625 if (integer_zerop (c))
5626 return NULL_TREE;
5628 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5629 op0 = TREE_OPERAND (t, 0);
5631 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5632 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5634 /* Note that we need not handle conditional operations here since fold
5635 already handles those cases. So just do arithmetic here. */
5636 switch (tcode)
5638 case INTEGER_CST:
5639 /* For a constant, we can always simplify if we are a multiply
5640 or (for divide and modulus) if it is a multiple of our constant. */
5641 if (code == MULT_EXPR
5642 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5643 return const_binop (code, fold_convert (ctype, t),
5644 fold_convert (ctype, c));
5645 break;
5647 CASE_CONVERT: case NON_LVALUE_EXPR:
5648 /* If op0 is an expression ... */
5649 if ((COMPARISON_CLASS_P (op0)
5650 || UNARY_CLASS_P (op0)
5651 || BINARY_CLASS_P (op0)
5652 || VL_EXP_CLASS_P (op0)
5653 || EXPRESSION_CLASS_P (op0))
5654 /* ... and has wrapping overflow, and its type is smaller
5655 than ctype, then we cannot pass through as widening. */
5656 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5657 && (TYPE_PRECISION (ctype)
5658 > TYPE_PRECISION (TREE_TYPE (op0))))
5659 /* ... or this is a truncation (t is narrower than op0),
5660 then we cannot pass through this narrowing. */
5661 || (TYPE_PRECISION (type)
5662 < TYPE_PRECISION (TREE_TYPE (op0)))
5663 /* ... or signedness changes for division or modulus,
5664 then we cannot pass through this conversion. */
5665 || (code != MULT_EXPR
5666 && (TYPE_UNSIGNED (ctype)
5667 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5668 /* ... or has undefined overflow while the converted to
5669 type has not, we cannot do the operation in the inner type
5670 as that would introduce undefined overflow. */
5671 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5672 && !TYPE_OVERFLOW_UNDEFINED (type))))
5673 break;
5675 /* Pass the constant down and see if we can make a simplification. If
5676 we can, replace this expression with the inner simplification for
5677 possible later conversion to our or some other type. */
5678 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5679 && TREE_CODE (t2) == INTEGER_CST
5680 && !TREE_OVERFLOW (t2)
5681 && (0 != (t1 = extract_muldiv (op0, t2, code,
5682 code == MULT_EXPR
5683 ? ctype : NULL_TREE,
5684 strict_overflow_p))))
5685 return t1;
5686 break;
5688 case ABS_EXPR:
5689 /* If widening the type changes it from signed to unsigned, then we
5690 must avoid building ABS_EXPR itself as unsigned. */
5691 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5693 tree cstype = (*signed_type_for) (ctype);
5694 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5695 != 0)
5697 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5698 return fold_convert (ctype, t1);
5700 break;
5702 /* If the constant is negative, we cannot simplify this. */
5703 if (tree_int_cst_sgn (c) == -1)
5704 break;
5705 /* FALLTHROUGH */
5706 case NEGATE_EXPR:
5707 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5708 != 0)
5709 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5710 break;
5712 case MIN_EXPR: case MAX_EXPR:
5713 /* If widening the type changes the signedness, then we can't perform
5714 this optimization as that changes the result. */
5715 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5716 break;
5718 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5719 sub_strict_overflow_p = false;
5720 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5721 &sub_strict_overflow_p)) != 0
5722 && (t2 = extract_muldiv (op1, c, code, wide_type,
5723 &sub_strict_overflow_p)) != 0)
5725 if (tree_int_cst_sgn (c) < 0)
5726 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5727 if (sub_strict_overflow_p)
5728 *strict_overflow_p = true;
5729 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5730 fold_convert (ctype, t2));
5732 break;
5734 case LSHIFT_EXPR: case RSHIFT_EXPR:
5735 /* If the second operand is constant, this is a multiplication
5736 or floor division, by a power of two, so we can treat it that
5737 way unless the multiplier or divisor overflows. Signed
5738 left-shift overflow is implementation-defined rather than
5739 undefined in C90, so do not convert signed left shift into
5740 multiplication. */
5741 if (TREE_CODE (op1) == INTEGER_CST
5742 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5743 /* const_binop may not detect overflow correctly,
5744 so check for it explicitly here. */
5745 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5746 && TREE_INT_CST_HIGH (op1) == 0
5747 && 0 != (t1 = fold_convert (ctype,
5748 const_binop (LSHIFT_EXPR,
5749 size_one_node,
5750 op1)))
5751 && !TREE_OVERFLOW (t1))
5752 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5753 ? MULT_EXPR : FLOOR_DIV_EXPR,
5754 ctype,
5755 fold_convert (ctype, op0),
5756 t1),
5757 c, code, wide_type, strict_overflow_p);
5758 break;
5760 case PLUS_EXPR: case MINUS_EXPR:
5761 /* See if we can eliminate the operation on both sides. If we can, we
5762 can return a new PLUS or MINUS. If we can't, the only remaining
5763 cases where we can do anything are if the second operand is a
5764 constant. */
5765 sub_strict_overflow_p = false;
5766 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5767 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5768 if (t1 != 0 && t2 != 0
5769 && (code == MULT_EXPR
5770 /* If not multiplication, we can only do this if both operands
5771 are divisible by c. */
5772 || (multiple_of_p (ctype, op0, c)
5773 && multiple_of_p (ctype, op1, c))))
5775 if (sub_strict_overflow_p)
5776 *strict_overflow_p = true;
5777 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5778 fold_convert (ctype, t2));
5781 /* If this was a subtraction, negate OP1 and set it to be an addition.
5782 This simplifies the logic below. */
5783 if (tcode == MINUS_EXPR)
5785 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5786 /* If OP1 was not easily negatable, the constant may be OP0. */
5787 if (TREE_CODE (op0) == INTEGER_CST)
5789 tree tem = op0;
5790 op0 = op1;
5791 op1 = tem;
5792 tem = t1;
5793 t1 = t2;
5794 t2 = tem;
5798 if (TREE_CODE (op1) != INTEGER_CST)
5799 break;
5801 /* If either OP1 or C are negative, this optimization is not safe for
5802 some of the division and remainder types while for others we need
5803 to change the code. */
5804 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5806 if (code == CEIL_DIV_EXPR)
5807 code = FLOOR_DIV_EXPR;
5808 else if (code == FLOOR_DIV_EXPR)
5809 code = CEIL_DIV_EXPR;
5810 else if (code != MULT_EXPR
5811 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5812 break;
5815 /* If it's a multiply or a division/modulus operation of a multiple
5816 of our constant, do the operation and verify it doesn't overflow. */
5817 if (code == MULT_EXPR
5818 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5820 op1 = const_binop (code, fold_convert (ctype, op1),
5821 fold_convert (ctype, c));
5822 /* We allow the constant to overflow with wrapping semantics. */
5823 if (op1 == 0
5824 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5825 break;
5827 else
5828 break;
5830 /* If we have an unsigned type is not a sizetype, we cannot widen
5831 the operation since it will change the result if the original
5832 computation overflowed. */
5833 if (TYPE_UNSIGNED (ctype)
5834 && ctype != type)
5835 break;
5837 /* If we were able to eliminate our operation from the first side,
5838 apply our operation to the second side and reform the PLUS. */
5839 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5840 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5842 /* The last case is if we are a multiply. In that case, we can
5843 apply the distributive law to commute the multiply and addition
5844 if the multiplication of the constants doesn't overflow. */
5845 if (code == MULT_EXPR)
5846 return fold_build2 (tcode, ctype,
5847 fold_build2 (code, ctype,
5848 fold_convert (ctype, op0),
5849 fold_convert (ctype, c)),
5850 op1);
5852 break;
5854 case MULT_EXPR:
5855 /* We have a special case here if we are doing something like
5856 (C * 8) % 4 since we know that's zero. */
5857 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5858 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5859 /* If the multiplication can overflow we cannot optimize this. */
5860 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5861 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5862 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5864 *strict_overflow_p = true;
5865 return omit_one_operand (type, integer_zero_node, op0);
5868 /* ... fall through ... */
5870 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5871 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5872 /* If we can extract our operation from the LHS, do so and return a
5873 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5874 do something only if the second operand is a constant. */
5875 if (same_p
5876 && (t1 = extract_muldiv (op0, c, code, wide_type,
5877 strict_overflow_p)) != 0)
5878 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5879 fold_convert (ctype, op1));
5880 else if (tcode == MULT_EXPR && code == MULT_EXPR
5881 && (t1 = extract_muldiv (op1, c, code, wide_type,
5882 strict_overflow_p)) != 0)
5883 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5884 fold_convert (ctype, t1));
5885 else if (TREE_CODE (op1) != INTEGER_CST)
5886 return 0;
5888 /* If these are the same operation types, we can associate them
5889 assuming no overflow. */
5890 if (tcode == code)
5892 double_int mul;
5893 int overflow_p;
5894 mul = double_int_mul_with_sign
5895 (double_int_ext
5896 (tree_to_double_int (op1),
5897 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5898 double_int_ext
5899 (tree_to_double_int (c),
5900 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5901 false, &overflow_p);
5902 overflow_p = ((!TYPE_UNSIGNED (ctype) && overflow_p)
5903 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5904 if (!double_int_fits_to_tree_p (ctype, mul)
5905 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5906 || !TYPE_UNSIGNED (ctype)))
5907 overflow_p = 1;
5908 if (!overflow_p)
5909 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5910 double_int_to_tree (ctype, mul));
5913 /* If these operations "cancel" each other, we have the main
5914 optimizations of this pass, which occur when either constant is a
5915 multiple of the other, in which case we replace this with either an
5916 operation or CODE or TCODE.
5918 If we have an unsigned type, we cannot do this since it will change
5919 the result if the original computation overflowed. */
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5921 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5922 || (tcode == MULT_EXPR
5923 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5924 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5925 && code != MULT_EXPR)))
5927 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5930 *strict_overflow_p = true;
5931 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5932 fold_convert (ctype,
5933 const_binop (TRUNC_DIV_EXPR,
5934 op1, c)));
5936 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5938 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5939 *strict_overflow_p = true;
5940 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5941 fold_convert (ctype,
5942 const_binop (TRUNC_DIV_EXPR,
5943 c, op1)));
5946 break;
5948 default:
5949 break;
5952 return 0;
5955 /* Return a node which has the indicated constant VALUE (either 0 or
5956 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5957 and is of the indicated TYPE. */
5959 tree
5960 constant_boolean_node (bool value, tree type)
5962 if (type == integer_type_node)
5963 return value ? integer_one_node : integer_zero_node;
5964 else if (type == boolean_type_node)
5965 return value ? boolean_true_node : boolean_false_node;
5966 else if (TREE_CODE (type) == VECTOR_TYPE)
5967 return build_vector_from_val (type,
5968 build_int_cst (TREE_TYPE (type),
5969 value ? -1 : 0));
5970 else
5971 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5975 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5976 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5977 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5978 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5979 COND is the first argument to CODE; otherwise (as in the example
5980 given here), it is the second argument. TYPE is the type of the
5981 original expression. Return NULL_TREE if no simplification is
5982 possible. */
5984 static tree
5985 fold_binary_op_with_conditional_arg (location_t loc,
5986 enum tree_code code,
5987 tree type, tree op0, tree op1,
5988 tree cond, tree arg, int cond_first_p)
5990 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5991 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5992 tree test, true_value, false_value;
5993 tree lhs = NULL_TREE;
5994 tree rhs = NULL_TREE;
5996 if (TREE_CODE (cond) == COND_EXPR)
5998 test = TREE_OPERAND (cond, 0);
5999 true_value = TREE_OPERAND (cond, 1);
6000 false_value = TREE_OPERAND (cond, 2);
6001 /* If this operand throws an expression, then it does not make
6002 sense to try to perform a logical or arithmetic operation
6003 involving it. */
6004 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6005 lhs = true_value;
6006 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6007 rhs = false_value;
6009 else
6011 tree testtype = TREE_TYPE (cond);
6012 test = cond;
6013 true_value = constant_boolean_node (true, testtype);
6014 false_value = constant_boolean_node (false, testtype);
6017 /* This transformation is only worthwhile if we don't have to wrap ARG
6018 in a SAVE_EXPR and the operation can be simplified on at least one
6019 of the branches once its pushed inside the COND_EXPR. */
6020 if (!TREE_CONSTANT (arg)
6021 && (TREE_SIDE_EFFECTS (arg)
6022 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6023 return NULL_TREE;
6025 arg = fold_convert_loc (loc, arg_type, arg);
6026 if (lhs == 0)
6028 true_value = fold_convert_loc (loc, cond_type, true_value);
6029 if (cond_first_p)
6030 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6031 else
6032 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6034 if (rhs == 0)
6036 false_value = fold_convert_loc (loc, cond_type, false_value);
6037 if (cond_first_p)
6038 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6039 else
6040 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6043 /* Check that we have simplified at least one of the branches. */
6044 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6045 return NULL_TREE;
6047 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6051 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6053 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6054 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6055 ADDEND is the same as X.
6057 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6058 and finite. The problematic cases are when X is zero, and its mode
6059 has signed zeros. In the case of rounding towards -infinity,
6060 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6061 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6063 bool
6064 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6066 if (!real_zerop (addend))
6067 return false;
6069 /* Don't allow the fold with -fsignaling-nans. */
6070 if (HONOR_SNANS (TYPE_MODE (type)))
6071 return false;
6073 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6074 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6075 return true;
6077 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6078 if (TREE_CODE (addend) == REAL_CST
6079 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6080 negate = !negate;
6082 /* The mode has signed zeros, and we have to honor their sign.
6083 In this situation, there is only one case we can return true for.
6084 X - 0 is the same as X unless rounding towards -infinity is
6085 supported. */
6086 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6089 /* Subroutine of fold() that checks comparisons of built-in math
6090 functions against real constants.
6092 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6093 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6094 is the type of the result and ARG0 and ARG1 are the operands of the
6095 comparison. ARG1 must be a TREE_REAL_CST.
6097 The function returns the constant folded tree if a simplification
6098 can be made, and NULL_TREE otherwise. */
6100 static tree
6101 fold_mathfn_compare (location_t loc,
6102 enum built_in_function fcode, enum tree_code code,
6103 tree type, tree arg0, tree arg1)
6105 REAL_VALUE_TYPE c;
6107 if (BUILTIN_SQRT_P (fcode))
6109 tree arg = CALL_EXPR_ARG (arg0, 0);
6110 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6112 c = TREE_REAL_CST (arg1);
6113 if (REAL_VALUE_NEGATIVE (c))
6115 /* sqrt(x) < y is always false, if y is negative. */
6116 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6117 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6119 /* sqrt(x) > y is always true, if y is negative and we
6120 don't care about NaNs, i.e. negative values of x. */
6121 if (code == NE_EXPR || !HONOR_NANS (mode))
6122 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6124 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6125 return fold_build2_loc (loc, GE_EXPR, type, arg,
6126 build_real (TREE_TYPE (arg), dconst0));
6128 else if (code == GT_EXPR || code == GE_EXPR)
6130 REAL_VALUE_TYPE c2;
6132 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6133 real_convert (&c2, mode, &c2);
6135 if (REAL_VALUE_ISINF (c2))
6137 /* sqrt(x) > y is x == +Inf, when y is very large. */
6138 if (HONOR_INFINITIES (mode))
6139 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6140 build_real (TREE_TYPE (arg), c2));
6142 /* sqrt(x) > y is always false, when y is very large
6143 and we don't care about infinities. */
6144 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6147 /* sqrt(x) > c is the same as x > c*c. */
6148 return fold_build2_loc (loc, code, type, arg,
6149 build_real (TREE_TYPE (arg), c2));
6151 else if (code == LT_EXPR || code == LE_EXPR)
6153 REAL_VALUE_TYPE c2;
6155 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6156 real_convert (&c2, mode, &c2);
6158 if (REAL_VALUE_ISINF (c2))
6160 /* sqrt(x) < y is always true, when y is a very large
6161 value and we don't care about NaNs or Infinities. */
6162 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6163 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6165 /* sqrt(x) < y is x != +Inf when y is very large and we
6166 don't care about NaNs. */
6167 if (! HONOR_NANS (mode))
6168 return fold_build2_loc (loc, NE_EXPR, type, arg,
6169 build_real (TREE_TYPE (arg), c2));
6171 /* sqrt(x) < y is x >= 0 when y is very large and we
6172 don't care about Infinities. */
6173 if (! HONOR_INFINITIES (mode))
6174 return fold_build2_loc (loc, GE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg), dconst0));
6177 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6178 arg = save_expr (arg);
6179 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6180 fold_build2_loc (loc, GE_EXPR, type, arg,
6181 build_real (TREE_TYPE (arg),
6182 dconst0)),
6183 fold_build2_loc (loc, NE_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg),
6185 c2)));
6188 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6189 if (! HONOR_NANS (mode))
6190 return fold_build2_loc (loc, code, type, arg,
6191 build_real (TREE_TYPE (arg), c2));
6193 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6194 arg = save_expr (arg);
6195 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6196 fold_build2_loc (loc, GE_EXPR, type, arg,
6197 build_real (TREE_TYPE (arg),
6198 dconst0)),
6199 fold_build2_loc (loc, code, type, arg,
6200 build_real (TREE_TYPE (arg),
6201 c2)));
6205 return NULL_TREE;
6208 /* Subroutine of fold() that optimizes comparisons against Infinities,
6209 either +Inf or -Inf.
6211 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6212 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6213 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6215 The function returns the constant folded tree if a simplification
6216 can be made, and NULL_TREE otherwise. */
6218 static tree
6219 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6220 tree arg0, tree arg1)
6222 enum machine_mode mode;
6223 REAL_VALUE_TYPE max;
6224 tree temp;
6225 bool neg;
6227 mode = TYPE_MODE (TREE_TYPE (arg0));
6229 /* For negative infinity swap the sense of the comparison. */
6230 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6231 if (neg)
6232 code = swap_tree_comparison (code);
6234 switch (code)
6236 case GT_EXPR:
6237 /* x > +Inf is always false, if with ignore sNANs. */
6238 if (HONOR_SNANS (mode))
6239 return NULL_TREE;
6240 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6242 case LE_EXPR:
6243 /* x <= +Inf is always true, if we don't case about NaNs. */
6244 if (! HONOR_NANS (mode))
6245 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6247 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6248 arg0 = save_expr (arg0);
6249 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6251 case EQ_EXPR:
6252 case GE_EXPR:
6253 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6254 real_maxval (&max, neg, mode);
6255 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6256 arg0, build_real (TREE_TYPE (arg0), max));
6258 case LT_EXPR:
6259 /* x < +Inf is always equal to x <= DBL_MAX. */
6260 real_maxval (&max, neg, mode);
6261 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6262 arg0, build_real (TREE_TYPE (arg0), max));
6264 case NE_EXPR:
6265 /* x != +Inf is always equal to !(x > DBL_MAX). */
6266 real_maxval (&max, neg, mode);
6267 if (! HONOR_NANS (mode))
6268 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6269 arg0, build_real (TREE_TYPE (arg0), max));
6271 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6272 arg0, build_real (TREE_TYPE (arg0), max));
6273 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6275 default:
6276 break;
6279 return NULL_TREE;
6282 /* Subroutine of fold() that optimizes comparisons of a division by
6283 a nonzero integer constant against an integer constant, i.e.
6284 X/C1 op C2.
6286 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6287 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6288 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6290 The function returns the constant folded tree if a simplification
6291 can be made, and NULL_TREE otherwise. */
6293 static tree
6294 fold_div_compare (location_t loc,
6295 enum tree_code code, tree type, tree arg0, tree arg1)
6297 tree prod, tmp, hi, lo;
6298 tree arg00 = TREE_OPERAND (arg0, 0);
6299 tree arg01 = TREE_OPERAND (arg0, 1);
6300 double_int val;
6301 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6302 bool neg_overflow;
6303 int overflow;
6305 /* We have to do this the hard way to detect unsigned overflow.
6306 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6307 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6308 TREE_INT_CST_HIGH (arg01),
6309 TREE_INT_CST_LOW (arg1),
6310 TREE_INT_CST_HIGH (arg1),
6311 &val.low, &val.high, unsigned_p);
6312 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6313 neg_overflow = false;
6315 if (unsigned_p)
6317 tmp = int_const_binop (MINUS_EXPR, arg01,
6318 build_int_cst (TREE_TYPE (arg01), 1));
6319 lo = prod;
6321 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6322 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6323 TREE_INT_CST_HIGH (prod),
6324 TREE_INT_CST_LOW (tmp),
6325 TREE_INT_CST_HIGH (tmp),
6326 &val.low, &val.high, unsigned_p);
6327 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6328 -1, overflow | TREE_OVERFLOW (prod));
6330 else if (tree_int_cst_sgn (arg01) >= 0)
6332 tmp = int_const_binop (MINUS_EXPR, arg01,
6333 build_int_cst (TREE_TYPE (arg01), 1));
6334 switch (tree_int_cst_sgn (arg1))
6336 case -1:
6337 neg_overflow = true;
6338 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6339 hi = prod;
6340 break;
6342 case 0:
6343 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6344 hi = tmp;
6345 break;
6347 case 1:
6348 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6349 lo = prod;
6350 break;
6352 default:
6353 gcc_unreachable ();
6356 else
6358 /* A negative divisor reverses the relational operators. */
6359 code = swap_tree_comparison (code);
6361 tmp = int_const_binop (PLUS_EXPR, arg01,
6362 build_int_cst (TREE_TYPE (arg01), 1));
6363 switch (tree_int_cst_sgn (arg1))
6365 case -1:
6366 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6367 lo = prod;
6368 break;
6370 case 0:
6371 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6372 lo = tmp;
6373 break;
6375 case 1:
6376 neg_overflow = true;
6377 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6378 hi = prod;
6379 break;
6381 default:
6382 gcc_unreachable ();
6386 switch (code)
6388 case EQ_EXPR:
6389 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6390 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6391 if (TREE_OVERFLOW (hi))
6392 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6393 if (TREE_OVERFLOW (lo))
6394 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6395 return build_range_check (loc, type, arg00, 1, lo, hi);
6397 case NE_EXPR:
6398 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6399 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6400 if (TREE_OVERFLOW (hi))
6401 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6402 if (TREE_OVERFLOW (lo))
6403 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6404 return build_range_check (loc, type, arg00, 0, lo, hi);
6406 case LT_EXPR:
6407 if (TREE_OVERFLOW (lo))
6409 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6410 return omit_one_operand_loc (loc, type, tmp, arg00);
6412 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6414 case LE_EXPR:
6415 if (TREE_OVERFLOW (hi))
6417 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6418 return omit_one_operand_loc (loc, type, tmp, arg00);
6420 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6422 case GT_EXPR:
6423 if (TREE_OVERFLOW (hi))
6425 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6426 return omit_one_operand_loc (loc, type, tmp, arg00);
6428 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6430 case GE_EXPR:
6431 if (TREE_OVERFLOW (lo))
6433 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6434 return omit_one_operand_loc (loc, type, tmp, arg00);
6436 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6438 default:
6439 break;
6442 return NULL_TREE;
6446 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6447 equality/inequality test, then return a simplified form of the test
6448 using a sign testing. Otherwise return NULL. TYPE is the desired
6449 result type. */
6451 static tree
6452 fold_single_bit_test_into_sign_test (location_t loc,
6453 enum tree_code code, tree arg0, tree arg1,
6454 tree result_type)
6456 /* If this is testing a single bit, we can optimize the test. */
6457 if ((code == NE_EXPR || code == EQ_EXPR)
6458 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6459 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6461 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6462 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6463 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6465 if (arg00 != NULL_TREE
6466 /* This is only a win if casting to a signed type is cheap,
6467 i.e. when arg00's type is not a partial mode. */
6468 && TYPE_PRECISION (TREE_TYPE (arg00))
6469 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6471 tree stype = signed_type_for (TREE_TYPE (arg00));
6472 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6473 result_type,
6474 fold_convert_loc (loc, stype, arg00),
6475 build_int_cst (stype, 0));
6479 return NULL_TREE;
6482 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6483 equality/inequality test, then return a simplified form of
6484 the test using shifts and logical operations. Otherwise return
6485 NULL. TYPE is the desired result type. */
6487 tree
6488 fold_single_bit_test (location_t loc, enum tree_code code,
6489 tree arg0, tree arg1, tree result_type)
6491 /* If this is testing a single bit, we can optimize the test. */
6492 if ((code == NE_EXPR || code == EQ_EXPR)
6493 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6494 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6496 tree inner = TREE_OPERAND (arg0, 0);
6497 tree type = TREE_TYPE (arg0);
6498 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6499 enum machine_mode operand_mode = TYPE_MODE (type);
6500 int ops_unsigned;
6501 tree signed_type, unsigned_type, intermediate_type;
6502 tree tem, one;
6504 /* First, see if we can fold the single bit test into a sign-bit
6505 test. */
6506 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6507 result_type);
6508 if (tem)
6509 return tem;
6511 /* Otherwise we have (A & C) != 0 where C is a single bit,
6512 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6513 Similarly for (A & C) == 0. */
6515 /* If INNER is a right shift of a constant and it plus BITNUM does
6516 not overflow, adjust BITNUM and INNER. */
6517 if (TREE_CODE (inner) == RSHIFT_EXPR
6518 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6519 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6520 && bitnum < TYPE_PRECISION (type)
6521 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6522 bitnum - TYPE_PRECISION (type)))
6524 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6525 inner = TREE_OPERAND (inner, 0);
6528 /* If we are going to be able to omit the AND below, we must do our
6529 operations as unsigned. If we must use the AND, we have a choice.
6530 Normally unsigned is faster, but for some machines signed is. */
6531 #ifdef LOAD_EXTEND_OP
6532 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6533 && !flag_syntax_only) ? 0 : 1;
6534 #else
6535 ops_unsigned = 1;
6536 #endif
6538 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6539 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6540 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6541 inner = fold_convert_loc (loc, intermediate_type, inner);
6543 if (bitnum != 0)
6544 inner = build2 (RSHIFT_EXPR, intermediate_type,
6545 inner, size_int (bitnum));
6547 one = build_int_cst (intermediate_type, 1);
6549 if (code == EQ_EXPR)
6550 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6552 /* Put the AND last so it can combine with more things. */
6553 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6555 /* Make sure to return the proper type. */
6556 inner = fold_convert_loc (loc, result_type, inner);
6558 return inner;
6560 return NULL_TREE;
6563 /* Check whether we are allowed to reorder operands arg0 and arg1,
6564 such that the evaluation of arg1 occurs before arg0. */
6566 static bool
6567 reorder_operands_p (const_tree arg0, const_tree arg1)
6569 if (! flag_evaluation_order)
6570 return true;
6571 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6572 return true;
6573 return ! TREE_SIDE_EFFECTS (arg0)
6574 && ! TREE_SIDE_EFFECTS (arg1);
6577 /* Test whether it is preferable two swap two operands, ARG0 and
6578 ARG1, for example because ARG0 is an integer constant and ARG1
6579 isn't. If REORDER is true, only recommend swapping if we can
6580 evaluate the operands in reverse order. */
6582 bool
6583 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6585 STRIP_SIGN_NOPS (arg0);
6586 STRIP_SIGN_NOPS (arg1);
6588 if (TREE_CODE (arg1) == INTEGER_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == INTEGER_CST)
6591 return 1;
6593 if (TREE_CODE (arg1) == REAL_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == REAL_CST)
6596 return 1;
6598 if (TREE_CODE (arg1) == FIXED_CST)
6599 return 0;
6600 if (TREE_CODE (arg0) == FIXED_CST)
6601 return 1;
6603 if (TREE_CODE (arg1) == COMPLEX_CST)
6604 return 0;
6605 if (TREE_CODE (arg0) == COMPLEX_CST)
6606 return 1;
6608 if (TREE_CONSTANT (arg1))
6609 return 0;
6610 if (TREE_CONSTANT (arg0))
6611 return 1;
6613 if (optimize_function_for_size_p (cfun))
6614 return 0;
6616 if (reorder && flag_evaluation_order
6617 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6618 return 0;
6620 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6621 for commutative and comparison operators. Ensuring a canonical
6622 form allows the optimizers to find additional redundancies without
6623 having to explicitly check for both orderings. */
6624 if (TREE_CODE (arg0) == SSA_NAME
6625 && TREE_CODE (arg1) == SSA_NAME
6626 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6627 return 1;
6629 /* Put SSA_NAMEs last. */
6630 if (TREE_CODE (arg1) == SSA_NAME)
6631 return 0;
6632 if (TREE_CODE (arg0) == SSA_NAME)
6633 return 1;
6635 /* Put variables last. */
6636 if (DECL_P (arg1))
6637 return 0;
6638 if (DECL_P (arg0))
6639 return 1;
6641 return 0;
6644 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6645 ARG0 is extended to a wider type. */
6647 static tree
6648 fold_widened_comparison (location_t loc, enum tree_code code,
6649 tree type, tree arg0, tree arg1)
6651 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6652 tree arg1_unw;
6653 tree shorter_type, outer_type;
6654 tree min, max;
6655 bool above, below;
6657 if (arg0_unw == arg0)
6658 return NULL_TREE;
6659 shorter_type = TREE_TYPE (arg0_unw);
6661 #ifdef HAVE_canonicalize_funcptr_for_compare
6662 /* Disable this optimization if we're casting a function pointer
6663 type on targets that require function pointer canonicalization. */
6664 if (HAVE_canonicalize_funcptr_for_compare
6665 && TREE_CODE (shorter_type) == POINTER_TYPE
6666 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6667 return NULL_TREE;
6668 #endif
6670 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6671 return NULL_TREE;
6673 arg1_unw = get_unwidened (arg1, NULL_TREE);
6675 /* If possible, express the comparison in the shorter mode. */
6676 if ((code == EQ_EXPR || code == NE_EXPR
6677 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6678 && (TREE_TYPE (arg1_unw) == shorter_type
6679 || ((TYPE_PRECISION (shorter_type)
6680 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6681 && (TYPE_UNSIGNED (shorter_type)
6682 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6683 || (TREE_CODE (arg1_unw) == INTEGER_CST
6684 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6685 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6686 && int_fits_type_p (arg1_unw, shorter_type))))
6687 return fold_build2_loc (loc, code, type, arg0_unw,
6688 fold_convert_loc (loc, shorter_type, arg1_unw));
6690 if (TREE_CODE (arg1_unw) != INTEGER_CST
6691 || TREE_CODE (shorter_type) != INTEGER_TYPE
6692 || !int_fits_type_p (arg1_unw, shorter_type))
6693 return NULL_TREE;
6695 /* If we are comparing with the integer that does not fit into the range
6696 of the shorter type, the result is known. */
6697 outer_type = TREE_TYPE (arg1_unw);
6698 min = lower_bound_in_type (outer_type, shorter_type);
6699 max = upper_bound_in_type (outer_type, shorter_type);
6701 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6702 max, arg1_unw));
6703 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6704 arg1_unw, min));
6706 switch (code)
6708 case EQ_EXPR:
6709 if (above || below)
6710 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6711 break;
6713 case NE_EXPR:
6714 if (above || below)
6715 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6716 break;
6718 case LT_EXPR:
6719 case LE_EXPR:
6720 if (above)
6721 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6722 else if (below)
6723 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6725 case GT_EXPR:
6726 case GE_EXPR:
6727 if (above)
6728 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6729 else if (below)
6730 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6732 default:
6733 break;
6736 return NULL_TREE;
6739 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6740 ARG0 just the signedness is changed. */
6742 static tree
6743 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6744 tree arg0, tree arg1)
6746 tree arg0_inner;
6747 tree inner_type, outer_type;
6749 if (!CONVERT_EXPR_P (arg0))
6750 return NULL_TREE;
6752 outer_type = TREE_TYPE (arg0);
6753 arg0_inner = TREE_OPERAND (arg0, 0);
6754 inner_type = TREE_TYPE (arg0_inner);
6756 #ifdef HAVE_canonicalize_funcptr_for_compare
6757 /* Disable this optimization if we're casting a function pointer
6758 type on targets that require function pointer canonicalization. */
6759 if (HAVE_canonicalize_funcptr_for_compare
6760 && TREE_CODE (inner_type) == POINTER_TYPE
6761 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6762 return NULL_TREE;
6763 #endif
6765 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6766 return NULL_TREE;
6768 if (TREE_CODE (arg1) != INTEGER_CST
6769 && !(CONVERT_EXPR_P (arg1)
6770 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6771 return NULL_TREE;
6773 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6774 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6775 && code != NE_EXPR
6776 && code != EQ_EXPR)
6777 return NULL_TREE;
6779 if (TREE_CODE (arg1) == INTEGER_CST)
6780 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6781 0, TREE_OVERFLOW (arg1));
6782 else
6783 arg1 = fold_convert_loc (loc, inner_type, arg1);
6785 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6788 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6789 step of the array. Reconstructs s and delta in the case of s *
6790 delta being an integer constant (and thus already folded). ADDR is
6791 the address. MULT is the multiplicative expression. If the
6792 function succeeds, the new address expression is returned.
6793 Otherwise NULL_TREE is returned. LOC is the location of the
6794 resulting expression. */
6796 static tree
6797 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6799 tree s, delta, step;
6800 tree ref = TREE_OPERAND (addr, 0), pref;
6801 tree ret, pos;
6802 tree itype;
6803 bool mdim = false;
6805 /* Strip the nops that might be added when converting op1 to sizetype. */
6806 STRIP_NOPS (op1);
6808 /* Canonicalize op1 into a possibly non-constant delta
6809 and an INTEGER_CST s. */
6810 if (TREE_CODE (op1) == MULT_EXPR)
6812 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6814 STRIP_NOPS (arg0);
6815 STRIP_NOPS (arg1);
6817 if (TREE_CODE (arg0) == INTEGER_CST)
6819 s = arg0;
6820 delta = arg1;
6822 else if (TREE_CODE (arg1) == INTEGER_CST)
6824 s = arg1;
6825 delta = arg0;
6827 else
6828 return NULL_TREE;
6830 else if (TREE_CODE (op1) == INTEGER_CST)
6832 delta = op1;
6833 s = NULL_TREE;
6835 else
6837 /* Simulate we are delta * 1. */
6838 delta = op1;
6839 s = integer_one_node;
6842 /* Handle &x.array the same as we would handle &x.array[0]. */
6843 if (TREE_CODE (ref) == COMPONENT_REF
6844 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6846 tree domain;
6848 /* Remember if this was a multi-dimensional array. */
6849 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6850 mdim = true;
6852 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6853 if (! domain)
6854 goto cont;
6855 itype = TREE_TYPE (domain);
6857 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6858 if (TREE_CODE (step) != INTEGER_CST)
6859 goto cont;
6861 if (s)
6863 if (! tree_int_cst_equal (step, s))
6864 goto cont;
6866 else
6868 /* Try if delta is a multiple of step. */
6869 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6870 if (! tmp)
6871 goto cont;
6872 delta = tmp;
6875 /* Only fold here if we can verify we do not overflow one
6876 dimension of a multi-dimensional array. */
6877 if (mdim)
6879 tree tmp;
6881 if (!TYPE_MIN_VALUE (domain)
6882 || !TYPE_MAX_VALUE (domain)
6883 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6884 goto cont;
6886 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6887 fold_convert_loc (loc, itype,
6888 TYPE_MIN_VALUE (domain)),
6889 fold_convert_loc (loc, itype, delta));
6890 if (TREE_CODE (tmp) != INTEGER_CST
6891 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6892 goto cont;
6895 /* We found a suitable component reference. */
6897 pref = TREE_OPERAND (addr, 0);
6898 ret = copy_node (pref);
6899 SET_EXPR_LOCATION (ret, loc);
6901 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6902 fold_build2_loc
6903 (loc, PLUS_EXPR, itype,
6904 fold_convert_loc (loc, itype,
6905 TYPE_MIN_VALUE
6906 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6907 fold_convert_loc (loc, itype, delta)),
6908 NULL_TREE, NULL_TREE);
6909 return build_fold_addr_expr_loc (loc, ret);
6912 cont:
6914 for (;; ref = TREE_OPERAND (ref, 0))
6916 if (TREE_CODE (ref) == ARRAY_REF)
6918 tree domain;
6920 /* Remember if this was a multi-dimensional array. */
6921 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6922 mdim = true;
6924 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6925 if (! domain)
6926 continue;
6927 itype = TREE_TYPE (domain);
6929 step = array_ref_element_size (ref);
6930 if (TREE_CODE (step) != INTEGER_CST)
6931 continue;
6933 if (s)
6935 if (! tree_int_cst_equal (step, s))
6936 continue;
6938 else
6940 /* Try if delta is a multiple of step. */
6941 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6942 if (! tmp)
6943 continue;
6944 delta = tmp;
6947 /* Only fold here if we can verify we do not overflow one
6948 dimension of a multi-dimensional array. */
6949 if (mdim)
6951 tree tmp;
6953 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6954 || !TYPE_MAX_VALUE (domain)
6955 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6956 continue;
6958 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6959 fold_convert_loc (loc, itype,
6960 TREE_OPERAND (ref, 1)),
6961 fold_convert_loc (loc, itype, delta));
6962 if (!tmp
6963 || TREE_CODE (tmp) != INTEGER_CST
6964 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6965 continue;
6968 break;
6970 else
6971 mdim = false;
6973 if (!handled_component_p (ref))
6974 return NULL_TREE;
6977 /* We found the suitable array reference. So copy everything up to it,
6978 and replace the index. */
6980 pref = TREE_OPERAND (addr, 0);
6981 ret = copy_node (pref);
6982 SET_EXPR_LOCATION (ret, loc);
6983 pos = ret;
6985 while (pref != ref)
6987 pref = TREE_OPERAND (pref, 0);
6988 TREE_OPERAND (pos, 0) = copy_node (pref);
6989 pos = TREE_OPERAND (pos, 0);
6992 TREE_OPERAND (pos, 1)
6993 = fold_build2_loc (loc, PLUS_EXPR, itype,
6994 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6995 fold_convert_loc (loc, itype, delta));
6996 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7000 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7001 means A >= Y && A != MAX, but in this case we know that
7002 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7004 static tree
7005 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7007 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7009 if (TREE_CODE (bound) == LT_EXPR)
7010 a = TREE_OPERAND (bound, 0);
7011 else if (TREE_CODE (bound) == GT_EXPR)
7012 a = TREE_OPERAND (bound, 1);
7013 else
7014 return NULL_TREE;
7016 typea = TREE_TYPE (a);
7017 if (!INTEGRAL_TYPE_P (typea)
7018 && !POINTER_TYPE_P (typea))
7019 return NULL_TREE;
7021 if (TREE_CODE (ineq) == LT_EXPR)
7023 a1 = TREE_OPERAND (ineq, 1);
7024 y = TREE_OPERAND (ineq, 0);
7026 else if (TREE_CODE (ineq) == GT_EXPR)
7028 a1 = TREE_OPERAND (ineq, 0);
7029 y = TREE_OPERAND (ineq, 1);
7031 else
7032 return NULL_TREE;
7034 if (TREE_TYPE (a1) != typea)
7035 return NULL_TREE;
7037 if (POINTER_TYPE_P (typea))
7039 /* Convert the pointer types into integer before taking the difference. */
7040 tree ta = fold_convert_loc (loc, ssizetype, a);
7041 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7042 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7044 else
7045 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7047 if (!diff || !integer_onep (diff))
7048 return NULL_TREE;
7050 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7053 /* Fold a sum or difference of at least one multiplication.
7054 Returns the folded tree or NULL if no simplification could be made. */
7056 static tree
7057 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7058 tree arg0, tree arg1)
7060 tree arg00, arg01, arg10, arg11;
7061 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7063 /* (A * C) +- (B * C) -> (A+-B) * C.
7064 (A * C) +- A -> A * (C+-1).
7065 We are most concerned about the case where C is a constant,
7066 but other combinations show up during loop reduction. Since
7067 it is not difficult, try all four possibilities. */
7069 if (TREE_CODE (arg0) == MULT_EXPR)
7071 arg00 = TREE_OPERAND (arg0, 0);
7072 arg01 = TREE_OPERAND (arg0, 1);
7074 else if (TREE_CODE (arg0) == INTEGER_CST)
7076 arg00 = build_one_cst (type);
7077 arg01 = arg0;
7079 else
7081 /* We cannot generate constant 1 for fract. */
7082 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7083 return NULL_TREE;
7084 arg00 = arg0;
7085 arg01 = build_one_cst (type);
7087 if (TREE_CODE (arg1) == MULT_EXPR)
7089 arg10 = TREE_OPERAND (arg1, 0);
7090 arg11 = TREE_OPERAND (arg1, 1);
7092 else if (TREE_CODE (arg1) == INTEGER_CST)
7094 arg10 = build_one_cst (type);
7095 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7096 the purpose of this canonicalization. */
7097 if (TREE_INT_CST_HIGH (arg1) == -1
7098 && negate_expr_p (arg1)
7099 && code == PLUS_EXPR)
7101 arg11 = negate_expr (arg1);
7102 code = MINUS_EXPR;
7104 else
7105 arg11 = arg1;
7107 else
7109 /* We cannot generate constant 1 for fract. */
7110 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7111 return NULL_TREE;
7112 arg10 = arg1;
7113 arg11 = build_one_cst (type);
7115 same = NULL_TREE;
7117 if (operand_equal_p (arg01, arg11, 0))
7118 same = arg01, alt0 = arg00, alt1 = arg10;
7119 else if (operand_equal_p (arg00, arg10, 0))
7120 same = arg00, alt0 = arg01, alt1 = arg11;
7121 else if (operand_equal_p (arg00, arg11, 0))
7122 same = arg00, alt0 = arg01, alt1 = arg10;
7123 else if (operand_equal_p (arg01, arg10, 0))
7124 same = arg01, alt0 = arg00, alt1 = arg11;
7126 /* No identical multiplicands; see if we can find a common
7127 power-of-two factor in non-power-of-two multiplies. This
7128 can help in multi-dimensional array access. */
7129 else if (host_integerp (arg01, 0)
7130 && host_integerp (arg11, 0))
7132 HOST_WIDE_INT int01, int11, tmp;
7133 bool swap = false;
7134 tree maybe_same;
7135 int01 = TREE_INT_CST_LOW (arg01);
7136 int11 = TREE_INT_CST_LOW (arg11);
7138 /* Move min of absolute values to int11. */
7139 if (absu_hwi (int01) < absu_hwi (int11))
7141 tmp = int01, int01 = int11, int11 = tmp;
7142 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7143 maybe_same = arg01;
7144 swap = true;
7146 else
7147 maybe_same = arg11;
7149 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7150 /* The remainder should not be a constant, otherwise we
7151 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7152 increased the number of multiplications necessary. */
7153 && TREE_CODE (arg10) != INTEGER_CST)
7155 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7156 build_int_cst (TREE_TYPE (arg00),
7157 int01 / int11));
7158 alt1 = arg10;
7159 same = maybe_same;
7160 if (swap)
7161 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7165 if (same)
7166 return fold_build2_loc (loc, MULT_EXPR, type,
7167 fold_build2_loc (loc, code, type,
7168 fold_convert_loc (loc, type, alt0),
7169 fold_convert_loc (loc, type, alt1)),
7170 fold_convert_loc (loc, type, same));
7172 return NULL_TREE;
7175 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7178 upon failure. */
7180 static int
7181 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7183 tree type = TREE_TYPE (expr);
7184 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7185 int byte, offset, word, words;
7186 unsigned char value;
7188 if (total_bytes > len)
7189 return 0;
7190 words = total_bytes / UNITS_PER_WORD;
7192 for (byte = 0; byte < total_bytes; byte++)
7194 int bitpos = byte * BITS_PER_UNIT;
7195 if (bitpos < HOST_BITS_PER_WIDE_INT)
7196 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7197 else
7198 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7199 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7201 if (total_bytes > UNITS_PER_WORD)
7203 word = byte / UNITS_PER_WORD;
7204 if (WORDS_BIG_ENDIAN)
7205 word = (words - 1) - word;
7206 offset = word * UNITS_PER_WORD;
7207 if (BYTES_BIG_ENDIAN)
7208 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7209 else
7210 offset += byte % UNITS_PER_WORD;
7212 else
7213 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7214 ptr[offset] = value;
7216 return total_bytes;
7220 /* Subroutine of native_encode_expr. Encode the REAL_CST
7221 specified by EXPR into the buffer PTR of length LEN bytes.
7222 Return the number of bytes placed in the buffer, or zero
7223 upon failure. */
7225 static int
7226 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7228 tree type = TREE_TYPE (expr);
7229 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7230 int byte, offset, word, words, bitpos;
7231 unsigned char value;
7233 /* There are always 32 bits in each long, no matter the size of
7234 the hosts long. We handle floating point representations with
7235 up to 192 bits. */
7236 long tmp[6];
7238 if (total_bytes > len)
7239 return 0;
7240 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7242 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7244 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7245 bitpos += BITS_PER_UNIT)
7247 byte = (bitpos / BITS_PER_UNIT) & 3;
7248 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7250 if (UNITS_PER_WORD < 4)
7252 word = byte / UNITS_PER_WORD;
7253 if (WORDS_BIG_ENDIAN)
7254 word = (words - 1) - word;
7255 offset = word * UNITS_PER_WORD;
7256 if (BYTES_BIG_ENDIAN)
7257 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7258 else
7259 offset += byte % UNITS_PER_WORD;
7261 else
7262 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7263 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7265 return total_bytes;
7268 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7269 specified by EXPR into the buffer PTR of length LEN bytes.
7270 Return the number of bytes placed in the buffer, or zero
7271 upon failure. */
7273 static int
7274 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7276 int rsize, isize;
7277 tree part;
7279 part = TREE_REALPART (expr);
7280 rsize = native_encode_expr (part, ptr, len);
7281 if (rsize == 0)
7282 return 0;
7283 part = TREE_IMAGPART (expr);
7284 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7285 if (isize != rsize)
7286 return 0;
7287 return rsize + isize;
7291 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7292 specified by EXPR into the buffer PTR of length LEN bytes.
7293 Return the number of bytes placed in the buffer, or zero
7294 upon failure. */
7296 static int
7297 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7299 unsigned i, count;
7300 int size, offset;
7301 tree itype, elem;
7303 offset = 0;
7304 count = VECTOR_CST_NELTS (expr);
7305 itype = TREE_TYPE (TREE_TYPE (expr));
7306 size = GET_MODE_SIZE (TYPE_MODE (itype));
7307 for (i = 0; i < count; i++)
7309 elem = VECTOR_CST_ELT (expr, i);
7310 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7311 return 0;
7312 offset += size;
7314 return offset;
7318 /* Subroutine of native_encode_expr. Encode the STRING_CST
7319 specified by EXPR into the buffer PTR of length LEN bytes.
7320 Return the number of bytes placed in the buffer, or zero
7321 upon failure. */
7323 static int
7324 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7326 tree type = TREE_TYPE (expr);
7327 HOST_WIDE_INT total_bytes;
7329 if (TREE_CODE (type) != ARRAY_TYPE
7330 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7331 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7332 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7333 return 0;
7334 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7335 if (total_bytes > len)
7336 return 0;
7337 if (TREE_STRING_LENGTH (expr) < total_bytes)
7339 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7340 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7341 total_bytes - TREE_STRING_LENGTH (expr));
7343 else
7344 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7345 return total_bytes;
7349 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7350 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7351 buffer PTR of length LEN bytes. Return the number of bytes
7352 placed in the buffer, or zero upon failure. */
7355 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7357 switch (TREE_CODE (expr))
7359 case INTEGER_CST:
7360 return native_encode_int (expr, ptr, len);
7362 case REAL_CST:
7363 return native_encode_real (expr, ptr, len);
7365 case COMPLEX_CST:
7366 return native_encode_complex (expr, ptr, len);
7368 case VECTOR_CST:
7369 return native_encode_vector (expr, ptr, len);
7371 case STRING_CST:
7372 return native_encode_string (expr, ptr, len);
7374 default:
7375 return 0;
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7384 static tree
7385 native_interpret_int (tree type, const unsigned char *ptr, int len)
7387 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7388 int byte, offset, word, words;
7389 unsigned char value;
7390 double_int result;
7392 if (total_bytes > len)
7393 return NULL_TREE;
7394 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7395 return NULL_TREE;
7397 result = double_int_zero;
7398 words = total_bytes / UNITS_PER_WORD;
7400 for (byte = 0; byte < total_bytes; byte++)
7402 int bitpos = byte * BITS_PER_UNIT;
7403 if (total_bytes > UNITS_PER_WORD)
7405 word = byte / UNITS_PER_WORD;
7406 if (WORDS_BIG_ENDIAN)
7407 word = (words - 1) - word;
7408 offset = word * UNITS_PER_WORD;
7409 if (BYTES_BIG_ENDIAN)
7410 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7411 else
7412 offset += byte % UNITS_PER_WORD;
7414 else
7415 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7416 value = ptr[offset];
7418 if (bitpos < HOST_BITS_PER_WIDE_INT)
7419 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7420 else
7421 result.high |= (unsigned HOST_WIDE_INT) value
7422 << (bitpos - HOST_BITS_PER_WIDE_INT);
7425 return double_int_to_tree (type, result);
7429 /* Subroutine of native_interpret_expr. Interpret the contents of
7430 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7431 If the buffer cannot be interpreted, return NULL_TREE. */
7433 static tree
7434 native_interpret_real (tree type, const unsigned char *ptr, int len)
7436 enum machine_mode mode = TYPE_MODE (type);
7437 int total_bytes = GET_MODE_SIZE (mode);
7438 int byte, offset, word, words, bitpos;
7439 unsigned char value;
7440 /* There are always 32 bits in each long, no matter the size of
7441 the hosts long. We handle floating point representations with
7442 up to 192 bits. */
7443 REAL_VALUE_TYPE r;
7444 long tmp[6];
7446 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7447 if (total_bytes > len || total_bytes > 24)
7448 return NULL_TREE;
7449 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7451 memset (tmp, 0, sizeof (tmp));
7452 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7453 bitpos += BITS_PER_UNIT)
7455 byte = (bitpos / BITS_PER_UNIT) & 3;
7456 if (UNITS_PER_WORD < 4)
7458 word = byte / UNITS_PER_WORD;
7459 if (WORDS_BIG_ENDIAN)
7460 word = (words - 1) - word;
7461 offset = word * UNITS_PER_WORD;
7462 if (BYTES_BIG_ENDIAN)
7463 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7464 else
7465 offset += byte % UNITS_PER_WORD;
7467 else
7468 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7469 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7471 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7474 real_from_target (&r, tmp, mode);
7475 return build_real (type, r);
7479 /* Subroutine of native_interpret_expr. Interpret the contents of
7480 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7481 If the buffer cannot be interpreted, return NULL_TREE. */
7483 static tree
7484 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7486 tree etype, rpart, ipart;
7487 int size;
7489 etype = TREE_TYPE (type);
7490 size = GET_MODE_SIZE (TYPE_MODE (etype));
7491 if (size * 2 > len)
7492 return NULL_TREE;
7493 rpart = native_interpret_expr (etype, ptr, size);
7494 if (!rpart)
7495 return NULL_TREE;
7496 ipart = native_interpret_expr (etype, ptr+size, size);
7497 if (!ipart)
7498 return NULL_TREE;
7499 return build_complex (type, rpart, ipart);
7503 /* Subroutine of native_interpret_expr. Interpret the contents of
7504 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7505 If the buffer cannot be interpreted, return NULL_TREE. */
7507 static tree
7508 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7510 tree etype, elem;
7511 int i, size, count;
7512 tree *elements;
7514 etype = TREE_TYPE (type);
7515 size = GET_MODE_SIZE (TYPE_MODE (etype));
7516 count = TYPE_VECTOR_SUBPARTS (type);
7517 if (size * count > len)
7518 return NULL_TREE;
7520 elements = XALLOCAVEC (tree, count);
7521 for (i = count - 1; i >= 0; i--)
7523 elem = native_interpret_expr (etype, ptr+(i*size), size);
7524 if (!elem)
7525 return NULL_TREE;
7526 elements[i] = elem;
7528 return build_vector (type, elements);
7532 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7533 the buffer PTR of length LEN as a constant of type TYPE. For
7534 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7535 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7536 return NULL_TREE. */
7538 tree
7539 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7541 switch (TREE_CODE (type))
7543 case INTEGER_TYPE:
7544 case ENUMERAL_TYPE:
7545 case BOOLEAN_TYPE:
7546 case POINTER_TYPE:
7547 case REFERENCE_TYPE:
7548 return native_interpret_int (type, ptr, len);
7550 case REAL_TYPE:
7551 return native_interpret_real (type, ptr, len);
7553 case COMPLEX_TYPE:
7554 return native_interpret_complex (type, ptr, len);
7556 case VECTOR_TYPE:
7557 return native_interpret_vector (type, ptr, len);
7559 default:
7560 return NULL_TREE;
7564 /* Returns true if we can interpret the contents of a native encoding
7565 as TYPE. */
7567 static bool
7568 can_native_interpret_type_p (tree type)
7570 switch (TREE_CODE (type))
7572 case INTEGER_TYPE:
7573 case ENUMERAL_TYPE:
7574 case BOOLEAN_TYPE:
7575 case POINTER_TYPE:
7576 case REFERENCE_TYPE:
7577 case REAL_TYPE:
7578 case COMPLEX_TYPE:
7579 case VECTOR_TYPE:
7580 return true;
7581 default:
7582 return false;
7586 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7587 TYPE at compile-time. If we're unable to perform the conversion
7588 return NULL_TREE. */
7590 static tree
7591 fold_view_convert_expr (tree type, tree expr)
7593 /* We support up to 512-bit values (for V8DFmode). */
7594 unsigned char buffer[64];
7595 int len;
7597 /* Check that the host and target are sane. */
7598 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7599 return NULL_TREE;
7601 len = native_encode_expr (expr, buffer, sizeof (buffer));
7602 if (len == 0)
7603 return NULL_TREE;
7605 return native_interpret_expr (type, buffer, len);
7608 /* Build an expression for the address of T. Folds away INDIRECT_REF
7609 to avoid confusing the gimplify process. */
7611 tree
7612 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7614 /* The size of the object is not relevant when talking about its address. */
7615 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7616 t = TREE_OPERAND (t, 0);
7618 if (TREE_CODE (t) == INDIRECT_REF)
7620 t = TREE_OPERAND (t, 0);
7622 if (TREE_TYPE (t) != ptrtype)
7623 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7625 else if (TREE_CODE (t) == MEM_REF
7626 && integer_zerop (TREE_OPERAND (t, 1)))
7627 return TREE_OPERAND (t, 0);
7628 else if (TREE_CODE (t) == MEM_REF
7629 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7630 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7631 TREE_OPERAND (t, 0),
7632 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7633 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7635 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7637 if (TREE_TYPE (t) != ptrtype)
7638 t = fold_convert_loc (loc, ptrtype, t);
7640 else
7641 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7643 return t;
7646 /* Build an expression for the address of T. */
7648 tree
7649 build_fold_addr_expr_loc (location_t loc, tree t)
7651 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7653 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7656 static bool vec_cst_ctor_to_array (tree, tree *);
7658 /* Fold a unary expression of code CODE and type TYPE with operand
7659 OP0. Return the folded expression if folding is successful.
7660 Otherwise, return NULL_TREE. */
7662 tree
7663 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7665 tree tem;
7666 tree arg0;
7667 enum tree_code_class kind = TREE_CODE_CLASS (code);
7669 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7670 && TREE_CODE_LENGTH (code) == 1);
7672 arg0 = op0;
7673 if (arg0)
7675 if (CONVERT_EXPR_CODE_P (code)
7676 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7678 /* Don't use STRIP_NOPS, because signedness of argument type
7679 matters. */
7680 STRIP_SIGN_NOPS (arg0);
7682 else
7684 /* Strip any conversions that don't change the mode. This
7685 is safe for every expression, except for a comparison
7686 expression because its signedness is derived from its
7687 operands.
7689 Note that this is done as an internal manipulation within
7690 the constant folder, in order to find the simplest
7691 representation of the arguments so that their form can be
7692 studied. In any cases, the appropriate type conversions
7693 should be put back in the tree that will get out of the
7694 constant folder. */
7695 STRIP_NOPS (arg0);
7699 if (TREE_CODE_CLASS (code) == tcc_unary)
7701 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7702 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7703 fold_build1_loc (loc, code, type,
7704 fold_convert_loc (loc, TREE_TYPE (op0),
7705 TREE_OPERAND (arg0, 1))));
7706 else if (TREE_CODE (arg0) == COND_EXPR)
7708 tree arg01 = TREE_OPERAND (arg0, 1);
7709 tree arg02 = TREE_OPERAND (arg0, 2);
7710 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7711 arg01 = fold_build1_loc (loc, code, type,
7712 fold_convert_loc (loc,
7713 TREE_TYPE (op0), arg01));
7714 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7715 arg02 = fold_build1_loc (loc, code, type,
7716 fold_convert_loc (loc,
7717 TREE_TYPE (op0), arg02));
7718 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7719 arg01, arg02);
7721 /* If this was a conversion, and all we did was to move into
7722 inside the COND_EXPR, bring it back out. But leave it if
7723 it is a conversion from integer to integer and the
7724 result precision is no wider than a word since such a
7725 conversion is cheap and may be optimized away by combine,
7726 while it couldn't if it were outside the COND_EXPR. Then return
7727 so we don't get into an infinite recursion loop taking the
7728 conversion out and then back in. */
7730 if ((CONVERT_EXPR_CODE_P (code)
7731 || code == NON_LVALUE_EXPR)
7732 && TREE_CODE (tem) == COND_EXPR
7733 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7734 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7735 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7736 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7737 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7738 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7739 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7740 && (INTEGRAL_TYPE_P
7741 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7742 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7743 || flag_syntax_only))
7744 tem = build1_loc (loc, code, type,
7745 build3 (COND_EXPR,
7746 TREE_TYPE (TREE_OPERAND
7747 (TREE_OPERAND (tem, 1), 0)),
7748 TREE_OPERAND (tem, 0),
7749 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7750 TREE_OPERAND (TREE_OPERAND (tem, 2),
7751 0)));
7752 return tem;
7756 switch (code)
7758 case PAREN_EXPR:
7759 /* Re-association barriers around constants and other re-association
7760 barriers can be removed. */
7761 if (CONSTANT_CLASS_P (op0)
7762 || TREE_CODE (op0) == PAREN_EXPR)
7763 return fold_convert_loc (loc, type, op0);
7764 return NULL_TREE;
7766 CASE_CONVERT:
7767 case FLOAT_EXPR:
7768 case FIX_TRUNC_EXPR:
7769 if (TREE_TYPE (op0) == type)
7770 return op0;
7772 if (COMPARISON_CLASS_P (op0))
7774 /* If we have (type) (a CMP b) and type is an integral type, return
7775 new expression involving the new type. Canonicalize
7776 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7777 non-integral type.
7778 Do not fold the result as that would not simplify further, also
7779 folding again results in recursions. */
7780 if (TREE_CODE (type) == BOOLEAN_TYPE)
7781 return build2_loc (loc, TREE_CODE (op0), type,
7782 TREE_OPERAND (op0, 0),
7783 TREE_OPERAND (op0, 1));
7784 else if (!INTEGRAL_TYPE_P (type))
7785 return build3_loc (loc, COND_EXPR, type, op0,
7786 constant_boolean_node (true, type),
7787 constant_boolean_node (false, type));
7790 /* Handle cases of two conversions in a row. */
7791 if (CONVERT_EXPR_P (op0))
7793 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7794 tree inter_type = TREE_TYPE (op0);
7795 int inside_int = INTEGRAL_TYPE_P (inside_type);
7796 int inside_ptr = POINTER_TYPE_P (inside_type);
7797 int inside_float = FLOAT_TYPE_P (inside_type);
7798 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7799 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7800 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7801 int inter_int = INTEGRAL_TYPE_P (inter_type);
7802 int inter_ptr = POINTER_TYPE_P (inter_type);
7803 int inter_float = FLOAT_TYPE_P (inter_type);
7804 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7805 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7806 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7807 int final_int = INTEGRAL_TYPE_P (type);
7808 int final_ptr = POINTER_TYPE_P (type);
7809 int final_float = FLOAT_TYPE_P (type);
7810 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7811 unsigned int final_prec = TYPE_PRECISION (type);
7812 int final_unsignedp = TYPE_UNSIGNED (type);
7814 /* In addition to the cases of two conversions in a row
7815 handled below, if we are converting something to its own
7816 type via an object of identical or wider precision, neither
7817 conversion is needed. */
7818 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7819 && (((inter_int || inter_ptr) && final_int)
7820 || (inter_float && final_float))
7821 && inter_prec >= final_prec)
7822 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7824 /* Likewise, if the intermediate and initial types are either both
7825 float or both integer, we don't need the middle conversion if the
7826 former is wider than the latter and doesn't change the signedness
7827 (for integers). Avoid this if the final type is a pointer since
7828 then we sometimes need the middle conversion. Likewise if the
7829 final type has a precision not equal to the size of its mode. */
7830 if (((inter_int && inside_int)
7831 || (inter_float && inside_float)
7832 || (inter_vec && inside_vec))
7833 && inter_prec >= inside_prec
7834 && (inter_float || inter_vec
7835 || inter_unsignedp == inside_unsignedp)
7836 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7837 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7838 && ! final_ptr
7839 && (! final_vec || inter_prec == inside_prec))
7840 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7842 /* If we have a sign-extension of a zero-extended value, we can
7843 replace that by a single zero-extension. Likewise if the
7844 final conversion does not change precision we can drop the
7845 intermediate conversion. */
7846 if (inside_int && inter_int && final_int
7847 && ((inside_prec < inter_prec && inter_prec < final_prec
7848 && inside_unsignedp && !inter_unsignedp)
7849 || final_prec == inter_prec))
7850 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7852 /* Two conversions in a row are not needed unless:
7853 - some conversion is floating-point (overstrict for now), or
7854 - some conversion is a vector (overstrict for now), or
7855 - the intermediate type is narrower than both initial and
7856 final, or
7857 - the intermediate type and innermost type differ in signedness,
7858 and the outermost type is wider than the intermediate, or
7859 - the initial type is a pointer type and the precisions of the
7860 intermediate and final types differ, or
7861 - the final type is a pointer type and the precisions of the
7862 initial and intermediate types differ. */
7863 if (! inside_float && ! inter_float && ! final_float
7864 && ! inside_vec && ! inter_vec && ! final_vec
7865 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7866 && ! (inside_int && inter_int
7867 && inter_unsignedp != inside_unsignedp
7868 && inter_prec < final_prec)
7869 && ((inter_unsignedp && inter_prec > inside_prec)
7870 == (final_unsignedp && final_prec > inter_prec))
7871 && ! (inside_ptr && inter_prec != final_prec)
7872 && ! (final_ptr && inside_prec != inter_prec)
7873 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7874 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7875 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7878 /* Handle (T *)&A.B.C for A being of type T and B and C
7879 living at offset zero. This occurs frequently in
7880 C++ upcasting and then accessing the base. */
7881 if (TREE_CODE (op0) == ADDR_EXPR
7882 && POINTER_TYPE_P (type)
7883 && handled_component_p (TREE_OPERAND (op0, 0)))
7885 HOST_WIDE_INT bitsize, bitpos;
7886 tree offset;
7887 enum machine_mode mode;
7888 int unsignedp, volatilep;
7889 tree base = TREE_OPERAND (op0, 0);
7890 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7891 &mode, &unsignedp, &volatilep, false);
7892 /* If the reference was to a (constant) zero offset, we can use
7893 the address of the base if it has the same base type
7894 as the result type and the pointer type is unqualified. */
7895 if (! offset && bitpos == 0
7896 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7897 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7898 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7899 return fold_convert_loc (loc, type,
7900 build_fold_addr_expr_loc (loc, base));
7903 if (TREE_CODE (op0) == MODIFY_EXPR
7904 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7905 /* Detect assigning a bitfield. */
7906 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7907 && DECL_BIT_FIELD
7908 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7910 /* Don't leave an assignment inside a conversion
7911 unless assigning a bitfield. */
7912 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7913 /* First do the assignment, then return converted constant. */
7914 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7915 TREE_NO_WARNING (tem) = 1;
7916 TREE_USED (tem) = 1;
7917 return tem;
7920 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7921 constants (if x has signed type, the sign bit cannot be set
7922 in c). This folds extension into the BIT_AND_EXPR.
7923 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7924 very likely don't have maximal range for their precision and this
7925 transformation effectively doesn't preserve non-maximal ranges. */
7926 if (TREE_CODE (type) == INTEGER_TYPE
7927 && TREE_CODE (op0) == BIT_AND_EXPR
7928 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7930 tree and_expr = op0;
7931 tree and0 = TREE_OPERAND (and_expr, 0);
7932 tree and1 = TREE_OPERAND (and_expr, 1);
7933 int change = 0;
7935 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7936 || (TYPE_PRECISION (type)
7937 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7938 change = 1;
7939 else if (TYPE_PRECISION (TREE_TYPE (and1))
7940 <= HOST_BITS_PER_WIDE_INT
7941 && host_integerp (and1, 1))
7943 unsigned HOST_WIDE_INT cst;
7945 cst = tree_low_cst (and1, 1);
7946 cst &= (HOST_WIDE_INT) -1
7947 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7948 change = (cst == 0);
7949 #ifdef LOAD_EXTEND_OP
7950 if (change
7951 && !flag_syntax_only
7952 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7953 == ZERO_EXTEND))
7955 tree uns = unsigned_type_for (TREE_TYPE (and0));
7956 and0 = fold_convert_loc (loc, uns, and0);
7957 and1 = fold_convert_loc (loc, uns, and1);
7959 #endif
7961 if (change)
7963 tem = force_fit_type_double (type, tree_to_double_int (and1),
7964 0, TREE_OVERFLOW (and1));
7965 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7966 fold_convert_loc (loc, type, and0), tem);
7970 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7971 when one of the new casts will fold away. Conservatively we assume
7972 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7973 if (POINTER_TYPE_P (type)
7974 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7975 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7976 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7977 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7978 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7980 tree arg00 = TREE_OPERAND (arg0, 0);
7981 tree arg01 = TREE_OPERAND (arg0, 1);
7983 return fold_build_pointer_plus_loc
7984 (loc, fold_convert_loc (loc, type, arg00), arg01);
7987 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7988 of the same precision, and X is an integer type not narrower than
7989 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7990 if (INTEGRAL_TYPE_P (type)
7991 && TREE_CODE (op0) == BIT_NOT_EXPR
7992 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7993 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7994 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7996 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7997 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7998 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7999 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8000 fold_convert_loc (loc, type, tem));
8003 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8004 type of X and Y (integer types only). */
8005 if (INTEGRAL_TYPE_P (type)
8006 && TREE_CODE (op0) == MULT_EXPR
8007 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8008 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8010 /* Be careful not to introduce new overflows. */
8011 tree mult_type;
8012 if (TYPE_OVERFLOW_WRAPS (type))
8013 mult_type = type;
8014 else
8015 mult_type = unsigned_type_for (type);
8017 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8019 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8020 fold_convert_loc (loc, mult_type,
8021 TREE_OPERAND (op0, 0)),
8022 fold_convert_loc (loc, mult_type,
8023 TREE_OPERAND (op0, 1)));
8024 return fold_convert_loc (loc, type, tem);
8028 tem = fold_convert_const (code, type, op0);
8029 return tem ? tem : NULL_TREE;
8031 case ADDR_SPACE_CONVERT_EXPR:
8032 if (integer_zerop (arg0))
8033 return fold_convert_const (code, type, arg0);
8034 return NULL_TREE;
8036 case FIXED_CONVERT_EXPR:
8037 tem = fold_convert_const (code, type, arg0);
8038 return tem ? tem : NULL_TREE;
8040 case VIEW_CONVERT_EXPR:
8041 if (TREE_TYPE (op0) == type)
8042 return op0;
8043 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8044 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8045 type, TREE_OPERAND (op0, 0));
8046 if (TREE_CODE (op0) == MEM_REF)
8047 return fold_build2_loc (loc, MEM_REF, type,
8048 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8050 /* For integral conversions with the same precision or pointer
8051 conversions use a NOP_EXPR instead. */
8052 if ((INTEGRAL_TYPE_P (type)
8053 || POINTER_TYPE_P (type))
8054 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8055 || POINTER_TYPE_P (TREE_TYPE (op0)))
8056 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8057 return fold_convert_loc (loc, type, op0);
8059 /* Strip inner integral conversions that do not change the precision. */
8060 if (CONVERT_EXPR_P (op0)
8061 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8062 || POINTER_TYPE_P (TREE_TYPE (op0)))
8063 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8064 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8065 && (TYPE_PRECISION (TREE_TYPE (op0))
8066 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8067 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8068 type, TREE_OPERAND (op0, 0));
8070 return fold_view_convert_expr (type, op0);
8072 case NEGATE_EXPR:
8073 tem = fold_negate_expr (loc, arg0);
8074 if (tem)
8075 return fold_convert_loc (loc, type, tem);
8076 return NULL_TREE;
8078 case ABS_EXPR:
8079 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8080 return fold_abs_const (arg0, type);
8081 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8082 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8083 /* Convert fabs((double)float) into (double)fabsf(float). */
8084 else if (TREE_CODE (arg0) == NOP_EXPR
8085 && TREE_CODE (type) == REAL_TYPE)
8087 tree targ0 = strip_float_extensions (arg0);
8088 if (targ0 != arg0)
8089 return fold_convert_loc (loc, type,
8090 fold_build1_loc (loc, ABS_EXPR,
8091 TREE_TYPE (targ0),
8092 targ0));
8094 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8095 else if (TREE_CODE (arg0) == ABS_EXPR)
8096 return arg0;
8097 else if (tree_expr_nonnegative_p (arg0))
8098 return arg0;
8100 /* Strip sign ops from argument. */
8101 if (TREE_CODE (type) == REAL_TYPE)
8103 tem = fold_strip_sign_ops (arg0);
8104 if (tem)
8105 return fold_build1_loc (loc, ABS_EXPR, type,
8106 fold_convert_loc (loc, type, tem));
8108 return NULL_TREE;
8110 case CONJ_EXPR:
8111 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8112 return fold_convert_loc (loc, type, arg0);
8113 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8115 tree itype = TREE_TYPE (type);
8116 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8117 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8118 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8119 negate_expr (ipart));
8121 if (TREE_CODE (arg0) == COMPLEX_CST)
8123 tree itype = TREE_TYPE (type);
8124 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8125 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8126 return build_complex (type, rpart, negate_expr (ipart));
8128 if (TREE_CODE (arg0) == CONJ_EXPR)
8129 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8130 return NULL_TREE;
8132 case BIT_NOT_EXPR:
8133 if (TREE_CODE (arg0) == INTEGER_CST)
8134 return fold_not_const (arg0, type);
8135 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8136 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8137 /* Convert ~ (-A) to A - 1. */
8138 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8139 return fold_build2_loc (loc, MINUS_EXPR, type,
8140 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8141 build_int_cst (type, 1));
8142 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8143 else if (INTEGRAL_TYPE_P (type)
8144 && ((TREE_CODE (arg0) == MINUS_EXPR
8145 && integer_onep (TREE_OPERAND (arg0, 1)))
8146 || (TREE_CODE (arg0) == PLUS_EXPR
8147 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8148 return fold_build1_loc (loc, NEGATE_EXPR, type,
8149 fold_convert_loc (loc, type,
8150 TREE_OPERAND (arg0, 0)));
8151 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8152 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8153 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8154 fold_convert_loc (loc, type,
8155 TREE_OPERAND (arg0, 0)))))
8156 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8157 fold_convert_loc (loc, type,
8158 TREE_OPERAND (arg0, 1)));
8159 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8160 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8161 fold_convert_loc (loc, type,
8162 TREE_OPERAND (arg0, 1)))))
8163 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8164 fold_convert_loc (loc, type,
8165 TREE_OPERAND (arg0, 0)), tem);
8166 /* Perform BIT_NOT_EXPR on each element individually. */
8167 else if (TREE_CODE (arg0) == VECTOR_CST)
8169 tree *elements;
8170 tree elem;
8171 unsigned count = VECTOR_CST_NELTS (arg0), i;
8173 elements = XALLOCAVEC (tree, count);
8174 for (i = 0; i < count; i++)
8176 elem = VECTOR_CST_ELT (arg0, i);
8177 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8178 if (elem == NULL_TREE)
8179 break;
8180 elements[i] = elem;
8182 if (i == count)
8183 return build_vector (type, elements);
8186 return NULL_TREE;
8188 case TRUTH_NOT_EXPR:
8189 /* The argument to invert_truthvalue must have Boolean type. */
8190 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8191 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8193 /* Note that the operand of this must be an int
8194 and its values must be 0 or 1.
8195 ("true" is a fixed value perhaps depending on the language,
8196 but we don't handle values other than 1 correctly yet.) */
8197 tem = fold_truth_not_expr (loc, arg0);
8198 if (!tem)
8199 return NULL_TREE;
8200 return fold_convert_loc (loc, type, tem);
8202 case REALPART_EXPR:
8203 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8204 return fold_convert_loc (loc, type, arg0);
8205 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8206 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8207 TREE_OPERAND (arg0, 1));
8208 if (TREE_CODE (arg0) == COMPLEX_CST)
8209 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8210 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8212 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8213 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8214 fold_build1_loc (loc, REALPART_EXPR, itype,
8215 TREE_OPERAND (arg0, 0)),
8216 fold_build1_loc (loc, REALPART_EXPR, itype,
8217 TREE_OPERAND (arg0, 1)));
8218 return fold_convert_loc (loc, type, tem);
8220 if (TREE_CODE (arg0) == CONJ_EXPR)
8222 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8223 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8224 TREE_OPERAND (arg0, 0));
8225 return fold_convert_loc (loc, type, tem);
8227 if (TREE_CODE (arg0) == CALL_EXPR)
8229 tree fn = get_callee_fndecl (arg0);
8230 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8231 switch (DECL_FUNCTION_CODE (fn))
8233 CASE_FLT_FN (BUILT_IN_CEXPI):
8234 fn = mathfn_built_in (type, BUILT_IN_COS);
8235 if (fn)
8236 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8237 break;
8239 default:
8240 break;
8243 return NULL_TREE;
8245 case IMAGPART_EXPR:
8246 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8247 return build_zero_cst (type);
8248 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8249 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8250 TREE_OPERAND (arg0, 0));
8251 if (TREE_CODE (arg0) == COMPLEX_CST)
8252 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8253 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8255 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8256 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8257 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8258 TREE_OPERAND (arg0, 0)),
8259 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8260 TREE_OPERAND (arg0, 1)));
8261 return fold_convert_loc (loc, type, tem);
8263 if (TREE_CODE (arg0) == CONJ_EXPR)
8265 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8266 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8267 return fold_convert_loc (loc, type, negate_expr (tem));
8269 if (TREE_CODE (arg0) == CALL_EXPR)
8271 tree fn = get_callee_fndecl (arg0);
8272 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8273 switch (DECL_FUNCTION_CODE (fn))
8275 CASE_FLT_FN (BUILT_IN_CEXPI):
8276 fn = mathfn_built_in (type, BUILT_IN_SIN);
8277 if (fn)
8278 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8279 break;
8281 default:
8282 break;
8285 return NULL_TREE;
8287 case INDIRECT_REF:
8288 /* Fold *&X to X if X is an lvalue. */
8289 if (TREE_CODE (op0) == ADDR_EXPR)
8291 tree op00 = TREE_OPERAND (op0, 0);
8292 if ((TREE_CODE (op00) == VAR_DECL
8293 || TREE_CODE (op00) == PARM_DECL
8294 || TREE_CODE (op00) == RESULT_DECL)
8295 && !TREE_READONLY (op00))
8296 return op00;
8298 return NULL_TREE;
8300 case VEC_UNPACK_LO_EXPR:
8301 case VEC_UNPACK_HI_EXPR:
8302 case VEC_UNPACK_FLOAT_LO_EXPR:
8303 case VEC_UNPACK_FLOAT_HI_EXPR:
8305 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8306 tree *elts;
8307 enum tree_code subcode;
8309 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8310 if (TREE_CODE (arg0) != VECTOR_CST)
8311 return NULL_TREE;
8313 elts = XALLOCAVEC (tree, nelts * 2);
8314 if (!vec_cst_ctor_to_array (arg0, elts))
8315 return NULL_TREE;
8317 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8318 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8319 elts += nelts;
8321 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8322 subcode = NOP_EXPR;
8323 else
8324 subcode = FLOAT_EXPR;
8326 for (i = 0; i < nelts; i++)
8328 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8329 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8330 return NULL_TREE;
8333 return build_vector (type, elts);
8336 default:
8337 return NULL_TREE;
8338 } /* switch (code) */
8342 /* If the operation was a conversion do _not_ mark a resulting constant
8343 with TREE_OVERFLOW if the original constant was not. These conversions
8344 have implementation defined behavior and retaining the TREE_OVERFLOW
8345 flag here would confuse later passes such as VRP. */
8346 tree
8347 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8348 tree type, tree op0)
8350 tree res = fold_unary_loc (loc, code, type, op0);
8351 if (res
8352 && TREE_CODE (res) == INTEGER_CST
8353 && TREE_CODE (op0) == INTEGER_CST
8354 && CONVERT_EXPR_CODE_P (code))
8355 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8357 return res;
8360 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8361 operands OP0 and OP1. LOC is the location of the resulting expression.
8362 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8363 Return the folded expression if folding is successful. Otherwise,
8364 return NULL_TREE. */
8365 static tree
8366 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8367 tree arg0, tree arg1, tree op0, tree op1)
8369 tree tem;
8371 /* We only do these simplifications if we are optimizing. */
8372 if (!optimize)
8373 return NULL_TREE;
8375 /* Check for things like (A || B) && (A || C). We can convert this
8376 to A || (B && C). Note that either operator can be any of the four
8377 truth and/or operations and the transformation will still be
8378 valid. Also note that we only care about order for the
8379 ANDIF and ORIF operators. If B contains side effects, this
8380 might change the truth-value of A. */
8381 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8382 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8383 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8384 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8385 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8386 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8388 tree a00 = TREE_OPERAND (arg0, 0);
8389 tree a01 = TREE_OPERAND (arg0, 1);
8390 tree a10 = TREE_OPERAND (arg1, 0);
8391 tree a11 = TREE_OPERAND (arg1, 1);
8392 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8393 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8394 && (code == TRUTH_AND_EXPR
8395 || code == TRUTH_OR_EXPR));
8397 if (operand_equal_p (a00, a10, 0))
8398 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8399 fold_build2_loc (loc, code, type, a01, a11));
8400 else if (commutative && operand_equal_p (a00, a11, 0))
8401 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8402 fold_build2_loc (loc, code, type, a01, a10));
8403 else if (commutative && operand_equal_p (a01, a10, 0))
8404 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8405 fold_build2_loc (loc, code, type, a00, a11));
8407 /* This case if tricky because we must either have commutative
8408 operators or else A10 must not have side-effects. */
8410 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8411 && operand_equal_p (a01, a11, 0))
8412 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8413 fold_build2_loc (loc, code, type, a00, a10),
8414 a01);
8417 /* See if we can build a range comparison. */
8418 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8419 return tem;
8421 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8422 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8424 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8425 if (tem)
8426 return fold_build2_loc (loc, code, type, tem, arg1);
8429 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8430 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8432 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8433 if (tem)
8434 return fold_build2_loc (loc, code, type, arg0, tem);
8437 /* Check for the possibility of merging component references. If our
8438 lhs is another similar operation, try to merge its rhs with our
8439 rhs. Then try to merge our lhs and rhs. */
8440 if (TREE_CODE (arg0) == code
8441 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8442 TREE_OPERAND (arg0, 1), arg1)))
8443 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8445 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8446 return tem;
8448 if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8449 false) >= 2)
8450 && LOGICAL_OP_NON_SHORT_CIRCUIT
8451 && (code == TRUTH_AND_EXPR
8452 || code == TRUTH_ANDIF_EXPR
8453 || code == TRUTH_OR_EXPR
8454 || code == TRUTH_ORIF_EXPR))
8456 enum tree_code ncode, icode;
8458 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8460 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8462 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8463 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8464 We don't want to pack more than two leafs to a non-IF AND/OR
8465 expression.
8466 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8467 equal to IF-CODE, then we don't want to add right-hand operand.
8468 If the inner right-hand side of left-hand operand has
8469 side-effects, or isn't simple, then we can't add to it,
8470 as otherwise we might destroy if-sequence. */
8471 if (TREE_CODE (arg0) == icode
8472 && simple_operand_p_2 (arg1)
8473 /* Needed for sequence points to handle trappings, and
8474 side-effects. */
8475 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8477 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8478 arg1);
8479 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8480 tem);
8482 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8483 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8484 else if (TREE_CODE (arg1) == icode
8485 && simple_operand_p_2 (arg0)
8486 /* Needed for sequence points to handle trappings, and
8487 side-effects. */
8488 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8490 tem = fold_build2_loc (loc, ncode, type,
8491 arg0, TREE_OPERAND (arg1, 0));
8492 return fold_build2_loc (loc, icode, type, tem,
8493 TREE_OPERAND (arg1, 1));
8495 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8496 into (A OR B).
8497 For sequence point consistancy, we need to check for trapping,
8498 and side-effects. */
8499 else if (code == icode && simple_operand_p_2 (arg0)
8500 && simple_operand_p_2 (arg1))
8501 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8504 return NULL_TREE;
8507 /* Fold a binary expression of code CODE and type TYPE with operands
8508 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8509 Return the folded expression if folding is successful. Otherwise,
8510 return NULL_TREE. */
8512 static tree
8513 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8515 enum tree_code compl_code;
8517 if (code == MIN_EXPR)
8518 compl_code = MAX_EXPR;
8519 else if (code == MAX_EXPR)
8520 compl_code = MIN_EXPR;
8521 else
8522 gcc_unreachable ();
8524 /* MIN (MAX (a, b), b) == b. */
8525 if (TREE_CODE (op0) == compl_code
8526 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8527 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8529 /* MIN (MAX (b, a), b) == b. */
8530 if (TREE_CODE (op0) == compl_code
8531 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8532 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8533 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8535 /* MIN (a, MAX (a, b)) == a. */
8536 if (TREE_CODE (op1) == compl_code
8537 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8538 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8539 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8541 /* MIN (a, MAX (b, a)) == a. */
8542 if (TREE_CODE (op1) == compl_code
8543 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8544 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8545 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8547 return NULL_TREE;
8550 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8551 by changing CODE to reduce the magnitude of constants involved in
8552 ARG0 of the comparison.
8553 Returns a canonicalized comparison tree if a simplification was
8554 possible, otherwise returns NULL_TREE.
8555 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8556 valid if signed overflow is undefined. */
8558 static tree
8559 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8560 tree arg0, tree arg1,
8561 bool *strict_overflow_p)
8563 enum tree_code code0 = TREE_CODE (arg0);
8564 tree t, cst0 = NULL_TREE;
8565 int sgn0;
8566 bool swap = false;
8568 /* Match A +- CST code arg1 and CST code arg1. We can change the
8569 first form only if overflow is undefined. */
8570 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8571 /* In principle pointers also have undefined overflow behavior,
8572 but that causes problems elsewhere. */
8573 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8574 && (code0 == MINUS_EXPR
8575 || code0 == PLUS_EXPR)
8576 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8577 || code0 == INTEGER_CST))
8578 return NULL_TREE;
8580 /* Identify the constant in arg0 and its sign. */
8581 if (code0 == INTEGER_CST)
8582 cst0 = arg0;
8583 else
8584 cst0 = TREE_OPERAND (arg0, 1);
8585 sgn0 = tree_int_cst_sgn (cst0);
8587 /* Overflowed constants and zero will cause problems. */
8588 if (integer_zerop (cst0)
8589 || TREE_OVERFLOW (cst0))
8590 return NULL_TREE;
8592 /* See if we can reduce the magnitude of the constant in
8593 arg0 by changing the comparison code. */
8594 if (code0 == INTEGER_CST)
8596 /* CST <= arg1 -> CST-1 < arg1. */
8597 if (code == LE_EXPR && sgn0 == 1)
8598 code = LT_EXPR;
8599 /* -CST < arg1 -> -CST-1 <= arg1. */
8600 else if (code == LT_EXPR && sgn0 == -1)
8601 code = LE_EXPR;
8602 /* CST > arg1 -> CST-1 >= arg1. */
8603 else if (code == GT_EXPR && sgn0 == 1)
8604 code = GE_EXPR;
8605 /* -CST >= arg1 -> -CST-1 > arg1. */
8606 else if (code == GE_EXPR && sgn0 == -1)
8607 code = GT_EXPR;
8608 else
8609 return NULL_TREE;
8610 /* arg1 code' CST' might be more canonical. */
8611 swap = true;
8613 else
8615 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8616 if (code == LT_EXPR
8617 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8618 code = LE_EXPR;
8619 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8620 else if (code == GT_EXPR
8621 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8622 code = GE_EXPR;
8623 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8624 else if (code == LE_EXPR
8625 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8626 code = LT_EXPR;
8627 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8628 else if (code == GE_EXPR
8629 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8630 code = GT_EXPR;
8631 else
8632 return NULL_TREE;
8633 *strict_overflow_p = true;
8636 /* Now build the constant reduced in magnitude. But not if that
8637 would produce one outside of its types range. */
8638 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8639 && ((sgn0 == 1
8640 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8641 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8642 || (sgn0 == -1
8643 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8644 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8645 /* We cannot swap the comparison here as that would cause us to
8646 endlessly recurse. */
8647 return NULL_TREE;
8649 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8650 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8651 if (code0 != INTEGER_CST)
8652 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8653 t = fold_convert (TREE_TYPE (arg1), t);
8655 /* If swapping might yield to a more canonical form, do so. */
8656 if (swap)
8657 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8658 else
8659 return fold_build2_loc (loc, code, type, t, arg1);
8662 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8663 overflow further. Try to decrease the magnitude of constants involved
8664 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8665 and put sole constants at the second argument position.
8666 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8668 static tree
8669 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8670 tree arg0, tree arg1)
8672 tree t;
8673 bool strict_overflow_p;
8674 const char * const warnmsg = G_("assuming signed overflow does not occur "
8675 "when reducing constant in comparison");
8677 /* Try canonicalization by simplifying arg0. */
8678 strict_overflow_p = false;
8679 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8680 &strict_overflow_p);
8681 if (t)
8683 if (strict_overflow_p)
8684 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8685 return t;
8688 /* Try canonicalization by simplifying arg1 using the swapped
8689 comparison. */
8690 code = swap_tree_comparison (code);
8691 strict_overflow_p = false;
8692 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8693 &strict_overflow_p);
8694 if (t && strict_overflow_p)
8695 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8696 return t;
8699 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8700 space. This is used to avoid issuing overflow warnings for
8701 expressions like &p->x which can not wrap. */
8703 static bool
8704 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8706 unsigned HOST_WIDE_INT offset_low, total_low;
8707 HOST_WIDE_INT size, offset_high, total_high;
8709 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8710 return true;
8712 if (bitpos < 0)
8713 return true;
8715 if (offset == NULL_TREE)
8717 offset_low = 0;
8718 offset_high = 0;
8720 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8721 return true;
8722 else
8724 offset_low = TREE_INT_CST_LOW (offset);
8725 offset_high = TREE_INT_CST_HIGH (offset);
8728 if (add_double_with_sign (offset_low, offset_high,
8729 bitpos / BITS_PER_UNIT, 0,
8730 &total_low, &total_high,
8731 true))
8732 return true;
8734 if (total_high != 0)
8735 return true;
8737 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8738 if (size <= 0)
8739 return true;
8741 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8742 array. */
8743 if (TREE_CODE (base) == ADDR_EXPR)
8745 HOST_WIDE_INT base_size;
8747 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8748 if (base_size > 0 && size < base_size)
8749 size = base_size;
8752 return total_low > (unsigned HOST_WIDE_INT) size;
8755 /* Subroutine of fold_binary. This routine performs all of the
8756 transformations that are common to the equality/inequality
8757 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8758 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8759 fold_binary should call fold_binary. Fold a comparison with
8760 tree code CODE and type TYPE with operands OP0 and OP1. Return
8761 the folded comparison or NULL_TREE. */
8763 static tree
8764 fold_comparison (location_t loc, enum tree_code code, tree type,
8765 tree op0, tree op1)
8767 tree arg0, arg1, tem;
8769 arg0 = op0;
8770 arg1 = op1;
8772 STRIP_SIGN_NOPS (arg0);
8773 STRIP_SIGN_NOPS (arg1);
8775 tem = fold_relational_const (code, type, arg0, arg1);
8776 if (tem != NULL_TREE)
8777 return tem;
8779 /* If one arg is a real or integer constant, put it last. */
8780 if (tree_swap_operands_p (arg0, arg1, true))
8781 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8783 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8784 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8785 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8786 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8787 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8788 && (TREE_CODE (arg1) == INTEGER_CST
8789 && !TREE_OVERFLOW (arg1)))
8791 tree const1 = TREE_OPERAND (arg0, 1);
8792 tree const2 = arg1;
8793 tree variable = TREE_OPERAND (arg0, 0);
8794 tree lhs;
8795 int lhs_add;
8796 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8798 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8799 TREE_TYPE (arg1), const2, const1);
8801 /* If the constant operation overflowed this can be
8802 simplified as a comparison against INT_MAX/INT_MIN. */
8803 if (TREE_CODE (lhs) == INTEGER_CST
8804 && TREE_OVERFLOW (lhs))
8806 int const1_sgn = tree_int_cst_sgn (const1);
8807 enum tree_code code2 = code;
8809 /* Get the sign of the constant on the lhs if the
8810 operation were VARIABLE + CONST1. */
8811 if (TREE_CODE (arg0) == MINUS_EXPR)
8812 const1_sgn = -const1_sgn;
8814 /* The sign of the constant determines if we overflowed
8815 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8816 Canonicalize to the INT_MIN overflow by swapping the comparison
8817 if necessary. */
8818 if (const1_sgn == -1)
8819 code2 = swap_tree_comparison (code);
8821 /* We now can look at the canonicalized case
8822 VARIABLE + 1 CODE2 INT_MIN
8823 and decide on the result. */
8824 if (code2 == LT_EXPR
8825 || code2 == LE_EXPR
8826 || code2 == EQ_EXPR)
8827 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8828 else if (code2 == NE_EXPR
8829 || code2 == GE_EXPR
8830 || code2 == GT_EXPR)
8831 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8834 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8835 && (TREE_CODE (lhs) != INTEGER_CST
8836 || !TREE_OVERFLOW (lhs)))
8838 if (code != EQ_EXPR && code != NE_EXPR)
8839 fold_overflow_warning ("assuming signed overflow does not occur "
8840 "when changing X +- C1 cmp C2 to "
8841 "X cmp C1 +- C2",
8842 WARN_STRICT_OVERFLOW_COMPARISON);
8843 return fold_build2_loc (loc, code, type, variable, lhs);
8847 /* For comparisons of pointers we can decompose it to a compile time
8848 comparison of the base objects and the offsets into the object.
8849 This requires at least one operand being an ADDR_EXPR or a
8850 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8851 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8852 && (TREE_CODE (arg0) == ADDR_EXPR
8853 || TREE_CODE (arg1) == ADDR_EXPR
8854 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8855 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8857 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8858 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8859 enum machine_mode mode;
8860 int volatilep, unsignedp;
8861 bool indirect_base0 = false, indirect_base1 = false;
8863 /* Get base and offset for the access. Strip ADDR_EXPR for
8864 get_inner_reference, but put it back by stripping INDIRECT_REF
8865 off the base object if possible. indirect_baseN will be true
8866 if baseN is not an address but refers to the object itself. */
8867 base0 = arg0;
8868 if (TREE_CODE (arg0) == ADDR_EXPR)
8870 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8871 &bitsize, &bitpos0, &offset0, &mode,
8872 &unsignedp, &volatilep, false);
8873 if (TREE_CODE (base0) == INDIRECT_REF)
8874 base0 = TREE_OPERAND (base0, 0);
8875 else
8876 indirect_base0 = true;
8878 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8880 base0 = TREE_OPERAND (arg0, 0);
8881 STRIP_SIGN_NOPS (base0);
8882 if (TREE_CODE (base0) == ADDR_EXPR)
8884 base0 = TREE_OPERAND (base0, 0);
8885 indirect_base0 = true;
8887 offset0 = TREE_OPERAND (arg0, 1);
8888 if (host_integerp (offset0, 0))
8890 HOST_WIDE_INT off = size_low_cst (offset0);
8891 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8892 * BITS_PER_UNIT)
8893 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8895 bitpos0 = off * BITS_PER_UNIT;
8896 offset0 = NULL_TREE;
8901 base1 = arg1;
8902 if (TREE_CODE (arg1) == ADDR_EXPR)
8904 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8905 &bitsize, &bitpos1, &offset1, &mode,
8906 &unsignedp, &volatilep, false);
8907 if (TREE_CODE (base1) == INDIRECT_REF)
8908 base1 = TREE_OPERAND (base1, 0);
8909 else
8910 indirect_base1 = true;
8912 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8914 base1 = TREE_OPERAND (arg1, 0);
8915 STRIP_SIGN_NOPS (base1);
8916 if (TREE_CODE (base1) == ADDR_EXPR)
8918 base1 = TREE_OPERAND (base1, 0);
8919 indirect_base1 = true;
8921 offset1 = TREE_OPERAND (arg1, 1);
8922 if (host_integerp (offset1, 0))
8924 HOST_WIDE_INT off = size_low_cst (offset1);
8925 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8926 * BITS_PER_UNIT)
8927 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8929 bitpos1 = off * BITS_PER_UNIT;
8930 offset1 = NULL_TREE;
8935 /* A local variable can never be pointed to by
8936 the default SSA name of an incoming parameter. */
8937 if ((TREE_CODE (arg0) == ADDR_EXPR
8938 && indirect_base0
8939 && TREE_CODE (base0) == VAR_DECL
8940 && auto_var_in_fn_p (base0, current_function_decl)
8941 && !indirect_base1
8942 && TREE_CODE (base1) == SSA_NAME
8943 && SSA_NAME_IS_DEFAULT_DEF (base1)
8944 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8945 || (TREE_CODE (arg1) == ADDR_EXPR
8946 && indirect_base1
8947 && TREE_CODE (base1) == VAR_DECL
8948 && auto_var_in_fn_p (base1, current_function_decl)
8949 && !indirect_base0
8950 && TREE_CODE (base0) == SSA_NAME
8951 && SSA_NAME_IS_DEFAULT_DEF (base0)
8952 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8954 if (code == NE_EXPR)
8955 return constant_boolean_node (1, type);
8956 else if (code == EQ_EXPR)
8957 return constant_boolean_node (0, type);
8959 /* If we have equivalent bases we might be able to simplify. */
8960 else if (indirect_base0 == indirect_base1
8961 && operand_equal_p (base0, base1, 0))
8963 /* We can fold this expression to a constant if the non-constant
8964 offset parts are equal. */
8965 if ((offset0 == offset1
8966 || (offset0 && offset1
8967 && operand_equal_p (offset0, offset1, 0)))
8968 && (code == EQ_EXPR
8969 || code == NE_EXPR
8970 || (indirect_base0 && DECL_P (base0))
8971 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8974 if (code != EQ_EXPR
8975 && code != NE_EXPR
8976 && bitpos0 != bitpos1
8977 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8978 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8979 fold_overflow_warning (("assuming pointer wraparound does not "
8980 "occur when comparing P +- C1 with "
8981 "P +- C2"),
8982 WARN_STRICT_OVERFLOW_CONDITIONAL);
8984 switch (code)
8986 case EQ_EXPR:
8987 return constant_boolean_node (bitpos0 == bitpos1, type);
8988 case NE_EXPR:
8989 return constant_boolean_node (bitpos0 != bitpos1, type);
8990 case LT_EXPR:
8991 return constant_boolean_node (bitpos0 < bitpos1, type);
8992 case LE_EXPR:
8993 return constant_boolean_node (bitpos0 <= bitpos1, type);
8994 case GE_EXPR:
8995 return constant_boolean_node (bitpos0 >= bitpos1, type);
8996 case GT_EXPR:
8997 return constant_boolean_node (bitpos0 > bitpos1, type);
8998 default:;
9001 /* We can simplify the comparison to a comparison of the variable
9002 offset parts if the constant offset parts are equal.
9003 Be careful to use signed size type here because otherwise we
9004 mess with array offsets in the wrong way. This is possible
9005 because pointer arithmetic is restricted to retain within an
9006 object and overflow on pointer differences is undefined as of
9007 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9008 else if (bitpos0 == bitpos1
9009 && ((code == EQ_EXPR || code == NE_EXPR)
9010 || (indirect_base0 && DECL_P (base0))
9011 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9013 /* By converting to signed size type we cover middle-end pointer
9014 arithmetic which operates on unsigned pointer types of size
9015 type size and ARRAY_REF offsets which are properly sign or
9016 zero extended from their type in case it is narrower than
9017 size type. */
9018 if (offset0 == NULL_TREE)
9019 offset0 = build_int_cst (ssizetype, 0);
9020 else
9021 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9022 if (offset1 == NULL_TREE)
9023 offset1 = build_int_cst (ssizetype, 0);
9024 else
9025 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9027 if (code != EQ_EXPR
9028 && code != NE_EXPR
9029 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9030 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9031 fold_overflow_warning (("assuming pointer wraparound does not "
9032 "occur when comparing P +- C1 with "
9033 "P +- C2"),
9034 WARN_STRICT_OVERFLOW_COMPARISON);
9036 return fold_build2_loc (loc, code, type, offset0, offset1);
9039 /* For non-equal bases we can simplify if they are addresses
9040 of local binding decls or constants. */
9041 else if (indirect_base0 && indirect_base1
9042 /* We know that !operand_equal_p (base0, base1, 0)
9043 because the if condition was false. But make
9044 sure two decls are not the same. */
9045 && base0 != base1
9046 && TREE_CODE (arg0) == ADDR_EXPR
9047 && TREE_CODE (arg1) == ADDR_EXPR
9048 && (((TREE_CODE (base0) == VAR_DECL
9049 || TREE_CODE (base0) == PARM_DECL)
9050 && (targetm.binds_local_p (base0)
9051 || CONSTANT_CLASS_P (base1)))
9052 || CONSTANT_CLASS_P (base0))
9053 && (((TREE_CODE (base1) == VAR_DECL
9054 || TREE_CODE (base1) == PARM_DECL)
9055 && (targetm.binds_local_p (base1)
9056 || CONSTANT_CLASS_P (base0)))
9057 || CONSTANT_CLASS_P (base1)))
9059 if (code == EQ_EXPR)
9060 return omit_two_operands_loc (loc, type, boolean_false_node,
9061 arg0, arg1);
9062 else if (code == NE_EXPR)
9063 return omit_two_operands_loc (loc, type, boolean_true_node,
9064 arg0, arg1);
9066 /* For equal offsets we can simplify to a comparison of the
9067 base addresses. */
9068 else if (bitpos0 == bitpos1
9069 && (indirect_base0
9070 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9071 && (indirect_base1
9072 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9073 && ((offset0 == offset1)
9074 || (offset0 && offset1
9075 && operand_equal_p (offset0, offset1, 0))))
9077 if (indirect_base0)
9078 base0 = build_fold_addr_expr_loc (loc, base0);
9079 if (indirect_base1)
9080 base1 = build_fold_addr_expr_loc (loc, base1);
9081 return fold_build2_loc (loc, code, type, base0, base1);
9085 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9086 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9087 the resulting offset is smaller in absolute value than the
9088 original one. */
9089 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9090 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9091 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9092 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9093 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9094 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9095 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9097 tree const1 = TREE_OPERAND (arg0, 1);
9098 tree const2 = TREE_OPERAND (arg1, 1);
9099 tree variable1 = TREE_OPERAND (arg0, 0);
9100 tree variable2 = TREE_OPERAND (arg1, 0);
9101 tree cst;
9102 const char * const warnmsg = G_("assuming signed overflow does not "
9103 "occur when combining constants around "
9104 "a comparison");
9106 /* Put the constant on the side where it doesn't overflow and is
9107 of lower absolute value than before. */
9108 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9109 ? MINUS_EXPR : PLUS_EXPR,
9110 const2, const1);
9111 if (!TREE_OVERFLOW (cst)
9112 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9114 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9115 return fold_build2_loc (loc, code, type,
9116 variable1,
9117 fold_build2_loc (loc,
9118 TREE_CODE (arg1), TREE_TYPE (arg1),
9119 variable2, cst));
9122 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9123 ? MINUS_EXPR : PLUS_EXPR,
9124 const1, const2);
9125 if (!TREE_OVERFLOW (cst)
9126 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9128 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9129 return fold_build2_loc (loc, code, type,
9130 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9131 variable1, cst),
9132 variable2);
9136 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9137 signed arithmetic case. That form is created by the compiler
9138 often enough for folding it to be of value. One example is in
9139 computing loop trip counts after Operator Strength Reduction. */
9140 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9141 && TREE_CODE (arg0) == MULT_EXPR
9142 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9143 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9144 && integer_zerop (arg1))
9146 tree const1 = TREE_OPERAND (arg0, 1);
9147 tree const2 = arg1; /* zero */
9148 tree variable1 = TREE_OPERAND (arg0, 0);
9149 enum tree_code cmp_code = code;
9151 /* Handle unfolded multiplication by zero. */
9152 if (integer_zerop (const1))
9153 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9155 fold_overflow_warning (("assuming signed overflow does not occur when "
9156 "eliminating multiplication in comparison "
9157 "with zero"),
9158 WARN_STRICT_OVERFLOW_COMPARISON);
9160 /* If const1 is negative we swap the sense of the comparison. */
9161 if (tree_int_cst_sgn (const1) < 0)
9162 cmp_code = swap_tree_comparison (cmp_code);
9164 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9167 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9168 if (tem)
9169 return tem;
9171 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9173 tree targ0 = strip_float_extensions (arg0);
9174 tree targ1 = strip_float_extensions (arg1);
9175 tree newtype = TREE_TYPE (targ0);
9177 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9178 newtype = TREE_TYPE (targ1);
9180 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9181 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9182 return fold_build2_loc (loc, code, type,
9183 fold_convert_loc (loc, newtype, targ0),
9184 fold_convert_loc (loc, newtype, targ1));
9186 /* (-a) CMP (-b) -> b CMP a */
9187 if (TREE_CODE (arg0) == NEGATE_EXPR
9188 && TREE_CODE (arg1) == NEGATE_EXPR)
9189 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9190 TREE_OPERAND (arg0, 0));
9192 if (TREE_CODE (arg1) == REAL_CST)
9194 REAL_VALUE_TYPE cst;
9195 cst = TREE_REAL_CST (arg1);
9197 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9198 if (TREE_CODE (arg0) == NEGATE_EXPR)
9199 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9200 TREE_OPERAND (arg0, 0),
9201 build_real (TREE_TYPE (arg1),
9202 real_value_negate (&cst)));
9204 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9205 /* a CMP (-0) -> a CMP 0 */
9206 if (REAL_VALUE_MINUS_ZERO (cst))
9207 return fold_build2_loc (loc, code, type, arg0,
9208 build_real (TREE_TYPE (arg1), dconst0));
9210 /* x != NaN is always true, other ops are always false. */
9211 if (REAL_VALUE_ISNAN (cst)
9212 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9214 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9215 return omit_one_operand_loc (loc, type, tem, arg0);
9218 /* Fold comparisons against infinity. */
9219 if (REAL_VALUE_ISINF (cst)
9220 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9222 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9223 if (tem != NULL_TREE)
9224 return tem;
9228 /* If this is a comparison of a real constant with a PLUS_EXPR
9229 or a MINUS_EXPR of a real constant, we can convert it into a
9230 comparison with a revised real constant as long as no overflow
9231 occurs when unsafe_math_optimizations are enabled. */
9232 if (flag_unsafe_math_optimizations
9233 && TREE_CODE (arg1) == REAL_CST
9234 && (TREE_CODE (arg0) == PLUS_EXPR
9235 || TREE_CODE (arg0) == MINUS_EXPR)
9236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9237 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9238 ? MINUS_EXPR : PLUS_EXPR,
9239 arg1, TREE_OPERAND (arg0, 1)))
9240 && !TREE_OVERFLOW (tem))
9241 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9243 /* Likewise, we can simplify a comparison of a real constant with
9244 a MINUS_EXPR whose first operand is also a real constant, i.e.
9245 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9246 floating-point types only if -fassociative-math is set. */
9247 if (flag_associative_math
9248 && TREE_CODE (arg1) == REAL_CST
9249 && TREE_CODE (arg0) == MINUS_EXPR
9250 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9251 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9252 arg1))
9253 && !TREE_OVERFLOW (tem))
9254 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9255 TREE_OPERAND (arg0, 1), tem);
9257 /* Fold comparisons against built-in math functions. */
9258 if (TREE_CODE (arg1) == REAL_CST
9259 && flag_unsafe_math_optimizations
9260 && ! flag_errno_math)
9262 enum built_in_function fcode = builtin_mathfn_code (arg0);
9264 if (fcode != END_BUILTINS)
9266 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9267 if (tem != NULL_TREE)
9268 return tem;
9273 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9274 && CONVERT_EXPR_P (arg0))
9276 /* If we are widening one operand of an integer comparison,
9277 see if the other operand is similarly being widened. Perhaps we
9278 can do the comparison in the narrower type. */
9279 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9280 if (tem)
9281 return tem;
9283 /* Or if we are changing signedness. */
9284 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9285 if (tem)
9286 return tem;
9289 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9290 constant, we can simplify it. */
9291 if (TREE_CODE (arg1) == INTEGER_CST
9292 && (TREE_CODE (arg0) == MIN_EXPR
9293 || TREE_CODE (arg0) == MAX_EXPR)
9294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9296 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9297 if (tem)
9298 return tem;
9301 /* Simplify comparison of something with itself. (For IEEE
9302 floating-point, we can only do some of these simplifications.) */
9303 if (operand_equal_p (arg0, arg1, 0))
9305 switch (code)
9307 case EQ_EXPR:
9308 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9309 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9310 return constant_boolean_node (1, type);
9311 break;
9313 case GE_EXPR:
9314 case LE_EXPR:
9315 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9316 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9317 return constant_boolean_node (1, type);
9318 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9320 case NE_EXPR:
9321 /* For NE, we can only do this simplification if integer
9322 or we don't honor IEEE floating point NaNs. */
9323 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9324 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9325 break;
9326 /* ... fall through ... */
9327 case GT_EXPR:
9328 case LT_EXPR:
9329 return constant_boolean_node (0, type);
9330 default:
9331 gcc_unreachable ();
9335 /* If we are comparing an expression that just has comparisons
9336 of two integer values, arithmetic expressions of those comparisons,
9337 and constants, we can simplify it. There are only three cases
9338 to check: the two values can either be equal, the first can be
9339 greater, or the second can be greater. Fold the expression for
9340 those three values. Since each value must be 0 or 1, we have
9341 eight possibilities, each of which corresponds to the constant 0
9342 or 1 or one of the six possible comparisons.
9344 This handles common cases like (a > b) == 0 but also handles
9345 expressions like ((x > y) - (y > x)) > 0, which supposedly
9346 occur in macroized code. */
9348 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9350 tree cval1 = 0, cval2 = 0;
9351 int save_p = 0;
9353 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9354 /* Don't handle degenerate cases here; they should already
9355 have been handled anyway. */
9356 && cval1 != 0 && cval2 != 0
9357 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9358 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9359 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9360 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9361 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9362 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9363 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9365 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9366 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9368 /* We can't just pass T to eval_subst in case cval1 or cval2
9369 was the same as ARG1. */
9371 tree high_result
9372 = fold_build2_loc (loc, code, type,
9373 eval_subst (loc, arg0, cval1, maxval,
9374 cval2, minval),
9375 arg1);
9376 tree equal_result
9377 = fold_build2_loc (loc, code, type,
9378 eval_subst (loc, arg0, cval1, maxval,
9379 cval2, maxval),
9380 arg1);
9381 tree low_result
9382 = fold_build2_loc (loc, code, type,
9383 eval_subst (loc, arg0, cval1, minval,
9384 cval2, maxval),
9385 arg1);
9387 /* All three of these results should be 0 or 1. Confirm they are.
9388 Then use those values to select the proper code to use. */
9390 if (TREE_CODE (high_result) == INTEGER_CST
9391 && TREE_CODE (equal_result) == INTEGER_CST
9392 && TREE_CODE (low_result) == INTEGER_CST)
9394 /* Make a 3-bit mask with the high-order bit being the
9395 value for `>', the next for '=', and the low for '<'. */
9396 switch ((integer_onep (high_result) * 4)
9397 + (integer_onep (equal_result) * 2)
9398 + integer_onep (low_result))
9400 case 0:
9401 /* Always false. */
9402 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9403 case 1:
9404 code = LT_EXPR;
9405 break;
9406 case 2:
9407 code = EQ_EXPR;
9408 break;
9409 case 3:
9410 code = LE_EXPR;
9411 break;
9412 case 4:
9413 code = GT_EXPR;
9414 break;
9415 case 5:
9416 code = NE_EXPR;
9417 break;
9418 case 6:
9419 code = GE_EXPR;
9420 break;
9421 case 7:
9422 /* Always true. */
9423 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9426 if (save_p)
9428 tem = save_expr (build2 (code, type, cval1, cval2));
9429 SET_EXPR_LOCATION (tem, loc);
9430 return tem;
9432 return fold_build2_loc (loc, code, type, cval1, cval2);
9437 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9438 into a single range test. */
9439 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9440 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9441 && TREE_CODE (arg1) == INTEGER_CST
9442 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9443 && !integer_zerop (TREE_OPERAND (arg0, 1))
9444 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9445 && !TREE_OVERFLOW (arg1))
9447 tem = fold_div_compare (loc, code, type, arg0, arg1);
9448 if (tem != NULL_TREE)
9449 return tem;
9452 /* Fold ~X op ~Y as Y op X. */
9453 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9454 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9456 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9457 return fold_build2_loc (loc, code, type,
9458 fold_convert_loc (loc, cmp_type,
9459 TREE_OPERAND (arg1, 0)),
9460 TREE_OPERAND (arg0, 0));
9463 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9464 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9465 && TREE_CODE (arg1) == INTEGER_CST)
9467 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9468 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9469 TREE_OPERAND (arg0, 0),
9470 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9471 fold_convert_loc (loc, cmp_type, arg1)));
9474 return NULL_TREE;
9478 /* Subroutine of fold_binary. Optimize complex multiplications of the
9479 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9480 argument EXPR represents the expression "z" of type TYPE. */
9482 static tree
9483 fold_mult_zconjz (location_t loc, tree type, tree expr)
9485 tree itype = TREE_TYPE (type);
9486 tree rpart, ipart, tem;
9488 if (TREE_CODE (expr) == COMPLEX_EXPR)
9490 rpart = TREE_OPERAND (expr, 0);
9491 ipart = TREE_OPERAND (expr, 1);
9493 else if (TREE_CODE (expr) == COMPLEX_CST)
9495 rpart = TREE_REALPART (expr);
9496 ipart = TREE_IMAGPART (expr);
9498 else
9500 expr = save_expr (expr);
9501 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9502 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9505 rpart = save_expr (rpart);
9506 ipart = save_expr (ipart);
9507 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9508 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9509 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9510 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9511 build_zero_cst (itype));
9515 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9516 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9517 guarantees that P and N have the same least significant log2(M) bits.
9518 N is not otherwise constrained. In particular, N is not normalized to
9519 0 <= N < M as is common. In general, the precise value of P is unknown.
9520 M is chosen as large as possible such that constant N can be determined.
9522 Returns M and sets *RESIDUE to N.
9524 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9525 account. This is not always possible due to PR 35705.
9528 static unsigned HOST_WIDE_INT
9529 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9530 bool allow_func_align)
9532 enum tree_code code;
9534 *residue = 0;
9536 code = TREE_CODE (expr);
9537 if (code == ADDR_EXPR)
9539 unsigned int bitalign;
9540 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9541 *residue /= BITS_PER_UNIT;
9542 return bitalign / BITS_PER_UNIT;
9544 else if (code == POINTER_PLUS_EXPR)
9546 tree op0, op1;
9547 unsigned HOST_WIDE_INT modulus;
9548 enum tree_code inner_code;
9550 op0 = TREE_OPERAND (expr, 0);
9551 STRIP_NOPS (op0);
9552 modulus = get_pointer_modulus_and_residue (op0, residue,
9553 allow_func_align);
9555 op1 = TREE_OPERAND (expr, 1);
9556 STRIP_NOPS (op1);
9557 inner_code = TREE_CODE (op1);
9558 if (inner_code == INTEGER_CST)
9560 *residue += TREE_INT_CST_LOW (op1);
9561 return modulus;
9563 else if (inner_code == MULT_EXPR)
9565 op1 = TREE_OPERAND (op1, 1);
9566 if (TREE_CODE (op1) == INTEGER_CST)
9568 unsigned HOST_WIDE_INT align;
9570 /* Compute the greatest power-of-2 divisor of op1. */
9571 align = TREE_INT_CST_LOW (op1);
9572 align &= -align;
9574 /* If align is non-zero and less than *modulus, replace
9575 *modulus with align., If align is 0, then either op1 is 0
9576 or the greatest power-of-2 divisor of op1 doesn't fit in an
9577 unsigned HOST_WIDE_INT. In either case, no additional
9578 constraint is imposed. */
9579 if (align)
9580 modulus = MIN (modulus, align);
9582 return modulus;
9587 /* If we get here, we were unable to determine anything useful about the
9588 expression. */
9589 return 1;
9592 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9593 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9595 static bool
9596 vec_cst_ctor_to_array (tree arg, tree *elts)
9598 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9600 if (TREE_CODE (arg) == VECTOR_CST)
9602 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9603 elts[i] = VECTOR_CST_ELT (arg, i);
9605 else if (TREE_CODE (arg) == CONSTRUCTOR)
9607 constructor_elt *elt;
9609 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9610 if (i >= nelts)
9611 return false;
9612 else
9613 elts[i] = elt->value;
9615 else
9616 return false;
9617 for (; i < nelts; i++)
9618 elts[i]
9619 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9620 return true;
9623 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9624 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9625 NULL_TREE otherwise. */
9627 static tree
9628 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9630 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9631 tree *elts;
9632 bool need_ctor = false;
9634 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9635 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9636 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9637 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9638 return NULL_TREE;
9640 elts = XALLOCAVEC (tree, nelts * 3);
9641 if (!vec_cst_ctor_to_array (arg0, elts)
9642 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9643 return NULL_TREE;
9645 for (i = 0; i < nelts; i++)
9647 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9648 need_ctor = true;
9649 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9652 if (need_ctor)
9654 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9655 for (i = 0; i < nelts; i++)
9656 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9657 return build_constructor (type, v);
9659 else
9660 return build_vector (type, &elts[2 * nelts]);
9663 /* Try to fold a pointer difference of type TYPE two address expressions of
9664 array references AREF0 and AREF1 using location LOC. Return a
9665 simplified expression for the difference or NULL_TREE. */
9667 static tree
9668 fold_addr_of_array_ref_difference (location_t loc, tree type,
9669 tree aref0, tree aref1)
9671 tree base0 = TREE_OPERAND (aref0, 0);
9672 tree base1 = TREE_OPERAND (aref1, 0);
9673 tree base_offset = build_int_cst (type, 0);
9675 /* If the bases are array references as well, recurse. If the bases
9676 are pointer indirections compute the difference of the pointers.
9677 If the bases are equal, we are set. */
9678 if ((TREE_CODE (base0) == ARRAY_REF
9679 && TREE_CODE (base1) == ARRAY_REF
9680 && (base_offset
9681 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9682 || (INDIRECT_REF_P (base0)
9683 && INDIRECT_REF_P (base1)
9684 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9685 TREE_OPERAND (base0, 0),
9686 TREE_OPERAND (base1, 0))))
9687 || operand_equal_p (base0, base1, 0))
9689 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9690 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9691 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9692 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9693 return fold_build2_loc (loc, PLUS_EXPR, type,
9694 base_offset,
9695 fold_build2_loc (loc, MULT_EXPR, type,
9696 diff, esz));
9698 return NULL_TREE;
9701 /* If the real or vector real constant CST of type TYPE has an exact
9702 inverse, return it, else return NULL. */
9704 static tree
9705 exact_inverse (tree type, tree cst)
9707 REAL_VALUE_TYPE r;
9708 tree unit_type, *elts;
9709 enum machine_mode mode;
9710 unsigned vec_nelts, i;
9712 switch (TREE_CODE (cst))
9714 case REAL_CST:
9715 r = TREE_REAL_CST (cst);
9717 if (exact_real_inverse (TYPE_MODE (type), &r))
9718 return build_real (type, r);
9720 return NULL_TREE;
9722 case VECTOR_CST:
9723 vec_nelts = VECTOR_CST_NELTS (cst);
9724 elts = XALLOCAVEC (tree, vec_nelts);
9725 unit_type = TREE_TYPE (type);
9726 mode = TYPE_MODE (unit_type);
9728 for (i = 0; i < vec_nelts; i++)
9730 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9731 if (!exact_real_inverse (mode, &r))
9732 return NULL_TREE;
9733 elts[i] = build_real (unit_type, r);
9736 return build_vector (type, elts);
9738 default:
9739 return NULL_TREE;
9743 /* Fold a binary expression of code CODE and type TYPE with operands
9744 OP0 and OP1. LOC is the location of the resulting expression.
9745 Return the folded expression if folding is successful. Otherwise,
9746 return NULL_TREE. */
9748 tree
9749 fold_binary_loc (location_t loc,
9750 enum tree_code code, tree type, tree op0, tree op1)
9752 enum tree_code_class kind = TREE_CODE_CLASS (code);
9753 tree arg0, arg1, tem;
9754 tree t1 = NULL_TREE;
9755 bool strict_overflow_p;
9757 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9758 && TREE_CODE_LENGTH (code) == 2
9759 && op0 != NULL_TREE
9760 && op1 != NULL_TREE);
9762 arg0 = op0;
9763 arg1 = op1;
9765 /* Strip any conversions that don't change the mode. This is
9766 safe for every expression, except for a comparison expression
9767 because its signedness is derived from its operands. So, in
9768 the latter case, only strip conversions that don't change the
9769 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9770 preserved.
9772 Note that this is done as an internal manipulation within the
9773 constant folder, in order to find the simplest representation
9774 of the arguments so that their form can be studied. In any
9775 cases, the appropriate type conversions should be put back in
9776 the tree that will get out of the constant folder. */
9778 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9780 STRIP_SIGN_NOPS (arg0);
9781 STRIP_SIGN_NOPS (arg1);
9783 else
9785 STRIP_NOPS (arg0);
9786 STRIP_NOPS (arg1);
9789 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9790 constant but we can't do arithmetic on them. */
9791 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9792 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9793 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9794 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9795 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9796 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9798 if (kind == tcc_binary)
9800 /* Make sure type and arg0 have the same saturating flag. */
9801 gcc_assert (TYPE_SATURATING (type)
9802 == TYPE_SATURATING (TREE_TYPE (arg0)));
9803 tem = const_binop (code, arg0, arg1);
9805 else if (kind == tcc_comparison)
9806 tem = fold_relational_const (code, type, arg0, arg1);
9807 else
9808 tem = NULL_TREE;
9810 if (tem != NULL_TREE)
9812 if (TREE_TYPE (tem) != type)
9813 tem = fold_convert_loc (loc, type, tem);
9814 return tem;
9818 /* If this is a commutative operation, and ARG0 is a constant, move it
9819 to ARG1 to reduce the number of tests below. */
9820 if (commutative_tree_code (code)
9821 && tree_swap_operands_p (arg0, arg1, true))
9822 return fold_build2_loc (loc, code, type, op1, op0);
9824 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9826 First check for cases where an arithmetic operation is applied to a
9827 compound, conditional, or comparison operation. Push the arithmetic
9828 operation inside the compound or conditional to see if any folding
9829 can then be done. Convert comparison to conditional for this purpose.
9830 The also optimizes non-constant cases that used to be done in
9831 expand_expr.
9833 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9834 one of the operands is a comparison and the other is a comparison, a
9835 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9836 code below would make the expression more complex. Change it to a
9837 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9838 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9840 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9841 || code == EQ_EXPR || code == NE_EXPR)
9842 && ((truth_value_p (TREE_CODE (arg0))
9843 && (truth_value_p (TREE_CODE (arg1))
9844 || (TREE_CODE (arg1) == BIT_AND_EXPR
9845 && integer_onep (TREE_OPERAND (arg1, 1)))))
9846 || (truth_value_p (TREE_CODE (arg1))
9847 && (truth_value_p (TREE_CODE (arg0))
9848 || (TREE_CODE (arg0) == BIT_AND_EXPR
9849 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9851 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9852 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9853 : TRUTH_XOR_EXPR,
9854 boolean_type_node,
9855 fold_convert_loc (loc, boolean_type_node, arg0),
9856 fold_convert_loc (loc, boolean_type_node, arg1));
9858 if (code == EQ_EXPR)
9859 tem = invert_truthvalue_loc (loc, tem);
9861 return fold_convert_loc (loc, type, tem);
9864 if (TREE_CODE_CLASS (code) == tcc_binary
9865 || TREE_CODE_CLASS (code) == tcc_comparison)
9867 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9869 tem = fold_build2_loc (loc, code, type,
9870 fold_convert_loc (loc, TREE_TYPE (op0),
9871 TREE_OPERAND (arg0, 1)), op1);
9872 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9873 tem);
9875 if (TREE_CODE (arg1) == COMPOUND_EXPR
9876 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9878 tem = fold_build2_loc (loc, code, type, op0,
9879 fold_convert_loc (loc, TREE_TYPE (op1),
9880 TREE_OPERAND (arg1, 1)));
9881 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9882 tem);
9885 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9887 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9888 arg0, arg1,
9889 /*cond_first_p=*/1);
9890 if (tem != NULL_TREE)
9891 return tem;
9894 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9896 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9897 arg1, arg0,
9898 /*cond_first_p=*/0);
9899 if (tem != NULL_TREE)
9900 return tem;
9904 switch (code)
9906 case MEM_REF:
9907 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9908 if (TREE_CODE (arg0) == ADDR_EXPR
9909 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9911 tree iref = TREE_OPERAND (arg0, 0);
9912 return fold_build2 (MEM_REF, type,
9913 TREE_OPERAND (iref, 0),
9914 int_const_binop (PLUS_EXPR, arg1,
9915 TREE_OPERAND (iref, 1)));
9918 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9919 if (TREE_CODE (arg0) == ADDR_EXPR
9920 && handled_component_p (TREE_OPERAND (arg0, 0)))
9922 tree base;
9923 HOST_WIDE_INT coffset;
9924 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9925 &coffset);
9926 if (!base)
9927 return NULL_TREE;
9928 return fold_build2 (MEM_REF, type,
9929 build_fold_addr_expr (base),
9930 int_const_binop (PLUS_EXPR, arg1,
9931 size_int (coffset)));
9934 return NULL_TREE;
9936 case POINTER_PLUS_EXPR:
9937 /* 0 +p index -> (type)index */
9938 if (integer_zerop (arg0))
9939 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9941 /* PTR +p 0 -> PTR */
9942 if (integer_zerop (arg1))
9943 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9945 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9946 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9947 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9948 return fold_convert_loc (loc, type,
9949 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9950 fold_convert_loc (loc, sizetype,
9951 arg1),
9952 fold_convert_loc (loc, sizetype,
9953 arg0)));
9955 /* (PTR +p B) +p A -> PTR +p (B + A) */
9956 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9958 tree inner;
9959 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9960 tree arg00 = TREE_OPERAND (arg0, 0);
9961 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9962 arg01, fold_convert_loc (loc, sizetype, arg1));
9963 return fold_convert_loc (loc, type,
9964 fold_build_pointer_plus_loc (loc,
9965 arg00, inner));
9968 /* PTR_CST +p CST -> CST1 */
9969 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9970 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9971 fold_convert_loc (loc, type, arg1));
9973 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9974 of the array. Loop optimizer sometimes produce this type of
9975 expressions. */
9976 if (TREE_CODE (arg0) == ADDR_EXPR)
9978 tem = try_move_mult_to_index (loc, arg0,
9979 fold_convert_loc (loc,
9980 ssizetype, arg1));
9981 if (tem)
9982 return fold_convert_loc (loc, type, tem);
9985 return NULL_TREE;
9987 case PLUS_EXPR:
9988 /* A + (-B) -> A - B */
9989 if (TREE_CODE (arg1) == NEGATE_EXPR)
9990 return fold_build2_loc (loc, MINUS_EXPR, type,
9991 fold_convert_loc (loc, type, arg0),
9992 fold_convert_loc (loc, type,
9993 TREE_OPERAND (arg1, 0)));
9994 /* (-A) + B -> B - A */
9995 if (TREE_CODE (arg0) == NEGATE_EXPR
9996 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9997 return fold_build2_loc (loc, MINUS_EXPR, type,
9998 fold_convert_loc (loc, type, arg1),
9999 fold_convert_loc (loc, type,
10000 TREE_OPERAND (arg0, 0)));
10002 if (INTEGRAL_TYPE_P (type))
10004 /* Convert ~A + 1 to -A. */
10005 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10006 && integer_onep (arg1))
10007 return fold_build1_loc (loc, NEGATE_EXPR, type,
10008 fold_convert_loc (loc, type,
10009 TREE_OPERAND (arg0, 0)));
10011 /* ~X + X is -1. */
10012 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10013 && !TYPE_OVERFLOW_TRAPS (type))
10015 tree tem = TREE_OPERAND (arg0, 0);
10017 STRIP_NOPS (tem);
10018 if (operand_equal_p (tem, arg1, 0))
10020 t1 = build_int_cst_type (type, -1);
10021 return omit_one_operand_loc (loc, type, t1, arg1);
10025 /* X + ~X is -1. */
10026 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10027 && !TYPE_OVERFLOW_TRAPS (type))
10029 tree tem = TREE_OPERAND (arg1, 0);
10031 STRIP_NOPS (tem);
10032 if (operand_equal_p (arg0, tem, 0))
10034 t1 = build_int_cst_type (type, -1);
10035 return omit_one_operand_loc (loc, type, t1, arg0);
10039 /* X + (X / CST) * -CST is X % CST. */
10040 if (TREE_CODE (arg1) == MULT_EXPR
10041 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10042 && operand_equal_p (arg0,
10043 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10045 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10046 tree cst1 = TREE_OPERAND (arg1, 1);
10047 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10048 cst1, cst0);
10049 if (sum && integer_zerop (sum))
10050 return fold_convert_loc (loc, type,
10051 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10052 TREE_TYPE (arg0), arg0,
10053 cst0));
10057 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10058 one. Make sure the type is not saturating and has the signedness of
10059 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10060 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10061 if ((TREE_CODE (arg0) == MULT_EXPR
10062 || TREE_CODE (arg1) == MULT_EXPR)
10063 && !TYPE_SATURATING (type)
10064 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10065 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10066 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10068 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10069 if (tem)
10070 return tem;
10073 if (! FLOAT_TYPE_P (type))
10075 if (integer_zerop (arg1))
10076 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10078 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10079 with a constant, and the two constants have no bits in common,
10080 we should treat this as a BIT_IOR_EXPR since this may produce more
10081 simplifications. */
10082 if (TREE_CODE (arg0) == BIT_AND_EXPR
10083 && TREE_CODE (arg1) == BIT_AND_EXPR
10084 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10085 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10086 && integer_zerop (const_binop (BIT_AND_EXPR,
10087 TREE_OPERAND (arg0, 1),
10088 TREE_OPERAND (arg1, 1))))
10090 code = BIT_IOR_EXPR;
10091 goto bit_ior;
10094 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10095 (plus (plus (mult) (mult)) (foo)) so that we can
10096 take advantage of the factoring cases below. */
10097 if (TYPE_OVERFLOW_WRAPS (type)
10098 && (((TREE_CODE (arg0) == PLUS_EXPR
10099 || TREE_CODE (arg0) == MINUS_EXPR)
10100 && TREE_CODE (arg1) == MULT_EXPR)
10101 || ((TREE_CODE (arg1) == PLUS_EXPR
10102 || TREE_CODE (arg1) == MINUS_EXPR)
10103 && TREE_CODE (arg0) == MULT_EXPR)))
10105 tree parg0, parg1, parg, marg;
10106 enum tree_code pcode;
10108 if (TREE_CODE (arg1) == MULT_EXPR)
10109 parg = arg0, marg = arg1;
10110 else
10111 parg = arg1, marg = arg0;
10112 pcode = TREE_CODE (parg);
10113 parg0 = TREE_OPERAND (parg, 0);
10114 parg1 = TREE_OPERAND (parg, 1);
10115 STRIP_NOPS (parg0);
10116 STRIP_NOPS (parg1);
10118 if (TREE_CODE (parg0) == MULT_EXPR
10119 && TREE_CODE (parg1) != MULT_EXPR)
10120 return fold_build2_loc (loc, pcode, type,
10121 fold_build2_loc (loc, PLUS_EXPR, type,
10122 fold_convert_loc (loc, type,
10123 parg0),
10124 fold_convert_loc (loc, type,
10125 marg)),
10126 fold_convert_loc (loc, type, parg1));
10127 if (TREE_CODE (parg0) != MULT_EXPR
10128 && TREE_CODE (parg1) == MULT_EXPR)
10129 return
10130 fold_build2_loc (loc, PLUS_EXPR, type,
10131 fold_convert_loc (loc, type, parg0),
10132 fold_build2_loc (loc, pcode, type,
10133 fold_convert_loc (loc, type, marg),
10134 fold_convert_loc (loc, type,
10135 parg1)));
10138 else
10140 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10141 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10142 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10144 /* Likewise if the operands are reversed. */
10145 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10146 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10148 /* Convert X + -C into X - C. */
10149 if (TREE_CODE (arg1) == REAL_CST
10150 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10152 tem = fold_negate_const (arg1, type);
10153 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10154 return fold_build2_loc (loc, MINUS_EXPR, type,
10155 fold_convert_loc (loc, type, arg0),
10156 fold_convert_loc (loc, type, tem));
10159 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10160 to __complex__ ( x, y ). This is not the same for SNaNs or
10161 if signed zeros are involved. */
10162 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10164 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10166 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10167 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10168 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10169 bool arg0rz = false, arg0iz = false;
10170 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10171 || (arg0i && (arg0iz = real_zerop (arg0i))))
10173 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10174 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10175 if (arg0rz && arg1i && real_zerop (arg1i))
10177 tree rp = arg1r ? arg1r
10178 : build1 (REALPART_EXPR, rtype, arg1);
10179 tree ip = arg0i ? arg0i
10180 : build1 (IMAGPART_EXPR, rtype, arg0);
10181 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10183 else if (arg0iz && arg1r && real_zerop (arg1r))
10185 tree rp = arg0r ? arg0r
10186 : build1 (REALPART_EXPR, rtype, arg0);
10187 tree ip = arg1i ? arg1i
10188 : build1 (IMAGPART_EXPR, rtype, arg1);
10189 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10194 if (flag_unsafe_math_optimizations
10195 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10196 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10197 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10198 return tem;
10200 /* Convert x+x into x*2.0. */
10201 if (operand_equal_p (arg0, arg1, 0)
10202 && SCALAR_FLOAT_TYPE_P (type))
10203 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10204 build_real (type, dconst2));
10206 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10207 We associate floats only if the user has specified
10208 -fassociative-math. */
10209 if (flag_associative_math
10210 && TREE_CODE (arg1) == PLUS_EXPR
10211 && TREE_CODE (arg0) != MULT_EXPR)
10213 tree tree10 = TREE_OPERAND (arg1, 0);
10214 tree tree11 = TREE_OPERAND (arg1, 1);
10215 if (TREE_CODE (tree11) == MULT_EXPR
10216 && TREE_CODE (tree10) == MULT_EXPR)
10218 tree tree0;
10219 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10220 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10223 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10224 We associate floats only if the user has specified
10225 -fassociative-math. */
10226 if (flag_associative_math
10227 && TREE_CODE (arg0) == PLUS_EXPR
10228 && TREE_CODE (arg1) != MULT_EXPR)
10230 tree tree00 = TREE_OPERAND (arg0, 0);
10231 tree tree01 = TREE_OPERAND (arg0, 1);
10232 if (TREE_CODE (tree01) == MULT_EXPR
10233 && TREE_CODE (tree00) == MULT_EXPR)
10235 tree tree0;
10236 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10237 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10242 bit_rotate:
10243 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10244 is a rotate of A by C1 bits. */
10245 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10246 is a rotate of A by B bits. */
10248 enum tree_code code0, code1;
10249 tree rtype;
10250 code0 = TREE_CODE (arg0);
10251 code1 = TREE_CODE (arg1);
10252 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10253 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10254 && operand_equal_p (TREE_OPERAND (arg0, 0),
10255 TREE_OPERAND (arg1, 0), 0)
10256 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10257 TYPE_UNSIGNED (rtype))
10258 /* Only create rotates in complete modes. Other cases are not
10259 expanded properly. */
10260 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10262 tree tree01, tree11;
10263 enum tree_code code01, code11;
10265 tree01 = TREE_OPERAND (arg0, 1);
10266 tree11 = TREE_OPERAND (arg1, 1);
10267 STRIP_NOPS (tree01);
10268 STRIP_NOPS (tree11);
10269 code01 = TREE_CODE (tree01);
10270 code11 = TREE_CODE (tree11);
10271 if (code01 == INTEGER_CST
10272 && code11 == INTEGER_CST
10273 && TREE_INT_CST_HIGH (tree01) == 0
10274 && TREE_INT_CST_HIGH (tree11) == 0
10275 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10276 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10278 tem = build2_loc (loc, LROTATE_EXPR,
10279 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10280 TREE_OPERAND (arg0, 0),
10281 code0 == LSHIFT_EXPR ? tree01 : tree11);
10282 return fold_convert_loc (loc, type, tem);
10284 else if (code11 == MINUS_EXPR)
10286 tree tree110, tree111;
10287 tree110 = TREE_OPERAND (tree11, 0);
10288 tree111 = TREE_OPERAND (tree11, 1);
10289 STRIP_NOPS (tree110);
10290 STRIP_NOPS (tree111);
10291 if (TREE_CODE (tree110) == INTEGER_CST
10292 && 0 == compare_tree_int (tree110,
10293 TYPE_PRECISION
10294 (TREE_TYPE (TREE_OPERAND
10295 (arg0, 0))))
10296 && operand_equal_p (tree01, tree111, 0))
10297 return
10298 fold_convert_loc (loc, type,
10299 build2 ((code0 == LSHIFT_EXPR
10300 ? LROTATE_EXPR
10301 : RROTATE_EXPR),
10302 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10303 TREE_OPERAND (arg0, 0), tree01));
10305 else if (code01 == MINUS_EXPR)
10307 tree tree010, tree011;
10308 tree010 = TREE_OPERAND (tree01, 0);
10309 tree011 = TREE_OPERAND (tree01, 1);
10310 STRIP_NOPS (tree010);
10311 STRIP_NOPS (tree011);
10312 if (TREE_CODE (tree010) == INTEGER_CST
10313 && 0 == compare_tree_int (tree010,
10314 TYPE_PRECISION
10315 (TREE_TYPE (TREE_OPERAND
10316 (arg0, 0))))
10317 && operand_equal_p (tree11, tree011, 0))
10318 return fold_convert_loc
10319 (loc, type,
10320 build2 ((code0 != LSHIFT_EXPR
10321 ? LROTATE_EXPR
10322 : RROTATE_EXPR),
10323 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10324 TREE_OPERAND (arg0, 0), tree11));
10329 associate:
10330 /* In most languages, can't associate operations on floats through
10331 parentheses. Rather than remember where the parentheses were, we
10332 don't associate floats at all, unless the user has specified
10333 -fassociative-math.
10334 And, we need to make sure type is not saturating. */
10336 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10337 && !TYPE_SATURATING (type))
10339 tree var0, con0, lit0, minus_lit0;
10340 tree var1, con1, lit1, minus_lit1;
10341 bool ok = true;
10343 /* Split both trees into variables, constants, and literals. Then
10344 associate each group together, the constants with literals,
10345 then the result with variables. This increases the chances of
10346 literals being recombined later and of generating relocatable
10347 expressions for the sum of a constant and literal. */
10348 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10349 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10350 code == MINUS_EXPR);
10352 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10353 if (code == MINUS_EXPR)
10354 code = PLUS_EXPR;
10356 /* With undefined overflow we can only associate constants with one
10357 variable, and constants whose association doesn't overflow. */
10358 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10359 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10361 if (var0 && var1)
10363 tree tmp0 = var0;
10364 tree tmp1 = var1;
10366 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10367 tmp0 = TREE_OPERAND (tmp0, 0);
10368 if (CONVERT_EXPR_P (tmp0)
10369 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10371 <= TYPE_PRECISION (type)))
10372 tmp0 = TREE_OPERAND (tmp0, 0);
10373 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10374 tmp1 = TREE_OPERAND (tmp1, 0);
10375 if (CONVERT_EXPR_P (tmp1)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10378 <= TYPE_PRECISION (type)))
10379 tmp1 = TREE_OPERAND (tmp1, 0);
10380 /* The only case we can still associate with two variables
10381 is if they are the same, modulo negation and bit-pattern
10382 preserving conversions. */
10383 if (!operand_equal_p (tmp0, tmp1, 0))
10384 ok = false;
10387 if (ok && lit0 && lit1)
10389 tree tmp0 = fold_convert (type, lit0);
10390 tree tmp1 = fold_convert (type, lit1);
10392 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10393 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10394 ok = false;
10398 /* Only do something if we found more than two objects. Otherwise,
10399 nothing has changed and we risk infinite recursion. */
10400 if (ok
10401 && (2 < ((var0 != 0) + (var1 != 0)
10402 + (con0 != 0) + (con1 != 0)
10403 + (lit0 != 0) + (lit1 != 0)
10404 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10406 var0 = associate_trees (loc, var0, var1, code, type);
10407 con0 = associate_trees (loc, con0, con1, code, type);
10408 lit0 = associate_trees (loc, lit0, lit1, code, type);
10409 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10411 /* Preserve the MINUS_EXPR if the negative part of the literal is
10412 greater than the positive part. Otherwise, the multiplicative
10413 folding code (i.e extract_muldiv) may be fooled in case
10414 unsigned constants are subtracted, like in the following
10415 example: ((X*2 + 4) - 8U)/2. */
10416 if (minus_lit0 && lit0)
10418 if (TREE_CODE (lit0) == INTEGER_CST
10419 && TREE_CODE (minus_lit0) == INTEGER_CST
10420 && tree_int_cst_lt (lit0, minus_lit0))
10422 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10423 MINUS_EXPR, type);
10424 lit0 = 0;
10426 else
10428 lit0 = associate_trees (loc, lit0, minus_lit0,
10429 MINUS_EXPR, type);
10430 minus_lit0 = 0;
10433 if (minus_lit0)
10435 if (con0 == 0)
10436 return
10437 fold_convert_loc (loc, type,
10438 associate_trees (loc, var0, minus_lit0,
10439 MINUS_EXPR, type));
10440 else
10442 con0 = associate_trees (loc, con0, minus_lit0,
10443 MINUS_EXPR, type);
10444 return
10445 fold_convert_loc (loc, type,
10446 associate_trees (loc, var0, con0,
10447 PLUS_EXPR, type));
10451 con0 = associate_trees (loc, con0, lit0, code, type);
10452 return
10453 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10454 code, type));
10458 return NULL_TREE;
10460 case MINUS_EXPR:
10461 /* Pointer simplifications for subtraction, simple reassociations. */
10462 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10464 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10465 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10466 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10468 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10469 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10470 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10471 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10472 return fold_build2_loc (loc, PLUS_EXPR, type,
10473 fold_build2_loc (loc, MINUS_EXPR, type,
10474 arg00, arg10),
10475 fold_build2_loc (loc, MINUS_EXPR, type,
10476 arg01, arg11));
10478 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10479 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10481 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10482 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10483 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10484 fold_convert_loc (loc, type, arg1));
10485 if (tmp)
10486 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10489 /* A - (-B) -> A + B */
10490 if (TREE_CODE (arg1) == NEGATE_EXPR)
10491 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10492 fold_convert_loc (loc, type,
10493 TREE_OPERAND (arg1, 0)));
10494 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10495 if (TREE_CODE (arg0) == NEGATE_EXPR
10496 && (FLOAT_TYPE_P (type)
10497 || INTEGRAL_TYPE_P (type))
10498 && negate_expr_p (arg1)
10499 && reorder_operands_p (arg0, arg1))
10500 return fold_build2_loc (loc, MINUS_EXPR, type,
10501 fold_convert_loc (loc, type,
10502 negate_expr (arg1)),
10503 fold_convert_loc (loc, type,
10504 TREE_OPERAND (arg0, 0)));
10505 /* Convert -A - 1 to ~A. */
10506 if (INTEGRAL_TYPE_P (type)
10507 && TREE_CODE (arg0) == NEGATE_EXPR
10508 && integer_onep (arg1)
10509 && !TYPE_OVERFLOW_TRAPS (type))
10510 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10511 fold_convert_loc (loc, type,
10512 TREE_OPERAND (arg0, 0)));
10514 /* Convert -1 - A to ~A. */
10515 if (INTEGRAL_TYPE_P (type)
10516 && integer_all_onesp (arg0))
10517 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10520 /* X - (X / CST) * CST is X % CST. */
10521 if (INTEGRAL_TYPE_P (type)
10522 && TREE_CODE (arg1) == MULT_EXPR
10523 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10524 && operand_equal_p (arg0,
10525 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10526 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10527 TREE_OPERAND (arg1, 1), 0))
10528 return
10529 fold_convert_loc (loc, type,
10530 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10531 arg0, TREE_OPERAND (arg1, 1)));
10533 if (! FLOAT_TYPE_P (type))
10535 if (integer_zerop (arg0))
10536 return negate_expr (fold_convert_loc (loc, type, arg1));
10537 if (integer_zerop (arg1))
10538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10540 /* Fold A - (A & B) into ~B & A. */
10541 if (!TREE_SIDE_EFFECTS (arg0)
10542 && TREE_CODE (arg1) == BIT_AND_EXPR)
10544 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10546 tree arg10 = fold_convert_loc (loc, type,
10547 TREE_OPERAND (arg1, 0));
10548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10549 fold_build1_loc (loc, BIT_NOT_EXPR,
10550 type, arg10),
10551 fold_convert_loc (loc, type, arg0));
10553 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10555 tree arg11 = fold_convert_loc (loc,
10556 type, TREE_OPERAND (arg1, 1));
10557 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10558 fold_build1_loc (loc, BIT_NOT_EXPR,
10559 type, arg11),
10560 fold_convert_loc (loc, type, arg0));
10564 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10565 any power of 2 minus 1. */
10566 if (TREE_CODE (arg0) == BIT_AND_EXPR
10567 && TREE_CODE (arg1) == BIT_AND_EXPR
10568 && operand_equal_p (TREE_OPERAND (arg0, 0),
10569 TREE_OPERAND (arg1, 0), 0))
10571 tree mask0 = TREE_OPERAND (arg0, 1);
10572 tree mask1 = TREE_OPERAND (arg1, 1);
10573 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10575 if (operand_equal_p (tem, mask1, 0))
10577 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10578 TREE_OPERAND (arg0, 0), mask1);
10579 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10584 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10585 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10586 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10588 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10589 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10590 (-ARG1 + ARG0) reduces to -ARG1. */
10591 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10592 return negate_expr (fold_convert_loc (loc, type, arg1));
10594 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10595 __complex__ ( x, -y ). This is not the same for SNaNs or if
10596 signed zeros are involved. */
10597 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10599 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10601 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10602 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10603 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10604 bool arg0rz = false, arg0iz = false;
10605 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10606 || (arg0i && (arg0iz = real_zerop (arg0i))))
10608 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10609 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10610 if (arg0rz && arg1i && real_zerop (arg1i))
10612 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10613 arg1r ? arg1r
10614 : build1 (REALPART_EXPR, rtype, arg1));
10615 tree ip = arg0i ? arg0i
10616 : build1 (IMAGPART_EXPR, rtype, arg0);
10617 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10619 else if (arg0iz && arg1r && real_zerop (arg1r))
10621 tree rp = arg0r ? arg0r
10622 : build1 (REALPART_EXPR, rtype, arg0);
10623 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10624 arg1i ? arg1i
10625 : build1 (IMAGPART_EXPR, rtype, arg1));
10626 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10631 /* Fold &x - &x. This can happen from &x.foo - &x.
10632 This is unsafe for certain floats even in non-IEEE formats.
10633 In IEEE, it is unsafe because it does wrong for NaNs.
10634 Also note that operand_equal_p is always false if an operand
10635 is volatile. */
10637 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10638 && operand_equal_p (arg0, arg1, 0))
10639 return build_zero_cst (type);
10641 /* A - B -> A + (-B) if B is easily negatable. */
10642 if (negate_expr_p (arg1)
10643 && ((FLOAT_TYPE_P (type)
10644 /* Avoid this transformation if B is a positive REAL_CST. */
10645 && (TREE_CODE (arg1) != REAL_CST
10646 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10647 || INTEGRAL_TYPE_P (type)))
10648 return fold_build2_loc (loc, PLUS_EXPR, type,
10649 fold_convert_loc (loc, type, arg0),
10650 fold_convert_loc (loc, type,
10651 negate_expr (arg1)));
10653 /* Try folding difference of addresses. */
10655 HOST_WIDE_INT diff;
10657 if ((TREE_CODE (arg0) == ADDR_EXPR
10658 || TREE_CODE (arg1) == ADDR_EXPR)
10659 && ptr_difference_const (arg0, arg1, &diff))
10660 return build_int_cst_type (type, diff);
10663 /* Fold &a[i] - &a[j] to i-j. */
10664 if (TREE_CODE (arg0) == ADDR_EXPR
10665 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10666 && TREE_CODE (arg1) == ADDR_EXPR
10667 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10669 tree tem = fold_addr_of_array_ref_difference (loc, type,
10670 TREE_OPERAND (arg0, 0),
10671 TREE_OPERAND (arg1, 0));
10672 if (tem)
10673 return tem;
10676 if (FLOAT_TYPE_P (type)
10677 && flag_unsafe_math_optimizations
10678 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10679 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10680 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10681 return tem;
10683 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10684 one. Make sure the type is not saturating and has the signedness of
10685 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10686 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10687 if ((TREE_CODE (arg0) == MULT_EXPR
10688 || TREE_CODE (arg1) == MULT_EXPR)
10689 && !TYPE_SATURATING (type)
10690 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10691 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10692 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10694 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10695 if (tem)
10696 return tem;
10699 goto associate;
10701 case MULT_EXPR:
10702 /* (-A) * (-B) -> A * B */
10703 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10704 return fold_build2_loc (loc, MULT_EXPR, type,
10705 fold_convert_loc (loc, type,
10706 TREE_OPERAND (arg0, 0)),
10707 fold_convert_loc (loc, type,
10708 negate_expr (arg1)));
10709 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10710 return fold_build2_loc (loc, MULT_EXPR, type,
10711 fold_convert_loc (loc, type,
10712 negate_expr (arg0)),
10713 fold_convert_loc (loc, type,
10714 TREE_OPERAND (arg1, 0)));
10716 if (! FLOAT_TYPE_P (type))
10718 if (integer_zerop (arg1))
10719 return omit_one_operand_loc (loc, type, arg1, arg0);
10720 if (integer_onep (arg1))
10721 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10722 /* Transform x * -1 into -x. Make sure to do the negation
10723 on the original operand with conversions not stripped
10724 because we can only strip non-sign-changing conversions. */
10725 if (integer_all_onesp (arg1))
10726 return fold_convert_loc (loc, type, negate_expr (op0));
10727 /* Transform x * -C into -x * C if x is easily negatable. */
10728 if (TREE_CODE (arg1) == INTEGER_CST
10729 && tree_int_cst_sgn (arg1) == -1
10730 && negate_expr_p (arg0)
10731 && (tem = negate_expr (arg1)) != arg1
10732 && !TREE_OVERFLOW (tem))
10733 return fold_build2_loc (loc, MULT_EXPR, type,
10734 fold_convert_loc (loc, type,
10735 negate_expr (arg0)),
10736 tem);
10738 /* (a * (1 << b)) is (a << b) */
10739 if (TREE_CODE (arg1) == LSHIFT_EXPR
10740 && integer_onep (TREE_OPERAND (arg1, 0)))
10741 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10742 TREE_OPERAND (arg1, 1));
10743 if (TREE_CODE (arg0) == LSHIFT_EXPR
10744 && integer_onep (TREE_OPERAND (arg0, 0)))
10745 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10746 TREE_OPERAND (arg0, 1));
10748 /* (A + A) * C -> A * 2 * C */
10749 if (TREE_CODE (arg0) == PLUS_EXPR
10750 && TREE_CODE (arg1) == INTEGER_CST
10751 && operand_equal_p (TREE_OPERAND (arg0, 0),
10752 TREE_OPERAND (arg0, 1), 0))
10753 return fold_build2_loc (loc, MULT_EXPR, type,
10754 omit_one_operand_loc (loc, type,
10755 TREE_OPERAND (arg0, 0),
10756 TREE_OPERAND (arg0, 1)),
10757 fold_build2_loc (loc, MULT_EXPR, type,
10758 build_int_cst (type, 2) , arg1));
10760 strict_overflow_p = false;
10761 if (TREE_CODE (arg1) == INTEGER_CST
10762 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10763 &strict_overflow_p)))
10765 if (strict_overflow_p)
10766 fold_overflow_warning (("assuming signed overflow does not "
10767 "occur when simplifying "
10768 "multiplication"),
10769 WARN_STRICT_OVERFLOW_MISC);
10770 return fold_convert_loc (loc, type, tem);
10773 /* Optimize z * conj(z) for integer complex numbers. */
10774 if (TREE_CODE (arg0) == CONJ_EXPR
10775 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10776 return fold_mult_zconjz (loc, type, arg1);
10777 if (TREE_CODE (arg1) == CONJ_EXPR
10778 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10779 return fold_mult_zconjz (loc, type, arg0);
10781 else
10783 /* Maybe fold x * 0 to 0. The expressions aren't the same
10784 when x is NaN, since x * 0 is also NaN. Nor are they the
10785 same in modes with signed zeros, since multiplying a
10786 negative value by 0 gives -0, not +0. */
10787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10788 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10789 && real_zerop (arg1))
10790 return omit_one_operand_loc (loc, type, arg1, arg0);
10791 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10792 Likewise for complex arithmetic with signed zeros. */
10793 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10794 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10795 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10796 && real_onep (arg1))
10797 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10799 /* Transform x * -1.0 into -x. */
10800 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10801 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10802 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10803 && real_minus_onep (arg1))
10804 return fold_convert_loc (loc, type, negate_expr (arg0));
10806 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10807 the result for floating point types due to rounding so it is applied
10808 only if -fassociative-math was specify. */
10809 if (flag_associative_math
10810 && TREE_CODE (arg0) == RDIV_EXPR
10811 && TREE_CODE (arg1) == REAL_CST
10812 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10814 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10815 arg1);
10816 if (tem)
10817 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10818 TREE_OPERAND (arg0, 1));
10821 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10822 if (operand_equal_p (arg0, arg1, 0))
10824 tree tem = fold_strip_sign_ops (arg0);
10825 if (tem != NULL_TREE)
10827 tem = fold_convert_loc (loc, type, tem);
10828 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10832 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10833 This is not the same for NaNs or if signed zeros are
10834 involved. */
10835 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10836 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10837 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10838 && TREE_CODE (arg1) == COMPLEX_CST
10839 && real_zerop (TREE_REALPART (arg1)))
10841 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10842 if (real_onep (TREE_IMAGPART (arg1)))
10843 return
10844 fold_build2_loc (loc, COMPLEX_EXPR, type,
10845 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10846 rtype, arg0)),
10847 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10848 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10849 return
10850 fold_build2_loc (loc, COMPLEX_EXPR, type,
10851 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10852 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10853 rtype, arg0)));
10856 /* Optimize z * conj(z) for floating point complex numbers.
10857 Guarded by flag_unsafe_math_optimizations as non-finite
10858 imaginary components don't produce scalar results. */
10859 if (flag_unsafe_math_optimizations
10860 && TREE_CODE (arg0) == CONJ_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10862 return fold_mult_zconjz (loc, type, arg1);
10863 if (flag_unsafe_math_optimizations
10864 && TREE_CODE (arg1) == CONJ_EXPR
10865 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10866 return fold_mult_zconjz (loc, type, arg0);
10868 if (flag_unsafe_math_optimizations)
10870 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10871 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10873 /* Optimizations of root(...)*root(...). */
10874 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10876 tree rootfn, arg;
10877 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10878 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10880 /* Optimize sqrt(x)*sqrt(x) as x. */
10881 if (BUILTIN_SQRT_P (fcode0)
10882 && operand_equal_p (arg00, arg10, 0)
10883 && ! HONOR_SNANS (TYPE_MODE (type)))
10884 return arg00;
10886 /* Optimize root(x)*root(y) as root(x*y). */
10887 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10888 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10889 return build_call_expr_loc (loc, rootfn, 1, arg);
10892 /* Optimize expN(x)*expN(y) as expN(x+y). */
10893 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10895 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10896 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10897 CALL_EXPR_ARG (arg0, 0),
10898 CALL_EXPR_ARG (arg1, 0));
10899 return build_call_expr_loc (loc, expfn, 1, arg);
10902 /* Optimizations of pow(...)*pow(...). */
10903 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10904 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10905 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10907 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10908 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10909 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10910 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10912 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10913 if (operand_equal_p (arg01, arg11, 0))
10915 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10916 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10917 arg00, arg10);
10918 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10921 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10922 if (operand_equal_p (arg00, arg10, 0))
10924 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10925 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10926 arg01, arg11);
10927 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10931 /* Optimize tan(x)*cos(x) as sin(x). */
10932 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10933 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10934 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10935 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10936 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10937 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10938 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10939 CALL_EXPR_ARG (arg1, 0), 0))
10941 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10943 if (sinfn != NULL_TREE)
10944 return build_call_expr_loc (loc, sinfn, 1,
10945 CALL_EXPR_ARG (arg0, 0));
10948 /* Optimize x*pow(x,c) as pow(x,c+1). */
10949 if (fcode1 == BUILT_IN_POW
10950 || fcode1 == BUILT_IN_POWF
10951 || fcode1 == BUILT_IN_POWL)
10953 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10954 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10955 if (TREE_CODE (arg11) == REAL_CST
10956 && !TREE_OVERFLOW (arg11)
10957 && operand_equal_p (arg0, arg10, 0))
10959 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10960 REAL_VALUE_TYPE c;
10961 tree arg;
10963 c = TREE_REAL_CST (arg11);
10964 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10965 arg = build_real (type, c);
10966 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10970 /* Optimize pow(x,c)*x as pow(x,c+1). */
10971 if (fcode0 == BUILT_IN_POW
10972 || fcode0 == BUILT_IN_POWF
10973 || fcode0 == BUILT_IN_POWL)
10975 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10976 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10977 if (TREE_CODE (arg01) == REAL_CST
10978 && !TREE_OVERFLOW (arg01)
10979 && operand_equal_p (arg1, arg00, 0))
10981 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10982 REAL_VALUE_TYPE c;
10983 tree arg;
10985 c = TREE_REAL_CST (arg01);
10986 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10987 arg = build_real (type, c);
10988 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10992 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10993 if (!in_gimple_form
10994 && optimize
10995 && operand_equal_p (arg0, arg1, 0))
10997 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10999 if (powfn)
11001 tree arg = build_real (type, dconst2);
11002 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11007 goto associate;
11009 case BIT_IOR_EXPR:
11010 bit_ior:
11011 if (integer_all_onesp (arg1))
11012 return omit_one_operand_loc (loc, type, arg1, arg0);
11013 if (integer_zerop (arg1))
11014 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11015 if (operand_equal_p (arg0, arg1, 0))
11016 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11018 /* ~X | X is -1. */
11019 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11022 t1 = build_zero_cst (type);
11023 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11024 return omit_one_operand_loc (loc, type, t1, arg1);
11027 /* X | ~X is -1. */
11028 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11029 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11031 t1 = build_zero_cst (type);
11032 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11033 return omit_one_operand_loc (loc, type, t1, arg0);
11036 /* Canonicalize (X & C1) | C2. */
11037 if (TREE_CODE (arg0) == BIT_AND_EXPR
11038 && TREE_CODE (arg1) == INTEGER_CST
11039 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11041 double_int c1, c2, c3, msk;
11042 int width = TYPE_PRECISION (type), w;
11043 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11044 c2 = tree_to_double_int (arg1);
11046 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11047 if (double_int_equal_p (double_int_and (c1, c2), c1))
11048 return omit_one_operand_loc (loc, type, arg1,
11049 TREE_OPERAND (arg0, 0));
11051 msk = double_int_mask (width);
11053 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11054 if (double_int_zero_p (double_int_and_not (msk,
11055 double_int_ior (c1, c2))))
11056 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11057 TREE_OPERAND (arg0, 0), arg1);
11059 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11060 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11061 mode which allows further optimizations. */
11062 c1 = double_int_and (c1, msk);
11063 c2 = double_int_and (c2, msk);
11064 c3 = double_int_and_not (c1, c2);
11065 for (w = BITS_PER_UNIT;
11066 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11067 w <<= 1)
11069 unsigned HOST_WIDE_INT mask
11070 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11071 if (((c1.low | c2.low) & mask) == mask
11072 && (c1.low & ~mask) == 0 && c1.high == 0)
11074 c3 = uhwi_to_double_int (mask);
11075 break;
11078 if (!double_int_equal_p (c3, c1))
11079 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11080 fold_build2_loc (loc, BIT_AND_EXPR, type,
11081 TREE_OPERAND (arg0, 0),
11082 double_int_to_tree (type,
11083 c3)),
11084 arg1);
11087 /* (X & Y) | Y is (X, Y). */
11088 if (TREE_CODE (arg0) == BIT_AND_EXPR
11089 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11090 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11091 /* (X & Y) | X is (Y, X). */
11092 if (TREE_CODE (arg0) == BIT_AND_EXPR
11093 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11094 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11095 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11096 /* X | (X & Y) is (Y, X). */
11097 if (TREE_CODE (arg1) == BIT_AND_EXPR
11098 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11099 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11100 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11101 /* X | (Y & X) is (Y, X). */
11102 if (TREE_CODE (arg1) == BIT_AND_EXPR
11103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11104 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11105 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11107 /* (X & ~Y) | (~X & Y) is X ^ Y */
11108 if (TREE_CODE (arg0) == BIT_AND_EXPR
11109 && TREE_CODE (arg1) == BIT_AND_EXPR)
11111 tree a0, a1, l0, l1, n0, n1;
11113 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11114 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11116 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11117 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11119 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11120 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11122 if ((operand_equal_p (n0, a0, 0)
11123 && operand_equal_p (n1, a1, 0))
11124 || (operand_equal_p (n0, a1, 0)
11125 && operand_equal_p (n1, a0, 0)))
11126 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11129 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11130 if (t1 != NULL_TREE)
11131 return t1;
11133 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11135 This results in more efficient code for machines without a NAND
11136 instruction. Combine will canonicalize to the first form
11137 which will allow use of NAND instructions provided by the
11138 backend if they exist. */
11139 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11140 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11142 return
11143 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11144 build2 (BIT_AND_EXPR, type,
11145 fold_convert_loc (loc, type,
11146 TREE_OPERAND (arg0, 0)),
11147 fold_convert_loc (loc, type,
11148 TREE_OPERAND (arg1, 0))));
11151 /* See if this can be simplified into a rotate first. If that
11152 is unsuccessful continue in the association code. */
11153 goto bit_rotate;
11155 case BIT_XOR_EXPR:
11156 if (integer_zerop (arg1))
11157 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11158 if (integer_all_onesp (arg1))
11159 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11160 if (operand_equal_p (arg0, arg1, 0))
11161 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11163 /* ~X ^ X is -1. */
11164 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11165 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11167 t1 = build_zero_cst (type);
11168 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11169 return omit_one_operand_loc (loc, type, t1, arg1);
11172 /* X ^ ~X is -1. */
11173 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11174 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11176 t1 = build_zero_cst (type);
11177 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11178 return omit_one_operand_loc (loc, type, t1, arg0);
11181 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11182 with a constant, and the two constants have no bits in common,
11183 we should treat this as a BIT_IOR_EXPR since this may produce more
11184 simplifications. */
11185 if (TREE_CODE (arg0) == BIT_AND_EXPR
11186 && TREE_CODE (arg1) == BIT_AND_EXPR
11187 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11188 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11189 && integer_zerop (const_binop (BIT_AND_EXPR,
11190 TREE_OPERAND (arg0, 1),
11191 TREE_OPERAND (arg1, 1))))
11193 code = BIT_IOR_EXPR;
11194 goto bit_ior;
11197 /* (X | Y) ^ X -> Y & ~ X*/
11198 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11201 tree t2 = TREE_OPERAND (arg0, 1);
11202 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11203 arg1);
11204 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11205 fold_convert_loc (loc, type, t2),
11206 fold_convert_loc (loc, type, t1));
11207 return t1;
11210 /* (Y | X) ^ X -> Y & ~ X*/
11211 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11212 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11214 tree t2 = TREE_OPERAND (arg0, 0);
11215 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11216 arg1);
11217 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11218 fold_convert_loc (loc, type, t2),
11219 fold_convert_loc (loc, type, t1));
11220 return t1;
11223 /* X ^ (X | Y) -> Y & ~ X*/
11224 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11225 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11227 tree t2 = TREE_OPERAND (arg1, 1);
11228 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11229 arg0);
11230 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11231 fold_convert_loc (loc, type, t2),
11232 fold_convert_loc (loc, type, t1));
11233 return t1;
11236 /* X ^ (Y | X) -> Y & ~ X*/
11237 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11238 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11240 tree t2 = TREE_OPERAND (arg1, 0);
11241 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11242 arg0);
11243 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11244 fold_convert_loc (loc, type, t2),
11245 fold_convert_loc (loc, type, t1));
11246 return t1;
11249 /* Convert ~X ^ ~Y to X ^ Y. */
11250 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11251 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11252 return fold_build2_loc (loc, code, type,
11253 fold_convert_loc (loc, type,
11254 TREE_OPERAND (arg0, 0)),
11255 fold_convert_loc (loc, type,
11256 TREE_OPERAND (arg1, 0)));
11258 /* Convert ~X ^ C to X ^ ~C. */
11259 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11260 && TREE_CODE (arg1) == INTEGER_CST)
11261 return fold_build2_loc (loc, code, type,
11262 fold_convert_loc (loc, type,
11263 TREE_OPERAND (arg0, 0)),
11264 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11266 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11267 if (TREE_CODE (arg0) == BIT_AND_EXPR
11268 && integer_onep (TREE_OPERAND (arg0, 1))
11269 && integer_onep (arg1))
11270 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11271 build_zero_cst (TREE_TYPE (arg0)));
11273 /* Fold (X & Y) ^ Y as ~X & Y. */
11274 if (TREE_CODE (arg0) == BIT_AND_EXPR
11275 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11277 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11278 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11279 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11280 fold_convert_loc (loc, type, arg1));
11282 /* Fold (X & Y) ^ X as ~Y & X. */
11283 if (TREE_CODE (arg0) == BIT_AND_EXPR
11284 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11285 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11287 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11288 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11289 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11290 fold_convert_loc (loc, type, arg1));
11292 /* Fold X ^ (X & Y) as X & ~Y. */
11293 if (TREE_CODE (arg1) == BIT_AND_EXPR
11294 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11296 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11297 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11298 fold_convert_loc (loc, type, arg0),
11299 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11301 /* Fold X ^ (Y & X) as ~Y & X. */
11302 if (TREE_CODE (arg1) == BIT_AND_EXPR
11303 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11304 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11306 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11307 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11308 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11309 fold_convert_loc (loc, type, arg0));
11312 /* See if this can be simplified into a rotate first. If that
11313 is unsuccessful continue in the association code. */
11314 goto bit_rotate;
11316 case BIT_AND_EXPR:
11317 if (integer_all_onesp (arg1))
11318 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11319 if (integer_zerop (arg1))
11320 return omit_one_operand_loc (loc, type, arg1, arg0);
11321 if (operand_equal_p (arg0, arg1, 0))
11322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11324 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11325 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11326 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11327 || (TREE_CODE (arg0) == EQ_EXPR
11328 && integer_zerop (TREE_OPERAND (arg0, 1))))
11329 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11330 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11332 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11333 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11334 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11335 || (TREE_CODE (arg1) == EQ_EXPR
11336 && integer_zerop (TREE_OPERAND (arg1, 1))))
11337 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11338 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11340 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11341 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11342 && TREE_CODE (arg1) == INTEGER_CST
11343 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11345 tree tmp1 = fold_convert_loc (loc, type, arg1);
11346 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11347 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11348 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11349 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11350 return
11351 fold_convert_loc (loc, type,
11352 fold_build2_loc (loc, BIT_IOR_EXPR,
11353 type, tmp2, tmp3));
11356 /* (X | Y) & Y is (X, Y). */
11357 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11358 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11359 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11360 /* (X | Y) & X is (Y, X). */
11361 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11362 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11363 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11364 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11365 /* X & (X | Y) is (Y, X). */
11366 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11367 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11368 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11369 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11370 /* X & (Y | X) is (Y, X). */
11371 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11372 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11373 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11374 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11376 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11377 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11378 && integer_onep (TREE_OPERAND (arg0, 1))
11379 && integer_onep (arg1))
11381 tree tem2;
11382 tem = TREE_OPERAND (arg0, 0);
11383 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11384 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11385 tem, tem2);
11386 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11387 build_zero_cst (TREE_TYPE (tem)));
11389 /* Fold ~X & 1 as (X & 1) == 0. */
11390 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11391 && integer_onep (arg1))
11393 tree tem2;
11394 tem = TREE_OPERAND (arg0, 0);
11395 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11396 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11397 tem, tem2);
11398 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11399 build_zero_cst (TREE_TYPE (tem)));
11401 /* Fold !X & 1 as X == 0. */
11402 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11403 && integer_onep (arg1))
11405 tem = TREE_OPERAND (arg0, 0);
11406 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11407 build_zero_cst (TREE_TYPE (tem)));
11410 /* Fold (X ^ Y) & Y as ~X & Y. */
11411 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11412 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11414 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11415 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11416 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11417 fold_convert_loc (loc, type, arg1));
11419 /* Fold (X ^ Y) & X as ~Y & X. */
11420 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11421 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11422 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11424 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11425 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11426 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11427 fold_convert_loc (loc, type, arg1));
11429 /* Fold X & (X ^ Y) as X & ~Y. */
11430 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11431 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11433 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11434 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11435 fold_convert_loc (loc, type, arg0),
11436 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11438 /* Fold X & (Y ^ X) as ~Y & X. */
11439 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11440 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11441 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11443 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11444 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11445 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11446 fold_convert_loc (loc, type, arg0));
11449 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11450 multiple of 1 << CST. */
11451 if (TREE_CODE (arg1) == INTEGER_CST)
11453 double_int cst1 = tree_to_double_int (arg1);
11454 double_int ncst1 = double_int_ext (double_int_neg (cst1),
11455 TYPE_PRECISION (TREE_TYPE (arg1)),
11456 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11457 if (double_int_equal_p (double_int_and (cst1, ncst1), ncst1)
11458 && multiple_of_p (type, arg0,
11459 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11460 return fold_convert_loc (loc, type, arg0);
11463 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11464 bits from CST2. */
11465 if (TREE_CODE (arg1) == INTEGER_CST
11466 && TREE_CODE (arg0) == MULT_EXPR
11467 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11469 int arg1tz
11470 = double_int_ctz (tree_to_double_int (TREE_OPERAND (arg0, 1)));
11471 if (arg1tz > 0)
11473 double_int arg1mask, masked;
11474 arg1mask = double_int_not (double_int_mask (arg1tz));
11475 arg1mask = double_int_ext (arg1mask, TYPE_PRECISION (type),
11476 TYPE_UNSIGNED (type));
11477 masked = double_int_and (arg1mask, tree_to_double_int (arg1));
11478 if (double_int_zero_p (masked))
11479 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11480 arg0, arg1);
11481 else if (!double_int_equal_p (masked, tree_to_double_int (arg1)))
11482 return fold_build2_loc (loc, code, type, op0,
11483 double_int_to_tree (type, masked));
11487 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11488 ((A & N) + B) & M -> (A + B) & M
11489 Similarly if (N & M) == 0,
11490 ((A | N) + B) & M -> (A + B) & M
11491 and for - instead of + (or unary - instead of +)
11492 and/or ^ instead of |.
11493 If B is constant and (B & M) == 0, fold into A & M. */
11494 if (host_integerp (arg1, 1))
11496 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11497 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11498 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11499 && (TREE_CODE (arg0) == PLUS_EXPR
11500 || TREE_CODE (arg0) == MINUS_EXPR
11501 || TREE_CODE (arg0) == NEGATE_EXPR)
11502 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11503 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11505 tree pmop[2];
11506 int which = 0;
11507 unsigned HOST_WIDE_INT cst0;
11509 /* Now we know that arg0 is (C + D) or (C - D) or
11510 -C and arg1 (M) is == (1LL << cst) - 1.
11511 Store C into PMOP[0] and D into PMOP[1]. */
11512 pmop[0] = TREE_OPERAND (arg0, 0);
11513 pmop[1] = NULL;
11514 if (TREE_CODE (arg0) != NEGATE_EXPR)
11516 pmop[1] = TREE_OPERAND (arg0, 1);
11517 which = 1;
11520 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11521 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11522 & cst1) != cst1)
11523 which = -1;
11525 for (; which >= 0; which--)
11526 switch (TREE_CODE (pmop[which]))
11528 case BIT_AND_EXPR:
11529 case BIT_IOR_EXPR:
11530 case BIT_XOR_EXPR:
11531 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11532 != INTEGER_CST)
11533 break;
11534 /* tree_low_cst not used, because we don't care about
11535 the upper bits. */
11536 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11537 cst0 &= cst1;
11538 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11540 if (cst0 != cst1)
11541 break;
11543 else if (cst0 != 0)
11544 break;
11545 /* If C or D is of the form (A & N) where
11546 (N & M) == M, or of the form (A | N) or
11547 (A ^ N) where (N & M) == 0, replace it with A. */
11548 pmop[which] = TREE_OPERAND (pmop[which], 0);
11549 break;
11550 case INTEGER_CST:
11551 /* If C or D is a N where (N & M) == 0, it can be
11552 omitted (assumed 0). */
11553 if ((TREE_CODE (arg0) == PLUS_EXPR
11554 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11555 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11556 pmop[which] = NULL;
11557 break;
11558 default:
11559 break;
11562 /* Only build anything new if we optimized one or both arguments
11563 above. */
11564 if (pmop[0] != TREE_OPERAND (arg0, 0)
11565 || (TREE_CODE (arg0) != NEGATE_EXPR
11566 && pmop[1] != TREE_OPERAND (arg0, 1)))
11568 tree utype = TREE_TYPE (arg0);
11569 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11571 /* Perform the operations in a type that has defined
11572 overflow behavior. */
11573 utype = unsigned_type_for (TREE_TYPE (arg0));
11574 if (pmop[0] != NULL)
11575 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11576 if (pmop[1] != NULL)
11577 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11580 if (TREE_CODE (arg0) == NEGATE_EXPR)
11581 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11582 else if (TREE_CODE (arg0) == PLUS_EXPR)
11584 if (pmop[0] != NULL && pmop[1] != NULL)
11585 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11586 pmop[0], pmop[1]);
11587 else if (pmop[0] != NULL)
11588 tem = pmop[0];
11589 else if (pmop[1] != NULL)
11590 tem = pmop[1];
11591 else
11592 return build_int_cst (type, 0);
11594 else if (pmop[0] == NULL)
11595 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11596 else
11597 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11598 pmop[0], pmop[1]);
11599 /* TEM is now the new binary +, - or unary - replacement. */
11600 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11601 fold_convert_loc (loc, utype, arg1));
11602 return fold_convert_loc (loc, type, tem);
11607 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11608 if (t1 != NULL_TREE)
11609 return t1;
11610 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11611 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11612 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11614 unsigned int prec
11615 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11617 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11618 && (~TREE_INT_CST_LOW (arg1)
11619 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11620 return
11621 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11624 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11626 This results in more efficient code for machines without a NOR
11627 instruction. Combine will canonicalize to the first form
11628 which will allow use of NOR instructions provided by the
11629 backend if they exist. */
11630 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11631 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11633 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11634 build2 (BIT_IOR_EXPR, type,
11635 fold_convert_loc (loc, type,
11636 TREE_OPERAND (arg0, 0)),
11637 fold_convert_loc (loc, type,
11638 TREE_OPERAND (arg1, 0))));
11641 /* If arg0 is derived from the address of an object or function, we may
11642 be able to fold this expression using the object or function's
11643 alignment. */
11644 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11646 unsigned HOST_WIDE_INT modulus, residue;
11647 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11649 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11650 integer_onep (arg1));
11652 /* This works because modulus is a power of 2. If this weren't the
11653 case, we'd have to replace it by its greatest power-of-2
11654 divisor: modulus & -modulus. */
11655 if (low < modulus)
11656 return build_int_cst (type, residue & low);
11659 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11660 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11661 if the new mask might be further optimized. */
11662 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11663 || TREE_CODE (arg0) == RSHIFT_EXPR)
11664 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11665 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11666 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11667 < TYPE_PRECISION (TREE_TYPE (arg0))
11668 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11669 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11671 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11672 unsigned HOST_WIDE_INT mask
11673 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11674 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11675 tree shift_type = TREE_TYPE (arg0);
11677 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11678 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11679 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11680 && TYPE_PRECISION (TREE_TYPE (arg0))
11681 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11683 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11684 tree arg00 = TREE_OPERAND (arg0, 0);
11685 /* See if more bits can be proven as zero because of
11686 zero extension. */
11687 if (TREE_CODE (arg00) == NOP_EXPR
11688 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11690 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11691 if (TYPE_PRECISION (inner_type)
11692 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11693 && TYPE_PRECISION (inner_type) < prec)
11695 prec = TYPE_PRECISION (inner_type);
11696 /* See if we can shorten the right shift. */
11697 if (shiftc < prec)
11698 shift_type = inner_type;
11701 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11702 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11703 zerobits <<= prec - shiftc;
11704 /* For arithmetic shift if sign bit could be set, zerobits
11705 can contain actually sign bits, so no transformation is
11706 possible, unless MASK masks them all away. In that
11707 case the shift needs to be converted into logical shift. */
11708 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11709 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11711 if ((mask & zerobits) == 0)
11712 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11713 else
11714 zerobits = 0;
11718 /* ((X << 16) & 0xff00) is (X, 0). */
11719 if ((mask & zerobits) == mask)
11720 return omit_one_operand_loc (loc, type,
11721 build_int_cst (type, 0), arg0);
11723 newmask = mask | zerobits;
11724 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11726 unsigned int prec;
11728 /* Only do the transformation if NEWMASK is some integer
11729 mode's mask. */
11730 for (prec = BITS_PER_UNIT;
11731 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11732 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11733 break;
11734 if (prec < HOST_BITS_PER_WIDE_INT
11735 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11737 tree newmaskt;
11739 if (shift_type != TREE_TYPE (arg0))
11741 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11742 fold_convert_loc (loc, shift_type,
11743 TREE_OPERAND (arg0, 0)),
11744 TREE_OPERAND (arg0, 1));
11745 tem = fold_convert_loc (loc, type, tem);
11747 else
11748 tem = op0;
11749 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11750 if (!tree_int_cst_equal (newmaskt, arg1))
11751 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11756 goto associate;
11758 case RDIV_EXPR:
11759 /* Don't touch a floating-point divide by zero unless the mode
11760 of the constant can represent infinity. */
11761 if (TREE_CODE (arg1) == REAL_CST
11762 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11763 && real_zerop (arg1))
11764 return NULL_TREE;
11766 /* Optimize A / A to 1.0 if we don't care about
11767 NaNs or Infinities. Skip the transformation
11768 for non-real operands. */
11769 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11770 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11771 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11772 && operand_equal_p (arg0, arg1, 0))
11774 tree r = build_real (TREE_TYPE (arg0), dconst1);
11776 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11779 /* The complex version of the above A / A optimization. */
11780 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11781 && operand_equal_p (arg0, arg1, 0))
11783 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11784 if (! HONOR_NANS (TYPE_MODE (elem_type))
11785 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11787 tree r = build_real (elem_type, dconst1);
11788 /* omit_two_operands will call fold_convert for us. */
11789 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11793 /* (-A) / (-B) -> A / B */
11794 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11795 return fold_build2_loc (loc, RDIV_EXPR, type,
11796 TREE_OPERAND (arg0, 0),
11797 negate_expr (arg1));
11798 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11799 return fold_build2_loc (loc, RDIV_EXPR, type,
11800 negate_expr (arg0),
11801 TREE_OPERAND (arg1, 0));
11803 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11804 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11805 && real_onep (arg1))
11806 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11808 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11809 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11810 && real_minus_onep (arg1))
11811 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11812 negate_expr (arg0)));
11814 /* If ARG1 is a constant, we can convert this to a multiply by the
11815 reciprocal. This does not have the same rounding properties,
11816 so only do this if -freciprocal-math. We can actually
11817 always safely do it if ARG1 is a power of two, but it's hard to
11818 tell if it is or not in a portable manner. */
11819 if (optimize
11820 && (TREE_CODE (arg1) == REAL_CST
11821 || (TREE_CODE (arg1) == COMPLEX_CST
11822 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11823 || (TREE_CODE (arg1) == VECTOR_CST
11824 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11826 if (flag_reciprocal_math
11827 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11828 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11829 /* Find the reciprocal if optimizing and the result is exact.
11830 TODO: Complex reciprocal not implemented. */
11831 if (TREE_CODE (arg1) != COMPLEX_CST)
11833 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11835 if (inverse)
11836 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11839 /* Convert A/B/C to A/(B*C). */
11840 if (flag_reciprocal_math
11841 && TREE_CODE (arg0) == RDIV_EXPR)
11842 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11843 fold_build2_loc (loc, MULT_EXPR, type,
11844 TREE_OPERAND (arg0, 1), arg1));
11846 /* Convert A/(B/C) to (A/B)*C. */
11847 if (flag_reciprocal_math
11848 && TREE_CODE (arg1) == RDIV_EXPR)
11849 return fold_build2_loc (loc, MULT_EXPR, type,
11850 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11851 TREE_OPERAND (arg1, 0)),
11852 TREE_OPERAND (arg1, 1));
11854 /* Convert C1/(X*C2) into (C1/C2)/X. */
11855 if (flag_reciprocal_math
11856 && TREE_CODE (arg1) == MULT_EXPR
11857 && TREE_CODE (arg0) == REAL_CST
11858 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11860 tree tem = const_binop (RDIV_EXPR, arg0,
11861 TREE_OPERAND (arg1, 1));
11862 if (tem)
11863 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11864 TREE_OPERAND (arg1, 0));
11867 if (flag_unsafe_math_optimizations)
11869 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11870 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11872 /* Optimize sin(x)/cos(x) as tan(x). */
11873 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11874 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11875 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11876 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11877 CALL_EXPR_ARG (arg1, 0), 0))
11879 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11881 if (tanfn != NULL_TREE)
11882 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11885 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11886 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11887 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11888 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11889 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11890 CALL_EXPR_ARG (arg1, 0), 0))
11892 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11894 if (tanfn != NULL_TREE)
11896 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11897 CALL_EXPR_ARG (arg0, 0));
11898 return fold_build2_loc (loc, RDIV_EXPR, type,
11899 build_real (type, dconst1), tmp);
11903 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11904 NaNs or Infinities. */
11905 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11906 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11907 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11909 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11910 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11912 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11913 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11914 && operand_equal_p (arg00, arg01, 0))
11916 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11918 if (cosfn != NULL_TREE)
11919 return build_call_expr_loc (loc, cosfn, 1, arg00);
11923 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11924 NaNs or Infinities. */
11925 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11926 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11927 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11929 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11930 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11932 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11933 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11934 && operand_equal_p (arg00, arg01, 0))
11936 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11938 if (cosfn != NULL_TREE)
11940 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11941 return fold_build2_loc (loc, RDIV_EXPR, type,
11942 build_real (type, dconst1),
11943 tmp);
11948 /* Optimize pow(x,c)/x as pow(x,c-1). */
11949 if (fcode0 == BUILT_IN_POW
11950 || fcode0 == BUILT_IN_POWF
11951 || fcode0 == BUILT_IN_POWL)
11953 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11954 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11955 if (TREE_CODE (arg01) == REAL_CST
11956 && !TREE_OVERFLOW (arg01)
11957 && operand_equal_p (arg1, arg00, 0))
11959 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11960 REAL_VALUE_TYPE c;
11961 tree arg;
11963 c = TREE_REAL_CST (arg01);
11964 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11965 arg = build_real (type, c);
11966 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11970 /* Optimize a/root(b/c) into a*root(c/b). */
11971 if (BUILTIN_ROOT_P (fcode1))
11973 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11975 if (TREE_CODE (rootarg) == RDIV_EXPR)
11977 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11978 tree b = TREE_OPERAND (rootarg, 0);
11979 tree c = TREE_OPERAND (rootarg, 1);
11981 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11983 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11984 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11988 /* Optimize x/expN(y) into x*expN(-y). */
11989 if (BUILTIN_EXPONENT_P (fcode1))
11991 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11992 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11993 arg1 = build_call_expr_loc (loc,
11994 expfn, 1,
11995 fold_convert_loc (loc, type, arg));
11996 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11999 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12000 if (fcode1 == BUILT_IN_POW
12001 || fcode1 == BUILT_IN_POWF
12002 || fcode1 == BUILT_IN_POWL)
12004 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12005 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12006 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12007 tree neg11 = fold_convert_loc (loc, type,
12008 negate_expr (arg11));
12009 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12010 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12013 return NULL_TREE;
12015 case TRUNC_DIV_EXPR:
12016 /* Optimize (X & (-A)) / A where A is a power of 2,
12017 to X >> log2(A) */
12018 if (TREE_CODE (arg0) == BIT_AND_EXPR
12019 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12020 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12022 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12023 arg1, TREE_OPERAND (arg0, 1));
12024 if (sum && integer_zerop (sum)) {
12025 unsigned long pow2;
12027 if (TREE_INT_CST_LOW (arg1))
12028 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12029 else
12030 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12031 + HOST_BITS_PER_WIDE_INT;
12033 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12034 TREE_OPERAND (arg0, 0),
12035 build_int_cst (integer_type_node, pow2));
12039 /* Fall through */
12041 case FLOOR_DIV_EXPR:
12042 /* Simplify A / (B << N) where A and B are positive and B is
12043 a power of 2, to A >> (N + log2(B)). */
12044 strict_overflow_p = false;
12045 if (TREE_CODE (arg1) == LSHIFT_EXPR
12046 && (TYPE_UNSIGNED (type)
12047 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12049 tree sval = TREE_OPERAND (arg1, 0);
12050 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12052 tree sh_cnt = TREE_OPERAND (arg1, 1);
12053 unsigned long pow2;
12055 if (TREE_INT_CST_LOW (sval))
12056 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12057 else
12058 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12059 + HOST_BITS_PER_WIDE_INT;
12061 if (strict_overflow_p)
12062 fold_overflow_warning (("assuming signed overflow does not "
12063 "occur when simplifying A / (B << N)"),
12064 WARN_STRICT_OVERFLOW_MISC);
12066 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12067 sh_cnt,
12068 build_int_cst (TREE_TYPE (sh_cnt),
12069 pow2));
12070 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12071 fold_convert_loc (loc, type, arg0), sh_cnt);
12075 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12076 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12077 if (INTEGRAL_TYPE_P (type)
12078 && TYPE_UNSIGNED (type)
12079 && code == FLOOR_DIV_EXPR)
12080 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12082 /* Fall through */
12084 case ROUND_DIV_EXPR:
12085 case CEIL_DIV_EXPR:
12086 case EXACT_DIV_EXPR:
12087 if (integer_onep (arg1))
12088 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12089 if (integer_zerop (arg1))
12090 return NULL_TREE;
12091 /* X / -1 is -X. */
12092 if (!TYPE_UNSIGNED (type)
12093 && TREE_CODE (arg1) == INTEGER_CST
12094 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12095 && TREE_INT_CST_HIGH (arg1) == -1)
12096 return fold_convert_loc (loc, type, negate_expr (arg0));
12098 /* Convert -A / -B to A / B when the type is signed and overflow is
12099 undefined. */
12100 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12101 && TREE_CODE (arg0) == NEGATE_EXPR
12102 && negate_expr_p (arg1))
12104 if (INTEGRAL_TYPE_P (type))
12105 fold_overflow_warning (("assuming signed overflow does not occur "
12106 "when distributing negation across "
12107 "division"),
12108 WARN_STRICT_OVERFLOW_MISC);
12109 return fold_build2_loc (loc, code, type,
12110 fold_convert_loc (loc, type,
12111 TREE_OPERAND (arg0, 0)),
12112 fold_convert_loc (loc, type,
12113 negate_expr (arg1)));
12115 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12116 && TREE_CODE (arg1) == NEGATE_EXPR
12117 && negate_expr_p (arg0))
12119 if (INTEGRAL_TYPE_P (type))
12120 fold_overflow_warning (("assuming signed overflow does not occur "
12121 "when distributing negation across "
12122 "division"),
12123 WARN_STRICT_OVERFLOW_MISC);
12124 return fold_build2_loc (loc, code, type,
12125 fold_convert_loc (loc, type,
12126 negate_expr (arg0)),
12127 fold_convert_loc (loc, type,
12128 TREE_OPERAND (arg1, 0)));
12131 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12132 operation, EXACT_DIV_EXPR.
12134 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12135 At one time others generated faster code, it's not clear if they do
12136 after the last round to changes to the DIV code in expmed.c. */
12137 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12138 && multiple_of_p (type, arg0, arg1))
12139 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12141 strict_overflow_p = false;
12142 if (TREE_CODE (arg1) == INTEGER_CST
12143 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12144 &strict_overflow_p)))
12146 if (strict_overflow_p)
12147 fold_overflow_warning (("assuming signed overflow does not occur "
12148 "when simplifying division"),
12149 WARN_STRICT_OVERFLOW_MISC);
12150 return fold_convert_loc (loc, type, tem);
12153 return NULL_TREE;
12155 case CEIL_MOD_EXPR:
12156 case FLOOR_MOD_EXPR:
12157 case ROUND_MOD_EXPR:
12158 case TRUNC_MOD_EXPR:
12159 /* X % 1 is always zero, but be sure to preserve any side
12160 effects in X. */
12161 if (integer_onep (arg1))
12162 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12164 /* X % 0, return X % 0 unchanged so that we can get the
12165 proper warnings and errors. */
12166 if (integer_zerop (arg1))
12167 return NULL_TREE;
12169 /* 0 % X is always zero, but be sure to preserve any side
12170 effects in X. Place this after checking for X == 0. */
12171 if (integer_zerop (arg0))
12172 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12174 /* X % -1 is zero. */
12175 if (!TYPE_UNSIGNED (type)
12176 && TREE_CODE (arg1) == INTEGER_CST
12177 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12178 && TREE_INT_CST_HIGH (arg1) == -1)
12179 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12181 /* X % -C is the same as X % C. */
12182 if (code == TRUNC_MOD_EXPR
12183 && !TYPE_UNSIGNED (type)
12184 && TREE_CODE (arg1) == INTEGER_CST
12185 && !TREE_OVERFLOW (arg1)
12186 && TREE_INT_CST_HIGH (arg1) < 0
12187 && !TYPE_OVERFLOW_TRAPS (type)
12188 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12189 && !sign_bit_p (arg1, arg1))
12190 return fold_build2_loc (loc, code, type,
12191 fold_convert_loc (loc, type, arg0),
12192 fold_convert_loc (loc, type,
12193 negate_expr (arg1)));
12195 /* X % -Y is the same as X % Y. */
12196 if (code == TRUNC_MOD_EXPR
12197 && !TYPE_UNSIGNED (type)
12198 && TREE_CODE (arg1) == NEGATE_EXPR
12199 && !TYPE_OVERFLOW_TRAPS (type))
12200 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12201 fold_convert_loc (loc, type,
12202 TREE_OPERAND (arg1, 0)));
12204 strict_overflow_p = false;
12205 if (TREE_CODE (arg1) == INTEGER_CST
12206 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12207 &strict_overflow_p)))
12209 if (strict_overflow_p)
12210 fold_overflow_warning (("assuming signed overflow does not occur "
12211 "when simplifying modulus"),
12212 WARN_STRICT_OVERFLOW_MISC);
12213 return fold_convert_loc (loc, type, tem);
12216 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12217 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12218 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12219 && (TYPE_UNSIGNED (type)
12220 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12222 tree c = arg1;
12223 /* Also optimize A % (C << N) where C is a power of 2,
12224 to A & ((C << N) - 1). */
12225 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12226 c = TREE_OPERAND (arg1, 0);
12228 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12230 tree mask
12231 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12232 build_int_cst (TREE_TYPE (arg1), 1));
12233 if (strict_overflow_p)
12234 fold_overflow_warning (("assuming signed overflow does not "
12235 "occur when simplifying "
12236 "X % (power of two)"),
12237 WARN_STRICT_OVERFLOW_MISC);
12238 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12239 fold_convert_loc (loc, type, arg0),
12240 fold_convert_loc (loc, type, mask));
12244 return NULL_TREE;
12246 case LROTATE_EXPR:
12247 case RROTATE_EXPR:
12248 if (integer_all_onesp (arg0))
12249 return omit_one_operand_loc (loc, type, arg0, arg1);
12250 goto shift;
12252 case RSHIFT_EXPR:
12253 /* Optimize -1 >> x for arithmetic right shifts. */
12254 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12255 && tree_expr_nonnegative_p (arg1))
12256 return omit_one_operand_loc (loc, type, arg0, arg1);
12257 /* ... fall through ... */
12259 case LSHIFT_EXPR:
12260 shift:
12261 if (integer_zerop (arg1))
12262 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12263 if (integer_zerop (arg0))
12264 return omit_one_operand_loc (loc, type, arg0, arg1);
12266 /* Since negative shift count is not well-defined,
12267 don't try to compute it in the compiler. */
12268 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12269 return NULL_TREE;
12271 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12272 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12273 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12274 && host_integerp (TREE_OPERAND (arg0, 1), false)
12275 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12277 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12278 + TREE_INT_CST_LOW (arg1));
12280 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12281 being well defined. */
12282 if (low >= TYPE_PRECISION (type))
12284 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12285 low = low % TYPE_PRECISION (type);
12286 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12287 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12288 TREE_OPERAND (arg0, 0));
12289 else
12290 low = TYPE_PRECISION (type) - 1;
12293 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12294 build_int_cst (type, low));
12297 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12298 into x & ((unsigned)-1 >> c) for unsigned types. */
12299 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12300 || (TYPE_UNSIGNED (type)
12301 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12302 && host_integerp (arg1, false)
12303 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12304 && host_integerp (TREE_OPERAND (arg0, 1), false)
12305 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12307 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12308 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12309 tree lshift;
12310 tree arg00;
12312 if (low0 == low1)
12314 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12316 lshift = build_int_cst (type, -1);
12317 lshift = int_const_binop (code, lshift, arg1);
12319 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12323 /* Rewrite an LROTATE_EXPR by a constant into an
12324 RROTATE_EXPR by a new constant. */
12325 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12327 tree tem = build_int_cst (TREE_TYPE (arg1),
12328 TYPE_PRECISION (type));
12329 tem = const_binop (MINUS_EXPR, tem, arg1);
12330 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12333 /* If we have a rotate of a bit operation with the rotate count and
12334 the second operand of the bit operation both constant,
12335 permute the two operations. */
12336 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12337 && (TREE_CODE (arg0) == BIT_AND_EXPR
12338 || TREE_CODE (arg0) == BIT_IOR_EXPR
12339 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12340 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12341 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12342 fold_build2_loc (loc, code, type,
12343 TREE_OPERAND (arg0, 0), arg1),
12344 fold_build2_loc (loc, code, type,
12345 TREE_OPERAND (arg0, 1), arg1));
12347 /* Two consecutive rotates adding up to the precision of the
12348 type can be ignored. */
12349 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12350 && TREE_CODE (arg0) == RROTATE_EXPR
12351 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12352 && TREE_INT_CST_HIGH (arg1) == 0
12353 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12354 && ((TREE_INT_CST_LOW (arg1)
12355 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12356 == (unsigned int) TYPE_PRECISION (type)))
12357 return TREE_OPERAND (arg0, 0);
12359 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12360 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12361 if the latter can be further optimized. */
12362 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12363 && TREE_CODE (arg0) == BIT_AND_EXPR
12364 && TREE_CODE (arg1) == INTEGER_CST
12365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12367 tree mask = fold_build2_loc (loc, code, type,
12368 fold_convert_loc (loc, type,
12369 TREE_OPERAND (arg0, 1)),
12370 arg1);
12371 tree shift = fold_build2_loc (loc, code, type,
12372 fold_convert_loc (loc, type,
12373 TREE_OPERAND (arg0, 0)),
12374 arg1);
12375 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12376 if (tem)
12377 return tem;
12380 return NULL_TREE;
12382 case MIN_EXPR:
12383 if (operand_equal_p (arg0, arg1, 0))
12384 return omit_one_operand_loc (loc, type, arg0, arg1);
12385 if (INTEGRAL_TYPE_P (type)
12386 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12387 return omit_one_operand_loc (loc, type, arg1, arg0);
12388 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12389 if (tem)
12390 return tem;
12391 goto associate;
12393 case MAX_EXPR:
12394 if (operand_equal_p (arg0, arg1, 0))
12395 return omit_one_operand_loc (loc, type, arg0, arg1);
12396 if (INTEGRAL_TYPE_P (type)
12397 && TYPE_MAX_VALUE (type)
12398 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12399 return omit_one_operand_loc (loc, type, arg1, arg0);
12400 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12401 if (tem)
12402 return tem;
12403 goto associate;
12405 case TRUTH_ANDIF_EXPR:
12406 /* Note that the operands of this must be ints
12407 and their values must be 0 or 1.
12408 ("true" is a fixed value perhaps depending on the language.) */
12409 /* If first arg is constant zero, return it. */
12410 if (integer_zerop (arg0))
12411 return fold_convert_loc (loc, type, arg0);
12412 case TRUTH_AND_EXPR:
12413 /* If either arg is constant true, drop it. */
12414 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12415 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12416 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12417 /* Preserve sequence points. */
12418 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12419 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12420 /* If second arg is constant zero, result is zero, but first arg
12421 must be evaluated. */
12422 if (integer_zerop (arg1))
12423 return omit_one_operand_loc (loc, type, arg1, arg0);
12424 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12425 case will be handled here. */
12426 if (integer_zerop (arg0))
12427 return omit_one_operand_loc (loc, type, arg0, arg1);
12429 /* !X && X is always false. */
12430 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12431 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12432 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12433 /* X && !X is always false. */
12434 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12435 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12436 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12438 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12439 means A >= Y && A != MAX, but in this case we know that
12440 A < X <= MAX. */
12442 if (!TREE_SIDE_EFFECTS (arg0)
12443 && !TREE_SIDE_EFFECTS (arg1))
12445 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12446 if (tem && !operand_equal_p (tem, arg0, 0))
12447 return fold_build2_loc (loc, code, type, tem, arg1);
12449 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12450 if (tem && !operand_equal_p (tem, arg1, 0))
12451 return fold_build2_loc (loc, code, type, arg0, tem);
12454 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12455 != NULL_TREE)
12456 return tem;
12458 return NULL_TREE;
12460 case TRUTH_ORIF_EXPR:
12461 /* Note that the operands of this must be ints
12462 and their values must be 0 or true.
12463 ("true" is a fixed value perhaps depending on the language.) */
12464 /* If first arg is constant true, return it. */
12465 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12466 return fold_convert_loc (loc, type, arg0);
12467 case TRUTH_OR_EXPR:
12468 /* If either arg is constant zero, drop it. */
12469 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12470 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12471 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12472 /* Preserve sequence points. */
12473 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12474 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12475 /* If second arg is constant true, result is true, but we must
12476 evaluate first arg. */
12477 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12478 return omit_one_operand_loc (loc, type, arg1, arg0);
12479 /* Likewise for first arg, but note this only occurs here for
12480 TRUTH_OR_EXPR. */
12481 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12482 return omit_one_operand_loc (loc, type, arg0, arg1);
12484 /* !X || X is always true. */
12485 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12486 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12487 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12488 /* X || !X is always true. */
12489 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12490 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12491 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12493 /* (X && !Y) || (!X && Y) is X ^ Y */
12494 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12495 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12497 tree a0, a1, l0, l1, n0, n1;
12499 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12500 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12502 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12503 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12505 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12506 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12508 if ((operand_equal_p (n0, a0, 0)
12509 && operand_equal_p (n1, a1, 0))
12510 || (operand_equal_p (n0, a1, 0)
12511 && operand_equal_p (n1, a0, 0)))
12512 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12515 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12516 != NULL_TREE)
12517 return tem;
12519 return NULL_TREE;
12521 case TRUTH_XOR_EXPR:
12522 /* If the second arg is constant zero, drop it. */
12523 if (integer_zerop (arg1))
12524 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12525 /* If the second arg is constant true, this is a logical inversion. */
12526 if (integer_onep (arg1))
12528 /* Only call invert_truthvalue if operand is a truth value. */
12529 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12530 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12531 else
12532 tem = invert_truthvalue_loc (loc, arg0);
12533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12535 /* Identical arguments cancel to zero. */
12536 if (operand_equal_p (arg0, arg1, 0))
12537 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12539 /* !X ^ X is always true. */
12540 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12541 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12542 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12544 /* X ^ !X is always true. */
12545 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12546 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12547 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12549 return NULL_TREE;
12551 case EQ_EXPR:
12552 case NE_EXPR:
12553 STRIP_NOPS (arg0);
12554 STRIP_NOPS (arg1);
12556 tem = fold_comparison (loc, code, type, op0, op1);
12557 if (tem != NULL_TREE)
12558 return tem;
12560 /* bool_var != 0 becomes bool_var. */
12561 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12562 && code == NE_EXPR)
12563 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12565 /* bool_var == 1 becomes bool_var. */
12566 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12567 && code == EQ_EXPR)
12568 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12570 /* bool_var != 1 becomes !bool_var. */
12571 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12572 && code == NE_EXPR)
12573 return fold_convert_loc (loc, type,
12574 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12575 TREE_TYPE (arg0), arg0));
12577 /* bool_var == 0 becomes !bool_var. */
12578 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12579 && code == EQ_EXPR)
12580 return fold_convert_loc (loc, type,
12581 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12582 TREE_TYPE (arg0), arg0));
12584 /* !exp != 0 becomes !exp */
12585 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12586 && code == NE_EXPR)
12587 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12589 /* If this is an equality comparison of the address of two non-weak,
12590 unaliased symbols neither of which are extern (since we do not
12591 have access to attributes for externs), then we know the result. */
12592 if (TREE_CODE (arg0) == ADDR_EXPR
12593 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12594 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12595 && ! lookup_attribute ("alias",
12596 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12597 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12598 && TREE_CODE (arg1) == ADDR_EXPR
12599 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12600 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12601 && ! lookup_attribute ("alias",
12602 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12603 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12605 /* We know that we're looking at the address of two
12606 non-weak, unaliased, static _DECL nodes.
12608 It is both wasteful and incorrect to call operand_equal_p
12609 to compare the two ADDR_EXPR nodes. It is wasteful in that
12610 all we need to do is test pointer equality for the arguments
12611 to the two ADDR_EXPR nodes. It is incorrect to use
12612 operand_equal_p as that function is NOT equivalent to a
12613 C equality test. It can in fact return false for two
12614 objects which would test as equal using the C equality
12615 operator. */
12616 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12617 return constant_boolean_node (equal
12618 ? code == EQ_EXPR : code != EQ_EXPR,
12619 type);
12622 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12623 a MINUS_EXPR of a constant, we can convert it into a comparison with
12624 a revised constant as long as no overflow occurs. */
12625 if (TREE_CODE (arg1) == INTEGER_CST
12626 && (TREE_CODE (arg0) == PLUS_EXPR
12627 || TREE_CODE (arg0) == MINUS_EXPR)
12628 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12629 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12630 ? MINUS_EXPR : PLUS_EXPR,
12631 fold_convert_loc (loc, TREE_TYPE (arg0),
12632 arg1),
12633 TREE_OPERAND (arg0, 1)))
12634 && !TREE_OVERFLOW (tem))
12635 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12637 /* Similarly for a NEGATE_EXPR. */
12638 if (TREE_CODE (arg0) == NEGATE_EXPR
12639 && TREE_CODE (arg1) == INTEGER_CST
12640 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12641 arg1)))
12642 && TREE_CODE (tem) == INTEGER_CST
12643 && !TREE_OVERFLOW (tem))
12644 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12646 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12647 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12648 && TREE_CODE (arg1) == INTEGER_CST
12649 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12650 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12651 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12652 fold_convert_loc (loc,
12653 TREE_TYPE (arg0),
12654 arg1),
12655 TREE_OPERAND (arg0, 1)));
12657 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12658 if ((TREE_CODE (arg0) == PLUS_EXPR
12659 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12660 || TREE_CODE (arg0) == MINUS_EXPR)
12661 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12662 0)),
12663 arg1, 0)
12664 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12665 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12667 tree val = TREE_OPERAND (arg0, 1);
12668 return omit_two_operands_loc (loc, type,
12669 fold_build2_loc (loc, code, type,
12670 val,
12671 build_int_cst (TREE_TYPE (val),
12672 0)),
12673 TREE_OPERAND (arg0, 0), arg1);
12676 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12677 if (TREE_CODE (arg0) == MINUS_EXPR
12678 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12679 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12680 1)),
12681 arg1, 0)
12682 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12684 return omit_two_operands_loc (loc, type,
12685 code == NE_EXPR
12686 ? boolean_true_node : boolean_false_node,
12687 TREE_OPERAND (arg0, 1), arg1);
12690 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12691 for !=. Don't do this for ordered comparisons due to overflow. */
12692 if (TREE_CODE (arg0) == MINUS_EXPR
12693 && integer_zerop (arg1))
12694 return fold_build2_loc (loc, code, type,
12695 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12697 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12698 if (TREE_CODE (arg0) == ABS_EXPR
12699 && (integer_zerop (arg1) || real_zerop (arg1)))
12700 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12702 /* If this is an EQ or NE comparison with zero and ARG0 is
12703 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12704 two operations, but the latter can be done in one less insn
12705 on machines that have only two-operand insns or on which a
12706 constant cannot be the first operand. */
12707 if (TREE_CODE (arg0) == BIT_AND_EXPR
12708 && integer_zerop (arg1))
12710 tree arg00 = TREE_OPERAND (arg0, 0);
12711 tree arg01 = TREE_OPERAND (arg0, 1);
12712 if (TREE_CODE (arg00) == LSHIFT_EXPR
12713 && integer_onep (TREE_OPERAND (arg00, 0)))
12715 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12716 arg01, TREE_OPERAND (arg00, 1));
12717 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12718 build_int_cst (TREE_TYPE (arg0), 1));
12719 return fold_build2_loc (loc, code, type,
12720 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12721 arg1);
12723 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12724 && integer_onep (TREE_OPERAND (arg01, 0)))
12726 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12727 arg00, TREE_OPERAND (arg01, 1));
12728 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12729 build_int_cst (TREE_TYPE (arg0), 1));
12730 return fold_build2_loc (loc, code, type,
12731 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12732 arg1);
12736 /* If this is an NE or EQ comparison of zero against the result of a
12737 signed MOD operation whose second operand is a power of 2, make
12738 the MOD operation unsigned since it is simpler and equivalent. */
12739 if (integer_zerop (arg1)
12740 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12741 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12742 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12743 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12744 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12745 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12747 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12748 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12749 fold_convert_loc (loc, newtype,
12750 TREE_OPERAND (arg0, 0)),
12751 fold_convert_loc (loc, newtype,
12752 TREE_OPERAND (arg0, 1)));
12754 return fold_build2_loc (loc, code, type, newmod,
12755 fold_convert_loc (loc, newtype, arg1));
12758 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12759 C1 is a valid shift constant, and C2 is a power of two, i.e.
12760 a single bit. */
12761 if (TREE_CODE (arg0) == BIT_AND_EXPR
12762 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12763 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12764 == INTEGER_CST
12765 && integer_pow2p (TREE_OPERAND (arg0, 1))
12766 && integer_zerop (arg1))
12768 tree itype = TREE_TYPE (arg0);
12769 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12770 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12772 /* Check for a valid shift count. */
12773 if (TREE_INT_CST_HIGH (arg001) == 0
12774 && TREE_INT_CST_LOW (arg001) < prec)
12776 tree arg01 = TREE_OPERAND (arg0, 1);
12777 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12778 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12779 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12780 can be rewritten as (X & (C2 << C1)) != 0. */
12781 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12783 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12784 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12785 return fold_build2_loc (loc, code, type, tem,
12786 fold_convert_loc (loc, itype, arg1));
12788 /* Otherwise, for signed (arithmetic) shifts,
12789 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12790 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12791 else if (!TYPE_UNSIGNED (itype))
12792 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12793 arg000, build_int_cst (itype, 0));
12794 /* Otherwise, of unsigned (logical) shifts,
12795 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12796 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12797 else
12798 return omit_one_operand_loc (loc, type,
12799 code == EQ_EXPR ? integer_one_node
12800 : integer_zero_node,
12801 arg000);
12805 /* If we have (A & C) == C where C is a power of 2, convert this into
12806 (A & C) != 0. Similarly for NE_EXPR. */
12807 if (TREE_CODE (arg0) == BIT_AND_EXPR
12808 && integer_pow2p (TREE_OPERAND (arg0, 1))
12809 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12810 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12811 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12812 integer_zero_node));
12814 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12815 bit, then fold the expression into A < 0 or A >= 0. */
12816 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12817 if (tem)
12818 return tem;
12820 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12821 Similarly for NE_EXPR. */
12822 if (TREE_CODE (arg0) == BIT_AND_EXPR
12823 && TREE_CODE (arg1) == INTEGER_CST
12824 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12826 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12827 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12828 TREE_OPERAND (arg0, 1));
12829 tree dandnotc
12830 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12831 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12832 notc);
12833 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12834 if (integer_nonzerop (dandnotc))
12835 return omit_one_operand_loc (loc, type, rslt, arg0);
12838 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12839 Similarly for NE_EXPR. */
12840 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12841 && TREE_CODE (arg1) == INTEGER_CST
12842 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12844 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12845 tree candnotd
12846 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12847 TREE_OPERAND (arg0, 1),
12848 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12849 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12850 if (integer_nonzerop (candnotd))
12851 return omit_one_operand_loc (loc, type, rslt, arg0);
12854 /* If this is a comparison of a field, we may be able to simplify it. */
12855 if ((TREE_CODE (arg0) == COMPONENT_REF
12856 || TREE_CODE (arg0) == BIT_FIELD_REF)
12857 /* Handle the constant case even without -O
12858 to make sure the warnings are given. */
12859 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12861 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12862 if (t1)
12863 return t1;
12866 /* Optimize comparisons of strlen vs zero to a compare of the
12867 first character of the string vs zero. To wit,
12868 strlen(ptr) == 0 => *ptr == 0
12869 strlen(ptr) != 0 => *ptr != 0
12870 Other cases should reduce to one of these two (or a constant)
12871 due to the return value of strlen being unsigned. */
12872 if (TREE_CODE (arg0) == CALL_EXPR
12873 && integer_zerop (arg1))
12875 tree fndecl = get_callee_fndecl (arg0);
12877 if (fndecl
12878 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12879 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12880 && call_expr_nargs (arg0) == 1
12881 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12883 tree iref = build_fold_indirect_ref_loc (loc,
12884 CALL_EXPR_ARG (arg0, 0));
12885 return fold_build2_loc (loc, code, type, iref,
12886 build_int_cst (TREE_TYPE (iref), 0));
12890 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12891 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12892 if (TREE_CODE (arg0) == RSHIFT_EXPR
12893 && integer_zerop (arg1)
12894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12896 tree arg00 = TREE_OPERAND (arg0, 0);
12897 tree arg01 = TREE_OPERAND (arg0, 1);
12898 tree itype = TREE_TYPE (arg00);
12899 if (TREE_INT_CST_HIGH (arg01) == 0
12900 && TREE_INT_CST_LOW (arg01)
12901 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12903 if (TYPE_UNSIGNED (itype))
12905 itype = signed_type_for (itype);
12906 arg00 = fold_convert_loc (loc, itype, arg00);
12908 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12909 type, arg00, build_int_cst (itype, 0));
12913 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12914 if (integer_zerop (arg1)
12915 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12916 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12917 TREE_OPERAND (arg0, 1));
12919 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12920 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12921 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12922 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12923 build_zero_cst (TREE_TYPE (arg0)));
12924 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12925 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12926 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12927 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12928 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12929 build_zero_cst (TREE_TYPE (arg0)));
12931 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12932 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12933 && TREE_CODE (arg1) == INTEGER_CST
12934 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12935 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12936 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12937 TREE_OPERAND (arg0, 1), arg1));
12939 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12940 (X & C) == 0 when C is a single bit. */
12941 if (TREE_CODE (arg0) == BIT_AND_EXPR
12942 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12943 && integer_zerop (arg1)
12944 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12946 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12947 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12948 TREE_OPERAND (arg0, 1));
12949 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12950 type, tem,
12951 fold_convert_loc (loc, TREE_TYPE (arg0),
12952 arg1));
12955 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12956 constant C is a power of two, i.e. a single bit. */
12957 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12958 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12959 && integer_zerop (arg1)
12960 && integer_pow2p (TREE_OPERAND (arg0, 1))
12961 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12962 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12964 tree arg00 = TREE_OPERAND (arg0, 0);
12965 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12966 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12969 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12970 when is C is a power of two, i.e. a single bit. */
12971 if (TREE_CODE (arg0) == BIT_AND_EXPR
12972 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12973 && integer_zerop (arg1)
12974 && integer_pow2p (TREE_OPERAND (arg0, 1))
12975 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12976 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12978 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12979 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12980 arg000, TREE_OPERAND (arg0, 1));
12981 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12982 tem, build_int_cst (TREE_TYPE (tem), 0));
12985 if (integer_zerop (arg1)
12986 && tree_expr_nonzero_p (arg0))
12988 tree res = constant_boolean_node (code==NE_EXPR, type);
12989 return omit_one_operand_loc (loc, type, res, arg0);
12992 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12993 if (TREE_CODE (arg0) == NEGATE_EXPR
12994 && TREE_CODE (arg1) == NEGATE_EXPR)
12995 return fold_build2_loc (loc, code, type,
12996 TREE_OPERAND (arg0, 0),
12997 fold_convert_loc (loc, TREE_TYPE (arg0),
12998 TREE_OPERAND (arg1, 0)));
13000 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13001 if (TREE_CODE (arg0) == BIT_AND_EXPR
13002 && TREE_CODE (arg1) == BIT_AND_EXPR)
13004 tree arg00 = TREE_OPERAND (arg0, 0);
13005 tree arg01 = TREE_OPERAND (arg0, 1);
13006 tree arg10 = TREE_OPERAND (arg1, 0);
13007 tree arg11 = TREE_OPERAND (arg1, 1);
13008 tree itype = TREE_TYPE (arg0);
13010 if (operand_equal_p (arg01, arg11, 0))
13011 return fold_build2_loc (loc, code, type,
13012 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13013 fold_build2_loc (loc,
13014 BIT_XOR_EXPR, itype,
13015 arg00, arg10),
13016 arg01),
13017 build_zero_cst (itype));
13019 if (operand_equal_p (arg01, arg10, 0))
13020 return fold_build2_loc (loc, code, type,
13021 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13022 fold_build2_loc (loc,
13023 BIT_XOR_EXPR, itype,
13024 arg00, arg11),
13025 arg01),
13026 build_zero_cst (itype));
13028 if (operand_equal_p (arg00, arg11, 0))
13029 return fold_build2_loc (loc, code, type,
13030 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13031 fold_build2_loc (loc,
13032 BIT_XOR_EXPR, itype,
13033 arg01, arg10),
13034 arg00),
13035 build_zero_cst (itype));
13037 if (operand_equal_p (arg00, arg10, 0))
13038 return fold_build2_loc (loc, code, type,
13039 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13040 fold_build2_loc (loc,
13041 BIT_XOR_EXPR, itype,
13042 arg01, arg11),
13043 arg00),
13044 build_zero_cst (itype));
13047 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13048 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13050 tree arg00 = TREE_OPERAND (arg0, 0);
13051 tree arg01 = TREE_OPERAND (arg0, 1);
13052 tree arg10 = TREE_OPERAND (arg1, 0);
13053 tree arg11 = TREE_OPERAND (arg1, 1);
13054 tree itype = TREE_TYPE (arg0);
13056 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13057 operand_equal_p guarantees no side-effects so we don't need
13058 to use omit_one_operand on Z. */
13059 if (operand_equal_p (arg01, arg11, 0))
13060 return fold_build2_loc (loc, code, type, arg00,
13061 fold_convert_loc (loc, TREE_TYPE (arg00),
13062 arg10));
13063 if (operand_equal_p (arg01, arg10, 0))
13064 return fold_build2_loc (loc, code, type, arg00,
13065 fold_convert_loc (loc, TREE_TYPE (arg00),
13066 arg11));
13067 if (operand_equal_p (arg00, arg11, 0))
13068 return fold_build2_loc (loc, code, type, arg01,
13069 fold_convert_loc (loc, TREE_TYPE (arg01),
13070 arg10));
13071 if (operand_equal_p (arg00, arg10, 0))
13072 return fold_build2_loc (loc, code, type, arg01,
13073 fold_convert_loc (loc, TREE_TYPE (arg01),
13074 arg11));
13076 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13077 if (TREE_CODE (arg01) == INTEGER_CST
13078 && TREE_CODE (arg11) == INTEGER_CST)
13080 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13081 fold_convert_loc (loc, itype, arg11));
13082 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13083 return fold_build2_loc (loc, code, type, tem,
13084 fold_convert_loc (loc, itype, arg10));
13088 /* Attempt to simplify equality/inequality comparisons of complex
13089 values. Only lower the comparison if the result is known or
13090 can be simplified to a single scalar comparison. */
13091 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13092 || TREE_CODE (arg0) == COMPLEX_CST)
13093 && (TREE_CODE (arg1) == COMPLEX_EXPR
13094 || TREE_CODE (arg1) == COMPLEX_CST))
13096 tree real0, imag0, real1, imag1;
13097 tree rcond, icond;
13099 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13101 real0 = TREE_OPERAND (arg0, 0);
13102 imag0 = TREE_OPERAND (arg0, 1);
13104 else
13106 real0 = TREE_REALPART (arg0);
13107 imag0 = TREE_IMAGPART (arg0);
13110 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13112 real1 = TREE_OPERAND (arg1, 0);
13113 imag1 = TREE_OPERAND (arg1, 1);
13115 else
13117 real1 = TREE_REALPART (arg1);
13118 imag1 = TREE_IMAGPART (arg1);
13121 rcond = fold_binary_loc (loc, code, type, real0, real1);
13122 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13124 if (integer_zerop (rcond))
13126 if (code == EQ_EXPR)
13127 return omit_two_operands_loc (loc, type, boolean_false_node,
13128 imag0, imag1);
13129 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13131 else
13133 if (code == NE_EXPR)
13134 return omit_two_operands_loc (loc, type, boolean_true_node,
13135 imag0, imag1);
13136 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13140 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13141 if (icond && TREE_CODE (icond) == INTEGER_CST)
13143 if (integer_zerop (icond))
13145 if (code == EQ_EXPR)
13146 return omit_two_operands_loc (loc, type, boolean_false_node,
13147 real0, real1);
13148 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13150 else
13152 if (code == NE_EXPR)
13153 return omit_two_operands_loc (loc, type, boolean_true_node,
13154 real0, real1);
13155 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13160 return NULL_TREE;
13162 case LT_EXPR:
13163 case GT_EXPR:
13164 case LE_EXPR:
13165 case GE_EXPR:
13166 tem = fold_comparison (loc, code, type, op0, op1);
13167 if (tem != NULL_TREE)
13168 return tem;
13170 /* Transform comparisons of the form X +- C CMP X. */
13171 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13173 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13174 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13175 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13176 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13178 tree arg01 = TREE_OPERAND (arg0, 1);
13179 enum tree_code code0 = TREE_CODE (arg0);
13180 int is_positive;
13182 if (TREE_CODE (arg01) == REAL_CST)
13183 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13184 else
13185 is_positive = tree_int_cst_sgn (arg01);
13187 /* (X - c) > X becomes false. */
13188 if (code == GT_EXPR
13189 && ((code0 == MINUS_EXPR && is_positive >= 0)
13190 || (code0 == PLUS_EXPR && is_positive <= 0)))
13192 if (TREE_CODE (arg01) == INTEGER_CST
13193 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13194 fold_overflow_warning (("assuming signed overflow does not "
13195 "occur when assuming that (X - c) > X "
13196 "is always false"),
13197 WARN_STRICT_OVERFLOW_ALL);
13198 return constant_boolean_node (0, type);
13201 /* Likewise (X + c) < X becomes false. */
13202 if (code == LT_EXPR
13203 && ((code0 == PLUS_EXPR && is_positive >= 0)
13204 || (code0 == MINUS_EXPR && is_positive <= 0)))
13206 if (TREE_CODE (arg01) == INTEGER_CST
13207 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13208 fold_overflow_warning (("assuming signed overflow does not "
13209 "occur when assuming that "
13210 "(X + c) < X is always false"),
13211 WARN_STRICT_OVERFLOW_ALL);
13212 return constant_boolean_node (0, type);
13215 /* Convert (X - c) <= X to true. */
13216 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13217 && code == LE_EXPR
13218 && ((code0 == MINUS_EXPR && is_positive >= 0)
13219 || (code0 == PLUS_EXPR && is_positive <= 0)))
13221 if (TREE_CODE (arg01) == INTEGER_CST
13222 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13223 fold_overflow_warning (("assuming signed overflow does not "
13224 "occur when assuming that "
13225 "(X - c) <= X is always true"),
13226 WARN_STRICT_OVERFLOW_ALL);
13227 return constant_boolean_node (1, type);
13230 /* Convert (X + c) >= X to true. */
13231 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13232 && code == GE_EXPR
13233 && ((code0 == PLUS_EXPR && is_positive >= 0)
13234 || (code0 == MINUS_EXPR && is_positive <= 0)))
13236 if (TREE_CODE (arg01) == INTEGER_CST
13237 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13238 fold_overflow_warning (("assuming signed overflow does not "
13239 "occur when assuming that "
13240 "(X + c) >= X is always true"),
13241 WARN_STRICT_OVERFLOW_ALL);
13242 return constant_boolean_node (1, type);
13245 if (TREE_CODE (arg01) == INTEGER_CST)
13247 /* Convert X + c > X and X - c < X to true for integers. */
13248 if (code == GT_EXPR
13249 && ((code0 == PLUS_EXPR && is_positive > 0)
13250 || (code0 == MINUS_EXPR && is_positive < 0)))
13252 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13253 fold_overflow_warning (("assuming signed overflow does "
13254 "not occur when assuming that "
13255 "(X + c) > X is always true"),
13256 WARN_STRICT_OVERFLOW_ALL);
13257 return constant_boolean_node (1, type);
13260 if (code == LT_EXPR
13261 && ((code0 == MINUS_EXPR && is_positive > 0)
13262 || (code0 == PLUS_EXPR && is_positive < 0)))
13264 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13265 fold_overflow_warning (("assuming signed overflow does "
13266 "not occur when assuming that "
13267 "(X - c) < X is always true"),
13268 WARN_STRICT_OVERFLOW_ALL);
13269 return constant_boolean_node (1, type);
13272 /* Convert X + c <= X and X - c >= X to false for integers. */
13273 if (code == LE_EXPR
13274 && ((code0 == PLUS_EXPR && is_positive > 0)
13275 || (code0 == MINUS_EXPR && is_positive < 0)))
13277 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13278 fold_overflow_warning (("assuming signed overflow does "
13279 "not occur when assuming that "
13280 "(X + c) <= X is always false"),
13281 WARN_STRICT_OVERFLOW_ALL);
13282 return constant_boolean_node (0, type);
13285 if (code == GE_EXPR
13286 && ((code0 == MINUS_EXPR && is_positive > 0)
13287 || (code0 == PLUS_EXPR && is_positive < 0)))
13289 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13290 fold_overflow_warning (("assuming signed overflow does "
13291 "not occur when assuming that "
13292 "(X - c) >= X is always false"),
13293 WARN_STRICT_OVERFLOW_ALL);
13294 return constant_boolean_node (0, type);
13299 /* Comparisons with the highest or lowest possible integer of
13300 the specified precision will have known values. */
13302 tree arg1_type = TREE_TYPE (arg1);
13303 unsigned int width = TYPE_PRECISION (arg1_type);
13305 if (TREE_CODE (arg1) == INTEGER_CST
13306 && width <= HOST_BITS_PER_DOUBLE_INT
13307 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13309 HOST_WIDE_INT signed_max_hi;
13310 unsigned HOST_WIDE_INT signed_max_lo;
13311 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13313 if (width <= HOST_BITS_PER_WIDE_INT)
13315 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13316 - 1;
13317 signed_max_hi = 0;
13318 max_hi = 0;
13320 if (TYPE_UNSIGNED (arg1_type))
13322 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13323 min_lo = 0;
13324 min_hi = 0;
13326 else
13328 max_lo = signed_max_lo;
13329 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13330 min_hi = -1;
13333 else
13335 width -= HOST_BITS_PER_WIDE_INT;
13336 signed_max_lo = -1;
13337 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13338 - 1;
13339 max_lo = -1;
13340 min_lo = 0;
13342 if (TYPE_UNSIGNED (arg1_type))
13344 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13345 min_hi = 0;
13347 else
13349 max_hi = signed_max_hi;
13350 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13354 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13355 && TREE_INT_CST_LOW (arg1) == max_lo)
13356 switch (code)
13358 case GT_EXPR:
13359 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13361 case GE_EXPR:
13362 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13364 case LE_EXPR:
13365 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13367 case LT_EXPR:
13368 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13370 /* The GE_EXPR and LT_EXPR cases above are not normally
13371 reached because of previous transformations. */
13373 default:
13374 break;
13376 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13377 == max_hi
13378 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13379 switch (code)
13381 case GT_EXPR:
13382 arg1 = const_binop (PLUS_EXPR, arg1,
13383 build_int_cst (TREE_TYPE (arg1), 1));
13384 return fold_build2_loc (loc, EQ_EXPR, type,
13385 fold_convert_loc (loc,
13386 TREE_TYPE (arg1), arg0),
13387 arg1);
13388 case LE_EXPR:
13389 arg1 = const_binop (PLUS_EXPR, arg1,
13390 build_int_cst (TREE_TYPE (arg1), 1));
13391 return fold_build2_loc (loc, NE_EXPR, type,
13392 fold_convert_loc (loc, TREE_TYPE (arg1),
13393 arg0),
13394 arg1);
13395 default:
13396 break;
13398 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13399 == min_hi
13400 && TREE_INT_CST_LOW (arg1) == min_lo)
13401 switch (code)
13403 case LT_EXPR:
13404 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13406 case LE_EXPR:
13407 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13409 case GE_EXPR:
13410 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13412 case GT_EXPR:
13413 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13415 default:
13416 break;
13418 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13419 == min_hi
13420 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13421 switch (code)
13423 case GE_EXPR:
13424 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13425 return fold_build2_loc (loc, NE_EXPR, type,
13426 fold_convert_loc (loc,
13427 TREE_TYPE (arg1), arg0),
13428 arg1);
13429 case LT_EXPR:
13430 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13431 return fold_build2_loc (loc, EQ_EXPR, type,
13432 fold_convert_loc (loc, TREE_TYPE (arg1),
13433 arg0),
13434 arg1);
13435 default:
13436 break;
13439 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13440 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13441 && TYPE_UNSIGNED (arg1_type)
13442 /* We will flip the signedness of the comparison operator
13443 associated with the mode of arg1, so the sign bit is
13444 specified by this mode. Check that arg1 is the signed
13445 max associated with this sign bit. */
13446 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13447 /* signed_type does not work on pointer types. */
13448 && INTEGRAL_TYPE_P (arg1_type))
13450 /* The following case also applies to X < signed_max+1
13451 and X >= signed_max+1 because previous transformations. */
13452 if (code == LE_EXPR || code == GT_EXPR)
13454 tree st;
13455 st = signed_type_for (TREE_TYPE (arg1));
13456 return fold_build2_loc (loc,
13457 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13458 type, fold_convert_loc (loc, st, arg0),
13459 build_int_cst (st, 0));
13465 /* If we are comparing an ABS_EXPR with a constant, we can
13466 convert all the cases into explicit comparisons, but they may
13467 well not be faster than doing the ABS and one comparison.
13468 But ABS (X) <= C is a range comparison, which becomes a subtraction
13469 and a comparison, and is probably faster. */
13470 if (code == LE_EXPR
13471 && TREE_CODE (arg1) == INTEGER_CST
13472 && TREE_CODE (arg0) == ABS_EXPR
13473 && ! TREE_SIDE_EFFECTS (arg0)
13474 && (0 != (tem = negate_expr (arg1)))
13475 && TREE_CODE (tem) == INTEGER_CST
13476 && !TREE_OVERFLOW (tem))
13477 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13478 build2 (GE_EXPR, type,
13479 TREE_OPERAND (arg0, 0), tem),
13480 build2 (LE_EXPR, type,
13481 TREE_OPERAND (arg0, 0), arg1));
13483 /* Convert ABS_EXPR<x> >= 0 to true. */
13484 strict_overflow_p = false;
13485 if (code == GE_EXPR
13486 && (integer_zerop (arg1)
13487 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13488 && real_zerop (arg1)))
13489 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13491 if (strict_overflow_p)
13492 fold_overflow_warning (("assuming signed overflow does not occur "
13493 "when simplifying comparison of "
13494 "absolute value and zero"),
13495 WARN_STRICT_OVERFLOW_CONDITIONAL);
13496 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13499 /* Convert ABS_EXPR<x> < 0 to false. */
13500 strict_overflow_p = false;
13501 if (code == LT_EXPR
13502 && (integer_zerop (arg1) || real_zerop (arg1))
13503 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13505 if (strict_overflow_p)
13506 fold_overflow_warning (("assuming signed overflow does not occur "
13507 "when simplifying comparison of "
13508 "absolute value and zero"),
13509 WARN_STRICT_OVERFLOW_CONDITIONAL);
13510 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13513 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13514 and similarly for >= into !=. */
13515 if ((code == LT_EXPR || code == GE_EXPR)
13516 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13517 && TREE_CODE (arg1) == LSHIFT_EXPR
13518 && integer_onep (TREE_OPERAND (arg1, 0)))
13519 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13520 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13521 TREE_OPERAND (arg1, 1)),
13522 build_zero_cst (TREE_TYPE (arg0)));
13524 if ((code == LT_EXPR || code == GE_EXPR)
13525 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13526 && CONVERT_EXPR_P (arg1)
13527 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13528 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13530 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13531 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13532 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13533 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13534 build_zero_cst (TREE_TYPE (arg0)));
13537 return NULL_TREE;
13539 case UNORDERED_EXPR:
13540 case ORDERED_EXPR:
13541 case UNLT_EXPR:
13542 case UNLE_EXPR:
13543 case UNGT_EXPR:
13544 case UNGE_EXPR:
13545 case UNEQ_EXPR:
13546 case LTGT_EXPR:
13547 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13549 t1 = fold_relational_const (code, type, arg0, arg1);
13550 if (t1 != NULL_TREE)
13551 return t1;
13554 /* If the first operand is NaN, the result is constant. */
13555 if (TREE_CODE (arg0) == REAL_CST
13556 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13557 && (code != LTGT_EXPR || ! flag_trapping_math))
13559 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13560 ? integer_zero_node
13561 : integer_one_node;
13562 return omit_one_operand_loc (loc, type, t1, arg1);
13565 /* If the second operand is NaN, the result is constant. */
13566 if (TREE_CODE (arg1) == REAL_CST
13567 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13568 && (code != LTGT_EXPR || ! flag_trapping_math))
13570 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13571 ? integer_zero_node
13572 : integer_one_node;
13573 return omit_one_operand_loc (loc, type, t1, arg0);
13576 /* Simplify unordered comparison of something with itself. */
13577 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13578 && operand_equal_p (arg0, arg1, 0))
13579 return constant_boolean_node (1, type);
13581 if (code == LTGT_EXPR
13582 && !flag_trapping_math
13583 && operand_equal_p (arg0, arg1, 0))
13584 return constant_boolean_node (0, type);
13586 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13588 tree targ0 = strip_float_extensions (arg0);
13589 tree targ1 = strip_float_extensions (arg1);
13590 tree newtype = TREE_TYPE (targ0);
13592 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13593 newtype = TREE_TYPE (targ1);
13595 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13596 return fold_build2_loc (loc, code, type,
13597 fold_convert_loc (loc, newtype, targ0),
13598 fold_convert_loc (loc, newtype, targ1));
13601 return NULL_TREE;
13603 case COMPOUND_EXPR:
13604 /* When pedantic, a compound expression can be neither an lvalue
13605 nor an integer constant expression. */
13606 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13607 return NULL_TREE;
13608 /* Don't let (0, 0) be null pointer constant. */
13609 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13610 : fold_convert_loc (loc, type, arg1);
13611 return pedantic_non_lvalue_loc (loc, tem);
13613 case COMPLEX_EXPR:
13614 if ((TREE_CODE (arg0) == REAL_CST
13615 && TREE_CODE (arg1) == REAL_CST)
13616 || (TREE_CODE (arg0) == INTEGER_CST
13617 && TREE_CODE (arg1) == INTEGER_CST))
13618 return build_complex (type, arg0, arg1);
13619 if (TREE_CODE (arg0) == REALPART_EXPR
13620 && TREE_CODE (arg1) == IMAGPART_EXPR
13621 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13622 && operand_equal_p (TREE_OPERAND (arg0, 0),
13623 TREE_OPERAND (arg1, 0), 0))
13624 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13625 TREE_OPERAND (arg1, 0));
13626 return NULL_TREE;
13628 case ASSERT_EXPR:
13629 /* An ASSERT_EXPR should never be passed to fold_binary. */
13630 gcc_unreachable ();
13632 case VEC_PACK_TRUNC_EXPR:
13633 case VEC_PACK_FIX_TRUNC_EXPR:
13635 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13636 tree *elts;
13638 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13639 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13640 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13641 return NULL_TREE;
13643 elts = XALLOCAVEC (tree, nelts);
13644 if (!vec_cst_ctor_to_array (arg0, elts)
13645 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13646 return NULL_TREE;
13648 for (i = 0; i < nelts; i++)
13650 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13651 ? NOP_EXPR : FIX_TRUNC_EXPR,
13652 TREE_TYPE (type), elts[i]);
13653 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13654 return NULL_TREE;
13657 return build_vector (type, elts);
13660 case VEC_WIDEN_MULT_LO_EXPR:
13661 case VEC_WIDEN_MULT_HI_EXPR:
13662 case VEC_WIDEN_MULT_EVEN_EXPR:
13663 case VEC_WIDEN_MULT_ODD_EXPR:
13665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13666 unsigned int out, ofs, scale;
13667 tree *elts;
13669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13670 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13671 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13672 return NULL_TREE;
13674 elts = XALLOCAVEC (tree, nelts * 4);
13675 if (!vec_cst_ctor_to_array (arg0, elts)
13676 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13677 return NULL_TREE;
13679 if (code == VEC_WIDEN_MULT_LO_EXPR)
13680 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13681 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13682 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13683 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13684 scale = 1, ofs = 0;
13685 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13686 scale = 1, ofs = 1;
13688 for (out = 0; out < nelts; out++)
13690 unsigned int in1 = (out << scale) + ofs;
13691 unsigned int in2 = in1 + nelts * 2;
13692 tree t1, t2;
13694 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13695 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13697 if (t1 == NULL_TREE || t2 == NULL_TREE)
13698 return NULL_TREE;
13699 elts[out] = const_binop (MULT_EXPR, t1, t2);
13700 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13701 return NULL_TREE;
13704 return build_vector (type, elts);
13707 default:
13708 return NULL_TREE;
13709 } /* switch (code) */
13712 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13713 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13714 of GOTO_EXPR. */
13716 static tree
13717 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13719 switch (TREE_CODE (*tp))
13721 case LABEL_EXPR:
13722 return *tp;
13724 case GOTO_EXPR:
13725 *walk_subtrees = 0;
13727 /* ... fall through ... */
13729 default:
13730 return NULL_TREE;
13734 /* Return whether the sub-tree ST contains a label which is accessible from
13735 outside the sub-tree. */
13737 static bool
13738 contains_label_p (tree st)
13740 return
13741 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13744 /* Fold a ternary expression of code CODE and type TYPE with operands
13745 OP0, OP1, and OP2. Return the folded expression if folding is
13746 successful. Otherwise, return NULL_TREE. */
13748 tree
13749 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13750 tree op0, tree op1, tree op2)
13752 tree tem;
13753 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13754 enum tree_code_class kind = TREE_CODE_CLASS (code);
13756 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13757 && TREE_CODE_LENGTH (code) == 3);
13759 /* Strip any conversions that don't change the mode. This is safe
13760 for every expression, except for a comparison expression because
13761 its signedness is derived from its operands. So, in the latter
13762 case, only strip conversions that don't change the signedness.
13764 Note that this is done as an internal manipulation within the
13765 constant folder, in order to find the simplest representation of
13766 the arguments so that their form can be studied. In any cases,
13767 the appropriate type conversions should be put back in the tree
13768 that will get out of the constant folder. */
13769 if (op0)
13771 arg0 = op0;
13772 STRIP_NOPS (arg0);
13775 if (op1)
13777 arg1 = op1;
13778 STRIP_NOPS (arg1);
13781 if (op2)
13783 arg2 = op2;
13784 STRIP_NOPS (arg2);
13787 switch (code)
13789 case COMPONENT_REF:
13790 if (TREE_CODE (arg0) == CONSTRUCTOR
13791 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13793 unsigned HOST_WIDE_INT idx;
13794 tree field, value;
13795 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13796 if (field == arg1)
13797 return value;
13799 return NULL_TREE;
13801 case COND_EXPR:
13802 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13803 so all simple results must be passed through pedantic_non_lvalue. */
13804 if (TREE_CODE (arg0) == INTEGER_CST)
13806 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13807 tem = integer_zerop (arg0) ? op2 : op1;
13808 /* Only optimize constant conditions when the selected branch
13809 has the same type as the COND_EXPR. This avoids optimizing
13810 away "c ? x : throw", where the throw has a void type.
13811 Avoid throwing away that operand which contains label. */
13812 if ((!TREE_SIDE_EFFECTS (unused_op)
13813 || !contains_label_p (unused_op))
13814 && (! VOID_TYPE_P (TREE_TYPE (tem))
13815 || VOID_TYPE_P (type)))
13816 return pedantic_non_lvalue_loc (loc, tem);
13817 return NULL_TREE;
13819 if (operand_equal_p (arg1, op2, 0))
13820 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13822 /* If we have A op B ? A : C, we may be able to convert this to a
13823 simpler expression, depending on the operation and the values
13824 of B and C. Signed zeros prevent all of these transformations,
13825 for reasons given above each one.
13827 Also try swapping the arguments and inverting the conditional. */
13828 if (COMPARISON_CLASS_P (arg0)
13829 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13830 arg1, TREE_OPERAND (arg0, 1))
13831 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13833 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13834 if (tem)
13835 return tem;
13838 if (COMPARISON_CLASS_P (arg0)
13839 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13840 op2,
13841 TREE_OPERAND (arg0, 1))
13842 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13844 location_t loc0 = expr_location_or (arg0, loc);
13845 tem = fold_truth_not_expr (loc0, arg0);
13846 if (tem && COMPARISON_CLASS_P (tem))
13848 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13849 if (tem)
13850 return tem;
13854 /* If the second operand is simpler than the third, swap them
13855 since that produces better jump optimization results. */
13856 if (truth_value_p (TREE_CODE (arg0))
13857 && tree_swap_operands_p (op1, op2, false))
13859 location_t loc0 = expr_location_or (arg0, loc);
13860 /* See if this can be inverted. If it can't, possibly because
13861 it was a floating-point inequality comparison, don't do
13862 anything. */
13863 tem = fold_truth_not_expr (loc0, arg0);
13864 if (tem)
13865 return fold_build3_loc (loc, code, type, tem, op2, op1);
13868 /* Convert A ? 1 : 0 to simply A. */
13869 if (integer_onep (op1)
13870 && integer_zerop (op2)
13871 /* If we try to convert OP0 to our type, the
13872 call to fold will try to move the conversion inside
13873 a COND, which will recurse. In that case, the COND_EXPR
13874 is probably the best choice, so leave it alone. */
13875 && type == TREE_TYPE (arg0))
13876 return pedantic_non_lvalue_loc (loc, arg0);
13878 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13879 over COND_EXPR in cases such as floating point comparisons. */
13880 if (integer_zerop (op1)
13881 && integer_onep (op2)
13882 && truth_value_p (TREE_CODE (arg0)))
13883 return pedantic_non_lvalue_loc (loc,
13884 fold_convert_loc (loc, type,
13885 invert_truthvalue_loc (loc,
13886 arg0)));
13888 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13889 if (TREE_CODE (arg0) == LT_EXPR
13890 && integer_zerop (TREE_OPERAND (arg0, 1))
13891 && integer_zerop (op2)
13892 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13894 /* sign_bit_p only checks ARG1 bits within A's precision.
13895 If <sign bit of A> has wider type than A, bits outside
13896 of A's precision in <sign bit of A> need to be checked.
13897 If they are all 0, this optimization needs to be done
13898 in unsigned A's type, if they are all 1 in signed A's type,
13899 otherwise this can't be done. */
13900 if (TYPE_PRECISION (TREE_TYPE (tem))
13901 < TYPE_PRECISION (TREE_TYPE (arg1))
13902 && TYPE_PRECISION (TREE_TYPE (tem))
13903 < TYPE_PRECISION (type))
13905 unsigned HOST_WIDE_INT mask_lo;
13906 HOST_WIDE_INT mask_hi;
13907 int inner_width, outer_width;
13908 tree tem_type;
13910 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13911 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13912 if (outer_width > TYPE_PRECISION (type))
13913 outer_width = TYPE_PRECISION (type);
13915 if (outer_width > HOST_BITS_PER_WIDE_INT)
13917 mask_hi = ((unsigned HOST_WIDE_INT) -1
13918 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13919 mask_lo = -1;
13921 else
13923 mask_hi = 0;
13924 mask_lo = ((unsigned HOST_WIDE_INT) -1
13925 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13927 if (inner_width > HOST_BITS_PER_WIDE_INT)
13929 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13930 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13931 mask_lo = 0;
13933 else
13934 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13935 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13937 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13938 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13940 tem_type = signed_type_for (TREE_TYPE (tem));
13941 tem = fold_convert_loc (loc, tem_type, tem);
13943 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13944 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13946 tem_type = unsigned_type_for (TREE_TYPE (tem));
13947 tem = fold_convert_loc (loc, tem_type, tem);
13949 else
13950 tem = NULL;
13953 if (tem)
13954 return
13955 fold_convert_loc (loc, type,
13956 fold_build2_loc (loc, BIT_AND_EXPR,
13957 TREE_TYPE (tem), tem,
13958 fold_convert_loc (loc,
13959 TREE_TYPE (tem),
13960 arg1)));
13963 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13964 already handled above. */
13965 if (TREE_CODE (arg0) == BIT_AND_EXPR
13966 && integer_onep (TREE_OPERAND (arg0, 1))
13967 && integer_zerop (op2)
13968 && integer_pow2p (arg1))
13970 tree tem = TREE_OPERAND (arg0, 0);
13971 STRIP_NOPS (tem);
13972 if (TREE_CODE (tem) == RSHIFT_EXPR
13973 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13974 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13975 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13976 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13977 TREE_OPERAND (tem, 0), arg1);
13980 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13981 is probably obsolete because the first operand should be a
13982 truth value (that's why we have the two cases above), but let's
13983 leave it in until we can confirm this for all front-ends. */
13984 if (integer_zerop (op2)
13985 && TREE_CODE (arg0) == NE_EXPR
13986 && integer_zerop (TREE_OPERAND (arg0, 1))
13987 && integer_pow2p (arg1)
13988 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13989 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13990 arg1, OEP_ONLY_CONST))
13991 return pedantic_non_lvalue_loc (loc,
13992 fold_convert_loc (loc, type,
13993 TREE_OPERAND (arg0, 0)));
13995 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13996 if (integer_zerop (op2)
13997 && truth_value_p (TREE_CODE (arg0))
13998 && truth_value_p (TREE_CODE (arg1)))
13999 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14000 fold_convert_loc (loc, type, arg0),
14001 arg1);
14003 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14004 if (integer_onep (op2)
14005 && truth_value_p (TREE_CODE (arg0))
14006 && truth_value_p (TREE_CODE (arg1)))
14008 location_t loc0 = expr_location_or (arg0, loc);
14009 /* Only perform transformation if ARG0 is easily inverted. */
14010 tem = fold_truth_not_expr (loc0, arg0);
14011 if (tem)
14012 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14013 fold_convert_loc (loc, type, tem),
14014 arg1);
14017 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14018 if (integer_zerop (arg1)
14019 && truth_value_p (TREE_CODE (arg0))
14020 && truth_value_p (TREE_CODE (op2)))
14022 location_t loc0 = expr_location_or (arg0, loc);
14023 /* Only perform transformation if ARG0 is easily inverted. */
14024 tem = fold_truth_not_expr (loc0, arg0);
14025 if (tem)
14026 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14027 fold_convert_loc (loc, type, tem),
14028 op2);
14031 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14032 if (integer_onep (arg1)
14033 && truth_value_p (TREE_CODE (arg0))
14034 && truth_value_p (TREE_CODE (op2)))
14035 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14036 fold_convert_loc (loc, type, arg0),
14037 op2);
14039 return NULL_TREE;
14041 case CALL_EXPR:
14042 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14043 of fold_ternary on them. */
14044 gcc_unreachable ();
14046 case BIT_FIELD_REF:
14047 if ((TREE_CODE (arg0) == VECTOR_CST
14048 || (TREE_CODE (arg0) == CONSTRUCTOR
14049 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14050 && (type == TREE_TYPE (TREE_TYPE (arg0))
14051 || (TREE_CODE (type) == VECTOR_TYPE
14052 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14054 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14055 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14056 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14057 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14059 if (n != 0
14060 && (idx % width) == 0
14061 && (n % width) == 0
14062 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14064 idx = idx / width;
14065 n = n / width;
14066 if (TREE_CODE (type) == VECTOR_TYPE)
14068 if (TREE_CODE (arg0) == VECTOR_CST)
14070 tree *vals = XALLOCAVEC (tree, n);
14071 unsigned i;
14072 for (i = 0; i < n; ++i)
14073 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14074 return build_vector (type, vals);
14076 else
14078 VEC(constructor_elt, gc) *vals;
14079 unsigned i;
14080 if (CONSTRUCTOR_NELTS (arg0) == 0)
14081 return build_constructor (type, NULL);
14082 vals = VEC_alloc (constructor_elt, gc, n);
14083 for (i = 0; i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14084 ++i)
14085 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14086 CONSTRUCTOR_ELT
14087 (arg0, idx + i)->value);
14088 return build_constructor (type, vals);
14091 else if (n == 1)
14093 if (TREE_CODE (arg0) == VECTOR_CST)
14094 return VECTOR_CST_ELT (arg0, idx);
14095 else if (idx < CONSTRUCTOR_NELTS (arg0))
14096 return CONSTRUCTOR_ELT (arg0, idx)->value;
14097 return build_zero_cst (type);
14102 /* A bit-field-ref that referenced the full argument can be stripped. */
14103 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14104 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14105 && integer_zerop (op2))
14106 return fold_convert_loc (loc, type, arg0);
14108 /* On constants we can use native encode/interpret to constant
14109 fold (nearly) all BIT_FIELD_REFs. */
14110 if (CONSTANT_CLASS_P (arg0)
14111 && can_native_interpret_type_p (type)
14112 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14113 /* This limitation should not be necessary, we just need to
14114 round this up to mode size. */
14115 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14116 /* Need bit-shifting of the buffer to relax the following. */
14117 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14119 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14120 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14121 unsigned HOST_WIDE_INT clen;
14122 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14123 /* ??? We cannot tell native_encode_expr to start at
14124 some random byte only. So limit us to a reasonable amount
14125 of work. */
14126 if (clen <= 4096)
14128 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14129 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14130 if (len > 0
14131 && len * BITS_PER_UNIT >= bitpos + bitsize)
14133 tree v = native_interpret_expr (type,
14134 b + bitpos / BITS_PER_UNIT,
14135 bitsize / BITS_PER_UNIT);
14136 if (v)
14137 return v;
14142 return NULL_TREE;
14144 case FMA_EXPR:
14145 /* For integers we can decompose the FMA if possible. */
14146 if (TREE_CODE (arg0) == INTEGER_CST
14147 && TREE_CODE (arg1) == INTEGER_CST)
14148 return fold_build2_loc (loc, PLUS_EXPR, type,
14149 const_binop (MULT_EXPR, arg0, arg1), arg2);
14150 if (integer_zerop (arg2))
14151 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14153 return fold_fma (loc, type, arg0, arg1, arg2);
14155 case VEC_PERM_EXPR:
14156 if (TREE_CODE (arg2) == VECTOR_CST)
14158 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14159 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14160 tree t;
14161 bool need_mask_canon = false;
14163 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14164 for (i = 0; i < nelts; i++)
14166 tree val = VECTOR_CST_ELT (arg2, i);
14167 if (TREE_CODE (val) != INTEGER_CST)
14168 return NULL_TREE;
14170 sel[i] = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
14171 if (TREE_INT_CST_HIGH (val)
14172 || ((unsigned HOST_WIDE_INT)
14173 TREE_INT_CST_LOW (val) != sel[i]))
14174 need_mask_canon = true;
14177 if ((TREE_CODE (arg0) == VECTOR_CST
14178 || TREE_CODE (arg0) == CONSTRUCTOR)
14179 && (TREE_CODE (arg1) == VECTOR_CST
14180 || TREE_CODE (arg1) == CONSTRUCTOR))
14182 t = fold_vec_perm (type, arg0, arg1, sel);
14183 if (t != NULL_TREE)
14184 return t;
14187 if (need_mask_canon && arg2 == op2)
14189 tree *tsel = XALLOCAVEC (tree, nelts);
14190 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14191 for (i = 0; i < nelts; i++)
14192 tsel[i] = build_int_cst (eltype, sel[i]);
14193 t = build_vector (TREE_TYPE (arg2), tsel);
14194 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
14197 return NULL_TREE;
14199 default:
14200 return NULL_TREE;
14201 } /* switch (code) */
14204 /* Perform constant folding and related simplification of EXPR.
14205 The related simplifications include x*1 => x, x*0 => 0, etc.,
14206 and application of the associative law.
14207 NOP_EXPR conversions may be removed freely (as long as we
14208 are careful not to change the type of the overall expression).
14209 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14210 but we can constant-fold them if they have constant operands. */
14212 #ifdef ENABLE_FOLD_CHECKING
14213 # define fold(x) fold_1 (x)
14214 static tree fold_1 (tree);
14215 static
14216 #endif
14217 tree
14218 fold (tree expr)
14220 const tree t = expr;
14221 enum tree_code code = TREE_CODE (t);
14222 enum tree_code_class kind = TREE_CODE_CLASS (code);
14223 tree tem;
14224 location_t loc = EXPR_LOCATION (expr);
14226 /* Return right away if a constant. */
14227 if (kind == tcc_constant)
14228 return t;
14230 /* CALL_EXPR-like objects with variable numbers of operands are
14231 treated specially. */
14232 if (kind == tcc_vl_exp)
14234 if (code == CALL_EXPR)
14236 tem = fold_call_expr (loc, expr, false);
14237 return tem ? tem : expr;
14239 return expr;
14242 if (IS_EXPR_CODE_CLASS (kind))
14244 tree type = TREE_TYPE (t);
14245 tree op0, op1, op2;
14247 switch (TREE_CODE_LENGTH (code))
14249 case 1:
14250 op0 = TREE_OPERAND (t, 0);
14251 tem = fold_unary_loc (loc, code, type, op0);
14252 return tem ? tem : expr;
14253 case 2:
14254 op0 = TREE_OPERAND (t, 0);
14255 op1 = TREE_OPERAND (t, 1);
14256 tem = fold_binary_loc (loc, code, type, op0, op1);
14257 return tem ? tem : expr;
14258 case 3:
14259 op0 = TREE_OPERAND (t, 0);
14260 op1 = TREE_OPERAND (t, 1);
14261 op2 = TREE_OPERAND (t, 2);
14262 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14263 return tem ? tem : expr;
14264 default:
14265 break;
14269 switch (code)
14271 case ARRAY_REF:
14273 tree op0 = TREE_OPERAND (t, 0);
14274 tree op1 = TREE_OPERAND (t, 1);
14276 if (TREE_CODE (op1) == INTEGER_CST
14277 && TREE_CODE (op0) == CONSTRUCTOR
14278 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14280 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14281 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14282 unsigned HOST_WIDE_INT begin = 0;
14284 /* Find a matching index by means of a binary search. */
14285 while (begin != end)
14287 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14288 tree index = VEC_index (constructor_elt, elts, middle).index;
14290 if (TREE_CODE (index) == INTEGER_CST
14291 && tree_int_cst_lt (index, op1))
14292 begin = middle + 1;
14293 else if (TREE_CODE (index) == INTEGER_CST
14294 && tree_int_cst_lt (op1, index))
14295 end = middle;
14296 else if (TREE_CODE (index) == RANGE_EXPR
14297 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14298 begin = middle + 1;
14299 else if (TREE_CODE (index) == RANGE_EXPR
14300 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14301 end = middle;
14302 else
14303 return VEC_index (constructor_elt, elts, middle).value;
14307 return t;
14310 case CONST_DECL:
14311 return fold (DECL_INITIAL (t));
14313 default:
14314 return t;
14315 } /* switch (code) */
14318 #ifdef ENABLE_FOLD_CHECKING
14319 #undef fold
14321 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14322 static void fold_check_failed (const_tree, const_tree);
14323 void print_fold_checksum (const_tree);
14325 /* When --enable-checking=fold, compute a digest of expr before
14326 and after actual fold call to see if fold did not accidentally
14327 change original expr. */
14329 tree
14330 fold (tree expr)
14332 tree ret;
14333 struct md5_ctx ctx;
14334 unsigned char checksum_before[16], checksum_after[16];
14335 htab_t ht;
14337 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14338 md5_init_ctx (&ctx);
14339 fold_checksum_tree (expr, &ctx, ht);
14340 md5_finish_ctx (&ctx, checksum_before);
14341 htab_empty (ht);
14343 ret = fold_1 (expr);
14345 md5_init_ctx (&ctx);
14346 fold_checksum_tree (expr, &ctx, ht);
14347 md5_finish_ctx (&ctx, checksum_after);
14348 htab_delete (ht);
14350 if (memcmp (checksum_before, checksum_after, 16))
14351 fold_check_failed (expr, ret);
14353 return ret;
14356 void
14357 print_fold_checksum (const_tree expr)
14359 struct md5_ctx ctx;
14360 unsigned char checksum[16], cnt;
14361 htab_t ht;
14363 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14364 md5_init_ctx (&ctx);
14365 fold_checksum_tree (expr, &ctx, ht);
14366 md5_finish_ctx (&ctx, checksum);
14367 htab_delete (ht);
14368 for (cnt = 0; cnt < 16; ++cnt)
14369 fprintf (stderr, "%02x", checksum[cnt]);
14370 putc ('\n', stderr);
14373 static void
14374 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14376 internal_error ("fold check: original tree changed by fold");
14379 static void
14380 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14382 void **slot;
14383 enum tree_code code;
14384 union tree_node buf;
14385 int i, len;
14387 recursive_label:
14388 if (expr == NULL)
14389 return;
14390 slot = (void **) htab_find_slot (ht, expr, INSERT);
14391 if (*slot != NULL)
14392 return;
14393 *slot = CONST_CAST_TREE (expr);
14394 code = TREE_CODE (expr);
14395 if (TREE_CODE_CLASS (code) == tcc_declaration
14396 && DECL_ASSEMBLER_NAME_SET_P (expr))
14398 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14399 memcpy ((char *) &buf, expr, tree_size (expr));
14400 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14401 expr = (tree) &buf;
14403 else if (TREE_CODE_CLASS (code) == tcc_type
14404 && (TYPE_POINTER_TO (expr)
14405 || TYPE_REFERENCE_TO (expr)
14406 || TYPE_CACHED_VALUES_P (expr)
14407 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14408 || TYPE_NEXT_VARIANT (expr)))
14410 /* Allow these fields to be modified. */
14411 tree tmp;
14412 memcpy ((char *) &buf, expr, tree_size (expr));
14413 expr = tmp = (tree) &buf;
14414 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14415 TYPE_POINTER_TO (tmp) = NULL;
14416 TYPE_REFERENCE_TO (tmp) = NULL;
14417 TYPE_NEXT_VARIANT (tmp) = NULL;
14418 if (TYPE_CACHED_VALUES_P (tmp))
14420 TYPE_CACHED_VALUES_P (tmp) = 0;
14421 TYPE_CACHED_VALUES (tmp) = NULL;
14424 md5_process_bytes (expr, tree_size (expr), ctx);
14425 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14426 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14427 if (TREE_CODE_CLASS (code) != tcc_type
14428 && TREE_CODE_CLASS (code) != tcc_declaration
14429 && code != TREE_LIST
14430 && code != SSA_NAME
14431 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14432 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14433 switch (TREE_CODE_CLASS (code))
14435 case tcc_constant:
14436 switch (code)
14438 case STRING_CST:
14439 md5_process_bytes (TREE_STRING_POINTER (expr),
14440 TREE_STRING_LENGTH (expr), ctx);
14441 break;
14442 case COMPLEX_CST:
14443 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14444 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14445 break;
14446 case VECTOR_CST:
14447 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14448 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14449 break;
14450 default:
14451 break;
14453 break;
14454 case tcc_exceptional:
14455 switch (code)
14457 case TREE_LIST:
14458 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14459 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14460 expr = TREE_CHAIN (expr);
14461 goto recursive_label;
14462 break;
14463 case TREE_VEC:
14464 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14465 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14466 break;
14467 default:
14468 break;
14470 break;
14471 case tcc_expression:
14472 case tcc_reference:
14473 case tcc_comparison:
14474 case tcc_unary:
14475 case tcc_binary:
14476 case tcc_statement:
14477 case tcc_vl_exp:
14478 len = TREE_OPERAND_LENGTH (expr);
14479 for (i = 0; i < len; ++i)
14480 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14481 break;
14482 case tcc_declaration:
14483 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14484 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14485 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14487 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14488 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14489 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14490 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14491 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14493 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14494 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14496 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14498 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14499 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14500 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14502 break;
14503 case tcc_type:
14504 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14505 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14506 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14507 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14508 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14509 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14510 if (INTEGRAL_TYPE_P (expr)
14511 || SCALAR_FLOAT_TYPE_P (expr))
14513 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14514 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14516 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14517 if (TREE_CODE (expr) == RECORD_TYPE
14518 || TREE_CODE (expr) == UNION_TYPE
14519 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14520 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14521 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14522 break;
14523 default:
14524 break;
14528 /* Helper function for outputting the checksum of a tree T. When
14529 debugging with gdb, you can "define mynext" to be "next" followed
14530 by "call debug_fold_checksum (op0)", then just trace down till the
14531 outputs differ. */
14533 DEBUG_FUNCTION void
14534 debug_fold_checksum (const_tree t)
14536 int i;
14537 unsigned char checksum[16];
14538 struct md5_ctx ctx;
14539 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14541 md5_init_ctx (&ctx);
14542 fold_checksum_tree (t, &ctx, ht);
14543 md5_finish_ctx (&ctx, checksum);
14544 htab_empty (ht);
14546 for (i = 0; i < 16; i++)
14547 fprintf (stderr, "%d ", checksum[i]);
14549 fprintf (stderr, "\n");
14552 #endif
14554 /* Fold a unary tree expression with code CODE of type TYPE with an
14555 operand OP0. LOC is the location of the resulting expression.
14556 Return a folded expression if successful. Otherwise, return a tree
14557 expression with code CODE of type TYPE with an operand OP0. */
14559 tree
14560 fold_build1_stat_loc (location_t loc,
14561 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14563 tree tem;
14564 #ifdef ENABLE_FOLD_CHECKING
14565 unsigned char checksum_before[16], checksum_after[16];
14566 struct md5_ctx ctx;
14567 htab_t ht;
14569 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14570 md5_init_ctx (&ctx);
14571 fold_checksum_tree (op0, &ctx, ht);
14572 md5_finish_ctx (&ctx, checksum_before);
14573 htab_empty (ht);
14574 #endif
14576 tem = fold_unary_loc (loc, code, type, op0);
14577 if (!tem)
14578 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14580 #ifdef ENABLE_FOLD_CHECKING
14581 md5_init_ctx (&ctx);
14582 fold_checksum_tree (op0, &ctx, ht);
14583 md5_finish_ctx (&ctx, checksum_after);
14584 htab_delete (ht);
14586 if (memcmp (checksum_before, checksum_after, 16))
14587 fold_check_failed (op0, tem);
14588 #endif
14589 return tem;
14592 /* Fold a binary tree expression with code CODE of type TYPE with
14593 operands OP0 and OP1. LOC is the location of the resulting
14594 expression. Return a folded expression if successful. Otherwise,
14595 return a tree expression with code CODE of type TYPE with operands
14596 OP0 and OP1. */
14598 tree
14599 fold_build2_stat_loc (location_t loc,
14600 enum tree_code code, tree type, tree op0, tree op1
14601 MEM_STAT_DECL)
14603 tree tem;
14604 #ifdef ENABLE_FOLD_CHECKING
14605 unsigned char checksum_before_op0[16],
14606 checksum_before_op1[16],
14607 checksum_after_op0[16],
14608 checksum_after_op1[16];
14609 struct md5_ctx ctx;
14610 htab_t ht;
14612 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14613 md5_init_ctx (&ctx);
14614 fold_checksum_tree (op0, &ctx, ht);
14615 md5_finish_ctx (&ctx, checksum_before_op0);
14616 htab_empty (ht);
14618 md5_init_ctx (&ctx);
14619 fold_checksum_tree (op1, &ctx, ht);
14620 md5_finish_ctx (&ctx, checksum_before_op1);
14621 htab_empty (ht);
14622 #endif
14624 tem = fold_binary_loc (loc, code, type, op0, op1);
14625 if (!tem)
14626 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14628 #ifdef ENABLE_FOLD_CHECKING
14629 md5_init_ctx (&ctx);
14630 fold_checksum_tree (op0, &ctx, ht);
14631 md5_finish_ctx (&ctx, checksum_after_op0);
14632 htab_empty (ht);
14634 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14635 fold_check_failed (op0, tem);
14637 md5_init_ctx (&ctx);
14638 fold_checksum_tree (op1, &ctx, ht);
14639 md5_finish_ctx (&ctx, checksum_after_op1);
14640 htab_delete (ht);
14642 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14643 fold_check_failed (op1, tem);
14644 #endif
14645 return tem;
14648 /* Fold a ternary tree expression with code CODE of type TYPE with
14649 operands OP0, OP1, and OP2. Return a folded expression if
14650 successful. Otherwise, return a tree expression with code CODE of
14651 type TYPE with operands OP0, OP1, and OP2. */
14653 tree
14654 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14655 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14657 tree tem;
14658 #ifdef ENABLE_FOLD_CHECKING
14659 unsigned char checksum_before_op0[16],
14660 checksum_before_op1[16],
14661 checksum_before_op2[16],
14662 checksum_after_op0[16],
14663 checksum_after_op1[16],
14664 checksum_after_op2[16];
14665 struct md5_ctx ctx;
14666 htab_t ht;
14668 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14669 md5_init_ctx (&ctx);
14670 fold_checksum_tree (op0, &ctx, ht);
14671 md5_finish_ctx (&ctx, checksum_before_op0);
14672 htab_empty (ht);
14674 md5_init_ctx (&ctx);
14675 fold_checksum_tree (op1, &ctx, ht);
14676 md5_finish_ctx (&ctx, checksum_before_op1);
14677 htab_empty (ht);
14679 md5_init_ctx (&ctx);
14680 fold_checksum_tree (op2, &ctx, ht);
14681 md5_finish_ctx (&ctx, checksum_before_op2);
14682 htab_empty (ht);
14683 #endif
14685 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14686 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14687 if (!tem)
14688 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14690 #ifdef ENABLE_FOLD_CHECKING
14691 md5_init_ctx (&ctx);
14692 fold_checksum_tree (op0, &ctx, ht);
14693 md5_finish_ctx (&ctx, checksum_after_op0);
14694 htab_empty (ht);
14696 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14697 fold_check_failed (op0, tem);
14699 md5_init_ctx (&ctx);
14700 fold_checksum_tree (op1, &ctx, ht);
14701 md5_finish_ctx (&ctx, checksum_after_op1);
14702 htab_empty (ht);
14704 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14705 fold_check_failed (op1, tem);
14707 md5_init_ctx (&ctx);
14708 fold_checksum_tree (op2, &ctx, ht);
14709 md5_finish_ctx (&ctx, checksum_after_op2);
14710 htab_delete (ht);
14712 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14713 fold_check_failed (op2, tem);
14714 #endif
14715 return tem;
14718 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14719 arguments in ARGARRAY, and a null static chain.
14720 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14721 of type TYPE from the given operands as constructed by build_call_array. */
14723 tree
14724 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14725 int nargs, tree *argarray)
14727 tree tem;
14728 #ifdef ENABLE_FOLD_CHECKING
14729 unsigned char checksum_before_fn[16],
14730 checksum_before_arglist[16],
14731 checksum_after_fn[16],
14732 checksum_after_arglist[16];
14733 struct md5_ctx ctx;
14734 htab_t ht;
14735 int i;
14737 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14738 md5_init_ctx (&ctx);
14739 fold_checksum_tree (fn, &ctx, ht);
14740 md5_finish_ctx (&ctx, checksum_before_fn);
14741 htab_empty (ht);
14743 md5_init_ctx (&ctx);
14744 for (i = 0; i < nargs; i++)
14745 fold_checksum_tree (argarray[i], &ctx, ht);
14746 md5_finish_ctx (&ctx, checksum_before_arglist);
14747 htab_empty (ht);
14748 #endif
14750 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14752 #ifdef ENABLE_FOLD_CHECKING
14753 md5_init_ctx (&ctx);
14754 fold_checksum_tree (fn, &ctx, ht);
14755 md5_finish_ctx (&ctx, checksum_after_fn);
14756 htab_empty (ht);
14758 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14759 fold_check_failed (fn, tem);
14761 md5_init_ctx (&ctx);
14762 for (i = 0; i < nargs; i++)
14763 fold_checksum_tree (argarray[i], &ctx, ht);
14764 md5_finish_ctx (&ctx, checksum_after_arglist);
14765 htab_delete (ht);
14767 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14768 fold_check_failed (NULL_TREE, tem);
14769 #endif
14770 return tem;
14773 /* Perform constant folding and related simplification of initializer
14774 expression EXPR. These behave identically to "fold_buildN" but ignore
14775 potential run-time traps and exceptions that fold must preserve. */
14777 #define START_FOLD_INIT \
14778 int saved_signaling_nans = flag_signaling_nans;\
14779 int saved_trapping_math = flag_trapping_math;\
14780 int saved_rounding_math = flag_rounding_math;\
14781 int saved_trapv = flag_trapv;\
14782 int saved_folding_initializer = folding_initializer;\
14783 flag_signaling_nans = 0;\
14784 flag_trapping_math = 0;\
14785 flag_rounding_math = 0;\
14786 flag_trapv = 0;\
14787 folding_initializer = 1;
14789 #define END_FOLD_INIT \
14790 flag_signaling_nans = saved_signaling_nans;\
14791 flag_trapping_math = saved_trapping_math;\
14792 flag_rounding_math = saved_rounding_math;\
14793 flag_trapv = saved_trapv;\
14794 folding_initializer = saved_folding_initializer;
14796 tree
14797 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14798 tree type, tree op)
14800 tree result;
14801 START_FOLD_INIT;
14803 result = fold_build1_loc (loc, code, type, op);
14805 END_FOLD_INIT;
14806 return result;
14809 tree
14810 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14811 tree type, tree op0, tree op1)
14813 tree result;
14814 START_FOLD_INIT;
14816 result = fold_build2_loc (loc, code, type, op0, op1);
14818 END_FOLD_INIT;
14819 return result;
14822 tree
14823 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14824 tree type, tree op0, tree op1, tree op2)
14826 tree result;
14827 START_FOLD_INIT;
14829 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14831 END_FOLD_INIT;
14832 return result;
14835 tree
14836 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14837 int nargs, tree *argarray)
14839 tree result;
14840 START_FOLD_INIT;
14842 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14844 END_FOLD_INIT;
14845 return result;
14848 #undef START_FOLD_INIT
14849 #undef END_FOLD_INIT
14851 /* Determine if first argument is a multiple of second argument. Return 0 if
14852 it is not, or we cannot easily determined it to be.
14854 An example of the sort of thing we care about (at this point; this routine
14855 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14856 fold cases do now) is discovering that
14858 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14860 is a multiple of
14862 SAVE_EXPR (J * 8)
14864 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14866 This code also handles discovering that
14868 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14870 is a multiple of 8 so we don't have to worry about dealing with a
14871 possible remainder.
14873 Note that we *look* inside a SAVE_EXPR only to determine how it was
14874 calculated; it is not safe for fold to do much of anything else with the
14875 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14876 at run time. For example, the latter example above *cannot* be implemented
14877 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14878 evaluation time of the original SAVE_EXPR is not necessarily the same at
14879 the time the new expression is evaluated. The only optimization of this
14880 sort that would be valid is changing
14882 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14884 divided by 8 to
14886 SAVE_EXPR (I) * SAVE_EXPR (J)
14888 (where the same SAVE_EXPR (J) is used in the original and the
14889 transformed version). */
14892 multiple_of_p (tree type, const_tree top, const_tree bottom)
14894 if (operand_equal_p (top, bottom, 0))
14895 return 1;
14897 if (TREE_CODE (type) != INTEGER_TYPE)
14898 return 0;
14900 switch (TREE_CODE (top))
14902 case BIT_AND_EXPR:
14903 /* Bitwise and provides a power of two multiple. If the mask is
14904 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14905 if (!integer_pow2p (bottom))
14906 return 0;
14907 /* FALLTHRU */
14909 case MULT_EXPR:
14910 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14911 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14913 case PLUS_EXPR:
14914 case MINUS_EXPR:
14915 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14916 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14918 case LSHIFT_EXPR:
14919 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14921 tree op1, t1;
14923 op1 = TREE_OPERAND (top, 1);
14924 /* const_binop may not detect overflow correctly,
14925 so check for it explicitly here. */
14926 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14927 > TREE_INT_CST_LOW (op1)
14928 && TREE_INT_CST_HIGH (op1) == 0
14929 && 0 != (t1 = fold_convert (type,
14930 const_binop (LSHIFT_EXPR,
14931 size_one_node,
14932 op1)))
14933 && !TREE_OVERFLOW (t1))
14934 return multiple_of_p (type, t1, bottom);
14936 return 0;
14938 case NOP_EXPR:
14939 /* Can't handle conversions from non-integral or wider integral type. */
14940 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14941 || (TYPE_PRECISION (type)
14942 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14943 return 0;
14945 /* .. fall through ... */
14947 case SAVE_EXPR:
14948 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14950 case COND_EXPR:
14951 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14952 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14954 case INTEGER_CST:
14955 if (TREE_CODE (bottom) != INTEGER_CST
14956 || integer_zerop (bottom)
14957 || (TYPE_UNSIGNED (type)
14958 && (tree_int_cst_sgn (top) < 0
14959 || tree_int_cst_sgn (bottom) < 0)))
14960 return 0;
14961 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14962 top, bottom));
14964 default:
14965 return 0;
14969 /* Return true if CODE or TYPE is known to be non-negative. */
14971 static bool
14972 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14974 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14975 && truth_value_p (code))
14976 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14977 have a signed:1 type (where the value is -1 and 0). */
14978 return true;
14979 return false;
14982 /* Return true if (CODE OP0) is known to be non-negative. If the return
14983 value is based on the assumption that signed overflow is undefined,
14984 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14985 *STRICT_OVERFLOW_P. */
14987 bool
14988 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14989 bool *strict_overflow_p)
14991 if (TYPE_UNSIGNED (type))
14992 return true;
14994 switch (code)
14996 case ABS_EXPR:
14997 /* We can't return 1 if flag_wrapv is set because
14998 ABS_EXPR<INT_MIN> = INT_MIN. */
14999 if (!INTEGRAL_TYPE_P (type))
15000 return true;
15001 if (TYPE_OVERFLOW_UNDEFINED (type))
15003 *strict_overflow_p = true;
15004 return true;
15006 break;
15008 case NON_LVALUE_EXPR:
15009 case FLOAT_EXPR:
15010 case FIX_TRUNC_EXPR:
15011 return tree_expr_nonnegative_warnv_p (op0,
15012 strict_overflow_p);
15014 case NOP_EXPR:
15016 tree inner_type = TREE_TYPE (op0);
15017 tree outer_type = type;
15019 if (TREE_CODE (outer_type) == REAL_TYPE)
15021 if (TREE_CODE (inner_type) == REAL_TYPE)
15022 return tree_expr_nonnegative_warnv_p (op0,
15023 strict_overflow_p);
15024 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15026 if (TYPE_UNSIGNED (inner_type))
15027 return true;
15028 return tree_expr_nonnegative_warnv_p (op0,
15029 strict_overflow_p);
15032 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15034 if (TREE_CODE (inner_type) == REAL_TYPE)
15035 return tree_expr_nonnegative_warnv_p (op0,
15036 strict_overflow_p);
15037 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15038 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15039 && TYPE_UNSIGNED (inner_type);
15042 break;
15044 default:
15045 return tree_simple_nonnegative_warnv_p (code, type);
15048 /* We don't know sign of `t', so be conservative and return false. */
15049 return false;
15052 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15053 value is based on the assumption that signed overflow is undefined,
15054 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15055 *STRICT_OVERFLOW_P. */
15057 bool
15058 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15059 tree op1, bool *strict_overflow_p)
15061 if (TYPE_UNSIGNED (type))
15062 return true;
15064 switch (code)
15066 case POINTER_PLUS_EXPR:
15067 case PLUS_EXPR:
15068 if (FLOAT_TYPE_P (type))
15069 return (tree_expr_nonnegative_warnv_p (op0,
15070 strict_overflow_p)
15071 && tree_expr_nonnegative_warnv_p (op1,
15072 strict_overflow_p));
15074 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15075 both unsigned and at least 2 bits shorter than the result. */
15076 if (TREE_CODE (type) == INTEGER_TYPE
15077 && TREE_CODE (op0) == NOP_EXPR
15078 && TREE_CODE (op1) == NOP_EXPR)
15080 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15081 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15082 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15083 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15085 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15086 TYPE_PRECISION (inner2)) + 1;
15087 return prec < TYPE_PRECISION (type);
15090 break;
15092 case MULT_EXPR:
15093 if (FLOAT_TYPE_P (type))
15095 /* x * x for floating point x is always non-negative. */
15096 if (operand_equal_p (op0, op1, 0))
15097 return true;
15098 return (tree_expr_nonnegative_warnv_p (op0,
15099 strict_overflow_p)
15100 && tree_expr_nonnegative_warnv_p (op1,
15101 strict_overflow_p));
15104 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15105 both unsigned and their total bits is shorter than the result. */
15106 if (TREE_CODE (type) == INTEGER_TYPE
15107 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15108 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15110 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15111 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15112 : TREE_TYPE (op0);
15113 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15114 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15115 : TREE_TYPE (op1);
15117 bool unsigned0 = TYPE_UNSIGNED (inner0);
15118 bool unsigned1 = TYPE_UNSIGNED (inner1);
15120 if (TREE_CODE (op0) == INTEGER_CST)
15121 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15123 if (TREE_CODE (op1) == INTEGER_CST)
15124 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15126 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15127 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15129 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15130 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15131 : TYPE_PRECISION (inner0);
15133 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15134 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15135 : TYPE_PRECISION (inner1);
15137 return precision0 + precision1 < TYPE_PRECISION (type);
15140 return false;
15142 case BIT_AND_EXPR:
15143 case MAX_EXPR:
15144 return (tree_expr_nonnegative_warnv_p (op0,
15145 strict_overflow_p)
15146 || tree_expr_nonnegative_warnv_p (op1,
15147 strict_overflow_p));
15149 case BIT_IOR_EXPR:
15150 case BIT_XOR_EXPR:
15151 case MIN_EXPR:
15152 case RDIV_EXPR:
15153 case TRUNC_DIV_EXPR:
15154 case CEIL_DIV_EXPR:
15155 case FLOOR_DIV_EXPR:
15156 case ROUND_DIV_EXPR:
15157 return (tree_expr_nonnegative_warnv_p (op0,
15158 strict_overflow_p)
15159 && tree_expr_nonnegative_warnv_p (op1,
15160 strict_overflow_p));
15162 case TRUNC_MOD_EXPR:
15163 case CEIL_MOD_EXPR:
15164 case FLOOR_MOD_EXPR:
15165 case ROUND_MOD_EXPR:
15166 return tree_expr_nonnegative_warnv_p (op0,
15167 strict_overflow_p);
15168 default:
15169 return tree_simple_nonnegative_warnv_p (code, type);
15172 /* We don't know sign of `t', so be conservative and return false. */
15173 return false;
15176 /* Return true if T is known to be non-negative. If the return
15177 value is based on the assumption that signed overflow is undefined,
15178 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15179 *STRICT_OVERFLOW_P. */
15181 bool
15182 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15184 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15185 return true;
15187 switch (TREE_CODE (t))
15189 case INTEGER_CST:
15190 return tree_int_cst_sgn (t) >= 0;
15192 case REAL_CST:
15193 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15195 case FIXED_CST:
15196 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15198 case COND_EXPR:
15199 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15200 strict_overflow_p)
15201 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15202 strict_overflow_p));
15203 default:
15204 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15205 TREE_TYPE (t));
15207 /* We don't know sign of `t', so be conservative and return false. */
15208 return false;
15211 /* Return true if T is known to be non-negative. If the return
15212 value is based on the assumption that signed overflow is undefined,
15213 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15214 *STRICT_OVERFLOW_P. */
15216 bool
15217 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15218 tree arg0, tree arg1, bool *strict_overflow_p)
15220 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15221 switch (DECL_FUNCTION_CODE (fndecl))
15223 CASE_FLT_FN (BUILT_IN_ACOS):
15224 CASE_FLT_FN (BUILT_IN_ACOSH):
15225 CASE_FLT_FN (BUILT_IN_CABS):
15226 CASE_FLT_FN (BUILT_IN_COSH):
15227 CASE_FLT_FN (BUILT_IN_ERFC):
15228 CASE_FLT_FN (BUILT_IN_EXP):
15229 CASE_FLT_FN (BUILT_IN_EXP10):
15230 CASE_FLT_FN (BUILT_IN_EXP2):
15231 CASE_FLT_FN (BUILT_IN_FABS):
15232 CASE_FLT_FN (BUILT_IN_FDIM):
15233 CASE_FLT_FN (BUILT_IN_HYPOT):
15234 CASE_FLT_FN (BUILT_IN_POW10):
15235 CASE_INT_FN (BUILT_IN_FFS):
15236 CASE_INT_FN (BUILT_IN_PARITY):
15237 CASE_INT_FN (BUILT_IN_POPCOUNT):
15238 case BUILT_IN_BSWAP32:
15239 case BUILT_IN_BSWAP64:
15240 /* Always true. */
15241 return true;
15243 CASE_FLT_FN (BUILT_IN_SQRT):
15244 /* sqrt(-0.0) is -0.0. */
15245 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15246 return true;
15247 return tree_expr_nonnegative_warnv_p (arg0,
15248 strict_overflow_p);
15250 CASE_FLT_FN (BUILT_IN_ASINH):
15251 CASE_FLT_FN (BUILT_IN_ATAN):
15252 CASE_FLT_FN (BUILT_IN_ATANH):
15253 CASE_FLT_FN (BUILT_IN_CBRT):
15254 CASE_FLT_FN (BUILT_IN_CEIL):
15255 CASE_FLT_FN (BUILT_IN_ERF):
15256 CASE_FLT_FN (BUILT_IN_EXPM1):
15257 CASE_FLT_FN (BUILT_IN_FLOOR):
15258 CASE_FLT_FN (BUILT_IN_FMOD):
15259 CASE_FLT_FN (BUILT_IN_FREXP):
15260 CASE_FLT_FN (BUILT_IN_ICEIL):
15261 CASE_FLT_FN (BUILT_IN_IFLOOR):
15262 CASE_FLT_FN (BUILT_IN_IRINT):
15263 CASE_FLT_FN (BUILT_IN_IROUND):
15264 CASE_FLT_FN (BUILT_IN_LCEIL):
15265 CASE_FLT_FN (BUILT_IN_LDEXP):
15266 CASE_FLT_FN (BUILT_IN_LFLOOR):
15267 CASE_FLT_FN (BUILT_IN_LLCEIL):
15268 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15269 CASE_FLT_FN (BUILT_IN_LLRINT):
15270 CASE_FLT_FN (BUILT_IN_LLROUND):
15271 CASE_FLT_FN (BUILT_IN_LRINT):
15272 CASE_FLT_FN (BUILT_IN_LROUND):
15273 CASE_FLT_FN (BUILT_IN_MODF):
15274 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15275 CASE_FLT_FN (BUILT_IN_RINT):
15276 CASE_FLT_FN (BUILT_IN_ROUND):
15277 CASE_FLT_FN (BUILT_IN_SCALB):
15278 CASE_FLT_FN (BUILT_IN_SCALBLN):
15279 CASE_FLT_FN (BUILT_IN_SCALBN):
15280 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15281 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15282 CASE_FLT_FN (BUILT_IN_SINH):
15283 CASE_FLT_FN (BUILT_IN_TANH):
15284 CASE_FLT_FN (BUILT_IN_TRUNC):
15285 /* True if the 1st argument is nonnegative. */
15286 return tree_expr_nonnegative_warnv_p (arg0,
15287 strict_overflow_p);
15289 CASE_FLT_FN (BUILT_IN_FMAX):
15290 /* True if the 1st OR 2nd arguments are nonnegative. */
15291 return (tree_expr_nonnegative_warnv_p (arg0,
15292 strict_overflow_p)
15293 || (tree_expr_nonnegative_warnv_p (arg1,
15294 strict_overflow_p)));
15296 CASE_FLT_FN (BUILT_IN_FMIN):
15297 /* True if the 1st AND 2nd arguments are nonnegative. */
15298 return (tree_expr_nonnegative_warnv_p (arg0,
15299 strict_overflow_p)
15300 && (tree_expr_nonnegative_warnv_p (arg1,
15301 strict_overflow_p)));
15303 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15304 /* True if the 2nd argument is nonnegative. */
15305 return tree_expr_nonnegative_warnv_p (arg1,
15306 strict_overflow_p);
15308 CASE_FLT_FN (BUILT_IN_POWI):
15309 /* True if the 1st argument is nonnegative or the second
15310 argument is an even integer. */
15311 if (TREE_CODE (arg1) == INTEGER_CST
15312 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15313 return true;
15314 return tree_expr_nonnegative_warnv_p (arg0,
15315 strict_overflow_p);
15317 CASE_FLT_FN (BUILT_IN_POW):
15318 /* True if the 1st argument is nonnegative or the second
15319 argument is an even integer valued real. */
15320 if (TREE_CODE (arg1) == REAL_CST)
15322 REAL_VALUE_TYPE c;
15323 HOST_WIDE_INT n;
15325 c = TREE_REAL_CST (arg1);
15326 n = real_to_integer (&c);
15327 if ((n & 1) == 0)
15329 REAL_VALUE_TYPE cint;
15330 real_from_integer (&cint, VOIDmode, n,
15331 n < 0 ? -1 : 0, 0);
15332 if (real_identical (&c, &cint))
15333 return true;
15336 return tree_expr_nonnegative_warnv_p (arg0,
15337 strict_overflow_p);
15339 default:
15340 break;
15342 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15343 type);
15346 /* Return true if T is known to be non-negative. If the return
15347 value is based on the assumption that signed overflow is undefined,
15348 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15349 *STRICT_OVERFLOW_P. */
15351 bool
15352 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15354 enum tree_code code = TREE_CODE (t);
15355 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15356 return true;
15358 switch (code)
15360 case TARGET_EXPR:
15362 tree temp = TARGET_EXPR_SLOT (t);
15363 t = TARGET_EXPR_INITIAL (t);
15365 /* If the initializer is non-void, then it's a normal expression
15366 that will be assigned to the slot. */
15367 if (!VOID_TYPE_P (t))
15368 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15370 /* Otherwise, the initializer sets the slot in some way. One common
15371 way is an assignment statement at the end of the initializer. */
15372 while (1)
15374 if (TREE_CODE (t) == BIND_EXPR)
15375 t = expr_last (BIND_EXPR_BODY (t));
15376 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15377 || TREE_CODE (t) == TRY_CATCH_EXPR)
15378 t = expr_last (TREE_OPERAND (t, 0));
15379 else if (TREE_CODE (t) == STATEMENT_LIST)
15380 t = expr_last (t);
15381 else
15382 break;
15384 if (TREE_CODE (t) == MODIFY_EXPR
15385 && TREE_OPERAND (t, 0) == temp)
15386 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15387 strict_overflow_p);
15389 return false;
15392 case CALL_EXPR:
15394 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15395 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15397 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15398 get_callee_fndecl (t),
15399 arg0,
15400 arg1,
15401 strict_overflow_p);
15403 case COMPOUND_EXPR:
15404 case MODIFY_EXPR:
15405 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15406 strict_overflow_p);
15407 case BIND_EXPR:
15408 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15409 strict_overflow_p);
15410 case SAVE_EXPR:
15411 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15412 strict_overflow_p);
15414 default:
15415 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15416 TREE_TYPE (t));
15419 /* We don't know sign of `t', so be conservative and return false. */
15420 return false;
15423 /* Return true if T is known to be non-negative. If the return
15424 value is based on the assumption that signed overflow is undefined,
15425 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15426 *STRICT_OVERFLOW_P. */
15428 bool
15429 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15431 enum tree_code code;
15432 if (t == error_mark_node)
15433 return false;
15435 code = TREE_CODE (t);
15436 switch (TREE_CODE_CLASS (code))
15438 case tcc_binary:
15439 case tcc_comparison:
15440 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15441 TREE_TYPE (t),
15442 TREE_OPERAND (t, 0),
15443 TREE_OPERAND (t, 1),
15444 strict_overflow_p);
15446 case tcc_unary:
15447 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15448 TREE_TYPE (t),
15449 TREE_OPERAND (t, 0),
15450 strict_overflow_p);
15452 case tcc_constant:
15453 case tcc_declaration:
15454 case tcc_reference:
15455 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15457 default:
15458 break;
15461 switch (code)
15463 case TRUTH_AND_EXPR:
15464 case TRUTH_OR_EXPR:
15465 case TRUTH_XOR_EXPR:
15466 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15467 TREE_TYPE (t),
15468 TREE_OPERAND (t, 0),
15469 TREE_OPERAND (t, 1),
15470 strict_overflow_p);
15471 case TRUTH_NOT_EXPR:
15472 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15473 TREE_TYPE (t),
15474 TREE_OPERAND (t, 0),
15475 strict_overflow_p);
15477 case COND_EXPR:
15478 case CONSTRUCTOR:
15479 case OBJ_TYPE_REF:
15480 case ASSERT_EXPR:
15481 case ADDR_EXPR:
15482 case WITH_SIZE_EXPR:
15483 case SSA_NAME:
15484 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15486 default:
15487 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15491 /* Return true if `t' is known to be non-negative. Handle warnings
15492 about undefined signed overflow. */
15494 bool
15495 tree_expr_nonnegative_p (tree t)
15497 bool ret, strict_overflow_p;
15499 strict_overflow_p = false;
15500 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15501 if (strict_overflow_p)
15502 fold_overflow_warning (("assuming signed overflow does not occur when "
15503 "determining that expression is always "
15504 "non-negative"),
15505 WARN_STRICT_OVERFLOW_MISC);
15506 return ret;
15510 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15511 For floating point we further ensure that T is not denormal.
15512 Similar logic is present in nonzero_address in rtlanal.h.
15514 If the return value is based on the assumption that signed overflow
15515 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15516 change *STRICT_OVERFLOW_P. */
15518 bool
15519 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15520 bool *strict_overflow_p)
15522 switch (code)
15524 case ABS_EXPR:
15525 return tree_expr_nonzero_warnv_p (op0,
15526 strict_overflow_p);
15528 case NOP_EXPR:
15530 tree inner_type = TREE_TYPE (op0);
15531 tree outer_type = type;
15533 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15534 && tree_expr_nonzero_warnv_p (op0,
15535 strict_overflow_p));
15537 break;
15539 case NON_LVALUE_EXPR:
15540 return tree_expr_nonzero_warnv_p (op0,
15541 strict_overflow_p);
15543 default:
15544 break;
15547 return false;
15550 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15551 For floating point we further ensure that T is not denormal.
15552 Similar logic is present in nonzero_address in rtlanal.h.
15554 If the return value is based on the assumption that signed overflow
15555 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15556 change *STRICT_OVERFLOW_P. */
15558 bool
15559 tree_binary_nonzero_warnv_p (enum tree_code code,
15560 tree type,
15561 tree op0,
15562 tree op1, bool *strict_overflow_p)
15564 bool sub_strict_overflow_p;
15565 switch (code)
15567 case POINTER_PLUS_EXPR:
15568 case PLUS_EXPR:
15569 if (TYPE_OVERFLOW_UNDEFINED (type))
15571 /* With the presence of negative values it is hard
15572 to say something. */
15573 sub_strict_overflow_p = false;
15574 if (!tree_expr_nonnegative_warnv_p (op0,
15575 &sub_strict_overflow_p)
15576 || !tree_expr_nonnegative_warnv_p (op1,
15577 &sub_strict_overflow_p))
15578 return false;
15579 /* One of operands must be positive and the other non-negative. */
15580 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15581 overflows, on a twos-complement machine the sum of two
15582 nonnegative numbers can never be zero. */
15583 return (tree_expr_nonzero_warnv_p (op0,
15584 strict_overflow_p)
15585 || tree_expr_nonzero_warnv_p (op1,
15586 strict_overflow_p));
15588 break;
15590 case MULT_EXPR:
15591 if (TYPE_OVERFLOW_UNDEFINED (type))
15593 if (tree_expr_nonzero_warnv_p (op0,
15594 strict_overflow_p)
15595 && tree_expr_nonzero_warnv_p (op1,
15596 strict_overflow_p))
15598 *strict_overflow_p = true;
15599 return true;
15602 break;
15604 case MIN_EXPR:
15605 sub_strict_overflow_p = false;
15606 if (tree_expr_nonzero_warnv_p (op0,
15607 &sub_strict_overflow_p)
15608 && tree_expr_nonzero_warnv_p (op1,
15609 &sub_strict_overflow_p))
15611 if (sub_strict_overflow_p)
15612 *strict_overflow_p = true;
15614 break;
15616 case MAX_EXPR:
15617 sub_strict_overflow_p = false;
15618 if (tree_expr_nonzero_warnv_p (op0,
15619 &sub_strict_overflow_p))
15621 if (sub_strict_overflow_p)
15622 *strict_overflow_p = true;
15624 /* When both operands are nonzero, then MAX must be too. */
15625 if (tree_expr_nonzero_warnv_p (op1,
15626 strict_overflow_p))
15627 return true;
15629 /* MAX where operand 0 is positive is positive. */
15630 return tree_expr_nonnegative_warnv_p (op0,
15631 strict_overflow_p);
15633 /* MAX where operand 1 is positive is positive. */
15634 else if (tree_expr_nonzero_warnv_p (op1,
15635 &sub_strict_overflow_p)
15636 && tree_expr_nonnegative_warnv_p (op1,
15637 &sub_strict_overflow_p))
15639 if (sub_strict_overflow_p)
15640 *strict_overflow_p = true;
15641 return true;
15643 break;
15645 case BIT_IOR_EXPR:
15646 return (tree_expr_nonzero_warnv_p (op1,
15647 strict_overflow_p)
15648 || tree_expr_nonzero_warnv_p (op0,
15649 strict_overflow_p));
15651 default:
15652 break;
15655 return false;
15658 /* Return true when T is an address and is known to be nonzero.
15659 For floating point we further ensure that T is not denormal.
15660 Similar logic is present in nonzero_address in rtlanal.h.
15662 If the return value is based on the assumption that signed overflow
15663 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15664 change *STRICT_OVERFLOW_P. */
15666 bool
15667 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15669 bool sub_strict_overflow_p;
15670 switch (TREE_CODE (t))
15672 case INTEGER_CST:
15673 return !integer_zerop (t);
15675 case ADDR_EXPR:
15677 tree base = TREE_OPERAND (t, 0);
15678 if (!DECL_P (base))
15679 base = get_base_address (base);
15681 if (!base)
15682 return false;
15684 /* Weak declarations may link to NULL. Other things may also be NULL
15685 so protect with -fdelete-null-pointer-checks; but not variables
15686 allocated on the stack. */
15687 if (DECL_P (base)
15688 && (flag_delete_null_pointer_checks
15689 || (DECL_CONTEXT (base)
15690 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15691 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15692 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15694 /* Constants are never weak. */
15695 if (CONSTANT_CLASS_P (base))
15696 return true;
15698 return false;
15701 case COND_EXPR:
15702 sub_strict_overflow_p = false;
15703 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15704 &sub_strict_overflow_p)
15705 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15706 &sub_strict_overflow_p))
15708 if (sub_strict_overflow_p)
15709 *strict_overflow_p = true;
15710 return true;
15712 break;
15714 default:
15715 break;
15717 return false;
15720 /* Return true when T is an address and is known to be nonzero.
15721 For floating point we further ensure that T is not denormal.
15722 Similar logic is present in nonzero_address in rtlanal.h.
15724 If the return value is based on the assumption that signed overflow
15725 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15726 change *STRICT_OVERFLOW_P. */
15728 bool
15729 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15731 tree type = TREE_TYPE (t);
15732 enum tree_code code;
15734 /* Doing something useful for floating point would need more work. */
15735 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15736 return false;
15738 code = TREE_CODE (t);
15739 switch (TREE_CODE_CLASS (code))
15741 case tcc_unary:
15742 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15743 strict_overflow_p);
15744 case tcc_binary:
15745 case tcc_comparison:
15746 return tree_binary_nonzero_warnv_p (code, type,
15747 TREE_OPERAND (t, 0),
15748 TREE_OPERAND (t, 1),
15749 strict_overflow_p);
15750 case tcc_constant:
15751 case tcc_declaration:
15752 case tcc_reference:
15753 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15755 default:
15756 break;
15759 switch (code)
15761 case TRUTH_NOT_EXPR:
15762 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15763 strict_overflow_p);
15765 case TRUTH_AND_EXPR:
15766 case TRUTH_OR_EXPR:
15767 case TRUTH_XOR_EXPR:
15768 return tree_binary_nonzero_warnv_p (code, type,
15769 TREE_OPERAND (t, 0),
15770 TREE_OPERAND (t, 1),
15771 strict_overflow_p);
15773 case COND_EXPR:
15774 case CONSTRUCTOR:
15775 case OBJ_TYPE_REF:
15776 case ASSERT_EXPR:
15777 case ADDR_EXPR:
15778 case WITH_SIZE_EXPR:
15779 case SSA_NAME:
15780 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15782 case COMPOUND_EXPR:
15783 case MODIFY_EXPR:
15784 case BIND_EXPR:
15785 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15786 strict_overflow_p);
15788 case SAVE_EXPR:
15789 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15790 strict_overflow_p);
15792 case CALL_EXPR:
15793 return alloca_call_p (t);
15795 default:
15796 break;
15798 return false;
15801 /* Return true when T is an address and is known to be nonzero.
15802 Handle warnings about undefined signed overflow. */
15804 bool
15805 tree_expr_nonzero_p (tree t)
15807 bool ret, strict_overflow_p;
15809 strict_overflow_p = false;
15810 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15811 if (strict_overflow_p)
15812 fold_overflow_warning (("assuming signed overflow does not occur when "
15813 "determining that expression is always "
15814 "non-zero"),
15815 WARN_STRICT_OVERFLOW_MISC);
15816 return ret;
15819 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15820 attempt to fold the expression to a constant without modifying TYPE,
15821 OP0 or OP1.
15823 If the expression could be simplified to a constant, then return
15824 the constant. If the expression would not be simplified to a
15825 constant, then return NULL_TREE. */
15827 tree
15828 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15830 tree tem = fold_binary (code, type, op0, op1);
15831 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15834 /* Given the components of a unary expression CODE, TYPE and OP0,
15835 attempt to fold the expression to a constant without modifying
15836 TYPE or OP0.
15838 If the expression could be simplified to a constant, then return
15839 the constant. If the expression would not be simplified to a
15840 constant, then return NULL_TREE. */
15842 tree
15843 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15845 tree tem = fold_unary (code, type, op0);
15846 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15849 /* If EXP represents referencing an element in a constant string
15850 (either via pointer arithmetic or array indexing), return the
15851 tree representing the value accessed, otherwise return NULL. */
15853 tree
15854 fold_read_from_constant_string (tree exp)
15856 if ((TREE_CODE (exp) == INDIRECT_REF
15857 || TREE_CODE (exp) == ARRAY_REF)
15858 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15860 tree exp1 = TREE_OPERAND (exp, 0);
15861 tree index;
15862 tree string;
15863 location_t loc = EXPR_LOCATION (exp);
15865 if (TREE_CODE (exp) == INDIRECT_REF)
15866 string = string_constant (exp1, &index);
15867 else
15869 tree low_bound = array_ref_low_bound (exp);
15870 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15872 /* Optimize the special-case of a zero lower bound.
15874 We convert the low_bound to sizetype to avoid some problems
15875 with constant folding. (E.g. suppose the lower bound is 1,
15876 and its mode is QI. Without the conversion,l (ARRAY
15877 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15878 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15879 if (! integer_zerop (low_bound))
15880 index = size_diffop_loc (loc, index,
15881 fold_convert_loc (loc, sizetype, low_bound));
15883 string = exp1;
15886 if (string
15887 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15888 && TREE_CODE (string) == STRING_CST
15889 && TREE_CODE (index) == INTEGER_CST
15890 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15891 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15892 == MODE_INT)
15893 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15894 return build_int_cst_type (TREE_TYPE (exp),
15895 (TREE_STRING_POINTER (string)
15896 [TREE_INT_CST_LOW (index)]));
15898 return NULL;
15901 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15902 an integer constant, real, or fixed-point constant.
15904 TYPE is the type of the result. */
15906 static tree
15907 fold_negate_const (tree arg0, tree type)
15909 tree t = NULL_TREE;
15911 switch (TREE_CODE (arg0))
15913 case INTEGER_CST:
15915 double_int val = tree_to_double_int (arg0);
15916 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15918 t = force_fit_type_double (type, val, 1,
15919 (overflow | TREE_OVERFLOW (arg0))
15920 && !TYPE_UNSIGNED (type));
15921 break;
15924 case REAL_CST:
15925 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15926 break;
15928 case FIXED_CST:
15930 FIXED_VALUE_TYPE f;
15931 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15932 &(TREE_FIXED_CST (arg0)), NULL,
15933 TYPE_SATURATING (type));
15934 t = build_fixed (type, f);
15935 /* Propagate overflow flags. */
15936 if (overflow_p | TREE_OVERFLOW (arg0))
15937 TREE_OVERFLOW (t) = 1;
15938 break;
15941 default:
15942 gcc_unreachable ();
15945 return t;
15948 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15949 an integer constant or real constant.
15951 TYPE is the type of the result. */
15953 tree
15954 fold_abs_const (tree arg0, tree type)
15956 tree t = NULL_TREE;
15958 switch (TREE_CODE (arg0))
15960 case INTEGER_CST:
15962 double_int val = tree_to_double_int (arg0);
15964 /* If the value is unsigned or non-negative, then the absolute value
15965 is the same as the ordinary value. */
15966 if (TYPE_UNSIGNED (type)
15967 || !double_int_negative_p (val))
15968 t = arg0;
15970 /* If the value is negative, then the absolute value is
15971 its negation. */
15972 else
15974 int overflow;
15976 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15977 t = force_fit_type_double (type, val, -1,
15978 overflow | TREE_OVERFLOW (arg0));
15981 break;
15983 case REAL_CST:
15984 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15985 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15986 else
15987 t = arg0;
15988 break;
15990 default:
15991 gcc_unreachable ();
15994 return t;
15997 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15998 constant. TYPE is the type of the result. */
16000 static tree
16001 fold_not_const (const_tree arg0, tree type)
16003 double_int val;
16005 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16007 val = double_int_not (tree_to_double_int (arg0));
16008 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16011 /* Given CODE, a relational operator, the target type, TYPE and two
16012 constant operands OP0 and OP1, return the result of the
16013 relational operation. If the result is not a compile time
16014 constant, then return NULL_TREE. */
16016 static tree
16017 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16019 int result, invert;
16021 /* From here on, the only cases we handle are when the result is
16022 known to be a constant. */
16024 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16026 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16027 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16029 /* Handle the cases where either operand is a NaN. */
16030 if (real_isnan (c0) || real_isnan (c1))
16032 switch (code)
16034 case EQ_EXPR:
16035 case ORDERED_EXPR:
16036 result = 0;
16037 break;
16039 case NE_EXPR:
16040 case UNORDERED_EXPR:
16041 case UNLT_EXPR:
16042 case UNLE_EXPR:
16043 case UNGT_EXPR:
16044 case UNGE_EXPR:
16045 case UNEQ_EXPR:
16046 result = 1;
16047 break;
16049 case LT_EXPR:
16050 case LE_EXPR:
16051 case GT_EXPR:
16052 case GE_EXPR:
16053 case LTGT_EXPR:
16054 if (flag_trapping_math)
16055 return NULL_TREE;
16056 result = 0;
16057 break;
16059 default:
16060 gcc_unreachable ();
16063 return constant_boolean_node (result, type);
16066 return constant_boolean_node (real_compare (code, c0, c1), type);
16069 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16071 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16072 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16073 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16076 /* Handle equality/inequality of complex constants. */
16077 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16079 tree rcond = fold_relational_const (code, type,
16080 TREE_REALPART (op0),
16081 TREE_REALPART (op1));
16082 tree icond = fold_relational_const (code, type,
16083 TREE_IMAGPART (op0),
16084 TREE_IMAGPART (op1));
16085 if (code == EQ_EXPR)
16086 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16087 else if (code == NE_EXPR)
16088 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16089 else
16090 return NULL_TREE;
16093 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16095 To compute GT, swap the arguments and do LT.
16096 To compute GE, do LT and invert the result.
16097 To compute LE, swap the arguments, do LT and invert the result.
16098 To compute NE, do EQ and invert the result.
16100 Therefore, the code below must handle only EQ and LT. */
16102 if (code == LE_EXPR || code == GT_EXPR)
16104 tree tem = op0;
16105 op0 = op1;
16106 op1 = tem;
16107 code = swap_tree_comparison (code);
16110 /* Note that it is safe to invert for real values here because we
16111 have already handled the one case that it matters. */
16113 invert = 0;
16114 if (code == NE_EXPR || code == GE_EXPR)
16116 invert = 1;
16117 code = invert_tree_comparison (code, false);
16120 /* Compute a result for LT or EQ if args permit;
16121 Otherwise return T. */
16122 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16124 if (code == EQ_EXPR)
16125 result = tree_int_cst_equal (op0, op1);
16126 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16127 result = INT_CST_LT_UNSIGNED (op0, op1);
16128 else
16129 result = INT_CST_LT (op0, op1);
16131 else
16132 return NULL_TREE;
16134 if (invert)
16135 result ^= 1;
16136 return constant_boolean_node (result, type);
16139 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16140 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16141 itself. */
16143 tree
16144 fold_build_cleanup_point_expr (tree type, tree expr)
16146 /* If the expression does not have side effects then we don't have to wrap
16147 it with a cleanup point expression. */
16148 if (!TREE_SIDE_EFFECTS (expr))
16149 return expr;
16151 /* If the expression is a return, check to see if the expression inside the
16152 return has no side effects or the right hand side of the modify expression
16153 inside the return. If either don't have side effects set we don't need to
16154 wrap the expression in a cleanup point expression. Note we don't check the
16155 left hand side of the modify because it should always be a return decl. */
16156 if (TREE_CODE (expr) == RETURN_EXPR)
16158 tree op = TREE_OPERAND (expr, 0);
16159 if (!op || !TREE_SIDE_EFFECTS (op))
16160 return expr;
16161 op = TREE_OPERAND (op, 1);
16162 if (!TREE_SIDE_EFFECTS (op))
16163 return expr;
16166 return build1 (CLEANUP_POINT_EXPR, type, expr);
16169 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16170 of an indirection through OP0, or NULL_TREE if no simplification is
16171 possible. */
16173 tree
16174 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16176 tree sub = op0;
16177 tree subtype;
16179 STRIP_NOPS (sub);
16180 subtype = TREE_TYPE (sub);
16181 if (!POINTER_TYPE_P (subtype))
16182 return NULL_TREE;
16184 if (TREE_CODE (sub) == ADDR_EXPR)
16186 tree op = TREE_OPERAND (sub, 0);
16187 tree optype = TREE_TYPE (op);
16188 /* *&CONST_DECL -> to the value of the const decl. */
16189 if (TREE_CODE (op) == CONST_DECL)
16190 return DECL_INITIAL (op);
16191 /* *&p => p; make sure to handle *&"str"[cst] here. */
16192 if (type == optype)
16194 tree fop = fold_read_from_constant_string (op);
16195 if (fop)
16196 return fop;
16197 else
16198 return op;
16200 /* *(foo *)&fooarray => fooarray[0] */
16201 else if (TREE_CODE (optype) == ARRAY_TYPE
16202 && type == TREE_TYPE (optype)
16203 && (!in_gimple_form
16204 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16206 tree type_domain = TYPE_DOMAIN (optype);
16207 tree min_val = size_zero_node;
16208 if (type_domain && TYPE_MIN_VALUE (type_domain))
16209 min_val = TYPE_MIN_VALUE (type_domain);
16210 if (in_gimple_form
16211 && TREE_CODE (min_val) != INTEGER_CST)
16212 return NULL_TREE;
16213 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16214 NULL_TREE, NULL_TREE);
16216 /* *(foo *)&complexfoo => __real__ complexfoo */
16217 else if (TREE_CODE (optype) == COMPLEX_TYPE
16218 && type == TREE_TYPE (optype))
16219 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16220 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16221 else if (TREE_CODE (optype) == VECTOR_TYPE
16222 && type == TREE_TYPE (optype))
16224 tree part_width = TYPE_SIZE (type);
16225 tree index = bitsize_int (0);
16226 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16230 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16231 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16233 tree op00 = TREE_OPERAND (sub, 0);
16234 tree op01 = TREE_OPERAND (sub, 1);
16236 STRIP_NOPS (op00);
16237 if (TREE_CODE (op00) == ADDR_EXPR)
16239 tree op00type;
16240 op00 = TREE_OPERAND (op00, 0);
16241 op00type = TREE_TYPE (op00);
16243 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16244 if (TREE_CODE (op00type) == VECTOR_TYPE
16245 && type == TREE_TYPE (op00type))
16247 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16248 tree part_width = TYPE_SIZE (type);
16249 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16250 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16251 tree index = bitsize_int (indexi);
16253 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16254 return fold_build3_loc (loc,
16255 BIT_FIELD_REF, type, op00,
16256 part_width, index);
16259 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16260 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16261 && type == TREE_TYPE (op00type))
16263 tree size = TYPE_SIZE_UNIT (type);
16264 if (tree_int_cst_equal (size, op01))
16265 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16267 /* ((foo *)&fooarray)[1] => fooarray[1] */
16268 else if (TREE_CODE (op00type) == ARRAY_TYPE
16269 && type == TREE_TYPE (op00type))
16271 tree type_domain = TYPE_DOMAIN (op00type);
16272 tree min_val = size_zero_node;
16273 if (type_domain && TYPE_MIN_VALUE (type_domain))
16274 min_val = TYPE_MIN_VALUE (type_domain);
16275 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16276 TYPE_SIZE_UNIT (type));
16277 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16278 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16279 NULL_TREE, NULL_TREE);
16284 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16285 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16286 && type == TREE_TYPE (TREE_TYPE (subtype))
16287 && (!in_gimple_form
16288 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16290 tree type_domain;
16291 tree min_val = size_zero_node;
16292 sub = build_fold_indirect_ref_loc (loc, sub);
16293 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16294 if (type_domain && TYPE_MIN_VALUE (type_domain))
16295 min_val = TYPE_MIN_VALUE (type_domain);
16296 if (in_gimple_form
16297 && TREE_CODE (min_val) != INTEGER_CST)
16298 return NULL_TREE;
16299 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16300 NULL_TREE);
16303 return NULL_TREE;
16306 /* Builds an expression for an indirection through T, simplifying some
16307 cases. */
16309 tree
16310 build_fold_indirect_ref_loc (location_t loc, tree t)
16312 tree type = TREE_TYPE (TREE_TYPE (t));
16313 tree sub = fold_indirect_ref_1 (loc, type, t);
16315 if (sub)
16316 return sub;
16318 return build1_loc (loc, INDIRECT_REF, type, t);
16321 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16323 tree
16324 fold_indirect_ref_loc (location_t loc, tree t)
16326 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16328 if (sub)
16329 return sub;
16330 else
16331 return t;
16334 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16335 whose result is ignored. The type of the returned tree need not be
16336 the same as the original expression. */
16338 tree
16339 fold_ignored_result (tree t)
16341 if (!TREE_SIDE_EFFECTS (t))
16342 return integer_zero_node;
16344 for (;;)
16345 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16347 case tcc_unary:
16348 t = TREE_OPERAND (t, 0);
16349 break;
16351 case tcc_binary:
16352 case tcc_comparison:
16353 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16354 t = TREE_OPERAND (t, 0);
16355 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16356 t = TREE_OPERAND (t, 1);
16357 else
16358 return t;
16359 break;
16361 case tcc_expression:
16362 switch (TREE_CODE (t))
16364 case COMPOUND_EXPR:
16365 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16366 return t;
16367 t = TREE_OPERAND (t, 0);
16368 break;
16370 case COND_EXPR:
16371 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16372 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16373 return t;
16374 t = TREE_OPERAND (t, 0);
16375 break;
16377 default:
16378 return t;
16380 break;
16382 default:
16383 return t;
16387 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16388 This can only be applied to objects of a sizetype. */
16390 tree
16391 round_up_loc (location_t loc, tree value, int divisor)
16393 tree div = NULL_TREE;
16395 gcc_assert (divisor > 0);
16396 if (divisor == 1)
16397 return value;
16399 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16400 have to do anything. Only do this when we are not given a const,
16401 because in that case, this check is more expensive than just
16402 doing it. */
16403 if (TREE_CODE (value) != INTEGER_CST)
16405 div = build_int_cst (TREE_TYPE (value), divisor);
16407 if (multiple_of_p (TREE_TYPE (value), value, div))
16408 return value;
16411 /* If divisor is a power of two, simplify this to bit manipulation. */
16412 if (divisor == (divisor & -divisor))
16414 if (TREE_CODE (value) == INTEGER_CST)
16416 double_int val = tree_to_double_int (value);
16417 bool overflow_p;
16419 if ((val.low & (divisor - 1)) == 0)
16420 return value;
16422 overflow_p = TREE_OVERFLOW (value);
16423 val.low &= ~(divisor - 1);
16424 val.low += divisor;
16425 if (val.low == 0)
16427 val.high++;
16428 if (val.high == 0)
16429 overflow_p = true;
16432 return force_fit_type_double (TREE_TYPE (value), val,
16433 -1, overflow_p);
16435 else
16437 tree t;
16439 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16440 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16441 t = build_int_cst (TREE_TYPE (value), -divisor);
16442 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16445 else
16447 if (!div)
16448 div = build_int_cst (TREE_TYPE (value), divisor);
16449 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16450 value = size_binop_loc (loc, MULT_EXPR, value, div);
16453 return value;
16456 /* Likewise, but round down. */
16458 tree
16459 round_down_loc (location_t loc, tree value, int divisor)
16461 tree div = NULL_TREE;
16463 gcc_assert (divisor > 0);
16464 if (divisor == 1)
16465 return value;
16467 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16468 have to do anything. Only do this when we are not given a const,
16469 because in that case, this check is more expensive than just
16470 doing it. */
16471 if (TREE_CODE (value) != INTEGER_CST)
16473 div = build_int_cst (TREE_TYPE (value), divisor);
16475 if (multiple_of_p (TREE_TYPE (value), value, div))
16476 return value;
16479 /* If divisor is a power of two, simplify this to bit manipulation. */
16480 if (divisor == (divisor & -divisor))
16482 tree t;
16484 t = build_int_cst (TREE_TYPE (value), -divisor);
16485 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16487 else
16489 if (!div)
16490 div = build_int_cst (TREE_TYPE (value), divisor);
16491 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16492 value = size_binop_loc (loc, MULT_EXPR, value, div);
16495 return value;
16498 /* Returns the pointer to the base of the object addressed by EXP and
16499 extracts the information about the offset of the access, storing it
16500 to PBITPOS and POFFSET. */
16502 static tree
16503 split_address_to_core_and_offset (tree exp,
16504 HOST_WIDE_INT *pbitpos, tree *poffset)
16506 tree core;
16507 enum machine_mode mode;
16508 int unsignedp, volatilep;
16509 HOST_WIDE_INT bitsize;
16510 location_t loc = EXPR_LOCATION (exp);
16512 if (TREE_CODE (exp) == ADDR_EXPR)
16514 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16515 poffset, &mode, &unsignedp, &volatilep,
16516 false);
16517 core = build_fold_addr_expr_loc (loc, core);
16519 else
16521 core = exp;
16522 *pbitpos = 0;
16523 *poffset = NULL_TREE;
16526 return core;
16529 /* Returns true if addresses of E1 and E2 differ by a constant, false
16530 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16532 bool
16533 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16535 tree core1, core2;
16536 HOST_WIDE_INT bitpos1, bitpos2;
16537 tree toffset1, toffset2, tdiff, type;
16539 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16540 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16542 if (bitpos1 % BITS_PER_UNIT != 0
16543 || bitpos2 % BITS_PER_UNIT != 0
16544 || !operand_equal_p (core1, core2, 0))
16545 return false;
16547 if (toffset1 && toffset2)
16549 type = TREE_TYPE (toffset1);
16550 if (type != TREE_TYPE (toffset2))
16551 toffset2 = fold_convert (type, toffset2);
16553 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16554 if (!cst_and_fits_in_hwi (tdiff))
16555 return false;
16557 *diff = int_cst_value (tdiff);
16559 else if (toffset1 || toffset2)
16561 /* If only one of the offsets is non-constant, the difference cannot
16562 be a constant. */
16563 return false;
16565 else
16566 *diff = 0;
16568 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16569 return true;
16572 /* Simplify the floating point expression EXP when the sign of the
16573 result is not significant. Return NULL_TREE if no simplification
16574 is possible. */
16576 tree
16577 fold_strip_sign_ops (tree exp)
16579 tree arg0, arg1;
16580 location_t loc = EXPR_LOCATION (exp);
16582 switch (TREE_CODE (exp))
16584 case ABS_EXPR:
16585 case NEGATE_EXPR:
16586 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16587 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16589 case MULT_EXPR:
16590 case RDIV_EXPR:
16591 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16592 return NULL_TREE;
16593 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16594 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16595 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16596 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16597 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16598 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16599 break;
16601 case COMPOUND_EXPR:
16602 arg0 = TREE_OPERAND (exp, 0);
16603 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16604 if (arg1)
16605 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16606 break;
16608 case COND_EXPR:
16609 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16610 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16611 if (arg0 || arg1)
16612 return fold_build3_loc (loc,
16613 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16614 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16615 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16616 break;
16618 case CALL_EXPR:
16620 const enum built_in_function fcode = builtin_mathfn_code (exp);
16621 switch (fcode)
16623 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16624 /* Strip copysign function call, return the 1st argument. */
16625 arg0 = CALL_EXPR_ARG (exp, 0);
16626 arg1 = CALL_EXPR_ARG (exp, 1);
16627 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16629 default:
16630 /* Strip sign ops from the argument of "odd" math functions. */
16631 if (negate_mathfn_p (fcode))
16633 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16634 if (arg0)
16635 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16637 break;
16640 break;
16642 default:
16643 break;
16645 return NULL_TREE;