PR c++/53858
[official-gcc.git] / gcc / fold-const.c
blob702f4e06701d2fe8e4906d34df7634462fdd0e15
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
144 static location_t
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
166 return x;
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
187 double_int quo, rem;
188 int uns;
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
195 quo = double_int_divmod (tree_to_double_int (arg1),
196 tree_to_double_int (arg2),
197 uns, code, &rem);
199 if (double_int_zero_p (rem))
200 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
202 return NULL_TREE;
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
231 void
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 const char *warnmsg;
250 location_t locus;
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
267 return;
269 if (gimple_no_warning_p (stmt))
270 return;
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
278 return;
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
298 bool
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
326 static bool
327 negate_mathfn_p (enum built_in_function code)
329 switch (code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
362 default:
363 break;
365 return false;
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
371 bool
372 may_negate_without_overflow_p (const_tree t)
374 unsigned HOST_WIDE_INT val;
375 unsigned int prec;
376 tree type;
378 gcc_assert (TREE_CODE (t) == INTEGER_CST);
380 type = TREE_TYPE (t);
381 if (TYPE_UNSIGNED (type))
382 return false;
384 prec = TYPE_PRECISION (type);
385 if (prec > HOST_BITS_PER_WIDE_INT)
387 if (TREE_INT_CST_LOW (t) != 0)
388 return true;
389 prec -= HOST_BITS_PER_WIDE_INT;
390 val = TREE_INT_CST_HIGH (t);
392 else
393 val = TREE_INT_CST_LOW (t);
394 if (prec < HOST_BITS_PER_WIDE_INT)
395 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
396 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
399 /* Determine whether an expression T can be cheaply negated using
400 the function negate_expr without introducing undefined overflow. */
402 static bool
403 negate_expr_p (tree t)
405 tree type;
407 if (t == 0)
408 return false;
410 type = TREE_TYPE (t);
412 STRIP_SIGN_NOPS (t);
413 switch (TREE_CODE (t))
415 case INTEGER_CST:
416 if (TYPE_OVERFLOW_WRAPS (type))
417 return true;
419 /* Check that -CST will not overflow type. */
420 return may_negate_without_overflow_p (t);
421 case BIT_NOT_EXPR:
422 return (INTEGRAL_TYPE_P (type)
423 && TYPE_OVERFLOW_WRAPS (type));
425 case FIXED_CST:
426 case NEGATE_EXPR:
427 return true;
429 case REAL_CST:
430 /* We want to canonicalize to positive real constants. Pretend
431 that only negative ones can be easily negated. */
432 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
434 case COMPLEX_CST:
435 return negate_expr_p (TREE_REALPART (t))
436 && negate_expr_p (TREE_IMAGPART (t));
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
487 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 return negate_expr_p (TREE_OPERAND (t, 1))
490 || negate_expr_p (TREE_OPERAND (t, 0));
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
500 break;
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 tree op1 = TREE_OPERAND (t, 1);
513 if (TREE_INT_CST_HIGH (op1) == 0
514 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
515 == TREE_INT_CST_LOW (op1))
516 return true;
518 break;
520 default:
521 break;
523 return false;
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
529 returned. */
531 static tree
532 fold_negate_expr (location_t loc, tree t)
534 tree type = TREE_TYPE (t);
535 tree tem;
537 switch (TREE_CODE (t))
539 /* Convert - (~A) to A + 1. */
540 case BIT_NOT_EXPR:
541 if (INTEGRAL_TYPE_P (type))
542 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 build_int_cst (type, 1));
544 break;
546 case INTEGER_CST:
547 tem = fold_negate_const (t, type);
548 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 || !TYPE_OVERFLOW_TRAPS (type))
550 return tem;
551 break;
553 case REAL_CST:
554 tem = fold_negate_const (t, type);
555 /* Two's complement FP formats, such as c4x, may overflow. */
556 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
557 return tem;
558 break;
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case COMPLEX_CST:
566 tree rpart = negate_expr (TREE_REALPART (t));
567 tree ipart = negate_expr (TREE_IMAGPART (t));
569 if ((TREE_CODE (rpart) == REAL_CST
570 && TREE_CODE (ipart) == REAL_CST)
571 || (TREE_CODE (rpart) == INTEGER_CST
572 && TREE_CODE (ipart) == INTEGER_CST))
573 return build_complex (type, rpart, ipart);
575 break;
577 case COMPLEX_EXPR:
578 if (negate_expr_p (t))
579 return fold_build2_loc (loc, COMPLEX_EXPR, type,
580 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
581 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
582 break;
584 case CONJ_EXPR:
585 if (negate_expr_p (t))
586 return fold_build1_loc (loc, CONJ_EXPR, type,
587 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
588 break;
590 case NEGATE_EXPR:
591 return TREE_OPERAND (t, 0);
593 case PLUS_EXPR:
594 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
595 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
597 /* -(A + B) -> (-B) - A. */
598 if (negate_expr_p (TREE_OPERAND (t, 1))
599 && reorder_operands_p (TREE_OPERAND (t, 0),
600 TREE_OPERAND (t, 1)))
602 tem = negate_expr (TREE_OPERAND (t, 1));
603 return fold_build2_loc (loc, MINUS_EXPR, type,
604 tem, TREE_OPERAND (t, 0));
607 /* -(A + B) -> (-A) - B. */
608 if (negate_expr_p (TREE_OPERAND (t, 0)))
610 tem = negate_expr (TREE_OPERAND (t, 0));
611 return fold_build2_loc (loc, MINUS_EXPR, type,
612 tem, TREE_OPERAND (t, 1));
615 break;
617 case MINUS_EXPR:
618 /* - (A - B) -> B - A */
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
621 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
622 return fold_build2_loc (loc, MINUS_EXPR, type,
623 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
624 break;
626 case MULT_EXPR:
627 if (TYPE_UNSIGNED (type))
628 break;
630 /* Fall through. */
632 case RDIV_EXPR:
633 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
635 tem = TREE_OPERAND (t, 1);
636 if (negate_expr_p (tem))
637 return fold_build2_loc (loc, TREE_CODE (t), type,
638 TREE_OPERAND (t, 0), negate_expr (tem));
639 tem = TREE_OPERAND (t, 0);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 negate_expr (tem), TREE_OPERAND (t, 1));
644 break;
646 case TRUNC_DIV_EXPR:
647 case ROUND_DIV_EXPR:
648 case FLOOR_DIV_EXPR:
649 case CEIL_DIV_EXPR:
650 case EXACT_DIV_EXPR:
651 /* In general we can't negate A / B, because if A is INT_MIN and
652 B is 1, we may turn this into INT_MIN / -1 which is undefined
653 and actually traps on some architectures. But if overflow is
654 undefined, we can negate, because - (INT_MIN / 1) is an
655 overflow. */
656 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
658 const char * const warnmsg = G_("assuming signed overflow does not "
659 "occur when negating a division");
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
663 if (INTEGRAL_TYPE_P (type)
664 && (TREE_CODE (tem) != INTEGER_CST
665 || integer_onep (tem)))
666 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0), negate_expr (tem));
670 tem = TREE_OPERAND (t, 0);
671 if (negate_expr_p (tem))
673 if (INTEGRAL_TYPE_P (type)
674 && (TREE_CODE (tem) != INTEGER_CST
675 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
676 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 negate_expr (tem), TREE_OPERAND (t, 1));
681 break;
683 case NOP_EXPR:
684 /* Convert -((double)float) into (double)(-float). */
685 if (TREE_CODE (type) == REAL_TYPE)
687 tem = strip_float_extensions (t);
688 if (tem != t && negate_expr_p (tem))
689 return fold_convert_loc (loc, type, negate_expr (tem));
691 break;
693 case CALL_EXPR:
694 /* Negate -f(x) as f(-x). */
695 if (negate_mathfn_p (builtin_mathfn_code (t))
696 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
698 tree fndecl, arg;
700 fndecl = get_callee_fndecl (t);
701 arg = negate_expr (CALL_EXPR_ARG (t, 0));
702 return build_call_expr_loc (loc, fndecl, 1, arg);
704 break;
706 case RSHIFT_EXPR:
707 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
708 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
710 tree op1 = TREE_OPERAND (t, 1);
711 if (TREE_INT_CST_HIGH (op1) == 0
712 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
713 == TREE_INT_CST_LOW (op1))
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
723 break;
725 default:
726 break;
729 return NULL_TREE;
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
779 tree var = 0;
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
836 else if (TREE_CONSTANT (in))
837 *conp = in;
838 else
839 var = in;
841 if (negate_p)
843 if (*litp)
844 *minus_litp = *litp, *litp = 0;
845 else if (*minus_litp)
846 *litp = *minus_litp, *minus_litp = 0;
847 *conp = negate_expr (*conp);
848 var = negate_expr (var);
851 return var;
854 /* Re-associate trees split by the above function. T1 and T2 are
855 either expressions to associate or null. Return the new
856 expression, if any. LOC is the location of the new expression. If
857 we build an operation, do it in TYPE and with CODE. */
859 static tree
860 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
862 if (t1 == 0)
863 return t2;
864 else if (t2 == 0)
865 return t1;
867 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
868 try to fold this since we will have infinite recursion. But do
869 deal with any NEGATE_EXPRs. */
870 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
871 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
873 if (code == PLUS_EXPR)
875 if (TREE_CODE (t1) == NEGATE_EXPR)
876 return build2_loc (loc, MINUS_EXPR, type,
877 fold_convert_loc (loc, type, t2),
878 fold_convert_loc (loc, type,
879 TREE_OPERAND (t1, 0)));
880 else if (TREE_CODE (t2) == NEGATE_EXPR)
881 return build2_loc (loc, MINUS_EXPR, type,
882 fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type,
884 TREE_OPERAND (t2, 0)));
885 else if (integer_zerop (t2))
886 return fold_convert_loc (loc, type, t1);
888 else if (code == MINUS_EXPR)
890 if (integer_zerop (t2))
891 return fold_convert_loc (loc, type, t1);
894 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
895 fold_convert_loc (loc, type, t2));
898 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
899 fold_convert_loc (loc, type, t2));
902 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
903 for use in int_const_binop, size_binop and size_diffop. */
905 static bool
906 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
908 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
909 return false;
910 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
911 return false;
913 switch (code)
915 case LSHIFT_EXPR:
916 case RSHIFT_EXPR:
917 case LROTATE_EXPR:
918 case RROTATE_EXPR:
919 return true;
921 default:
922 break;
925 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
926 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
927 && TYPE_MODE (type1) == TYPE_MODE (type2);
931 /* Combine two integer constants ARG1 and ARG2 under operation CODE
932 to produce a new constant. Return NULL_TREE if we don't know how
933 to evaluate CODE at compile-time. */
935 static tree
936 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
937 int overflowable)
939 double_int op1, op2, res, tmp;
940 tree t;
941 tree type = TREE_TYPE (arg1);
942 bool uns = TYPE_UNSIGNED (type);
943 bool overflow = false;
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
948 switch (code)
950 case BIT_IOR_EXPR:
951 res = double_int_ior (op1, op2);
952 break;
954 case BIT_XOR_EXPR:
955 res = double_int_xor (op1, op2);
956 break;
958 case BIT_AND_EXPR:
959 res = double_int_and (op1, op2);
960 break;
962 case RSHIFT_EXPR:
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
965 break;
967 case LSHIFT_EXPR:
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
973 break;
975 case RROTATE_EXPR:
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
978 break;
980 case LROTATE_EXPR:
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
983 break;
985 case PLUS_EXPR:
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
988 break;
990 case MINUS_EXPR:
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
995 break;
997 case MULT_EXPR:
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
1000 break;
1002 case MULT_HIGHPART_EXPR:
1003 /* ??? Need quad precision, or an additional shift operand
1004 to the multiply primitive, to handle very large highparts. */
1005 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1006 return NULL_TREE;
1007 tmp = double_int_mul (op1, op2);
1008 res = double_int_rshift (tmp, TYPE_PRECISION (type),
1009 TYPE_PRECISION (type), !uns);
1010 break;
1012 case TRUNC_DIV_EXPR:
1013 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1014 case EXACT_DIV_EXPR:
1015 /* This is a shortcut for a common special case. */
1016 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1017 && !TREE_OVERFLOW (arg1)
1018 && !TREE_OVERFLOW (arg2)
1019 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1021 if (code == CEIL_DIV_EXPR)
1022 op1.low += op2.low - 1;
1024 res.low = op1.low / op2.low, res.high = 0;
1025 break;
1028 /* ... fall through ... */
1030 case ROUND_DIV_EXPR:
1031 if (double_int_zero_p (op2))
1032 return NULL_TREE;
1033 if (double_int_one_p (op2))
1035 res = op1;
1036 break;
1038 if (double_int_equal_p (op1, op2)
1039 && ! double_int_zero_p (op1))
1041 res = double_int_one;
1042 break;
1044 overflow = div_and_round_double (code, uns,
1045 op1.low, op1.high, op2.low, op2.high,
1046 &res.low, &res.high,
1047 &tmp.low, &tmp.high);
1048 break;
1050 case TRUNC_MOD_EXPR:
1051 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1052 /* This is a shortcut for a common special case. */
1053 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1054 && !TREE_OVERFLOW (arg1)
1055 && !TREE_OVERFLOW (arg2)
1056 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1058 if (code == CEIL_MOD_EXPR)
1059 op1.low += op2.low - 1;
1060 res.low = op1.low % op2.low, res.high = 0;
1061 break;
1064 /* ... fall through ... */
1066 case ROUND_MOD_EXPR:
1067 if (double_int_zero_p (op2))
1068 return NULL_TREE;
1069 overflow = div_and_round_double (code, uns,
1070 op1.low, op1.high, op2.low, op2.high,
1071 &tmp.low, &tmp.high,
1072 &res.low, &res.high);
1073 break;
1075 case MIN_EXPR:
1076 res = double_int_min (op1, op2, uns);
1077 break;
1079 case MAX_EXPR:
1080 res = double_int_max (op1, op2, uns);
1081 break;
1083 default:
1084 return NULL_TREE;
1087 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1088 (!uns && overflow)
1089 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1091 return t;
1094 tree
1095 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1097 return int_const_binop_1 (code, arg1, arg2, 1);
1100 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1101 constant. We assume ARG1 and ARG2 have the same data type, or at least
1102 are the same kind of constant and the same machine mode. Return zero if
1103 combining the constants is not allowed in the current operating mode. */
1105 static tree
1106 const_binop (enum tree_code code, tree arg1, tree arg2)
1108 /* Sanity check for the recursive cases. */
1109 if (!arg1 || !arg2)
1110 return NULL_TREE;
1112 STRIP_NOPS (arg1);
1113 STRIP_NOPS (arg2);
1115 if (TREE_CODE (arg1) == INTEGER_CST)
1116 return int_const_binop (code, arg1, arg2);
1118 if (TREE_CODE (arg1) == REAL_CST)
1120 enum machine_mode mode;
1121 REAL_VALUE_TYPE d1;
1122 REAL_VALUE_TYPE d2;
1123 REAL_VALUE_TYPE value;
1124 REAL_VALUE_TYPE result;
1125 bool inexact;
1126 tree t, type;
1128 /* The following codes are handled by real_arithmetic. */
1129 switch (code)
1131 case PLUS_EXPR:
1132 case MINUS_EXPR:
1133 case MULT_EXPR:
1134 case RDIV_EXPR:
1135 case MIN_EXPR:
1136 case MAX_EXPR:
1137 break;
1139 default:
1140 return NULL_TREE;
1143 d1 = TREE_REAL_CST (arg1);
1144 d2 = TREE_REAL_CST (arg2);
1146 type = TREE_TYPE (arg1);
1147 mode = TYPE_MODE (type);
1149 /* Don't perform operation if we honor signaling NaNs and
1150 either operand is a NaN. */
1151 if (HONOR_SNANS (mode)
1152 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1153 return NULL_TREE;
1155 /* Don't perform operation if it would raise a division
1156 by zero exception. */
1157 if (code == RDIV_EXPR
1158 && REAL_VALUES_EQUAL (d2, dconst0)
1159 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1160 return NULL_TREE;
1162 /* If either operand is a NaN, just return it. Otherwise, set up
1163 for floating-point trap; we return an overflow. */
1164 if (REAL_VALUE_ISNAN (d1))
1165 return arg1;
1166 else if (REAL_VALUE_ISNAN (d2))
1167 return arg2;
1169 inexact = real_arithmetic (&value, code, &d1, &d2);
1170 real_convert (&result, mode, &value);
1172 /* Don't constant fold this floating point operation if
1173 the result has overflowed and flag_trapping_math. */
1174 if (flag_trapping_math
1175 && MODE_HAS_INFINITIES (mode)
1176 && REAL_VALUE_ISINF (result)
1177 && !REAL_VALUE_ISINF (d1)
1178 && !REAL_VALUE_ISINF (d2))
1179 return NULL_TREE;
1181 /* Don't constant fold this floating point operation if the
1182 result may dependent upon the run-time rounding mode and
1183 flag_rounding_math is set, or if GCC's software emulation
1184 is unable to accurately represent the result. */
1185 if ((flag_rounding_math
1186 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1187 && (inexact || !real_identical (&result, &value)))
1188 return NULL_TREE;
1190 t = build_real (type, result);
1192 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1193 return t;
1196 if (TREE_CODE (arg1) == FIXED_CST)
1198 FIXED_VALUE_TYPE f1;
1199 FIXED_VALUE_TYPE f2;
1200 FIXED_VALUE_TYPE result;
1201 tree t, type;
1202 int sat_p;
1203 bool overflow_p;
1205 /* The following codes are handled by fixed_arithmetic. */
1206 switch (code)
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 case TRUNC_DIV_EXPR:
1212 f2 = TREE_FIXED_CST (arg2);
1213 break;
1215 case LSHIFT_EXPR:
1216 case RSHIFT_EXPR:
1217 f2.data.high = TREE_INT_CST_HIGH (arg2);
1218 f2.data.low = TREE_INT_CST_LOW (arg2);
1219 f2.mode = SImode;
1220 break;
1222 default:
1223 return NULL_TREE;
1226 f1 = TREE_FIXED_CST (arg1);
1227 type = TREE_TYPE (arg1);
1228 sat_p = TYPE_SATURATING (type);
1229 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1230 t = build_fixed (type, result);
1231 /* Propagate overflow flags. */
1232 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1233 TREE_OVERFLOW (t) = 1;
1234 return t;
1237 if (TREE_CODE (arg1) == COMPLEX_CST)
1239 tree type = TREE_TYPE (arg1);
1240 tree r1 = TREE_REALPART (arg1);
1241 tree i1 = TREE_IMAGPART (arg1);
1242 tree r2 = TREE_REALPART (arg2);
1243 tree i2 = TREE_IMAGPART (arg2);
1244 tree real, imag;
1246 switch (code)
1248 case PLUS_EXPR:
1249 case MINUS_EXPR:
1250 real = const_binop (code, r1, r2);
1251 imag = const_binop (code, i1, i2);
1252 break;
1254 case MULT_EXPR:
1255 if (COMPLEX_FLOAT_TYPE_P (type))
1256 return do_mpc_arg2 (arg1, arg2, type,
1257 /* do_nonfinite= */ folding_initializer,
1258 mpc_mul);
1260 real = const_binop (MINUS_EXPR,
1261 const_binop (MULT_EXPR, r1, r2),
1262 const_binop (MULT_EXPR, i1, i2));
1263 imag = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, i2),
1265 const_binop (MULT_EXPR, i1, r2));
1266 break;
1268 case RDIV_EXPR:
1269 if (COMPLEX_FLOAT_TYPE_P (type))
1270 return do_mpc_arg2 (arg1, arg2, type,
1271 /* do_nonfinite= */ folding_initializer,
1272 mpc_div);
1273 /* Fallthru ... */
1274 case TRUNC_DIV_EXPR:
1275 case CEIL_DIV_EXPR:
1276 case FLOOR_DIV_EXPR:
1277 case ROUND_DIV_EXPR:
1278 if (flag_complex_method == 0)
1280 /* Keep this algorithm in sync with
1281 tree-complex.c:expand_complex_div_straight().
1283 Expand complex division to scalars, straightforward algorithm.
1284 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1285 t = br*br + bi*bi
1287 tree magsquared
1288 = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r2, r2),
1290 const_binop (MULT_EXPR, i2, i2));
1291 tree t1
1292 = const_binop (PLUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 tree t2
1296 = const_binop (MINUS_EXPR,
1297 const_binop (MULT_EXPR, i1, r2),
1298 const_binop (MULT_EXPR, r1, i2));
1300 real = const_binop (code, t1, magsquared);
1301 imag = const_binop (code, t2, magsquared);
1303 else
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_wide().
1308 Expand complex division to scalars, modified algorithm to minimize
1309 overflow with wide input ranges. */
1310 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1311 fold_abs_const (r2, TREE_TYPE (type)),
1312 fold_abs_const (i2, TREE_TYPE (type)));
1314 if (integer_nonzerop (compare))
1316 /* In the TRUE branch, we compute
1317 ratio = br/bi;
1318 div = (br * ratio) + bi;
1319 tr = (ar * ratio) + ai;
1320 ti = (ai * ratio) - ar;
1321 tr = tr / div;
1322 ti = ti / div; */
1323 tree ratio = const_binop (code, r2, i2);
1324 tree div = const_binop (PLUS_EXPR, i2,
1325 const_binop (MULT_EXPR, r2, ratio));
1326 real = const_binop (MULT_EXPR, r1, ratio);
1327 real = const_binop (PLUS_EXPR, real, i1);
1328 real = const_binop (code, real, div);
1330 imag = const_binop (MULT_EXPR, i1, ratio);
1331 imag = const_binop (MINUS_EXPR, imag, r1);
1332 imag = const_binop (code, imag, div);
1334 else
1336 /* In the FALSE branch, we compute
1337 ratio = d/c;
1338 divisor = (d * ratio) + c;
1339 tr = (b * ratio) + a;
1340 ti = b - (a * ratio);
1341 tr = tr / div;
1342 ti = ti / div; */
1343 tree ratio = const_binop (code, i2, r2);
1344 tree div = const_binop (PLUS_EXPR, r2,
1345 const_binop (MULT_EXPR, i2, ratio));
1347 real = const_binop (MULT_EXPR, i1, ratio);
1348 real = const_binop (PLUS_EXPR, real, r1);
1349 real = const_binop (code, real, div);
1351 imag = const_binop (MULT_EXPR, r1, ratio);
1352 imag = const_binop (MINUS_EXPR, i1, imag);
1353 imag = const_binop (code, imag, div);
1356 break;
1358 default:
1359 return NULL_TREE;
1362 if (real && imag)
1363 return build_complex (type, real, imag);
1366 if (TREE_CODE (arg1) == VECTOR_CST
1367 && TREE_CODE (arg2) == VECTOR_CST)
1369 tree type = TREE_TYPE(arg1);
1370 int count = TYPE_VECTOR_SUBPARTS (type), i;
1371 tree *elts = XALLOCAVEC (tree, count);
1373 for (i = 0; i < count; i++)
1375 tree elem1 = VECTOR_CST_ELT (arg1, i);
1376 tree elem2 = VECTOR_CST_ELT (arg2, i);
1378 elts[i] = const_binop (code, elem1, elem2);
1380 /* It is possible that const_binop cannot handle the given
1381 code and return NULL_TREE */
1382 if(elts[i] == NULL_TREE)
1383 return NULL_TREE;
1386 return build_vector (type, elts);
1388 return NULL_TREE;
1391 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1392 indicates which particular sizetype to create. */
1394 tree
1395 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1397 return build_int_cst (sizetype_tab[(int) kind], number);
1400 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1401 is a tree code. The type of the result is taken from the operands.
1402 Both must be equivalent integer types, ala int_binop_types_match_p.
1403 If the operands are constant, so is the result. */
1405 tree
1406 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1408 tree type = TREE_TYPE (arg0);
1410 if (arg0 == error_mark_node || arg1 == error_mark_node)
1411 return error_mark_node;
1413 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1414 TREE_TYPE (arg1)));
1416 /* Handle the special case of two integer constants faster. */
1417 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1419 /* And some specific cases even faster than that. */
1420 if (code == PLUS_EXPR)
1422 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1423 return arg1;
1424 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1425 return arg0;
1427 else if (code == MINUS_EXPR)
1429 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1430 return arg0;
1432 else if (code == MULT_EXPR)
1434 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1435 return arg1;
1438 /* Handle general case of two integer constants. For sizetype
1439 constant calculations we always want to know about overflow,
1440 even in the unsigned case. */
1441 return int_const_binop_1 (code, arg0, arg1, -1);
1444 return fold_build2_loc (loc, code, type, arg0, arg1);
1447 /* Given two values, either both of sizetype or both of bitsizetype,
1448 compute the difference between the two values. Return the value
1449 in signed type corresponding to the type of the operands. */
1451 tree
1452 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1454 tree type = TREE_TYPE (arg0);
1455 tree ctype;
1457 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1458 TREE_TYPE (arg1)));
1460 /* If the type is already signed, just do the simple thing. */
1461 if (!TYPE_UNSIGNED (type))
1462 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1464 if (type == sizetype)
1465 ctype = ssizetype;
1466 else if (type == bitsizetype)
1467 ctype = sbitsizetype;
1468 else
1469 ctype = signed_type_for (type);
1471 /* If either operand is not a constant, do the conversions to the signed
1472 type and subtract. The hardware will do the right thing with any
1473 overflow in the subtraction. */
1474 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1475 return size_binop_loc (loc, MINUS_EXPR,
1476 fold_convert_loc (loc, ctype, arg0),
1477 fold_convert_loc (loc, ctype, arg1));
1479 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1480 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1481 overflow) and negate (which can't either). Special-case a result
1482 of zero while we're here. */
1483 if (tree_int_cst_equal (arg0, arg1))
1484 return build_int_cst (ctype, 0);
1485 else if (tree_int_cst_lt (arg1, arg0))
1486 return fold_convert_loc (loc, ctype,
1487 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1488 else
1489 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1490 fold_convert_loc (loc, ctype,
1491 size_binop_loc (loc,
1492 MINUS_EXPR,
1493 arg1, arg0)));
1496 /* A subroutine of fold_convert_const handling conversions of an
1497 INTEGER_CST to another integer type. */
1499 static tree
1500 fold_convert_const_int_from_int (tree type, const_tree arg1)
1502 tree t;
1504 /* Given an integer constant, make new constant with new type,
1505 appropriately sign-extended or truncated. */
1506 t = force_fit_type_double (type, tree_to_double_int (arg1),
1507 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1508 (TREE_INT_CST_HIGH (arg1) < 0
1509 && (TYPE_UNSIGNED (type)
1510 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1511 | TREE_OVERFLOW (arg1));
1513 return t;
1516 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1517 to an integer type. */
1519 static tree
1520 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1522 int overflow = 0;
1523 tree t;
1525 /* The following code implements the floating point to integer
1526 conversion rules required by the Java Language Specification,
1527 that IEEE NaNs are mapped to zero and values that overflow
1528 the target precision saturate, i.e. values greater than
1529 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1530 are mapped to INT_MIN. These semantics are allowed by the
1531 C and C++ standards that simply state that the behavior of
1532 FP-to-integer conversion is unspecified upon overflow. */
1534 double_int val;
1535 REAL_VALUE_TYPE r;
1536 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1538 switch (code)
1540 case FIX_TRUNC_EXPR:
1541 real_trunc (&r, VOIDmode, &x);
1542 break;
1544 default:
1545 gcc_unreachable ();
1548 /* If R is NaN, return zero and show we have an overflow. */
1549 if (REAL_VALUE_ISNAN (r))
1551 overflow = 1;
1552 val = double_int_zero;
1555 /* See if R is less than the lower bound or greater than the
1556 upper bound. */
1558 if (! overflow)
1560 tree lt = TYPE_MIN_VALUE (type);
1561 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1562 if (REAL_VALUES_LESS (r, l))
1564 overflow = 1;
1565 val = tree_to_double_int (lt);
1569 if (! overflow)
1571 tree ut = TYPE_MAX_VALUE (type);
1572 if (ut)
1574 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1575 if (REAL_VALUES_LESS (u, r))
1577 overflow = 1;
1578 val = tree_to_double_int (ut);
1583 if (! overflow)
1584 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1586 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1587 return t;
1590 /* A subroutine of fold_convert_const handling conversions of a
1591 FIXED_CST to an integer type. */
1593 static tree
1594 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1596 tree t;
1597 double_int temp, temp_trunc;
1598 unsigned int mode;
1600 /* Right shift FIXED_CST to temp by fbit. */
1601 temp = TREE_FIXED_CST (arg1).data;
1602 mode = TREE_FIXED_CST (arg1).mode;
1603 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1605 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1606 HOST_BITS_PER_DOUBLE_INT,
1607 SIGNED_FIXED_POINT_MODE_P (mode));
1609 /* Left shift temp to temp_trunc by fbit. */
1610 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1611 HOST_BITS_PER_DOUBLE_INT,
1612 SIGNED_FIXED_POINT_MODE_P (mode));
1614 else
1616 temp = double_int_zero;
1617 temp_trunc = double_int_zero;
1620 /* If FIXED_CST is negative, we need to round the value toward 0.
1621 By checking if the fractional bits are not zero to add 1 to temp. */
1622 if (SIGNED_FIXED_POINT_MODE_P (mode)
1623 && double_int_negative_p (temp_trunc)
1624 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1625 temp = double_int_add (temp, double_int_one);
1627 /* Given a fixed-point constant, make new constant with new type,
1628 appropriately sign-extended or truncated. */
1629 t = force_fit_type_double (type, temp, -1,
1630 (double_int_negative_p (temp)
1631 && (TYPE_UNSIGNED (type)
1632 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1633 | TREE_OVERFLOW (arg1));
1635 return t;
1638 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1639 to another floating point type. */
1641 static tree
1642 fold_convert_const_real_from_real (tree type, const_tree arg1)
1644 REAL_VALUE_TYPE value;
1645 tree t;
1647 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1648 t = build_real (type, value);
1650 /* If converting an infinity or NAN to a representation that doesn't
1651 have one, set the overflow bit so that we can produce some kind of
1652 error message at the appropriate point if necessary. It's not the
1653 most user-friendly message, but it's better than nothing. */
1654 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1655 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1656 TREE_OVERFLOW (t) = 1;
1657 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1658 && !MODE_HAS_NANS (TYPE_MODE (type)))
1659 TREE_OVERFLOW (t) = 1;
1660 /* Regular overflow, conversion produced an infinity in a mode that
1661 can't represent them. */
1662 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1663 && REAL_VALUE_ISINF (value)
1664 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1665 TREE_OVERFLOW (t) = 1;
1666 else
1667 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1668 return t;
1671 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1672 to a floating point type. */
1674 static tree
1675 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1677 REAL_VALUE_TYPE value;
1678 tree t;
1680 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1681 t = build_real (type, value);
1683 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1684 return t;
1687 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1688 to another fixed-point type. */
1690 static tree
1691 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1693 FIXED_VALUE_TYPE value;
1694 tree t;
1695 bool overflow_p;
1697 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1698 TYPE_SATURATING (type));
1699 t = build_fixed (type, value);
1701 /* Propagate overflow flags. */
1702 if (overflow_p | TREE_OVERFLOW (arg1))
1703 TREE_OVERFLOW (t) = 1;
1704 return t;
1707 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1708 to a fixed-point type. */
1710 static tree
1711 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1713 FIXED_VALUE_TYPE value;
1714 tree t;
1715 bool overflow_p;
1717 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1718 TREE_INT_CST (arg1),
1719 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1720 TYPE_SATURATING (type));
1721 t = build_fixed (type, value);
1723 /* Propagate overflow flags. */
1724 if (overflow_p | TREE_OVERFLOW (arg1))
1725 TREE_OVERFLOW (t) = 1;
1726 return t;
1729 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1730 to a fixed-point type. */
1732 static tree
1733 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1735 FIXED_VALUE_TYPE value;
1736 tree t;
1737 bool overflow_p;
1739 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1740 &TREE_REAL_CST (arg1),
1741 TYPE_SATURATING (type));
1742 t = build_fixed (type, value);
1744 /* Propagate overflow flags. */
1745 if (overflow_p | TREE_OVERFLOW (arg1))
1746 TREE_OVERFLOW (t) = 1;
1747 return t;
1750 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1751 type TYPE. If no simplification can be done return NULL_TREE. */
1753 static tree
1754 fold_convert_const (enum tree_code code, tree type, tree arg1)
1756 if (TREE_TYPE (arg1) == type)
1757 return arg1;
1759 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1760 || TREE_CODE (type) == OFFSET_TYPE)
1762 if (TREE_CODE (arg1) == INTEGER_CST)
1763 return fold_convert_const_int_from_int (type, arg1);
1764 else if (TREE_CODE (arg1) == REAL_CST)
1765 return fold_convert_const_int_from_real (code, type, arg1);
1766 else if (TREE_CODE (arg1) == FIXED_CST)
1767 return fold_convert_const_int_from_fixed (type, arg1);
1769 else if (TREE_CODE (type) == REAL_TYPE)
1771 if (TREE_CODE (arg1) == INTEGER_CST)
1772 return build_real_from_int_cst (type, arg1);
1773 else if (TREE_CODE (arg1) == REAL_CST)
1774 return fold_convert_const_real_from_real (type, arg1);
1775 else if (TREE_CODE (arg1) == FIXED_CST)
1776 return fold_convert_const_real_from_fixed (type, arg1);
1778 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1780 if (TREE_CODE (arg1) == FIXED_CST)
1781 return fold_convert_const_fixed_from_fixed (type, arg1);
1782 else if (TREE_CODE (arg1) == INTEGER_CST)
1783 return fold_convert_const_fixed_from_int (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_fixed_from_real (type, arg1);
1787 return NULL_TREE;
1790 /* Construct a vector of zero elements of vector type TYPE. */
1792 static tree
1793 build_zero_vector (tree type)
1795 tree t;
1797 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1798 return build_vector_from_val (type, t);
1801 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1803 bool
1804 fold_convertible_p (const_tree type, const_tree arg)
1806 tree orig = TREE_TYPE (arg);
1808 if (type == orig)
1809 return true;
1811 if (TREE_CODE (arg) == ERROR_MARK
1812 || TREE_CODE (type) == ERROR_MARK
1813 || TREE_CODE (orig) == ERROR_MARK)
1814 return false;
1816 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1817 return true;
1819 switch (TREE_CODE (type))
1821 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1822 case POINTER_TYPE: case REFERENCE_TYPE:
1823 case OFFSET_TYPE:
1824 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1825 || TREE_CODE (orig) == OFFSET_TYPE)
1826 return true;
1827 return (TREE_CODE (orig) == VECTOR_TYPE
1828 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1830 case REAL_TYPE:
1831 case FIXED_POINT_TYPE:
1832 case COMPLEX_TYPE:
1833 case VECTOR_TYPE:
1834 case VOID_TYPE:
1835 return TREE_CODE (type) == TREE_CODE (orig);
1837 default:
1838 return false;
1842 /* Convert expression ARG to type TYPE. Used by the middle-end for
1843 simple conversions in preference to calling the front-end's convert. */
1845 tree
1846 fold_convert_loc (location_t loc, tree type, tree arg)
1848 tree orig = TREE_TYPE (arg);
1849 tree tem;
1851 if (type == orig)
1852 return arg;
1854 if (TREE_CODE (arg) == ERROR_MARK
1855 || TREE_CODE (type) == ERROR_MARK
1856 || TREE_CODE (orig) == ERROR_MARK)
1857 return error_mark_node;
1859 switch (TREE_CODE (type))
1861 case POINTER_TYPE:
1862 case REFERENCE_TYPE:
1863 /* Handle conversions between pointers to different address spaces. */
1864 if (POINTER_TYPE_P (orig)
1865 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1866 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1867 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1868 /* fall through */
1870 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1871 case OFFSET_TYPE:
1872 if (TREE_CODE (arg) == INTEGER_CST)
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1878 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1879 || TREE_CODE (orig) == OFFSET_TYPE)
1880 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1881 if (TREE_CODE (orig) == COMPLEX_TYPE)
1882 return fold_convert_loc (loc, type,
1883 fold_build1_loc (loc, REALPART_EXPR,
1884 TREE_TYPE (orig), arg));
1885 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1886 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1887 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889 case REAL_TYPE:
1890 if (TREE_CODE (arg) == INTEGER_CST)
1892 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1893 if (tem != NULL_TREE)
1894 return tem;
1896 else if (TREE_CODE (arg) == REAL_CST)
1898 tem = fold_convert_const (NOP_EXPR, type, arg);
1899 if (tem != NULL_TREE)
1900 return tem;
1902 else if (TREE_CODE (arg) == FIXED_CST)
1904 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1906 return tem;
1909 switch (TREE_CODE (orig))
1911 case INTEGER_TYPE:
1912 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1913 case POINTER_TYPE: case REFERENCE_TYPE:
1914 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1919 case FIXED_POINT_TYPE:
1920 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1922 case COMPLEX_TYPE:
1923 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1924 return fold_convert_loc (loc, type, tem);
1926 default:
1927 gcc_unreachable ();
1930 case FIXED_POINT_TYPE:
1931 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1932 || TREE_CODE (arg) == REAL_CST)
1934 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 goto fold_convert_exit;
1939 switch (TREE_CODE (orig))
1941 case FIXED_POINT_TYPE:
1942 case INTEGER_TYPE:
1943 case ENUMERAL_TYPE:
1944 case BOOLEAN_TYPE:
1945 case REAL_TYPE:
1946 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1948 case COMPLEX_TYPE:
1949 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1950 return fold_convert_loc (loc, type, tem);
1952 default:
1953 gcc_unreachable ();
1956 case COMPLEX_TYPE:
1957 switch (TREE_CODE (orig))
1959 case INTEGER_TYPE:
1960 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1961 case POINTER_TYPE: case REFERENCE_TYPE:
1962 case REAL_TYPE:
1963 case FIXED_POINT_TYPE:
1964 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1965 fold_convert_loc (loc, TREE_TYPE (type), arg),
1966 fold_convert_loc (loc, TREE_TYPE (type),
1967 integer_zero_node));
1968 case COMPLEX_TYPE:
1970 tree rpart, ipart;
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1975 TREE_OPERAND (arg, 0));
1976 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1977 TREE_OPERAND (arg, 1));
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1981 arg = save_expr (arg);
1982 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1983 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1984 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1985 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1989 default:
1990 gcc_unreachable ();
1993 case VECTOR_TYPE:
1994 if (integer_zerop (arg))
1995 return build_zero_vector (type);
1996 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1997 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1998 || TREE_CODE (orig) == VECTOR_TYPE);
1999 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2001 case VOID_TYPE:
2002 tem = fold_ignored_result (arg);
2003 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2005 default:
2006 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2007 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2008 gcc_unreachable ();
2010 fold_convert_exit:
2011 protected_set_expr_location_unshare (tem, loc);
2012 return tem;
2015 /* Return false if expr can be assumed not to be an lvalue, true
2016 otherwise. */
2018 static bool
2019 maybe_lvalue_p (const_tree x)
2021 /* We only need to wrap lvalue tree codes. */
2022 switch (TREE_CODE (x))
2024 case VAR_DECL:
2025 case PARM_DECL:
2026 case RESULT_DECL:
2027 case LABEL_DECL:
2028 case FUNCTION_DECL:
2029 case SSA_NAME:
2031 case COMPONENT_REF:
2032 case MEM_REF:
2033 case INDIRECT_REF:
2034 case ARRAY_REF:
2035 case ARRAY_RANGE_REF:
2036 case BIT_FIELD_REF:
2037 case OBJ_TYPE_REF:
2039 case REALPART_EXPR:
2040 case IMAGPART_EXPR:
2041 case PREINCREMENT_EXPR:
2042 case PREDECREMENT_EXPR:
2043 case SAVE_EXPR:
2044 case TRY_CATCH_EXPR:
2045 case WITH_CLEANUP_EXPR:
2046 case COMPOUND_EXPR:
2047 case MODIFY_EXPR:
2048 case TARGET_EXPR:
2049 case COND_EXPR:
2050 case BIND_EXPR:
2051 break;
2053 default:
2054 /* Assume the worst for front-end tree codes. */
2055 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2056 break;
2057 return false;
2060 return true;
2063 /* Return an expr equal to X but certainly not valid as an lvalue. */
2065 tree
2066 non_lvalue_loc (location_t loc, tree x)
2068 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2069 us. */
2070 if (in_gimple_form)
2071 return x;
2073 if (! maybe_lvalue_p (x))
2074 return x;
2075 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2078 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2079 Zero means allow extended lvalues. */
2081 int pedantic_lvalues;
2083 /* When pedantic, return an expr equal to X but certainly not valid as a
2084 pedantic lvalue. Otherwise, return X. */
2086 static tree
2087 pedantic_non_lvalue_loc (location_t loc, tree x)
2089 if (pedantic_lvalues)
2090 return non_lvalue_loc (loc, x);
2092 return protected_set_expr_location_unshare (x, loc);
2095 /* Given a tree comparison code, return the code that is the logical inverse.
2096 It is generally not safe to do this for floating-point comparisons, except
2097 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2099 enum tree_code
2100 invert_tree_comparison (enum tree_code code, bool honor_nans)
2102 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
2103 return ERROR_MARK;
2105 switch (code)
2107 case EQ_EXPR:
2108 return NE_EXPR;
2109 case NE_EXPR:
2110 return EQ_EXPR;
2111 case GT_EXPR:
2112 return honor_nans ? UNLE_EXPR : LE_EXPR;
2113 case GE_EXPR:
2114 return honor_nans ? UNLT_EXPR : LT_EXPR;
2115 case LT_EXPR:
2116 return honor_nans ? UNGE_EXPR : GE_EXPR;
2117 case LE_EXPR:
2118 return honor_nans ? UNGT_EXPR : GT_EXPR;
2119 case LTGT_EXPR:
2120 return UNEQ_EXPR;
2121 case UNEQ_EXPR:
2122 return LTGT_EXPR;
2123 case UNGT_EXPR:
2124 return LE_EXPR;
2125 case UNGE_EXPR:
2126 return LT_EXPR;
2127 case UNLT_EXPR:
2128 return GE_EXPR;
2129 case UNLE_EXPR:
2130 return GT_EXPR;
2131 case ORDERED_EXPR:
2132 return UNORDERED_EXPR;
2133 case UNORDERED_EXPR:
2134 return ORDERED_EXPR;
2135 default:
2136 gcc_unreachable ();
2140 /* Similar, but return the comparison that results if the operands are
2141 swapped. This is safe for floating-point. */
2143 enum tree_code
2144 swap_tree_comparison (enum tree_code code)
2146 switch (code)
2148 case EQ_EXPR:
2149 case NE_EXPR:
2150 case ORDERED_EXPR:
2151 case UNORDERED_EXPR:
2152 case LTGT_EXPR:
2153 case UNEQ_EXPR:
2154 return code;
2155 case GT_EXPR:
2156 return LT_EXPR;
2157 case GE_EXPR:
2158 return LE_EXPR;
2159 case LT_EXPR:
2160 return GT_EXPR;
2161 case LE_EXPR:
2162 return GE_EXPR;
2163 case UNGT_EXPR:
2164 return UNLT_EXPR;
2165 case UNGE_EXPR:
2166 return UNLE_EXPR;
2167 case UNLT_EXPR:
2168 return UNGT_EXPR;
2169 case UNLE_EXPR:
2170 return UNGE_EXPR;
2171 default:
2172 gcc_unreachable ();
2177 /* Convert a comparison tree code from an enum tree_code representation
2178 into a compcode bit-based encoding. This function is the inverse of
2179 compcode_to_comparison. */
2181 static enum comparison_code
2182 comparison_to_compcode (enum tree_code code)
2184 switch (code)
2186 case LT_EXPR:
2187 return COMPCODE_LT;
2188 case EQ_EXPR:
2189 return COMPCODE_EQ;
2190 case LE_EXPR:
2191 return COMPCODE_LE;
2192 case GT_EXPR:
2193 return COMPCODE_GT;
2194 case NE_EXPR:
2195 return COMPCODE_NE;
2196 case GE_EXPR:
2197 return COMPCODE_GE;
2198 case ORDERED_EXPR:
2199 return COMPCODE_ORD;
2200 case UNORDERED_EXPR:
2201 return COMPCODE_UNORD;
2202 case UNLT_EXPR:
2203 return COMPCODE_UNLT;
2204 case UNEQ_EXPR:
2205 return COMPCODE_UNEQ;
2206 case UNLE_EXPR:
2207 return COMPCODE_UNLE;
2208 case UNGT_EXPR:
2209 return COMPCODE_UNGT;
2210 case LTGT_EXPR:
2211 return COMPCODE_LTGT;
2212 case UNGE_EXPR:
2213 return COMPCODE_UNGE;
2214 default:
2215 gcc_unreachable ();
2219 /* Convert a compcode bit-based encoding of a comparison operator back
2220 to GCC's enum tree_code representation. This function is the
2221 inverse of comparison_to_compcode. */
2223 static enum tree_code
2224 compcode_to_comparison (enum comparison_code code)
2226 switch (code)
2228 case COMPCODE_LT:
2229 return LT_EXPR;
2230 case COMPCODE_EQ:
2231 return EQ_EXPR;
2232 case COMPCODE_LE:
2233 return LE_EXPR;
2234 case COMPCODE_GT:
2235 return GT_EXPR;
2236 case COMPCODE_NE:
2237 return NE_EXPR;
2238 case COMPCODE_GE:
2239 return GE_EXPR;
2240 case COMPCODE_ORD:
2241 return ORDERED_EXPR;
2242 case COMPCODE_UNORD:
2243 return UNORDERED_EXPR;
2244 case COMPCODE_UNLT:
2245 return UNLT_EXPR;
2246 case COMPCODE_UNEQ:
2247 return UNEQ_EXPR;
2248 case COMPCODE_UNLE:
2249 return UNLE_EXPR;
2250 case COMPCODE_UNGT:
2251 return UNGT_EXPR;
2252 case COMPCODE_LTGT:
2253 return LTGT_EXPR;
2254 case COMPCODE_UNGE:
2255 return UNGE_EXPR;
2256 default:
2257 gcc_unreachable ();
2261 /* Return a tree for the comparison which is the combination of
2262 doing the AND or OR (depending on CODE) of the two operations LCODE
2263 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2264 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2265 if this makes the transformation invalid. */
2267 tree
2268 combine_comparisons (location_t loc,
2269 enum tree_code code, enum tree_code lcode,
2270 enum tree_code rcode, tree truth_type,
2271 tree ll_arg, tree lr_arg)
2273 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2274 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2275 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2276 int compcode;
2278 switch (code)
2280 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2281 compcode = lcompcode & rcompcode;
2282 break;
2284 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2285 compcode = lcompcode | rcompcode;
2286 break;
2288 default:
2289 return NULL_TREE;
2292 if (!honor_nans)
2294 /* Eliminate unordered comparisons, as well as LTGT and ORD
2295 which are not used unless the mode has NaNs. */
2296 compcode &= ~COMPCODE_UNORD;
2297 if (compcode == COMPCODE_LTGT)
2298 compcode = COMPCODE_NE;
2299 else if (compcode == COMPCODE_ORD)
2300 compcode = COMPCODE_TRUE;
2302 else if (flag_trapping_math)
2304 /* Check that the original operation and the optimized ones will trap
2305 under the same condition. */
2306 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2307 && (lcompcode != COMPCODE_EQ)
2308 && (lcompcode != COMPCODE_ORD);
2309 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2310 && (rcompcode != COMPCODE_EQ)
2311 && (rcompcode != COMPCODE_ORD);
2312 bool trap = (compcode & COMPCODE_UNORD) == 0
2313 && (compcode != COMPCODE_EQ)
2314 && (compcode != COMPCODE_ORD);
2316 /* In a short-circuited boolean expression the LHS might be
2317 such that the RHS, if evaluated, will never trap. For
2318 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2319 if neither x nor y is NaN. (This is a mixed blessing: for
2320 example, the expression above will never trap, hence
2321 optimizing it to x < y would be invalid). */
2322 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2323 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2324 rtrap = false;
2326 /* If the comparison was short-circuited, and only the RHS
2327 trapped, we may now generate a spurious trap. */
2328 if (rtrap && !ltrap
2329 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2330 return NULL_TREE;
2332 /* If we changed the conditions that cause a trap, we lose. */
2333 if ((ltrap || rtrap) != trap)
2334 return NULL_TREE;
2337 if (compcode == COMPCODE_TRUE)
2338 return constant_boolean_node (true, truth_type);
2339 else if (compcode == COMPCODE_FALSE)
2340 return constant_boolean_node (false, truth_type);
2341 else
2343 enum tree_code tcode;
2345 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2346 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2350 /* Return nonzero if two operands (typically of the same tree node)
2351 are necessarily equal. If either argument has side-effects this
2352 function returns zero. FLAGS modifies behavior as follows:
2354 If OEP_ONLY_CONST is set, only return nonzero for constants.
2355 This function tests whether the operands are indistinguishable;
2356 it does not test whether they are equal using C's == operation.
2357 The distinction is important for IEEE floating point, because
2358 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2359 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2361 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2362 even though it may hold multiple values during a function.
2363 This is because a GCC tree node guarantees that nothing else is
2364 executed between the evaluation of its "operands" (which may often
2365 be evaluated in arbitrary order). Hence if the operands themselves
2366 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2367 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2368 unset means assuming isochronic (or instantaneous) tree equivalence.
2369 Unless comparing arbitrary expression trees, such as from different
2370 statements, this flag can usually be left unset.
2372 If OEP_PURE_SAME is set, then pure functions with identical arguments
2373 are considered the same. It is used when the caller has other ways
2374 to ensure that global memory is unchanged in between. */
2377 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2379 /* If either is ERROR_MARK, they aren't equal. */
2380 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2381 || TREE_TYPE (arg0) == error_mark_node
2382 || TREE_TYPE (arg1) == error_mark_node)
2383 return 0;
2385 /* Similar, if either does not have a type (like a released SSA name),
2386 they aren't equal. */
2387 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2388 return 0;
2390 /* Check equality of integer constants before bailing out due to
2391 precision differences. */
2392 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2393 return tree_int_cst_equal (arg0, arg1);
2395 /* If both types don't have the same signedness, then we can't consider
2396 them equal. We must check this before the STRIP_NOPS calls
2397 because they may change the signedness of the arguments. As pointers
2398 strictly don't have a signedness, require either two pointers or
2399 two non-pointers as well. */
2400 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2401 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2402 return 0;
2404 /* We cannot consider pointers to different address space equal. */
2405 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2406 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2407 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2408 return 0;
2410 /* If both types don't have the same precision, then it is not safe
2411 to strip NOPs. */
2412 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2413 return 0;
2415 STRIP_NOPS (arg0);
2416 STRIP_NOPS (arg1);
2418 /* In case both args are comparisons but with different comparison
2419 code, try to swap the comparison operands of one arg to produce
2420 a match and compare that variant. */
2421 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2422 && COMPARISON_CLASS_P (arg0)
2423 && COMPARISON_CLASS_P (arg1))
2425 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2427 if (TREE_CODE (arg0) == swap_code)
2428 return operand_equal_p (TREE_OPERAND (arg0, 0),
2429 TREE_OPERAND (arg1, 1), flags)
2430 && operand_equal_p (TREE_OPERAND (arg0, 1),
2431 TREE_OPERAND (arg1, 0), flags);
2434 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2435 /* This is needed for conversions and for COMPONENT_REF.
2436 Might as well play it safe and always test this. */
2437 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2438 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2439 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2440 return 0;
2442 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2443 We don't care about side effects in that case because the SAVE_EXPR
2444 takes care of that for us. In all other cases, two expressions are
2445 equal if they have no side effects. If we have two identical
2446 expressions with side effects that should be treated the same due
2447 to the only side effects being identical SAVE_EXPR's, that will
2448 be detected in the recursive calls below.
2449 If we are taking an invariant address of two identical objects
2450 they are necessarily equal as well. */
2451 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2452 && (TREE_CODE (arg0) == SAVE_EXPR
2453 || (flags & OEP_CONSTANT_ADDRESS_OF)
2454 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2455 return 1;
2457 /* Next handle constant cases, those for which we can return 1 even
2458 if ONLY_CONST is set. */
2459 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2460 switch (TREE_CODE (arg0))
2462 case INTEGER_CST:
2463 return tree_int_cst_equal (arg0, arg1);
2465 case FIXED_CST:
2466 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2467 TREE_FIXED_CST (arg1));
2469 case REAL_CST:
2470 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2471 TREE_REAL_CST (arg1)))
2472 return 1;
2475 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2477 /* If we do not distinguish between signed and unsigned zero,
2478 consider them equal. */
2479 if (real_zerop (arg0) && real_zerop (arg1))
2480 return 1;
2482 return 0;
2484 case VECTOR_CST:
2486 unsigned i;
2488 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2489 return 0;
2491 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2493 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2494 VECTOR_CST_ELT (arg1, i), flags))
2495 return 0;
2497 return 1;
2500 case COMPLEX_CST:
2501 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2502 flags)
2503 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2504 flags));
2506 case STRING_CST:
2507 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2508 && ! memcmp (TREE_STRING_POINTER (arg0),
2509 TREE_STRING_POINTER (arg1),
2510 TREE_STRING_LENGTH (arg0)));
2512 case ADDR_EXPR:
2513 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2514 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2515 ? OEP_CONSTANT_ADDRESS_OF : 0);
2516 default:
2517 break;
2520 if (flags & OEP_ONLY_CONST)
2521 return 0;
2523 /* Define macros to test an operand from arg0 and arg1 for equality and a
2524 variant that allows null and views null as being different from any
2525 non-null value. In the latter case, if either is null, the both
2526 must be; otherwise, do the normal comparison. */
2527 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2528 TREE_OPERAND (arg1, N), flags)
2530 #define OP_SAME_WITH_NULL(N) \
2531 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2532 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2534 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2536 case tcc_unary:
2537 /* Two conversions are equal only if signedness and modes match. */
2538 switch (TREE_CODE (arg0))
2540 CASE_CONVERT:
2541 case FIX_TRUNC_EXPR:
2542 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2543 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2544 return 0;
2545 break;
2546 default:
2547 break;
2550 return OP_SAME (0);
2553 case tcc_comparison:
2554 case tcc_binary:
2555 if (OP_SAME (0) && OP_SAME (1))
2556 return 1;
2558 /* For commutative ops, allow the other order. */
2559 return (commutative_tree_code (TREE_CODE (arg0))
2560 && operand_equal_p (TREE_OPERAND (arg0, 0),
2561 TREE_OPERAND (arg1, 1), flags)
2562 && operand_equal_p (TREE_OPERAND (arg0, 1),
2563 TREE_OPERAND (arg1, 0), flags));
2565 case tcc_reference:
2566 /* If either of the pointer (or reference) expressions we are
2567 dereferencing contain a side effect, these cannot be equal. */
2568 if (TREE_SIDE_EFFECTS (arg0)
2569 || TREE_SIDE_EFFECTS (arg1))
2570 return 0;
2572 switch (TREE_CODE (arg0))
2574 case INDIRECT_REF:
2575 case REALPART_EXPR:
2576 case IMAGPART_EXPR:
2577 return OP_SAME (0);
2579 case TARGET_MEM_REF:
2580 /* Require equal extra operands and then fall through to MEM_REF
2581 handling of the two common operands. */
2582 if (!OP_SAME_WITH_NULL (2)
2583 || !OP_SAME_WITH_NULL (3)
2584 || !OP_SAME_WITH_NULL (4))
2585 return 0;
2586 /* Fallthru. */
2587 case MEM_REF:
2588 /* Require equal access sizes, and similar pointer types.
2589 We can have incomplete types for array references of
2590 variable-sized arrays from the Fortran frontent
2591 though. */
2592 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2593 || (TYPE_SIZE (TREE_TYPE (arg0))
2594 && TYPE_SIZE (TREE_TYPE (arg1))
2595 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2596 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2597 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2598 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2599 && OP_SAME (0) && OP_SAME (1));
2601 case ARRAY_REF:
2602 case ARRAY_RANGE_REF:
2603 /* Operands 2 and 3 may be null.
2604 Compare the array index by value if it is constant first as we
2605 may have different types but same value here. */
2606 return (OP_SAME (0)
2607 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2608 TREE_OPERAND (arg1, 1))
2609 || OP_SAME (1))
2610 && OP_SAME_WITH_NULL (2)
2611 && OP_SAME_WITH_NULL (3));
2613 case COMPONENT_REF:
2614 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2615 may be NULL when we're called to compare MEM_EXPRs. */
2616 return OP_SAME_WITH_NULL (0)
2617 && OP_SAME (1)
2618 && OP_SAME_WITH_NULL (2);
2620 case BIT_FIELD_REF:
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2623 default:
2624 return 0;
2627 case tcc_expression:
2628 switch (TREE_CODE (arg0))
2630 case ADDR_EXPR:
2631 case TRUTH_NOT_EXPR:
2632 return OP_SAME (0);
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 return OP_SAME (0) && OP_SAME (1);
2638 case FMA_EXPR:
2639 case WIDEN_MULT_PLUS_EXPR:
2640 case WIDEN_MULT_MINUS_EXPR:
2641 if (!OP_SAME (2))
2642 return 0;
2643 /* The multiplcation operands are commutative. */
2644 /* FALLTHRU */
2646 case TRUTH_AND_EXPR:
2647 case TRUTH_OR_EXPR:
2648 case TRUTH_XOR_EXPR:
2649 if (OP_SAME (0) && OP_SAME (1))
2650 return 1;
2652 /* Otherwise take into account this is a commutative operation. */
2653 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2654 TREE_OPERAND (arg1, 1), flags)
2655 && operand_equal_p (TREE_OPERAND (arg0, 1),
2656 TREE_OPERAND (arg1, 0), flags));
2658 case COND_EXPR:
2659 case VEC_COND_EXPR:
2660 case DOT_PROD_EXPR:
2661 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2663 default:
2664 return 0;
2667 case tcc_vl_exp:
2668 switch (TREE_CODE (arg0))
2670 case CALL_EXPR:
2671 /* If the CALL_EXPRs call different functions, then they
2672 clearly can not be equal. */
2673 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2674 flags))
2675 return 0;
2678 unsigned int cef = call_expr_flags (arg0);
2679 if (flags & OEP_PURE_SAME)
2680 cef &= ECF_CONST | ECF_PURE;
2681 else
2682 cef &= ECF_CONST;
2683 if (!cef)
2684 return 0;
2687 /* Now see if all the arguments are the same. */
2689 const_call_expr_arg_iterator iter0, iter1;
2690 const_tree a0, a1;
2691 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2692 a1 = first_const_call_expr_arg (arg1, &iter1);
2693 a0 && a1;
2694 a0 = next_const_call_expr_arg (&iter0),
2695 a1 = next_const_call_expr_arg (&iter1))
2696 if (! operand_equal_p (a0, a1, flags))
2697 return 0;
2699 /* If we get here and both argument lists are exhausted
2700 then the CALL_EXPRs are equal. */
2701 return ! (a0 || a1);
2703 default:
2704 return 0;
2707 case tcc_declaration:
2708 /* Consider __builtin_sqrt equal to sqrt. */
2709 return (TREE_CODE (arg0) == FUNCTION_DECL
2710 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2711 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2712 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2714 default:
2715 return 0;
2718 #undef OP_SAME
2719 #undef OP_SAME_WITH_NULL
2722 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2723 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2725 When in doubt, return 0. */
2727 static int
2728 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2730 int unsignedp1, unsignedpo;
2731 tree primarg0, primarg1, primother;
2732 unsigned int correct_width;
2734 if (operand_equal_p (arg0, arg1, 0))
2735 return 1;
2737 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2738 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2739 return 0;
2741 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2742 and see if the inner values are the same. This removes any
2743 signedness comparison, which doesn't matter here. */
2744 primarg0 = arg0, primarg1 = arg1;
2745 STRIP_NOPS (primarg0);
2746 STRIP_NOPS (primarg1);
2747 if (operand_equal_p (primarg0, primarg1, 0))
2748 return 1;
2750 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2751 actual comparison operand, ARG0.
2753 First throw away any conversions to wider types
2754 already present in the operands. */
2756 primarg1 = get_narrower (arg1, &unsignedp1);
2757 primother = get_narrower (other, &unsignedpo);
2759 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2760 if (unsignedp1 == unsignedpo
2761 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2762 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2764 tree type = TREE_TYPE (arg0);
2766 /* Make sure shorter operand is extended the right way
2767 to match the longer operand. */
2768 primarg1 = fold_convert (signed_or_unsigned_type_for
2769 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2771 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2772 return 1;
2775 return 0;
2778 /* See if ARG is an expression that is either a comparison or is performing
2779 arithmetic on comparisons. The comparisons must only be comparing
2780 two different values, which will be stored in *CVAL1 and *CVAL2; if
2781 they are nonzero it means that some operands have already been found.
2782 No variables may be used anywhere else in the expression except in the
2783 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2784 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2786 If this is true, return 1. Otherwise, return zero. */
2788 static int
2789 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2791 enum tree_code code = TREE_CODE (arg);
2792 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2794 /* We can handle some of the tcc_expression cases here. */
2795 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2796 tclass = tcc_unary;
2797 else if (tclass == tcc_expression
2798 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2799 || code == COMPOUND_EXPR))
2800 tclass = tcc_binary;
2802 else if (tclass == tcc_expression && code == SAVE_EXPR
2803 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2805 /* If we've already found a CVAL1 or CVAL2, this expression is
2806 two complex to handle. */
2807 if (*cval1 || *cval2)
2808 return 0;
2810 tclass = tcc_unary;
2811 *save_p = 1;
2814 switch (tclass)
2816 case tcc_unary:
2817 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2819 case tcc_binary:
2820 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2821 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2822 cval1, cval2, save_p));
2824 case tcc_constant:
2825 return 1;
2827 case tcc_expression:
2828 if (code == COND_EXPR)
2829 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2830 cval1, cval2, save_p)
2831 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2832 cval1, cval2, save_p)
2833 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2834 cval1, cval2, save_p));
2835 return 0;
2837 case tcc_comparison:
2838 /* First see if we can handle the first operand, then the second. For
2839 the second operand, we know *CVAL1 can't be zero. It must be that
2840 one side of the comparison is each of the values; test for the
2841 case where this isn't true by failing if the two operands
2842 are the same. */
2844 if (operand_equal_p (TREE_OPERAND (arg, 0),
2845 TREE_OPERAND (arg, 1), 0))
2846 return 0;
2848 if (*cval1 == 0)
2849 *cval1 = TREE_OPERAND (arg, 0);
2850 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2852 else if (*cval2 == 0)
2853 *cval2 = TREE_OPERAND (arg, 0);
2854 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2856 else
2857 return 0;
2859 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2861 else if (*cval2 == 0)
2862 *cval2 = TREE_OPERAND (arg, 1);
2863 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2865 else
2866 return 0;
2868 return 1;
2870 default:
2871 return 0;
2875 /* ARG is a tree that is known to contain just arithmetic operations and
2876 comparisons. Evaluate the operations in the tree substituting NEW0 for
2877 any occurrence of OLD0 as an operand of a comparison and likewise for
2878 NEW1 and OLD1. */
2880 static tree
2881 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2882 tree old1, tree new1)
2884 tree type = TREE_TYPE (arg);
2885 enum tree_code code = TREE_CODE (arg);
2886 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2888 /* We can handle some of the tcc_expression cases here. */
2889 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2890 tclass = tcc_unary;
2891 else if (tclass == tcc_expression
2892 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2893 tclass = tcc_binary;
2895 switch (tclass)
2897 case tcc_unary:
2898 return fold_build1_loc (loc, code, type,
2899 eval_subst (loc, TREE_OPERAND (arg, 0),
2900 old0, new0, old1, new1));
2902 case tcc_binary:
2903 return fold_build2_loc (loc, code, type,
2904 eval_subst (loc, TREE_OPERAND (arg, 0),
2905 old0, new0, old1, new1),
2906 eval_subst (loc, TREE_OPERAND (arg, 1),
2907 old0, new0, old1, new1));
2909 case tcc_expression:
2910 switch (code)
2912 case SAVE_EXPR:
2913 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2914 old1, new1);
2916 case COMPOUND_EXPR:
2917 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2918 old1, new1);
2920 case COND_EXPR:
2921 return fold_build3_loc (loc, code, type,
2922 eval_subst (loc, TREE_OPERAND (arg, 0),
2923 old0, new0, old1, new1),
2924 eval_subst (loc, TREE_OPERAND (arg, 1),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 2),
2927 old0, new0, old1, new1));
2928 default:
2929 break;
2931 /* Fall through - ??? */
2933 case tcc_comparison:
2935 tree arg0 = TREE_OPERAND (arg, 0);
2936 tree arg1 = TREE_OPERAND (arg, 1);
2938 /* We need to check both for exact equality and tree equality. The
2939 former will be true if the operand has a side-effect. In that
2940 case, we know the operand occurred exactly once. */
2942 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2943 arg0 = new0;
2944 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2945 arg0 = new1;
2947 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2948 arg1 = new0;
2949 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2950 arg1 = new1;
2952 return fold_build2_loc (loc, code, type, arg0, arg1);
2955 default:
2956 return arg;
2960 /* Return a tree for the case when the result of an expression is RESULT
2961 converted to TYPE and OMITTED was previously an operand of the expression
2962 but is now not needed (e.g., we folded OMITTED * 0).
2964 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2965 the conversion of RESULT to TYPE. */
2967 tree
2968 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2970 tree t = fold_convert_loc (loc, type, result);
2972 /* If the resulting operand is an empty statement, just return the omitted
2973 statement casted to void. */
2974 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2975 return build1_loc (loc, NOP_EXPR, void_type_node,
2976 fold_ignored_result (omitted));
2978 if (TREE_SIDE_EFFECTS (omitted))
2979 return build2_loc (loc, COMPOUND_EXPR, type,
2980 fold_ignored_result (omitted), t);
2982 return non_lvalue_loc (loc, t);
2985 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2987 static tree
2988 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2989 tree omitted)
2991 tree t = fold_convert_loc (loc, type, result);
2993 /* If the resulting operand is an empty statement, just return the omitted
2994 statement casted to void. */
2995 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2996 return build1_loc (loc, NOP_EXPR, void_type_node,
2997 fold_ignored_result (omitted));
2999 if (TREE_SIDE_EFFECTS (omitted))
3000 return build2_loc (loc, COMPOUND_EXPR, type,
3001 fold_ignored_result (omitted), t);
3003 return pedantic_non_lvalue_loc (loc, t);
3006 /* Return a tree for the case when the result of an expression is RESULT
3007 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3008 of the expression but are now not needed.
3010 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3011 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3012 evaluated before OMITTED2. Otherwise, if neither has side effects,
3013 just do the conversion of RESULT to TYPE. */
3015 tree
3016 omit_two_operands_loc (location_t loc, tree type, tree result,
3017 tree omitted1, tree omitted2)
3019 tree t = fold_convert_loc (loc, type, result);
3021 if (TREE_SIDE_EFFECTS (omitted2))
3022 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3023 if (TREE_SIDE_EFFECTS (omitted1))
3024 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3026 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3030 /* Return a simplified tree node for the truth-negation of ARG. This
3031 never alters ARG itself. We assume that ARG is an operation that
3032 returns a truth value (0 or 1).
3034 FIXME: one would think we would fold the result, but it causes
3035 problems with the dominator optimizer. */
3037 tree
3038 fold_truth_not_expr (location_t loc, tree arg)
3040 tree type = TREE_TYPE (arg);
3041 enum tree_code code = TREE_CODE (arg);
3042 location_t loc1, loc2;
3044 /* If this is a comparison, we can simply invert it, except for
3045 floating-point non-equality comparisons, in which case we just
3046 enclose a TRUTH_NOT_EXPR around what we have. */
3048 if (TREE_CODE_CLASS (code) == tcc_comparison)
3050 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3051 if (FLOAT_TYPE_P (op_type)
3052 && flag_trapping_math
3053 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3054 && code != NE_EXPR && code != EQ_EXPR)
3055 return NULL_TREE;
3057 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3058 if (code == ERROR_MARK)
3059 return NULL_TREE;
3061 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3062 TREE_OPERAND (arg, 1));
3065 switch (code)
3067 case INTEGER_CST:
3068 return constant_boolean_node (integer_zerop (arg), type);
3070 case TRUTH_AND_EXPR:
3071 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3072 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3073 return build2_loc (loc, TRUTH_OR_EXPR, type,
3074 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3075 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3077 case TRUTH_OR_EXPR:
3078 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3079 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3080 return build2_loc (loc, TRUTH_AND_EXPR, type,
3081 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3082 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3084 case TRUTH_XOR_EXPR:
3085 /* Here we can invert either operand. We invert the first operand
3086 unless the second operand is a TRUTH_NOT_EXPR in which case our
3087 result is the XOR of the first operand with the inside of the
3088 negation of the second operand. */
3090 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3091 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3092 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3093 else
3094 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3095 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3096 TREE_OPERAND (arg, 1));
3098 case TRUTH_ANDIF_EXPR:
3099 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3100 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3101 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3102 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3103 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3105 case TRUTH_ORIF_EXPR:
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 case TRUTH_NOT_EXPR:
3113 return TREE_OPERAND (arg, 0);
3115 case COND_EXPR:
3117 tree arg1 = TREE_OPERAND (arg, 1);
3118 tree arg2 = TREE_OPERAND (arg, 2);
3120 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3121 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3123 /* A COND_EXPR may have a throw as one operand, which
3124 then has void type. Just leave void operands
3125 as they are. */
3126 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3127 VOID_TYPE_P (TREE_TYPE (arg1))
3128 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3129 VOID_TYPE_P (TREE_TYPE (arg2))
3130 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3133 case COMPOUND_EXPR:
3134 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3135 return build2_loc (loc, COMPOUND_EXPR, type,
3136 TREE_OPERAND (arg, 0),
3137 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3139 case NON_LVALUE_EXPR:
3140 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3141 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3143 CASE_CONVERT:
3144 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3145 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3147 /* ... fall through ... */
3149 case FLOAT_EXPR:
3150 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3151 return build1_loc (loc, TREE_CODE (arg), type,
3152 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3154 case BIT_AND_EXPR:
3155 if (!integer_onep (TREE_OPERAND (arg, 1)))
3156 return NULL_TREE;
3157 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3159 case SAVE_EXPR:
3160 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3162 case CLEANUP_POINT_EXPR:
3163 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3164 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3165 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3167 default:
3168 return NULL_TREE;
3172 /* Return a simplified tree node for the truth-negation of ARG. This
3173 never alters ARG itself. We assume that ARG is an operation that
3174 returns a truth value (0 or 1).
3176 FIXME: one would think we would fold the result, but it causes
3177 problems with the dominator optimizer. */
3179 tree
3180 invert_truthvalue_loc (location_t loc, tree arg)
3182 tree tem;
3184 if (TREE_CODE (arg) == ERROR_MARK)
3185 return arg;
3187 tem = fold_truth_not_expr (loc, arg);
3188 if (!tem)
3189 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3191 return tem;
3194 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3195 operands are another bit-wise operation with a common input. If so,
3196 distribute the bit operations to save an operation and possibly two if
3197 constants are involved. For example, convert
3198 (A | B) & (A | C) into A | (B & C)
3199 Further simplification will occur if B and C are constants.
3201 If this optimization cannot be done, 0 will be returned. */
3203 static tree
3204 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3205 tree arg0, tree arg1)
3207 tree common;
3208 tree left, right;
3210 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3211 || TREE_CODE (arg0) == code
3212 || (TREE_CODE (arg0) != BIT_AND_EXPR
3213 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3214 return 0;
3216 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3218 common = TREE_OPERAND (arg0, 0);
3219 left = TREE_OPERAND (arg0, 1);
3220 right = TREE_OPERAND (arg1, 1);
3222 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3224 common = TREE_OPERAND (arg0, 0);
3225 left = TREE_OPERAND (arg0, 1);
3226 right = TREE_OPERAND (arg1, 0);
3228 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3230 common = TREE_OPERAND (arg0, 1);
3231 left = TREE_OPERAND (arg0, 0);
3232 right = TREE_OPERAND (arg1, 1);
3234 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3236 common = TREE_OPERAND (arg0, 1);
3237 left = TREE_OPERAND (arg0, 0);
3238 right = TREE_OPERAND (arg1, 0);
3240 else
3241 return 0;
3243 common = fold_convert_loc (loc, type, common);
3244 left = fold_convert_loc (loc, type, left);
3245 right = fold_convert_loc (loc, type, right);
3246 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3247 fold_build2_loc (loc, code, type, left, right));
3250 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3251 with code CODE. This optimization is unsafe. */
3252 static tree
3253 distribute_real_division (location_t loc, enum tree_code code, tree type,
3254 tree arg0, tree arg1)
3256 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3257 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3259 /* (A / C) +- (B / C) -> (A +- B) / C. */
3260 if (mul0 == mul1
3261 && operand_equal_p (TREE_OPERAND (arg0, 1),
3262 TREE_OPERAND (arg1, 1), 0))
3263 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3264 fold_build2_loc (loc, code, type,
3265 TREE_OPERAND (arg0, 0),
3266 TREE_OPERAND (arg1, 0)),
3267 TREE_OPERAND (arg0, 1));
3269 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3270 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3271 TREE_OPERAND (arg1, 0), 0)
3272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3273 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3275 REAL_VALUE_TYPE r0, r1;
3276 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3277 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3278 if (!mul0)
3279 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3280 if (!mul1)
3281 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3282 real_arithmetic (&r0, code, &r0, &r1);
3283 return fold_build2_loc (loc, MULT_EXPR, type,
3284 TREE_OPERAND (arg0, 0),
3285 build_real (type, r0));
3288 return NULL_TREE;
3291 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3292 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3294 static tree
3295 make_bit_field_ref (location_t loc, tree inner, tree type,
3296 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3298 tree result, bftype;
3300 if (bitpos == 0)
3302 tree size = TYPE_SIZE (TREE_TYPE (inner));
3303 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3304 || POINTER_TYPE_P (TREE_TYPE (inner)))
3305 && host_integerp (size, 0)
3306 && tree_low_cst (size, 0) == bitsize)
3307 return fold_convert_loc (loc, type, inner);
3310 bftype = type;
3311 if (TYPE_PRECISION (bftype) != bitsize
3312 || TYPE_UNSIGNED (bftype) == !unsignedp)
3313 bftype = build_nonstandard_integer_type (bitsize, 0);
3315 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3316 size_int (bitsize), bitsize_int (bitpos));
3318 if (bftype != type)
3319 result = fold_convert_loc (loc, type, result);
3321 return result;
3324 /* Optimize a bit-field compare.
3326 There are two cases: First is a compare against a constant and the
3327 second is a comparison of two items where the fields are at the same
3328 bit position relative to the start of a chunk (byte, halfword, word)
3329 large enough to contain it. In these cases we can avoid the shift
3330 implicit in bitfield extractions.
3332 For constants, we emit a compare of the shifted constant with the
3333 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3334 compared. For two fields at the same position, we do the ANDs with the
3335 similar mask and compare the result of the ANDs.
3337 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3338 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3339 are the left and right operands of the comparison, respectively.
3341 If the optimization described above can be done, we return the resulting
3342 tree. Otherwise we return zero. */
3344 static tree
3345 optimize_bit_field_compare (location_t loc, enum tree_code code,
3346 tree compare_type, tree lhs, tree rhs)
3348 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3349 tree type = TREE_TYPE (lhs);
3350 tree signed_type, unsigned_type;
3351 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3352 enum machine_mode lmode, rmode, nmode;
3353 int lunsignedp, runsignedp;
3354 int lvolatilep = 0, rvolatilep = 0;
3355 tree linner, rinner = NULL_TREE;
3356 tree mask;
3357 tree offset;
3359 /* In the strict volatile bitfields case, doing code changes here may prevent
3360 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3361 if (flag_strict_volatile_bitfields > 0)
3362 return 0;
3364 /* Get all the information about the extractions being done. If the bit size
3365 if the same as the size of the underlying object, we aren't doing an
3366 extraction at all and so can do nothing. We also don't want to
3367 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3368 then will no longer be able to replace it. */
3369 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3370 &lunsignedp, &lvolatilep, false);
3371 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3372 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3373 return 0;
3375 if (!const_p)
3377 /* If this is not a constant, we can only do something if bit positions,
3378 sizes, and signedness are the same. */
3379 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3380 &runsignedp, &rvolatilep, false);
3382 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3383 || lunsignedp != runsignedp || offset != 0
3384 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3385 return 0;
3388 /* See if we can find a mode to refer to this field. We should be able to,
3389 but fail if we can't. */
3390 if (lvolatilep
3391 && GET_MODE_BITSIZE (lmode) > 0
3392 && flag_strict_volatile_bitfields > 0)
3393 nmode = lmode;
3394 else
3395 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3396 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3397 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3398 TYPE_ALIGN (TREE_TYPE (rinner))),
3399 word_mode, lvolatilep || rvolatilep);
3400 if (nmode == VOIDmode)
3401 return 0;
3403 /* Set signed and unsigned types of the precision of this mode for the
3404 shifts below. */
3405 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3406 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3408 /* Compute the bit position and size for the new reference and our offset
3409 within it. If the new reference is the same size as the original, we
3410 won't optimize anything, so return zero. */
3411 nbitsize = GET_MODE_BITSIZE (nmode);
3412 nbitpos = lbitpos & ~ (nbitsize - 1);
3413 lbitpos -= nbitpos;
3414 if (nbitsize == lbitsize)
3415 return 0;
3417 if (BYTES_BIG_ENDIAN)
3418 lbitpos = nbitsize - lbitsize - lbitpos;
3420 /* Make the mask to be used against the extracted field. */
3421 mask = build_int_cst_type (unsigned_type, -1);
3422 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3423 mask = const_binop (RSHIFT_EXPR, mask,
3424 size_int (nbitsize - lbitsize - lbitpos));
3426 if (! const_p)
3427 /* If not comparing with constant, just rework the comparison
3428 and return. */
3429 return fold_build2_loc (loc, code, compare_type,
3430 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3431 make_bit_field_ref (loc, linner,
3432 unsigned_type,
3433 nbitsize, nbitpos,
3435 mask),
3436 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3437 make_bit_field_ref (loc, rinner,
3438 unsigned_type,
3439 nbitsize, nbitpos,
3441 mask));
3443 /* Otherwise, we are handling the constant case. See if the constant is too
3444 big for the field. Warn and return a tree of for 0 (false) if so. We do
3445 this not only for its own sake, but to avoid having to test for this
3446 error case below. If we didn't, we might generate wrong code.
3448 For unsigned fields, the constant shifted right by the field length should
3449 be all zero. For signed fields, the high-order bits should agree with
3450 the sign bit. */
3452 if (lunsignedp)
3454 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3455 fold_convert_loc (loc,
3456 unsigned_type, rhs),
3457 size_int (lbitsize))))
3459 warning (0, "comparison is always %d due to width of bit-field",
3460 code == NE_EXPR);
3461 return constant_boolean_node (code == NE_EXPR, compare_type);
3464 else
3466 tree tem = const_binop (RSHIFT_EXPR,
3467 fold_convert_loc (loc, signed_type, rhs),
3468 size_int (lbitsize - 1));
3469 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3471 warning (0, "comparison is always %d due to width of bit-field",
3472 code == NE_EXPR);
3473 return constant_boolean_node (code == NE_EXPR, compare_type);
3477 /* Single-bit compares should always be against zero. */
3478 if (lbitsize == 1 && ! integer_zerop (rhs))
3480 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3481 rhs = build_int_cst (type, 0);
3484 /* Make a new bitfield reference, shift the constant over the
3485 appropriate number of bits and mask it with the computed mask
3486 (in case this was a signed field). If we changed it, make a new one. */
3487 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3488 if (lvolatilep)
3490 TREE_SIDE_EFFECTS (lhs) = 1;
3491 TREE_THIS_VOLATILE (lhs) = 1;
3494 rhs = const_binop (BIT_AND_EXPR,
3495 const_binop (LSHIFT_EXPR,
3496 fold_convert_loc (loc, unsigned_type, rhs),
3497 size_int (lbitpos)),
3498 mask);
3500 lhs = build2_loc (loc, code, compare_type,
3501 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3502 return lhs;
3505 /* Subroutine for fold_truth_andor_1: decode a field reference.
3507 If EXP is a comparison reference, we return the innermost reference.
3509 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3510 set to the starting bit number.
3512 If the innermost field can be completely contained in a mode-sized
3513 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3515 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3516 otherwise it is not changed.
3518 *PUNSIGNEDP is set to the signedness of the field.
3520 *PMASK is set to the mask used. This is either contained in a
3521 BIT_AND_EXPR or derived from the width of the field.
3523 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3525 Return 0 if this is not a component reference or is one that we can't
3526 do anything with. */
3528 static tree
3529 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3530 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3531 int *punsignedp, int *pvolatilep,
3532 tree *pmask, tree *pand_mask)
3534 tree outer_type = 0;
3535 tree and_mask = 0;
3536 tree mask, inner, offset;
3537 tree unsigned_type;
3538 unsigned int precision;
3540 /* All the optimizations using this function assume integer fields.
3541 There are problems with FP fields since the type_for_size call
3542 below can fail for, e.g., XFmode. */
3543 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3544 return 0;
3546 /* We are interested in the bare arrangement of bits, so strip everything
3547 that doesn't affect the machine mode. However, record the type of the
3548 outermost expression if it may matter below. */
3549 if (CONVERT_EXPR_P (exp)
3550 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3551 outer_type = TREE_TYPE (exp);
3552 STRIP_NOPS (exp);
3554 if (TREE_CODE (exp) == BIT_AND_EXPR)
3556 and_mask = TREE_OPERAND (exp, 1);
3557 exp = TREE_OPERAND (exp, 0);
3558 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3559 if (TREE_CODE (and_mask) != INTEGER_CST)
3560 return 0;
3563 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3564 punsignedp, pvolatilep, false);
3565 if ((inner == exp && and_mask == 0)
3566 || *pbitsize < 0 || offset != 0
3567 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3568 return 0;
3570 /* If the number of bits in the reference is the same as the bitsize of
3571 the outer type, then the outer type gives the signedness. Otherwise
3572 (in case of a small bitfield) the signedness is unchanged. */
3573 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3574 *punsignedp = TYPE_UNSIGNED (outer_type);
3576 /* Compute the mask to access the bitfield. */
3577 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3578 precision = TYPE_PRECISION (unsigned_type);
3580 mask = build_int_cst_type (unsigned_type, -1);
3582 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3583 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3585 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3586 if (and_mask != 0)
3587 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3588 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3590 *pmask = mask;
3591 *pand_mask = and_mask;
3592 return inner;
3595 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3596 bit positions. */
3598 static int
3599 all_ones_mask_p (const_tree mask, int size)
3601 tree type = TREE_TYPE (mask);
3602 unsigned int precision = TYPE_PRECISION (type);
3603 tree tmask;
3605 tmask = build_int_cst_type (signed_type_for (type), -1);
3607 return
3608 tree_int_cst_equal (mask,
3609 const_binop (RSHIFT_EXPR,
3610 const_binop (LSHIFT_EXPR, tmask,
3611 size_int (precision - size)),
3612 size_int (precision - size)));
3615 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3616 represents the sign bit of EXP's type. If EXP represents a sign
3617 or zero extension, also test VAL against the unextended type.
3618 The return value is the (sub)expression whose sign bit is VAL,
3619 or NULL_TREE otherwise. */
3621 static tree
3622 sign_bit_p (tree exp, const_tree val)
3624 unsigned HOST_WIDE_INT mask_lo, lo;
3625 HOST_WIDE_INT mask_hi, hi;
3626 int width;
3627 tree t;
3629 /* Tree EXP must have an integral type. */
3630 t = TREE_TYPE (exp);
3631 if (! INTEGRAL_TYPE_P (t))
3632 return NULL_TREE;
3634 /* Tree VAL must be an integer constant. */
3635 if (TREE_CODE (val) != INTEGER_CST
3636 || TREE_OVERFLOW (val))
3637 return NULL_TREE;
3639 width = TYPE_PRECISION (t);
3640 if (width > HOST_BITS_PER_WIDE_INT)
3642 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3643 lo = 0;
3645 mask_hi = ((unsigned HOST_WIDE_INT) -1
3646 >> (HOST_BITS_PER_DOUBLE_INT - width));
3647 mask_lo = -1;
3649 else
3651 hi = 0;
3652 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3654 mask_hi = 0;
3655 mask_lo = ((unsigned HOST_WIDE_INT) -1
3656 >> (HOST_BITS_PER_WIDE_INT - width));
3659 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3660 treat VAL as if it were unsigned. */
3661 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3662 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3663 return exp;
3665 /* Handle extension from a narrower type. */
3666 if (TREE_CODE (exp) == NOP_EXPR
3667 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3668 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3670 return NULL_TREE;
3673 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3674 to be evaluated unconditionally. */
3676 static int
3677 simple_operand_p (const_tree exp)
3679 /* Strip any conversions that don't change the machine mode. */
3680 STRIP_NOPS (exp);
3682 return (CONSTANT_CLASS_P (exp)
3683 || TREE_CODE (exp) == SSA_NAME
3684 || (DECL_P (exp)
3685 && ! TREE_ADDRESSABLE (exp)
3686 && ! TREE_THIS_VOLATILE (exp)
3687 && ! DECL_NONLOCAL (exp)
3688 /* Don't regard global variables as simple. They may be
3689 allocated in ways unknown to the compiler (shared memory,
3690 #pragma weak, etc). */
3691 && ! TREE_PUBLIC (exp)
3692 && ! DECL_EXTERNAL (exp)
3693 /* Loading a static variable is unduly expensive, but global
3694 registers aren't expensive. */
3695 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3698 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3699 to be evaluated unconditionally.
3700 I addition to simple_operand_p, we assume that comparisons, conversions,
3701 and logic-not operations are simple, if their operands are simple, too. */
3703 static bool
3704 simple_operand_p_2 (tree exp)
3706 enum tree_code code;
3708 if (TREE_SIDE_EFFECTS (exp)
3709 || tree_could_trap_p (exp))
3710 return false;
3712 while (CONVERT_EXPR_P (exp))
3713 exp = TREE_OPERAND (exp, 0);
3715 code = TREE_CODE (exp);
3717 if (TREE_CODE_CLASS (code) == tcc_comparison)
3718 return (simple_operand_p (TREE_OPERAND (exp, 0))
3719 && simple_operand_p (TREE_OPERAND (exp, 1)));
3721 if (code == TRUTH_NOT_EXPR)
3722 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3724 return simple_operand_p (exp);
3728 /* The following functions are subroutines to fold_range_test and allow it to
3729 try to change a logical combination of comparisons into a range test.
3731 For example, both
3732 X == 2 || X == 3 || X == 4 || X == 5
3734 X >= 2 && X <= 5
3735 are converted to
3736 (unsigned) (X - 2) <= 3
3738 We describe each set of comparisons as being either inside or outside
3739 a range, using a variable named like IN_P, and then describe the
3740 range with a lower and upper bound. If one of the bounds is omitted,
3741 it represents either the highest or lowest value of the type.
3743 In the comments below, we represent a range by two numbers in brackets
3744 preceded by a "+" to designate being inside that range, or a "-" to
3745 designate being outside that range, so the condition can be inverted by
3746 flipping the prefix. An omitted bound is represented by a "-". For
3747 example, "- [-, 10]" means being outside the range starting at the lowest
3748 possible value and ending at 10, in other words, being greater than 10.
3749 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3750 always false.
3752 We set up things so that the missing bounds are handled in a consistent
3753 manner so neither a missing bound nor "true" and "false" need to be
3754 handled using a special case. */
3756 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3757 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3758 and UPPER1_P are nonzero if the respective argument is an upper bound
3759 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3760 must be specified for a comparison. ARG1 will be converted to ARG0's
3761 type if both are specified. */
3763 static tree
3764 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3765 tree arg1, int upper1_p)
3767 tree tem;
3768 int result;
3769 int sgn0, sgn1;
3771 /* If neither arg represents infinity, do the normal operation.
3772 Else, if not a comparison, return infinity. Else handle the special
3773 comparison rules. Note that most of the cases below won't occur, but
3774 are handled for consistency. */
3776 if (arg0 != 0 && arg1 != 0)
3778 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3779 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3780 STRIP_NOPS (tem);
3781 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3784 if (TREE_CODE_CLASS (code) != tcc_comparison)
3785 return 0;
3787 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3788 for neither. In real maths, we cannot assume open ended ranges are
3789 the same. But, this is computer arithmetic, where numbers are finite.
3790 We can therefore make the transformation of any unbounded range with
3791 the value Z, Z being greater than any representable number. This permits
3792 us to treat unbounded ranges as equal. */
3793 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3794 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3795 switch (code)
3797 case EQ_EXPR:
3798 result = sgn0 == sgn1;
3799 break;
3800 case NE_EXPR:
3801 result = sgn0 != sgn1;
3802 break;
3803 case LT_EXPR:
3804 result = sgn0 < sgn1;
3805 break;
3806 case LE_EXPR:
3807 result = sgn0 <= sgn1;
3808 break;
3809 case GT_EXPR:
3810 result = sgn0 > sgn1;
3811 break;
3812 case GE_EXPR:
3813 result = sgn0 >= sgn1;
3814 break;
3815 default:
3816 gcc_unreachable ();
3819 return constant_boolean_node (result, type);
3822 /* Helper routine for make_range. Perform one step for it, return
3823 new expression if the loop should continue or NULL_TREE if it should
3824 stop. */
3826 tree
3827 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3828 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3829 bool *strict_overflow_p)
3831 tree arg0_type = TREE_TYPE (arg0);
3832 tree n_low, n_high, low = *p_low, high = *p_high;
3833 int in_p = *p_in_p, n_in_p;
3835 switch (code)
3837 case TRUTH_NOT_EXPR:
3838 *p_in_p = ! in_p;
3839 return arg0;
3841 case EQ_EXPR: case NE_EXPR:
3842 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3843 /* We can only do something if the range is testing for zero
3844 and if the second operand is an integer constant. Note that
3845 saying something is "in" the range we make is done by
3846 complementing IN_P since it will set in the initial case of
3847 being not equal to zero; "out" is leaving it alone. */
3848 if (low == NULL_TREE || high == NULL_TREE
3849 || ! integer_zerop (low) || ! integer_zerop (high)
3850 || TREE_CODE (arg1) != INTEGER_CST)
3851 return NULL_TREE;
3853 switch (code)
3855 case NE_EXPR: /* - [c, c] */
3856 low = high = arg1;
3857 break;
3858 case EQ_EXPR: /* + [c, c] */
3859 in_p = ! in_p, low = high = arg1;
3860 break;
3861 case GT_EXPR: /* - [-, c] */
3862 low = 0, high = arg1;
3863 break;
3864 case GE_EXPR: /* + [c, -] */
3865 in_p = ! in_p, low = arg1, high = 0;
3866 break;
3867 case LT_EXPR: /* - [c, -] */
3868 low = arg1, high = 0;
3869 break;
3870 case LE_EXPR: /* + [-, c] */
3871 in_p = ! in_p, low = 0, high = arg1;
3872 break;
3873 default:
3874 gcc_unreachable ();
3877 /* If this is an unsigned comparison, we also know that EXP is
3878 greater than or equal to zero. We base the range tests we make
3879 on that fact, so we record it here so we can parse existing
3880 range tests. We test arg0_type since often the return type
3881 of, e.g. EQ_EXPR, is boolean. */
3882 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3884 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3885 in_p, low, high, 1,
3886 build_int_cst (arg0_type, 0),
3887 NULL_TREE))
3888 return NULL_TREE;
3890 in_p = n_in_p, low = n_low, high = n_high;
3892 /* If the high bound is missing, but we have a nonzero low
3893 bound, reverse the range so it goes from zero to the low bound
3894 minus 1. */
3895 if (high == 0 && low && ! integer_zerop (low))
3897 in_p = ! in_p;
3898 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3899 integer_one_node, 0);
3900 low = build_int_cst (arg0_type, 0);
3904 *p_low = low;
3905 *p_high = high;
3906 *p_in_p = in_p;
3907 return arg0;
3909 case NEGATE_EXPR:
3910 /* (-x) IN [a,b] -> x in [-b, -a] */
3911 n_low = range_binop (MINUS_EXPR, exp_type,
3912 build_int_cst (exp_type, 0),
3913 0, high, 1);
3914 n_high = range_binop (MINUS_EXPR, exp_type,
3915 build_int_cst (exp_type, 0),
3916 0, low, 0);
3917 if (n_high != 0 && TREE_OVERFLOW (n_high))
3918 return NULL_TREE;
3919 goto normalize;
3921 case BIT_NOT_EXPR:
3922 /* ~ X -> -X - 1 */
3923 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3924 build_int_cst (exp_type, 1));
3926 case PLUS_EXPR:
3927 case MINUS_EXPR:
3928 if (TREE_CODE (arg1) != INTEGER_CST)
3929 return NULL_TREE;
3931 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3932 move a constant to the other side. */
3933 if (!TYPE_UNSIGNED (arg0_type)
3934 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3935 return NULL_TREE;
3937 /* If EXP is signed, any overflow in the computation is undefined,
3938 so we don't worry about it so long as our computations on
3939 the bounds don't overflow. For unsigned, overflow is defined
3940 and this is exactly the right thing. */
3941 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3942 arg0_type, low, 0, arg1, 0);
3943 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3944 arg0_type, high, 1, arg1, 0);
3945 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3946 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3947 return NULL_TREE;
3949 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3950 *strict_overflow_p = true;
3952 normalize:
3953 /* Check for an unsigned range which has wrapped around the maximum
3954 value thus making n_high < n_low, and normalize it. */
3955 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3957 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3958 integer_one_node, 0);
3959 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3960 integer_one_node, 0);
3962 /* If the range is of the form +/- [ x+1, x ], we won't
3963 be able to normalize it. But then, it represents the
3964 whole range or the empty set, so make it
3965 +/- [ -, - ]. */
3966 if (tree_int_cst_equal (n_low, low)
3967 && tree_int_cst_equal (n_high, high))
3968 low = high = 0;
3969 else
3970 in_p = ! in_p;
3972 else
3973 low = n_low, high = n_high;
3975 *p_low = low;
3976 *p_high = high;
3977 *p_in_p = in_p;
3978 return arg0;
3980 CASE_CONVERT:
3981 case NON_LVALUE_EXPR:
3982 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3983 return NULL_TREE;
3985 if (! INTEGRAL_TYPE_P (arg0_type)
3986 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3987 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3988 return NULL_TREE;
3990 n_low = low, n_high = high;
3992 if (n_low != 0)
3993 n_low = fold_convert_loc (loc, arg0_type, n_low);
3995 if (n_high != 0)
3996 n_high = fold_convert_loc (loc, arg0_type, n_high);
3998 /* If we're converting arg0 from an unsigned type, to exp,
3999 a signed type, we will be doing the comparison as unsigned.
4000 The tests above have already verified that LOW and HIGH
4001 are both positive.
4003 So we have to ensure that we will handle large unsigned
4004 values the same way that the current signed bounds treat
4005 negative values. */
4007 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4009 tree high_positive;
4010 tree equiv_type;
4011 /* For fixed-point modes, we need to pass the saturating flag
4012 as the 2nd parameter. */
4013 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4014 equiv_type
4015 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4016 TYPE_SATURATING (arg0_type));
4017 else
4018 equiv_type
4019 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4021 /* A range without an upper bound is, naturally, unbounded.
4022 Since convert would have cropped a very large value, use
4023 the max value for the destination type. */
4024 high_positive
4025 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4026 : TYPE_MAX_VALUE (arg0_type);
4028 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4029 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4030 fold_convert_loc (loc, arg0_type,
4031 high_positive),
4032 build_int_cst (arg0_type, 1));
4034 /* If the low bound is specified, "and" the range with the
4035 range for which the original unsigned value will be
4036 positive. */
4037 if (low != 0)
4039 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4040 1, fold_convert_loc (loc, arg0_type,
4041 integer_zero_node),
4042 high_positive))
4043 return NULL_TREE;
4045 in_p = (n_in_p == in_p);
4047 else
4049 /* Otherwise, "or" the range with the range of the input
4050 that will be interpreted as negative. */
4051 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4052 1, fold_convert_loc (loc, arg0_type,
4053 integer_zero_node),
4054 high_positive))
4055 return NULL_TREE;
4057 in_p = (in_p != n_in_p);
4061 *p_low = n_low;
4062 *p_high = n_high;
4063 *p_in_p = in_p;
4064 return arg0;
4066 default:
4067 return NULL_TREE;
4071 /* Given EXP, a logical expression, set the range it is testing into
4072 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4073 actually being tested. *PLOW and *PHIGH will be made of the same
4074 type as the returned expression. If EXP is not a comparison, we
4075 will most likely not be returning a useful value and range. Set
4076 *STRICT_OVERFLOW_P to true if the return value is only valid
4077 because signed overflow is undefined; otherwise, do not change
4078 *STRICT_OVERFLOW_P. */
4080 tree
4081 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4082 bool *strict_overflow_p)
4084 enum tree_code code;
4085 tree arg0, arg1 = NULL_TREE;
4086 tree exp_type, nexp;
4087 int in_p;
4088 tree low, high;
4089 location_t loc = EXPR_LOCATION (exp);
4091 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4092 and see if we can refine the range. Some of the cases below may not
4093 happen, but it doesn't seem worth worrying about this. We "continue"
4094 the outer loop when we've changed something; otherwise we "break"
4095 the switch, which will "break" the while. */
4097 in_p = 0;
4098 low = high = build_int_cst (TREE_TYPE (exp), 0);
4100 while (1)
4102 code = TREE_CODE (exp);
4103 exp_type = TREE_TYPE (exp);
4104 arg0 = NULL_TREE;
4106 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4108 if (TREE_OPERAND_LENGTH (exp) > 0)
4109 arg0 = TREE_OPERAND (exp, 0);
4110 if (TREE_CODE_CLASS (code) == tcc_binary
4111 || TREE_CODE_CLASS (code) == tcc_comparison
4112 || (TREE_CODE_CLASS (code) == tcc_expression
4113 && TREE_OPERAND_LENGTH (exp) > 1))
4114 arg1 = TREE_OPERAND (exp, 1);
4116 if (arg0 == NULL_TREE)
4117 break;
4119 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4120 &high, &in_p, strict_overflow_p);
4121 if (nexp == NULL_TREE)
4122 break;
4123 exp = nexp;
4126 /* If EXP is a constant, we can evaluate whether this is true or false. */
4127 if (TREE_CODE (exp) == INTEGER_CST)
4129 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4130 exp, 0, low, 0))
4131 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4132 exp, 1, high, 1)));
4133 low = high = 0;
4134 exp = 0;
4137 *pin_p = in_p, *plow = low, *phigh = high;
4138 return exp;
4141 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4142 type, TYPE, return an expression to test if EXP is in (or out of, depending
4143 on IN_P) the range. Return 0 if the test couldn't be created. */
4145 tree
4146 build_range_check (location_t loc, tree type, tree exp, int in_p,
4147 tree low, tree high)
4149 tree etype = TREE_TYPE (exp), value;
4151 #ifdef HAVE_canonicalize_funcptr_for_compare
4152 /* Disable this optimization for function pointer expressions
4153 on targets that require function pointer canonicalization. */
4154 if (HAVE_canonicalize_funcptr_for_compare
4155 && TREE_CODE (etype) == POINTER_TYPE
4156 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4157 return NULL_TREE;
4158 #endif
4160 if (! in_p)
4162 value = build_range_check (loc, type, exp, 1, low, high);
4163 if (value != 0)
4164 return invert_truthvalue_loc (loc, value);
4166 return 0;
4169 if (low == 0 && high == 0)
4170 return build_int_cst (type, 1);
4172 if (low == 0)
4173 return fold_build2_loc (loc, LE_EXPR, type, exp,
4174 fold_convert_loc (loc, etype, high));
4176 if (high == 0)
4177 return fold_build2_loc (loc, GE_EXPR, type, exp,
4178 fold_convert_loc (loc, etype, low));
4180 if (operand_equal_p (low, high, 0))
4181 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4182 fold_convert_loc (loc, etype, low));
4184 if (integer_zerop (low))
4186 if (! TYPE_UNSIGNED (etype))
4188 etype = unsigned_type_for (etype);
4189 high = fold_convert_loc (loc, etype, high);
4190 exp = fold_convert_loc (loc, etype, exp);
4192 return build_range_check (loc, type, exp, 1, 0, high);
4195 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4196 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4198 unsigned HOST_WIDE_INT lo;
4199 HOST_WIDE_INT hi;
4200 int prec;
4202 prec = TYPE_PRECISION (etype);
4203 if (prec <= HOST_BITS_PER_WIDE_INT)
4205 hi = 0;
4206 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4208 else
4210 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4211 lo = (unsigned HOST_WIDE_INT) -1;
4214 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4216 if (TYPE_UNSIGNED (etype))
4218 tree signed_etype = signed_type_for (etype);
4219 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4220 etype
4221 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4222 else
4223 etype = signed_etype;
4224 exp = fold_convert_loc (loc, etype, exp);
4226 return fold_build2_loc (loc, GT_EXPR, type, exp,
4227 build_int_cst (etype, 0));
4231 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4232 This requires wrap-around arithmetics for the type of the expression.
4233 First make sure that arithmetics in this type is valid, then make sure
4234 that it wraps around. */
4235 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4236 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4237 TYPE_UNSIGNED (etype));
4239 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4241 tree utype, minv, maxv;
4243 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4244 for the type in question, as we rely on this here. */
4245 utype = unsigned_type_for (etype);
4246 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4247 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4248 integer_one_node, 1);
4249 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4251 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4252 minv, 1, maxv, 1)))
4253 etype = utype;
4254 else
4255 return 0;
4258 high = fold_convert_loc (loc, etype, high);
4259 low = fold_convert_loc (loc, etype, low);
4260 exp = fold_convert_loc (loc, etype, exp);
4262 value = const_binop (MINUS_EXPR, high, low);
4265 if (POINTER_TYPE_P (etype))
4267 if (value != 0 && !TREE_OVERFLOW (value))
4269 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4270 return build_range_check (loc, type,
4271 fold_build_pointer_plus_loc (loc, exp, low),
4272 1, build_int_cst (etype, 0), value);
4274 return 0;
4277 if (value != 0 && !TREE_OVERFLOW (value))
4278 return build_range_check (loc, type,
4279 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4280 1, build_int_cst (etype, 0), value);
4282 return 0;
4285 /* Return the predecessor of VAL in its type, handling the infinite case. */
4287 static tree
4288 range_predecessor (tree val)
4290 tree type = TREE_TYPE (val);
4292 if (INTEGRAL_TYPE_P (type)
4293 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4294 return 0;
4295 else
4296 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4299 /* Return the successor of VAL in its type, handling the infinite case. */
4301 static tree
4302 range_successor (tree val)
4304 tree type = TREE_TYPE (val);
4306 if (INTEGRAL_TYPE_P (type)
4307 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4308 return 0;
4309 else
4310 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4313 /* Given two ranges, see if we can merge them into one. Return 1 if we
4314 can, 0 if we can't. Set the output range into the specified parameters. */
4316 bool
4317 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4318 tree high0, int in1_p, tree low1, tree high1)
4320 int no_overlap;
4321 int subset;
4322 int temp;
4323 tree tem;
4324 int in_p;
4325 tree low, high;
4326 int lowequal = ((low0 == 0 && low1 == 0)
4327 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4328 low0, 0, low1, 0)));
4329 int highequal = ((high0 == 0 && high1 == 0)
4330 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4331 high0, 1, high1, 1)));
4333 /* Make range 0 be the range that starts first, or ends last if they
4334 start at the same value. Swap them if it isn't. */
4335 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4336 low0, 0, low1, 0))
4337 || (lowequal
4338 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4339 high1, 1, high0, 1))))
4341 temp = in0_p, in0_p = in1_p, in1_p = temp;
4342 tem = low0, low0 = low1, low1 = tem;
4343 tem = high0, high0 = high1, high1 = tem;
4346 /* Now flag two cases, whether the ranges are disjoint or whether the
4347 second range is totally subsumed in the first. Note that the tests
4348 below are simplified by the ones above. */
4349 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4350 high0, 1, low1, 0));
4351 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4352 high1, 1, high0, 1));
4354 /* We now have four cases, depending on whether we are including or
4355 excluding the two ranges. */
4356 if (in0_p && in1_p)
4358 /* If they don't overlap, the result is false. If the second range
4359 is a subset it is the result. Otherwise, the range is from the start
4360 of the second to the end of the first. */
4361 if (no_overlap)
4362 in_p = 0, low = high = 0;
4363 else if (subset)
4364 in_p = 1, low = low1, high = high1;
4365 else
4366 in_p = 1, low = low1, high = high0;
4369 else if (in0_p && ! in1_p)
4371 /* If they don't overlap, the result is the first range. If they are
4372 equal, the result is false. If the second range is a subset of the
4373 first, and the ranges begin at the same place, we go from just after
4374 the end of the second range to the end of the first. If the second
4375 range is not a subset of the first, or if it is a subset and both
4376 ranges end at the same place, the range starts at the start of the
4377 first range and ends just before the second range.
4378 Otherwise, we can't describe this as a single range. */
4379 if (no_overlap)
4380 in_p = 1, low = low0, high = high0;
4381 else if (lowequal && highequal)
4382 in_p = 0, low = high = 0;
4383 else if (subset && lowequal)
4385 low = range_successor (high1);
4386 high = high0;
4387 in_p = 1;
4388 if (low == 0)
4390 /* We are in the weird situation where high0 > high1 but
4391 high1 has no successor. Punt. */
4392 return 0;
4395 else if (! subset || highequal)
4397 low = low0;
4398 high = range_predecessor (low1);
4399 in_p = 1;
4400 if (high == 0)
4402 /* low0 < low1 but low1 has no predecessor. Punt. */
4403 return 0;
4406 else
4407 return 0;
4410 else if (! in0_p && in1_p)
4412 /* If they don't overlap, the result is the second range. If the second
4413 is a subset of the first, the result is false. Otherwise,
4414 the range starts just after the first range and ends at the
4415 end of the second. */
4416 if (no_overlap)
4417 in_p = 1, low = low1, high = high1;
4418 else if (subset || highequal)
4419 in_p = 0, low = high = 0;
4420 else
4422 low = range_successor (high0);
4423 high = high1;
4424 in_p = 1;
4425 if (low == 0)
4427 /* high1 > high0 but high0 has no successor. Punt. */
4428 return 0;
4433 else
4435 /* The case where we are excluding both ranges. Here the complex case
4436 is if they don't overlap. In that case, the only time we have a
4437 range is if they are adjacent. If the second is a subset of the
4438 first, the result is the first. Otherwise, the range to exclude
4439 starts at the beginning of the first range and ends at the end of the
4440 second. */
4441 if (no_overlap)
4443 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4444 range_successor (high0),
4445 1, low1, 0)))
4446 in_p = 0, low = low0, high = high1;
4447 else
4449 /* Canonicalize - [min, x] into - [-, x]. */
4450 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4451 switch (TREE_CODE (TREE_TYPE (low0)))
4453 case ENUMERAL_TYPE:
4454 if (TYPE_PRECISION (TREE_TYPE (low0))
4455 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4456 break;
4457 /* FALLTHROUGH */
4458 case INTEGER_TYPE:
4459 if (tree_int_cst_equal (low0,
4460 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4461 low0 = 0;
4462 break;
4463 case POINTER_TYPE:
4464 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4465 && integer_zerop (low0))
4466 low0 = 0;
4467 break;
4468 default:
4469 break;
4472 /* Canonicalize - [x, max] into - [x, -]. */
4473 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4474 switch (TREE_CODE (TREE_TYPE (high1)))
4476 case ENUMERAL_TYPE:
4477 if (TYPE_PRECISION (TREE_TYPE (high1))
4478 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4479 break;
4480 /* FALLTHROUGH */
4481 case INTEGER_TYPE:
4482 if (tree_int_cst_equal (high1,
4483 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4484 high1 = 0;
4485 break;
4486 case POINTER_TYPE:
4487 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4488 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4489 high1, 1,
4490 integer_one_node, 1)))
4491 high1 = 0;
4492 break;
4493 default:
4494 break;
4497 /* The ranges might be also adjacent between the maximum and
4498 minimum values of the given type. For
4499 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4500 return + [x + 1, y - 1]. */
4501 if (low0 == 0 && high1 == 0)
4503 low = range_successor (high0);
4504 high = range_predecessor (low1);
4505 if (low == 0 || high == 0)
4506 return 0;
4508 in_p = 1;
4510 else
4511 return 0;
4514 else if (subset)
4515 in_p = 0, low = low0, high = high0;
4516 else
4517 in_p = 0, low = low0, high = high1;
4520 *pin_p = in_p, *plow = low, *phigh = high;
4521 return 1;
4525 /* Subroutine of fold, looking inside expressions of the form
4526 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4527 of the COND_EXPR. This function is being used also to optimize
4528 A op B ? C : A, by reversing the comparison first.
4530 Return a folded expression whose code is not a COND_EXPR
4531 anymore, or NULL_TREE if no folding opportunity is found. */
4533 static tree
4534 fold_cond_expr_with_comparison (location_t loc, tree type,
4535 tree arg0, tree arg1, tree arg2)
4537 enum tree_code comp_code = TREE_CODE (arg0);
4538 tree arg00 = TREE_OPERAND (arg0, 0);
4539 tree arg01 = TREE_OPERAND (arg0, 1);
4540 tree arg1_type = TREE_TYPE (arg1);
4541 tree tem;
4543 STRIP_NOPS (arg1);
4544 STRIP_NOPS (arg2);
4546 /* If we have A op 0 ? A : -A, consider applying the following
4547 transformations:
4549 A == 0? A : -A same as -A
4550 A != 0? A : -A same as A
4551 A >= 0? A : -A same as abs (A)
4552 A > 0? A : -A same as abs (A)
4553 A <= 0? A : -A same as -abs (A)
4554 A < 0? A : -A same as -abs (A)
4556 None of these transformations work for modes with signed
4557 zeros. If A is +/-0, the first two transformations will
4558 change the sign of the result (from +0 to -0, or vice
4559 versa). The last four will fix the sign of the result,
4560 even though the original expressions could be positive or
4561 negative, depending on the sign of A.
4563 Note that all these transformations are correct if A is
4564 NaN, since the two alternatives (A and -A) are also NaNs. */
4565 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4566 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4567 ? real_zerop (arg01)
4568 : integer_zerop (arg01))
4569 && ((TREE_CODE (arg2) == NEGATE_EXPR
4570 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4571 /* In the case that A is of the form X-Y, '-A' (arg2) may
4572 have already been folded to Y-X, check for that. */
4573 || (TREE_CODE (arg1) == MINUS_EXPR
4574 && TREE_CODE (arg2) == MINUS_EXPR
4575 && operand_equal_p (TREE_OPERAND (arg1, 0),
4576 TREE_OPERAND (arg2, 1), 0)
4577 && operand_equal_p (TREE_OPERAND (arg1, 1),
4578 TREE_OPERAND (arg2, 0), 0))))
4579 switch (comp_code)
4581 case EQ_EXPR:
4582 case UNEQ_EXPR:
4583 tem = fold_convert_loc (loc, arg1_type, arg1);
4584 return pedantic_non_lvalue_loc (loc,
4585 fold_convert_loc (loc, type,
4586 negate_expr (tem)));
4587 case NE_EXPR:
4588 case LTGT_EXPR:
4589 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4590 case UNGE_EXPR:
4591 case UNGT_EXPR:
4592 if (flag_trapping_math)
4593 break;
4594 /* Fall through. */
4595 case GE_EXPR:
4596 case GT_EXPR:
4597 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4598 arg1 = fold_convert_loc (loc, signed_type_for
4599 (TREE_TYPE (arg1)), arg1);
4600 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4601 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4602 case UNLE_EXPR:
4603 case UNLT_EXPR:
4604 if (flag_trapping_math)
4605 break;
4606 case LE_EXPR:
4607 case LT_EXPR:
4608 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4609 arg1 = fold_convert_loc (loc, signed_type_for
4610 (TREE_TYPE (arg1)), arg1);
4611 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4612 return negate_expr (fold_convert_loc (loc, type, tem));
4613 default:
4614 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4615 break;
4618 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4619 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4620 both transformations are correct when A is NaN: A != 0
4621 is then true, and A == 0 is false. */
4623 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4624 && integer_zerop (arg01) && integer_zerop (arg2))
4626 if (comp_code == NE_EXPR)
4627 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4628 else if (comp_code == EQ_EXPR)
4629 return build_int_cst (type, 0);
4632 /* Try some transformations of A op B ? A : B.
4634 A == B? A : B same as B
4635 A != B? A : B same as A
4636 A >= B? A : B same as max (A, B)
4637 A > B? A : B same as max (B, A)
4638 A <= B? A : B same as min (A, B)
4639 A < B? A : B same as min (B, A)
4641 As above, these transformations don't work in the presence
4642 of signed zeros. For example, if A and B are zeros of
4643 opposite sign, the first two transformations will change
4644 the sign of the result. In the last four, the original
4645 expressions give different results for (A=+0, B=-0) and
4646 (A=-0, B=+0), but the transformed expressions do not.
4648 The first two transformations are correct if either A or B
4649 is a NaN. In the first transformation, the condition will
4650 be false, and B will indeed be chosen. In the case of the
4651 second transformation, the condition A != B will be true,
4652 and A will be chosen.
4654 The conversions to max() and min() are not correct if B is
4655 a number and A is not. The conditions in the original
4656 expressions will be false, so all four give B. The min()
4657 and max() versions would give a NaN instead. */
4658 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4659 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4660 /* Avoid these transformations if the COND_EXPR may be used
4661 as an lvalue in the C++ front-end. PR c++/19199. */
4662 && (in_gimple_form
4663 || (strcmp (lang_hooks.name, "GNU C++") != 0
4664 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4665 || ! maybe_lvalue_p (arg1)
4666 || ! maybe_lvalue_p (arg2)))
4668 tree comp_op0 = arg00;
4669 tree comp_op1 = arg01;
4670 tree comp_type = TREE_TYPE (comp_op0);
4672 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4673 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4675 comp_type = type;
4676 comp_op0 = arg1;
4677 comp_op1 = arg2;
4680 switch (comp_code)
4682 case EQ_EXPR:
4683 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4684 case NE_EXPR:
4685 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4686 case LE_EXPR:
4687 case LT_EXPR:
4688 case UNLE_EXPR:
4689 case UNLT_EXPR:
4690 /* In C++ a ?: expression can be an lvalue, so put the
4691 operand which will be used if they are equal first
4692 so that we can convert this back to the
4693 corresponding COND_EXPR. */
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4696 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4697 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4698 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4699 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4700 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4701 comp_op1, comp_op0);
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, tem));
4705 break;
4706 case GE_EXPR:
4707 case GT_EXPR:
4708 case UNGE_EXPR:
4709 case UNGT_EXPR:
4710 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4712 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4713 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4714 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4715 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4716 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4717 comp_op1, comp_op0);
4718 return pedantic_non_lvalue_loc (loc,
4719 fold_convert_loc (loc, type, tem));
4721 break;
4722 case UNEQ_EXPR:
4723 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4724 return pedantic_non_lvalue_loc (loc,
4725 fold_convert_loc (loc, type, arg2));
4726 break;
4727 case LTGT_EXPR:
4728 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4729 return pedantic_non_lvalue_loc (loc,
4730 fold_convert_loc (loc, type, arg1));
4731 break;
4732 default:
4733 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4734 break;
4738 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4739 we might still be able to simplify this. For example,
4740 if C1 is one less or one more than C2, this might have started
4741 out as a MIN or MAX and been transformed by this function.
4742 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4744 if (INTEGRAL_TYPE_P (type)
4745 && TREE_CODE (arg01) == INTEGER_CST
4746 && TREE_CODE (arg2) == INTEGER_CST)
4747 switch (comp_code)
4749 case EQ_EXPR:
4750 if (TREE_CODE (arg1) == INTEGER_CST)
4751 break;
4752 /* We can replace A with C1 in this case. */
4753 arg1 = fold_convert_loc (loc, type, arg01);
4754 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4756 case LT_EXPR:
4757 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4758 MIN_EXPR, to preserve the signedness of the comparison. */
4759 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4760 OEP_ONLY_CONST)
4761 && operand_equal_p (arg01,
4762 const_binop (PLUS_EXPR, arg2,
4763 build_int_cst (type, 1)),
4764 OEP_ONLY_CONST))
4766 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4767 fold_convert_loc (loc, TREE_TYPE (arg00),
4768 arg2));
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, tem));
4772 break;
4774 case LE_EXPR:
4775 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4776 as above. */
4777 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4778 OEP_ONLY_CONST)
4779 && operand_equal_p (arg01,
4780 const_binop (MINUS_EXPR, arg2,
4781 build_int_cst (type, 1)),
4782 OEP_ONLY_CONST))
4784 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4785 fold_convert_loc (loc, TREE_TYPE (arg00),
4786 arg2));
4787 return pedantic_non_lvalue_loc (loc,
4788 fold_convert_loc (loc, type, tem));
4790 break;
4792 case GT_EXPR:
4793 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4794 MAX_EXPR, to preserve the signedness of the comparison. */
4795 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4796 OEP_ONLY_CONST)
4797 && operand_equal_p (arg01,
4798 const_binop (MINUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4800 OEP_ONLY_CONST))
4802 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4804 arg2));
4805 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4807 break;
4809 case GE_EXPR:
4810 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4811 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4812 OEP_ONLY_CONST)
4813 && operand_equal_p (arg01,
4814 const_binop (PLUS_EXPR, arg2,
4815 build_int_cst (type, 1)),
4816 OEP_ONLY_CONST))
4818 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4819 fold_convert_loc (loc, TREE_TYPE (arg00),
4820 arg2));
4821 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4823 break;
4824 case NE_EXPR:
4825 break;
4826 default:
4827 gcc_unreachable ();
4830 return NULL_TREE;
4835 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4836 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4837 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4838 false) >= 2)
4839 #endif
4841 /* EXP is some logical combination of boolean tests. See if we can
4842 merge it into some range test. Return the new tree if so. */
4844 static tree
4845 fold_range_test (location_t loc, enum tree_code code, tree type,
4846 tree op0, tree op1)
4848 int or_op = (code == TRUTH_ORIF_EXPR
4849 || code == TRUTH_OR_EXPR);
4850 int in0_p, in1_p, in_p;
4851 tree low0, low1, low, high0, high1, high;
4852 bool strict_overflow_p = false;
4853 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4854 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4855 tree tem;
4856 const char * const warnmsg = G_("assuming signed overflow does not occur "
4857 "when simplifying range test");
4859 /* If this is an OR operation, invert both sides; we will invert
4860 again at the end. */
4861 if (or_op)
4862 in0_p = ! in0_p, in1_p = ! in1_p;
4864 /* If both expressions are the same, if we can merge the ranges, and we
4865 can build the range test, return it or it inverted. If one of the
4866 ranges is always true or always false, consider it to be the same
4867 expression as the other. */
4868 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4869 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4870 in1_p, low1, high1)
4871 && 0 != (tem = (build_range_check (loc, type,
4872 lhs != 0 ? lhs
4873 : rhs != 0 ? rhs : integer_zero_node,
4874 in_p, low, high))))
4876 if (strict_overflow_p)
4877 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4878 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4881 /* On machines where the branch cost is expensive, if this is a
4882 short-circuited branch and the underlying object on both sides
4883 is the same, make a non-short-circuit operation. */
4884 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4885 && lhs != 0 && rhs != 0
4886 && (code == TRUTH_ANDIF_EXPR
4887 || code == TRUTH_ORIF_EXPR)
4888 && operand_equal_p (lhs, rhs, 0))
4890 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4891 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4892 which cases we can't do this. */
4893 if (simple_operand_p (lhs))
4894 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4895 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4896 type, op0, op1);
4898 else if (!lang_hooks.decls.global_bindings_p ()
4899 && !CONTAINS_PLACEHOLDER_P (lhs))
4901 tree common = save_expr (lhs);
4903 if (0 != (lhs = build_range_check (loc, type, common,
4904 or_op ? ! in0_p : in0_p,
4905 low0, high0))
4906 && (0 != (rhs = build_range_check (loc, type, common,
4907 or_op ? ! in1_p : in1_p,
4908 low1, high1))))
4910 if (strict_overflow_p)
4911 fold_overflow_warning (warnmsg,
4912 WARN_STRICT_OVERFLOW_COMPARISON);
4913 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4914 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4915 type, lhs, rhs);
4920 return 0;
4923 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4924 bit value. Arrange things so the extra bits will be set to zero if and
4925 only if C is signed-extended to its full width. If MASK is nonzero,
4926 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4928 static tree
4929 unextend (tree c, int p, int unsignedp, tree mask)
4931 tree type = TREE_TYPE (c);
4932 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4933 tree temp;
4935 if (p == modesize || unsignedp)
4936 return c;
4938 /* We work by getting just the sign bit into the low-order bit, then
4939 into the high-order bit, then sign-extend. We then XOR that value
4940 with C. */
4941 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4942 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4944 /* We must use a signed type in order to get an arithmetic right shift.
4945 However, we must also avoid introducing accidental overflows, so that
4946 a subsequent call to integer_zerop will work. Hence we must
4947 do the type conversion here. At this point, the constant is either
4948 zero or one, and the conversion to a signed type can never overflow.
4949 We could get an overflow if this conversion is done anywhere else. */
4950 if (TYPE_UNSIGNED (type))
4951 temp = fold_convert (signed_type_for (type), temp);
4953 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4954 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4955 if (mask != 0)
4956 temp = const_binop (BIT_AND_EXPR, temp,
4957 fold_convert (TREE_TYPE (c), mask));
4958 /* If necessary, convert the type back to match the type of C. */
4959 if (TYPE_UNSIGNED (type))
4960 temp = fold_convert (type, temp);
4962 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4965 /* For an expression that has the form
4966 (A && B) || ~B
4968 (A || B) && ~B,
4969 we can drop one of the inner expressions and simplify to
4970 A || ~B
4972 A && ~B
4973 LOC is the location of the resulting expression. OP is the inner
4974 logical operation; the left-hand side in the examples above, while CMPOP
4975 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4976 removing a condition that guards another, as in
4977 (A != NULL && A->...) || A == NULL
4978 which we must not transform. If RHS_ONLY is true, only eliminate the
4979 right-most operand of the inner logical operation. */
4981 static tree
4982 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4983 bool rhs_only)
4985 tree type = TREE_TYPE (cmpop);
4986 enum tree_code code = TREE_CODE (cmpop);
4987 enum tree_code truthop_code = TREE_CODE (op);
4988 tree lhs = TREE_OPERAND (op, 0);
4989 tree rhs = TREE_OPERAND (op, 1);
4990 tree orig_lhs = lhs, orig_rhs = rhs;
4991 enum tree_code rhs_code = TREE_CODE (rhs);
4992 enum tree_code lhs_code = TREE_CODE (lhs);
4993 enum tree_code inv_code;
4995 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4996 return NULL_TREE;
4998 if (TREE_CODE_CLASS (code) != tcc_comparison)
4999 return NULL_TREE;
5001 if (rhs_code == truthop_code)
5003 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5004 if (newrhs != NULL_TREE)
5006 rhs = newrhs;
5007 rhs_code = TREE_CODE (rhs);
5010 if (lhs_code == truthop_code && !rhs_only)
5012 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5013 if (newlhs != NULL_TREE)
5015 lhs = newlhs;
5016 lhs_code = TREE_CODE (lhs);
5020 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5021 if (inv_code == rhs_code
5022 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5023 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5024 return lhs;
5025 if (!rhs_only && inv_code == lhs_code
5026 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5027 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5028 return rhs;
5029 if (rhs != orig_rhs || lhs != orig_lhs)
5030 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5031 lhs, rhs);
5032 return NULL_TREE;
5035 /* Find ways of folding logical expressions of LHS and RHS:
5036 Try to merge two comparisons to the same innermost item.
5037 Look for range tests like "ch >= '0' && ch <= '9'".
5038 Look for combinations of simple terms on machines with expensive branches
5039 and evaluate the RHS unconditionally.
5041 For example, if we have p->a == 2 && p->b == 4 and we can make an
5042 object large enough to span both A and B, we can do this with a comparison
5043 against the object ANDed with the a mask.
5045 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5046 operations to do this with one comparison.
5048 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5049 function and the one above.
5051 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5052 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5054 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5055 two operands.
5057 We return the simplified tree or 0 if no optimization is possible. */
5059 static tree
5060 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5061 tree lhs, tree rhs)
5063 /* If this is the "or" of two comparisons, we can do something if
5064 the comparisons are NE_EXPR. If this is the "and", we can do something
5065 if the comparisons are EQ_EXPR. I.e.,
5066 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5068 WANTED_CODE is this operation code. For single bit fields, we can
5069 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5070 comparison for one-bit fields. */
5072 enum tree_code wanted_code;
5073 enum tree_code lcode, rcode;
5074 tree ll_arg, lr_arg, rl_arg, rr_arg;
5075 tree ll_inner, lr_inner, rl_inner, rr_inner;
5076 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5077 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5078 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5079 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5080 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5081 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5082 enum machine_mode lnmode, rnmode;
5083 tree ll_mask, lr_mask, rl_mask, rr_mask;
5084 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5085 tree l_const, r_const;
5086 tree lntype, rntype, result;
5087 HOST_WIDE_INT first_bit, end_bit;
5088 int volatilep;
5090 /* Start by getting the comparison codes. Fail if anything is volatile.
5091 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5092 it were surrounded with a NE_EXPR. */
5094 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5095 return 0;
5097 lcode = TREE_CODE (lhs);
5098 rcode = TREE_CODE (rhs);
5100 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5102 lhs = build2 (NE_EXPR, truth_type, lhs,
5103 build_int_cst (TREE_TYPE (lhs), 0));
5104 lcode = NE_EXPR;
5107 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5109 rhs = build2 (NE_EXPR, truth_type, rhs,
5110 build_int_cst (TREE_TYPE (rhs), 0));
5111 rcode = NE_EXPR;
5114 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5115 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5116 return 0;
5118 ll_arg = TREE_OPERAND (lhs, 0);
5119 lr_arg = TREE_OPERAND (lhs, 1);
5120 rl_arg = TREE_OPERAND (rhs, 0);
5121 rr_arg = TREE_OPERAND (rhs, 1);
5123 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5124 if (simple_operand_p (ll_arg)
5125 && simple_operand_p (lr_arg))
5127 if (operand_equal_p (ll_arg, rl_arg, 0)
5128 && operand_equal_p (lr_arg, rr_arg, 0))
5130 result = combine_comparisons (loc, code, lcode, rcode,
5131 truth_type, ll_arg, lr_arg);
5132 if (result)
5133 return result;
5135 else if (operand_equal_p (ll_arg, rr_arg, 0)
5136 && operand_equal_p (lr_arg, rl_arg, 0))
5138 result = combine_comparisons (loc, code, lcode,
5139 swap_tree_comparison (rcode),
5140 truth_type, ll_arg, lr_arg);
5141 if (result)
5142 return result;
5146 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5147 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5149 /* If the RHS can be evaluated unconditionally and its operands are
5150 simple, it wins to evaluate the RHS unconditionally on machines
5151 with expensive branches. In this case, this isn't a comparison
5152 that can be merged. */
5154 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5155 false) >= 2
5156 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5157 && simple_operand_p (rl_arg)
5158 && simple_operand_p (rr_arg))
5160 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5161 if (code == TRUTH_OR_EXPR
5162 && lcode == NE_EXPR && integer_zerop (lr_arg)
5163 && rcode == NE_EXPR && integer_zerop (rr_arg)
5164 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5166 return build2_loc (loc, NE_EXPR, truth_type,
5167 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5168 ll_arg, rl_arg),
5169 build_int_cst (TREE_TYPE (ll_arg), 0));
5171 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5172 if (code == TRUTH_AND_EXPR
5173 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5174 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5175 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5176 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5177 return build2_loc (loc, EQ_EXPR, truth_type,
5178 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5179 ll_arg, rl_arg),
5180 build_int_cst (TREE_TYPE (ll_arg), 0));
5183 /* See if the comparisons can be merged. Then get all the parameters for
5184 each side. */
5186 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5187 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5188 return 0;
5190 volatilep = 0;
5191 ll_inner = decode_field_reference (loc, ll_arg,
5192 &ll_bitsize, &ll_bitpos, &ll_mode,
5193 &ll_unsignedp, &volatilep, &ll_mask,
5194 &ll_and_mask);
5195 lr_inner = decode_field_reference (loc, lr_arg,
5196 &lr_bitsize, &lr_bitpos, &lr_mode,
5197 &lr_unsignedp, &volatilep, &lr_mask,
5198 &lr_and_mask);
5199 rl_inner = decode_field_reference (loc, rl_arg,
5200 &rl_bitsize, &rl_bitpos, &rl_mode,
5201 &rl_unsignedp, &volatilep, &rl_mask,
5202 &rl_and_mask);
5203 rr_inner = decode_field_reference (loc, rr_arg,
5204 &rr_bitsize, &rr_bitpos, &rr_mode,
5205 &rr_unsignedp, &volatilep, &rr_mask,
5206 &rr_and_mask);
5208 /* It must be true that the inner operation on the lhs of each
5209 comparison must be the same if we are to be able to do anything.
5210 Then see if we have constants. If not, the same must be true for
5211 the rhs's. */
5212 if (volatilep || ll_inner == 0 || rl_inner == 0
5213 || ! operand_equal_p (ll_inner, rl_inner, 0))
5214 return 0;
5216 if (TREE_CODE (lr_arg) == INTEGER_CST
5217 && TREE_CODE (rr_arg) == INTEGER_CST)
5218 l_const = lr_arg, r_const = rr_arg;
5219 else if (lr_inner == 0 || rr_inner == 0
5220 || ! operand_equal_p (lr_inner, rr_inner, 0))
5221 return 0;
5222 else
5223 l_const = r_const = 0;
5225 /* If either comparison code is not correct for our logical operation,
5226 fail. However, we can convert a one-bit comparison against zero into
5227 the opposite comparison against that bit being set in the field. */
5229 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5230 if (lcode != wanted_code)
5232 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5234 /* Make the left operand unsigned, since we are only interested
5235 in the value of one bit. Otherwise we are doing the wrong
5236 thing below. */
5237 ll_unsignedp = 1;
5238 l_const = ll_mask;
5240 else
5241 return 0;
5244 /* This is analogous to the code for l_const above. */
5245 if (rcode != wanted_code)
5247 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5249 rl_unsignedp = 1;
5250 r_const = rl_mask;
5252 else
5253 return 0;
5256 /* See if we can find a mode that contains both fields being compared on
5257 the left. If we can't, fail. Otherwise, update all constants and masks
5258 to be relative to a field of that size. */
5259 first_bit = MIN (ll_bitpos, rl_bitpos);
5260 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5261 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5262 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5263 volatilep);
5264 if (lnmode == VOIDmode)
5265 return 0;
5267 lnbitsize = GET_MODE_BITSIZE (lnmode);
5268 lnbitpos = first_bit & ~ (lnbitsize - 1);
5269 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5270 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5272 if (BYTES_BIG_ENDIAN)
5274 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5275 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5278 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5279 size_int (xll_bitpos));
5280 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5281 size_int (xrl_bitpos));
5283 if (l_const)
5285 l_const = fold_convert_loc (loc, lntype, l_const);
5286 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5287 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5288 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5289 fold_build1_loc (loc, BIT_NOT_EXPR,
5290 lntype, ll_mask))))
5292 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5294 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5297 if (r_const)
5299 r_const = fold_convert_loc (loc, lntype, r_const);
5300 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5301 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5302 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5303 fold_build1_loc (loc, BIT_NOT_EXPR,
5304 lntype, rl_mask))))
5306 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5308 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5312 /* If the right sides are not constant, do the same for it. Also,
5313 disallow this optimization if a size or signedness mismatch occurs
5314 between the left and right sides. */
5315 if (l_const == 0)
5317 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5318 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5319 /* Make sure the two fields on the right
5320 correspond to the left without being swapped. */
5321 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5322 return 0;
5324 first_bit = MIN (lr_bitpos, rr_bitpos);
5325 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5326 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5327 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5328 volatilep);
5329 if (rnmode == VOIDmode)
5330 return 0;
5332 rnbitsize = GET_MODE_BITSIZE (rnmode);
5333 rnbitpos = first_bit & ~ (rnbitsize - 1);
5334 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5335 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5337 if (BYTES_BIG_ENDIAN)
5339 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5340 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5343 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5344 rntype, lr_mask),
5345 size_int (xlr_bitpos));
5346 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5347 rntype, rr_mask),
5348 size_int (xrr_bitpos));
5350 /* Make a mask that corresponds to both fields being compared.
5351 Do this for both items being compared. If the operands are the
5352 same size and the bits being compared are in the same position
5353 then we can do this by masking both and comparing the masked
5354 results. */
5355 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5356 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5357 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5359 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5360 ll_unsignedp || rl_unsignedp);
5361 if (! all_ones_mask_p (ll_mask, lnbitsize))
5362 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5364 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5365 lr_unsignedp || rr_unsignedp);
5366 if (! all_ones_mask_p (lr_mask, rnbitsize))
5367 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5369 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5372 /* There is still another way we can do something: If both pairs of
5373 fields being compared are adjacent, we may be able to make a wider
5374 field containing them both.
5376 Note that we still must mask the lhs/rhs expressions. Furthermore,
5377 the mask must be shifted to account for the shift done by
5378 make_bit_field_ref. */
5379 if ((ll_bitsize + ll_bitpos == rl_bitpos
5380 && lr_bitsize + lr_bitpos == rr_bitpos)
5381 || (ll_bitpos == rl_bitpos + rl_bitsize
5382 && lr_bitpos == rr_bitpos + rr_bitsize))
5384 tree type;
5386 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5387 ll_bitsize + rl_bitsize,
5388 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5389 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5390 lr_bitsize + rr_bitsize,
5391 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5393 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5394 size_int (MIN (xll_bitpos, xrl_bitpos)));
5395 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5396 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5398 /* Convert to the smaller type before masking out unwanted bits. */
5399 type = lntype;
5400 if (lntype != rntype)
5402 if (lnbitsize > rnbitsize)
5404 lhs = fold_convert_loc (loc, rntype, lhs);
5405 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5406 type = rntype;
5408 else if (lnbitsize < rnbitsize)
5410 rhs = fold_convert_loc (loc, lntype, rhs);
5411 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5412 type = lntype;
5416 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5417 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5419 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5420 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5422 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5425 return 0;
5428 /* Handle the case of comparisons with constants. If there is something in
5429 common between the masks, those bits of the constants must be the same.
5430 If not, the condition is always false. Test for this to avoid generating
5431 incorrect code below. */
5432 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5433 if (! integer_zerop (result)
5434 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5435 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5437 if (wanted_code == NE_EXPR)
5439 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5440 return constant_boolean_node (true, truth_type);
5442 else
5444 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5445 return constant_boolean_node (false, truth_type);
5449 /* Construct the expression we will return. First get the component
5450 reference we will make. Unless the mask is all ones the width of
5451 that field, perform the mask operation. Then compare with the
5452 merged constant. */
5453 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5454 ll_unsignedp || rl_unsignedp);
5456 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5457 if (! all_ones_mask_p (ll_mask, lnbitsize))
5458 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5460 return build2_loc (loc, wanted_code, truth_type, result,
5461 const_binop (BIT_IOR_EXPR, l_const, r_const));
5464 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5465 constant. */
5467 static tree
5468 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5469 tree op0, tree op1)
5471 tree arg0 = op0;
5472 enum tree_code op_code;
5473 tree comp_const;
5474 tree minmax_const;
5475 int consts_equal, consts_lt;
5476 tree inner;
5478 STRIP_SIGN_NOPS (arg0);
5480 op_code = TREE_CODE (arg0);
5481 minmax_const = TREE_OPERAND (arg0, 1);
5482 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5483 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5484 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5485 inner = TREE_OPERAND (arg0, 0);
5487 /* If something does not permit us to optimize, return the original tree. */
5488 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5489 || TREE_CODE (comp_const) != INTEGER_CST
5490 || TREE_OVERFLOW (comp_const)
5491 || TREE_CODE (minmax_const) != INTEGER_CST
5492 || TREE_OVERFLOW (minmax_const))
5493 return NULL_TREE;
5495 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5496 and GT_EXPR, doing the rest with recursive calls using logical
5497 simplifications. */
5498 switch (code)
5500 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5502 tree tem
5503 = optimize_minmax_comparison (loc,
5504 invert_tree_comparison (code, false),
5505 type, op0, op1);
5506 if (tem)
5507 return invert_truthvalue_loc (loc, tem);
5508 return NULL_TREE;
5511 case GE_EXPR:
5512 return
5513 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5514 optimize_minmax_comparison
5515 (loc, EQ_EXPR, type, arg0, comp_const),
5516 optimize_minmax_comparison
5517 (loc, GT_EXPR, type, arg0, comp_const));
5519 case EQ_EXPR:
5520 if (op_code == MAX_EXPR && consts_equal)
5521 /* MAX (X, 0) == 0 -> X <= 0 */
5522 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5524 else if (op_code == MAX_EXPR && consts_lt)
5525 /* MAX (X, 0) == 5 -> X == 5 */
5526 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5528 else if (op_code == MAX_EXPR)
5529 /* MAX (X, 0) == -1 -> false */
5530 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5532 else if (consts_equal)
5533 /* MIN (X, 0) == 0 -> X >= 0 */
5534 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5536 else if (consts_lt)
5537 /* MIN (X, 0) == 5 -> false */
5538 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5540 else
5541 /* MIN (X, 0) == -1 -> X == -1 */
5542 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5544 case GT_EXPR:
5545 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5546 /* MAX (X, 0) > 0 -> X > 0
5547 MAX (X, 0) > 5 -> X > 5 */
5548 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5550 else if (op_code == MAX_EXPR)
5551 /* MAX (X, 0) > -1 -> true */
5552 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5554 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5555 /* MIN (X, 0) > 0 -> false
5556 MIN (X, 0) > 5 -> false */
5557 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5559 else
5560 /* MIN (X, 0) > -1 -> X > -1 */
5561 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5563 default:
5564 return NULL_TREE;
5568 /* T is an integer expression that is being multiplied, divided, or taken a
5569 modulus (CODE says which and what kind of divide or modulus) by a
5570 constant C. See if we can eliminate that operation by folding it with
5571 other operations already in T. WIDE_TYPE, if non-null, is a type that
5572 should be used for the computation if wider than our type.
5574 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5575 (X * 2) + (Y * 4). We must, however, be assured that either the original
5576 expression would not overflow or that overflow is undefined for the type
5577 in the language in question.
5579 If we return a non-null expression, it is an equivalent form of the
5580 original computation, but need not be in the original type.
5582 We set *STRICT_OVERFLOW_P to true if the return values depends on
5583 signed overflow being undefined. Otherwise we do not change
5584 *STRICT_OVERFLOW_P. */
5586 static tree
5587 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5588 bool *strict_overflow_p)
5590 /* To avoid exponential search depth, refuse to allow recursion past
5591 three levels. Beyond that (1) it's highly unlikely that we'll find
5592 something interesting and (2) we've probably processed it before
5593 when we built the inner expression. */
5595 static int depth;
5596 tree ret;
5598 if (depth > 3)
5599 return NULL;
5601 depth++;
5602 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5603 depth--;
5605 return ret;
5608 static tree
5609 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5610 bool *strict_overflow_p)
5612 tree type = TREE_TYPE (t);
5613 enum tree_code tcode = TREE_CODE (t);
5614 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5615 > GET_MODE_SIZE (TYPE_MODE (type)))
5616 ? wide_type : type);
5617 tree t1, t2;
5618 int same_p = tcode == code;
5619 tree op0 = NULL_TREE, op1 = NULL_TREE;
5620 bool sub_strict_overflow_p;
5622 /* Don't deal with constants of zero here; they confuse the code below. */
5623 if (integer_zerop (c))
5624 return NULL_TREE;
5626 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5627 op0 = TREE_OPERAND (t, 0);
5629 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5630 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5632 /* Note that we need not handle conditional operations here since fold
5633 already handles those cases. So just do arithmetic here. */
5634 switch (tcode)
5636 case INTEGER_CST:
5637 /* For a constant, we can always simplify if we are a multiply
5638 or (for divide and modulus) if it is a multiple of our constant. */
5639 if (code == MULT_EXPR
5640 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5641 return const_binop (code, fold_convert (ctype, t),
5642 fold_convert (ctype, c));
5643 break;
5645 CASE_CONVERT: case NON_LVALUE_EXPR:
5646 /* If op0 is an expression ... */
5647 if ((COMPARISON_CLASS_P (op0)
5648 || UNARY_CLASS_P (op0)
5649 || BINARY_CLASS_P (op0)
5650 || VL_EXP_CLASS_P (op0)
5651 || EXPRESSION_CLASS_P (op0))
5652 /* ... and has wrapping overflow, and its type is smaller
5653 than ctype, then we cannot pass through as widening. */
5654 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5655 && (TYPE_PRECISION (ctype)
5656 > TYPE_PRECISION (TREE_TYPE (op0))))
5657 /* ... or this is a truncation (t is narrower than op0),
5658 then we cannot pass through this narrowing. */
5659 || (TYPE_PRECISION (type)
5660 < TYPE_PRECISION (TREE_TYPE (op0)))
5661 /* ... or signedness changes for division or modulus,
5662 then we cannot pass through this conversion. */
5663 || (code != MULT_EXPR
5664 && (TYPE_UNSIGNED (ctype)
5665 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5666 /* ... or has undefined overflow while the converted to
5667 type has not, we cannot do the operation in the inner type
5668 as that would introduce undefined overflow. */
5669 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5670 && !TYPE_OVERFLOW_UNDEFINED (type))))
5671 break;
5673 /* Pass the constant down and see if we can make a simplification. If
5674 we can, replace this expression with the inner simplification for
5675 possible later conversion to our or some other type. */
5676 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5677 && TREE_CODE (t2) == INTEGER_CST
5678 && !TREE_OVERFLOW (t2)
5679 && (0 != (t1 = extract_muldiv (op0, t2, code,
5680 code == MULT_EXPR
5681 ? ctype : NULL_TREE,
5682 strict_overflow_p))))
5683 return t1;
5684 break;
5686 case ABS_EXPR:
5687 /* If widening the type changes it from signed to unsigned, then we
5688 must avoid building ABS_EXPR itself as unsigned. */
5689 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5691 tree cstype = (*signed_type_for) (ctype);
5692 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5693 != 0)
5695 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5696 return fold_convert (ctype, t1);
5698 break;
5700 /* If the constant is negative, we cannot simplify this. */
5701 if (tree_int_cst_sgn (c) == -1)
5702 break;
5703 /* FALLTHROUGH */
5704 case NEGATE_EXPR:
5705 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5706 != 0)
5707 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5708 break;
5710 case MIN_EXPR: case MAX_EXPR:
5711 /* If widening the type changes the signedness, then we can't perform
5712 this optimization as that changes the result. */
5713 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5714 break;
5716 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5717 sub_strict_overflow_p = false;
5718 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0
5720 && (t2 = extract_muldiv (op1, c, code, wide_type,
5721 &sub_strict_overflow_p)) != 0)
5723 if (tree_int_cst_sgn (c) < 0)
5724 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5725 if (sub_strict_overflow_p)
5726 *strict_overflow_p = true;
5727 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5728 fold_convert (ctype, t2));
5730 break;
5732 case LSHIFT_EXPR: case RSHIFT_EXPR:
5733 /* If the second operand is constant, this is a multiplication
5734 or floor division, by a power of two, so we can treat it that
5735 way unless the multiplier or divisor overflows. Signed
5736 left-shift overflow is implementation-defined rather than
5737 undefined in C90, so do not convert signed left shift into
5738 multiplication. */
5739 if (TREE_CODE (op1) == INTEGER_CST
5740 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5741 /* const_binop may not detect overflow correctly,
5742 so check for it explicitly here. */
5743 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5744 && TREE_INT_CST_HIGH (op1) == 0
5745 && 0 != (t1 = fold_convert (ctype,
5746 const_binop (LSHIFT_EXPR,
5747 size_one_node,
5748 op1)))
5749 && !TREE_OVERFLOW (t1))
5750 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5751 ? MULT_EXPR : FLOOR_DIV_EXPR,
5752 ctype,
5753 fold_convert (ctype, op0),
5754 t1),
5755 c, code, wide_type, strict_overflow_p);
5756 break;
5758 case PLUS_EXPR: case MINUS_EXPR:
5759 /* See if we can eliminate the operation on both sides. If we can, we
5760 can return a new PLUS or MINUS. If we can't, the only remaining
5761 cases where we can do anything are if the second operand is a
5762 constant. */
5763 sub_strict_overflow_p = false;
5764 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5765 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5766 if (t1 != 0 && t2 != 0
5767 && (code == MULT_EXPR
5768 /* If not multiplication, we can only do this if both operands
5769 are divisible by c. */
5770 || (multiple_of_p (ctype, op0, c)
5771 && multiple_of_p (ctype, op1, c))))
5773 if (sub_strict_overflow_p)
5774 *strict_overflow_p = true;
5775 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5776 fold_convert (ctype, t2));
5779 /* If this was a subtraction, negate OP1 and set it to be an addition.
5780 This simplifies the logic below. */
5781 if (tcode == MINUS_EXPR)
5783 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5784 /* If OP1 was not easily negatable, the constant may be OP0. */
5785 if (TREE_CODE (op0) == INTEGER_CST)
5787 tree tem = op0;
5788 op0 = op1;
5789 op1 = tem;
5790 tem = t1;
5791 t1 = t2;
5792 t2 = tem;
5796 if (TREE_CODE (op1) != INTEGER_CST)
5797 break;
5799 /* If either OP1 or C are negative, this optimization is not safe for
5800 some of the division and remainder types while for others we need
5801 to change the code. */
5802 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5804 if (code == CEIL_DIV_EXPR)
5805 code = FLOOR_DIV_EXPR;
5806 else if (code == FLOOR_DIV_EXPR)
5807 code = CEIL_DIV_EXPR;
5808 else if (code != MULT_EXPR
5809 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5810 break;
5813 /* If it's a multiply or a division/modulus operation of a multiple
5814 of our constant, do the operation and verify it doesn't overflow. */
5815 if (code == MULT_EXPR
5816 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5818 op1 = const_binop (code, fold_convert (ctype, op1),
5819 fold_convert (ctype, c));
5820 /* We allow the constant to overflow with wrapping semantics. */
5821 if (op1 == 0
5822 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5823 break;
5825 else
5826 break;
5828 /* If we have an unsigned type is not a sizetype, we cannot widen
5829 the operation since it will change the result if the original
5830 computation overflowed. */
5831 if (TYPE_UNSIGNED (ctype)
5832 && ctype != type)
5833 break;
5835 /* If we were able to eliminate our operation from the first side,
5836 apply our operation to the second side and reform the PLUS. */
5837 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5838 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5840 /* The last case is if we are a multiply. In that case, we can
5841 apply the distributive law to commute the multiply and addition
5842 if the multiplication of the constants doesn't overflow. */
5843 if (code == MULT_EXPR)
5844 return fold_build2 (tcode, ctype,
5845 fold_build2 (code, ctype,
5846 fold_convert (ctype, op0),
5847 fold_convert (ctype, c)),
5848 op1);
5850 break;
5852 case MULT_EXPR:
5853 /* We have a special case here if we are doing something like
5854 (C * 8) % 4 since we know that's zero. */
5855 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5856 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5857 /* If the multiplication can overflow we cannot optimize this. */
5858 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5859 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5860 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5862 *strict_overflow_p = true;
5863 return omit_one_operand (type, integer_zero_node, op0);
5866 /* ... fall through ... */
5868 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5869 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5870 /* If we can extract our operation from the LHS, do so and return a
5871 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5872 do something only if the second operand is a constant. */
5873 if (same_p
5874 && (t1 = extract_muldiv (op0, c, code, wide_type,
5875 strict_overflow_p)) != 0)
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5877 fold_convert (ctype, op1));
5878 else if (tcode == MULT_EXPR && code == MULT_EXPR
5879 && (t1 = extract_muldiv (op1, c, code, wide_type,
5880 strict_overflow_p)) != 0)
5881 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5882 fold_convert (ctype, t1));
5883 else if (TREE_CODE (op1) != INTEGER_CST)
5884 return 0;
5886 /* If these are the same operation types, we can associate them
5887 assuming no overflow. */
5888 if (tcode == code)
5890 double_int mul;
5891 int overflow_p;
5892 mul = double_int_mul_with_sign
5893 (double_int_ext
5894 (tree_to_double_int (op1),
5895 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5896 double_int_ext
5897 (tree_to_double_int (c),
5898 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5899 false, &overflow_p);
5900 overflow_p = ((!TYPE_UNSIGNED (ctype) && overflow_p)
5901 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5902 if (!double_int_fits_to_tree_p (ctype, mul)
5903 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5904 || !TYPE_UNSIGNED (ctype)))
5905 overflow_p = 1;
5906 if (!overflow_p)
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5908 double_int_to_tree (ctype, mul));
5911 /* If these operations "cancel" each other, we have the main
5912 optimizations of this pass, which occur when either constant is a
5913 multiple of the other, in which case we replace this with either an
5914 operation or CODE or TCODE.
5916 If we have an unsigned type, we cannot do this since it will change
5917 the result if the original computation overflowed. */
5918 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5919 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5920 || (tcode == MULT_EXPR
5921 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5922 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5923 && code != MULT_EXPR)))
5925 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5927 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5928 *strict_overflow_p = true;
5929 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5930 fold_convert (ctype,
5931 const_binop (TRUNC_DIV_EXPR,
5932 op1, c)));
5934 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5936 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5937 *strict_overflow_p = true;
5938 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5939 fold_convert (ctype,
5940 const_binop (TRUNC_DIV_EXPR,
5941 c, op1)));
5944 break;
5946 default:
5947 break;
5950 return 0;
5953 /* Return a node which has the indicated constant VALUE (either 0 or
5954 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5955 and is of the indicated TYPE. */
5957 tree
5958 constant_boolean_node (bool value, tree type)
5960 if (type == integer_type_node)
5961 return value ? integer_one_node : integer_zero_node;
5962 else if (type == boolean_type_node)
5963 return value ? boolean_true_node : boolean_false_node;
5964 else if (TREE_CODE (type) == VECTOR_TYPE)
5965 return build_vector_from_val (type,
5966 build_int_cst (TREE_TYPE (type),
5967 value ? -1 : 0));
5968 else
5969 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5973 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5974 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5975 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5976 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5977 COND is the first argument to CODE; otherwise (as in the example
5978 given here), it is the second argument. TYPE is the type of the
5979 original expression. Return NULL_TREE if no simplification is
5980 possible. */
5982 static tree
5983 fold_binary_op_with_conditional_arg (location_t loc,
5984 enum tree_code code,
5985 tree type, tree op0, tree op1,
5986 tree cond, tree arg, int cond_first_p)
5988 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5989 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5990 tree test, true_value, false_value;
5991 tree lhs = NULL_TREE;
5992 tree rhs = NULL_TREE;
5994 if (TREE_CODE (cond) == COND_EXPR)
5996 test = TREE_OPERAND (cond, 0);
5997 true_value = TREE_OPERAND (cond, 1);
5998 false_value = TREE_OPERAND (cond, 2);
5999 /* If this operand throws an expression, then it does not make
6000 sense to try to perform a logical or arithmetic operation
6001 involving it. */
6002 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6003 lhs = true_value;
6004 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6005 rhs = false_value;
6007 else
6009 tree testtype = TREE_TYPE (cond);
6010 test = cond;
6011 true_value = constant_boolean_node (true, testtype);
6012 false_value = constant_boolean_node (false, testtype);
6015 /* This transformation is only worthwhile if we don't have to wrap ARG
6016 in a SAVE_EXPR and the operation can be simplified on at least one
6017 of the branches once its pushed inside the COND_EXPR. */
6018 if (!TREE_CONSTANT (arg)
6019 && (TREE_SIDE_EFFECTS (arg)
6020 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6021 return NULL_TREE;
6023 arg = fold_convert_loc (loc, arg_type, arg);
6024 if (lhs == 0)
6026 true_value = fold_convert_loc (loc, cond_type, true_value);
6027 if (cond_first_p)
6028 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6029 else
6030 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6032 if (rhs == 0)
6034 false_value = fold_convert_loc (loc, cond_type, false_value);
6035 if (cond_first_p)
6036 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6037 else
6038 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6041 /* Check that we have simplified at least one of the branches. */
6042 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6043 return NULL_TREE;
6045 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6049 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6051 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6052 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6053 ADDEND is the same as X.
6055 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6056 and finite. The problematic cases are when X is zero, and its mode
6057 has signed zeros. In the case of rounding towards -infinity,
6058 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6059 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6061 bool
6062 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6064 if (!real_zerop (addend))
6065 return false;
6067 /* Don't allow the fold with -fsignaling-nans. */
6068 if (HONOR_SNANS (TYPE_MODE (type)))
6069 return false;
6071 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6072 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6073 return true;
6075 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6076 if (TREE_CODE (addend) == REAL_CST
6077 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6078 negate = !negate;
6080 /* The mode has signed zeros, and we have to honor their sign.
6081 In this situation, there is only one case we can return true for.
6082 X - 0 is the same as X unless rounding towards -infinity is
6083 supported. */
6084 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6087 /* Subroutine of fold() that checks comparisons of built-in math
6088 functions against real constants.
6090 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6091 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6092 is the type of the result and ARG0 and ARG1 are the operands of the
6093 comparison. ARG1 must be a TREE_REAL_CST.
6095 The function returns the constant folded tree if a simplification
6096 can be made, and NULL_TREE otherwise. */
6098 static tree
6099 fold_mathfn_compare (location_t loc,
6100 enum built_in_function fcode, enum tree_code code,
6101 tree type, tree arg0, tree arg1)
6103 REAL_VALUE_TYPE c;
6105 if (BUILTIN_SQRT_P (fcode))
6107 tree arg = CALL_EXPR_ARG (arg0, 0);
6108 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6110 c = TREE_REAL_CST (arg1);
6111 if (REAL_VALUE_NEGATIVE (c))
6113 /* sqrt(x) < y is always false, if y is negative. */
6114 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6115 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6117 /* sqrt(x) > y is always true, if y is negative and we
6118 don't care about NaNs, i.e. negative values of x. */
6119 if (code == NE_EXPR || !HONOR_NANS (mode))
6120 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6122 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6123 return fold_build2_loc (loc, GE_EXPR, type, arg,
6124 build_real (TREE_TYPE (arg), dconst0));
6126 else if (code == GT_EXPR || code == GE_EXPR)
6128 REAL_VALUE_TYPE c2;
6130 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6131 real_convert (&c2, mode, &c2);
6133 if (REAL_VALUE_ISINF (c2))
6135 /* sqrt(x) > y is x == +Inf, when y is very large. */
6136 if (HONOR_INFINITIES (mode))
6137 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6138 build_real (TREE_TYPE (arg), c2));
6140 /* sqrt(x) > y is always false, when y is very large
6141 and we don't care about infinities. */
6142 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6145 /* sqrt(x) > c is the same as x > c*c. */
6146 return fold_build2_loc (loc, code, type, arg,
6147 build_real (TREE_TYPE (arg), c2));
6149 else if (code == LT_EXPR || code == LE_EXPR)
6151 REAL_VALUE_TYPE c2;
6153 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6154 real_convert (&c2, mode, &c2);
6156 if (REAL_VALUE_ISINF (c2))
6158 /* sqrt(x) < y is always true, when y is a very large
6159 value and we don't care about NaNs or Infinities. */
6160 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6161 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6163 /* sqrt(x) < y is x != +Inf when y is very large and we
6164 don't care about NaNs. */
6165 if (! HONOR_NANS (mode))
6166 return fold_build2_loc (loc, NE_EXPR, type, arg,
6167 build_real (TREE_TYPE (arg), c2));
6169 /* sqrt(x) < y is x >= 0 when y is very large and we
6170 don't care about Infinities. */
6171 if (! HONOR_INFINITIES (mode))
6172 return fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg), dconst0));
6175 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6176 arg = save_expr (arg);
6177 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6178 fold_build2_loc (loc, GE_EXPR, type, arg,
6179 build_real (TREE_TYPE (arg),
6180 dconst0)),
6181 fold_build2_loc (loc, NE_EXPR, type, arg,
6182 build_real (TREE_TYPE (arg),
6183 c2)));
6186 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6187 if (! HONOR_NANS (mode))
6188 return fold_build2_loc (loc, code, type, arg,
6189 build_real (TREE_TYPE (arg), c2));
6191 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6192 arg = save_expr (arg);
6193 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6194 fold_build2_loc (loc, GE_EXPR, type, arg,
6195 build_real (TREE_TYPE (arg),
6196 dconst0)),
6197 fold_build2_loc (loc, code, type, arg,
6198 build_real (TREE_TYPE (arg),
6199 c2)));
6203 return NULL_TREE;
6206 /* Subroutine of fold() that optimizes comparisons against Infinities,
6207 either +Inf or -Inf.
6209 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6210 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6211 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6213 The function returns the constant folded tree if a simplification
6214 can be made, and NULL_TREE otherwise. */
6216 static tree
6217 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6218 tree arg0, tree arg1)
6220 enum machine_mode mode;
6221 REAL_VALUE_TYPE max;
6222 tree temp;
6223 bool neg;
6225 mode = TYPE_MODE (TREE_TYPE (arg0));
6227 /* For negative infinity swap the sense of the comparison. */
6228 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6229 if (neg)
6230 code = swap_tree_comparison (code);
6232 switch (code)
6234 case GT_EXPR:
6235 /* x > +Inf is always false, if with ignore sNANs. */
6236 if (HONOR_SNANS (mode))
6237 return NULL_TREE;
6238 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6240 case LE_EXPR:
6241 /* x <= +Inf is always true, if we don't case about NaNs. */
6242 if (! HONOR_NANS (mode))
6243 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6245 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6246 arg0 = save_expr (arg0);
6247 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6249 case EQ_EXPR:
6250 case GE_EXPR:
6251 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6252 real_maxval (&max, neg, mode);
6253 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6256 case LT_EXPR:
6257 /* x < +Inf is always equal to x <= DBL_MAX. */
6258 real_maxval (&max, neg, mode);
6259 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6260 arg0, build_real (TREE_TYPE (arg0), max));
6262 case NE_EXPR:
6263 /* x != +Inf is always equal to !(x > DBL_MAX). */
6264 real_maxval (&max, neg, mode);
6265 if (! HONOR_NANS (mode))
6266 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6267 arg0, build_real (TREE_TYPE (arg0), max));
6269 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6270 arg0, build_real (TREE_TYPE (arg0), max));
6271 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6273 default:
6274 break;
6277 return NULL_TREE;
6280 /* Subroutine of fold() that optimizes comparisons of a division by
6281 a nonzero integer constant against an integer constant, i.e.
6282 X/C1 op C2.
6284 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6285 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6286 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6288 The function returns the constant folded tree if a simplification
6289 can be made, and NULL_TREE otherwise. */
6291 static tree
6292 fold_div_compare (location_t loc,
6293 enum tree_code code, tree type, tree arg0, tree arg1)
6295 tree prod, tmp, hi, lo;
6296 tree arg00 = TREE_OPERAND (arg0, 0);
6297 tree arg01 = TREE_OPERAND (arg0, 1);
6298 double_int val;
6299 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6300 bool neg_overflow;
6301 int overflow;
6303 /* We have to do this the hard way to detect unsigned overflow.
6304 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6305 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6306 TREE_INT_CST_HIGH (arg01),
6307 TREE_INT_CST_LOW (arg1),
6308 TREE_INT_CST_HIGH (arg1),
6309 &val.low, &val.high, unsigned_p);
6310 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6311 neg_overflow = false;
6313 if (unsigned_p)
6315 tmp = int_const_binop (MINUS_EXPR, arg01,
6316 build_int_cst (TREE_TYPE (arg01), 1));
6317 lo = prod;
6319 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6320 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6321 TREE_INT_CST_HIGH (prod),
6322 TREE_INT_CST_LOW (tmp),
6323 TREE_INT_CST_HIGH (tmp),
6324 &val.low, &val.high, unsigned_p);
6325 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6326 -1, overflow | TREE_OVERFLOW (prod));
6328 else if (tree_int_cst_sgn (arg01) >= 0)
6330 tmp = int_const_binop (MINUS_EXPR, arg01,
6331 build_int_cst (TREE_TYPE (arg01), 1));
6332 switch (tree_int_cst_sgn (arg1))
6334 case -1:
6335 neg_overflow = true;
6336 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6337 hi = prod;
6338 break;
6340 case 0:
6341 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6342 hi = tmp;
6343 break;
6345 case 1:
6346 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6347 lo = prod;
6348 break;
6350 default:
6351 gcc_unreachable ();
6354 else
6356 /* A negative divisor reverses the relational operators. */
6357 code = swap_tree_comparison (code);
6359 tmp = int_const_binop (PLUS_EXPR, arg01,
6360 build_int_cst (TREE_TYPE (arg01), 1));
6361 switch (tree_int_cst_sgn (arg1))
6363 case -1:
6364 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6365 lo = prod;
6366 break;
6368 case 0:
6369 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6370 lo = tmp;
6371 break;
6373 case 1:
6374 neg_overflow = true;
6375 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6376 hi = prod;
6377 break;
6379 default:
6380 gcc_unreachable ();
6384 switch (code)
6386 case EQ_EXPR:
6387 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6388 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6389 if (TREE_OVERFLOW (hi))
6390 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6391 if (TREE_OVERFLOW (lo))
6392 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6393 return build_range_check (loc, type, arg00, 1, lo, hi);
6395 case NE_EXPR:
6396 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6397 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6398 if (TREE_OVERFLOW (hi))
6399 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6400 if (TREE_OVERFLOW (lo))
6401 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6402 return build_range_check (loc, type, arg00, 0, lo, hi);
6404 case LT_EXPR:
6405 if (TREE_OVERFLOW (lo))
6407 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6408 return omit_one_operand_loc (loc, type, tmp, arg00);
6410 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6412 case LE_EXPR:
6413 if (TREE_OVERFLOW (hi))
6415 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6420 case GT_EXPR:
6421 if (TREE_OVERFLOW (hi))
6423 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6424 return omit_one_operand_loc (loc, type, tmp, arg00);
6426 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6428 case GE_EXPR:
6429 if (TREE_OVERFLOW (lo))
6431 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6432 return omit_one_operand_loc (loc, type, tmp, arg00);
6434 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6436 default:
6437 break;
6440 return NULL_TREE;
6444 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6445 equality/inequality test, then return a simplified form of the test
6446 using a sign testing. Otherwise return NULL. TYPE is the desired
6447 result type. */
6449 static tree
6450 fold_single_bit_test_into_sign_test (location_t loc,
6451 enum tree_code code, tree arg0, tree arg1,
6452 tree result_type)
6454 /* If this is testing a single bit, we can optimize the test. */
6455 if ((code == NE_EXPR || code == EQ_EXPR)
6456 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6457 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6459 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6460 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6461 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6463 if (arg00 != NULL_TREE
6464 /* This is only a win if casting to a signed type is cheap,
6465 i.e. when arg00's type is not a partial mode. */
6466 && TYPE_PRECISION (TREE_TYPE (arg00))
6467 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6469 tree stype = signed_type_for (TREE_TYPE (arg00));
6470 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6471 result_type,
6472 fold_convert_loc (loc, stype, arg00),
6473 build_int_cst (stype, 0));
6477 return NULL_TREE;
6480 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6481 equality/inequality test, then return a simplified form of
6482 the test using shifts and logical operations. Otherwise return
6483 NULL. TYPE is the desired result type. */
6485 tree
6486 fold_single_bit_test (location_t loc, enum tree_code code,
6487 tree arg0, tree arg1, tree result_type)
6489 /* If this is testing a single bit, we can optimize the test. */
6490 if ((code == NE_EXPR || code == EQ_EXPR)
6491 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6492 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6494 tree inner = TREE_OPERAND (arg0, 0);
6495 tree type = TREE_TYPE (arg0);
6496 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6497 enum machine_mode operand_mode = TYPE_MODE (type);
6498 int ops_unsigned;
6499 tree signed_type, unsigned_type, intermediate_type;
6500 tree tem, one;
6502 /* First, see if we can fold the single bit test into a sign-bit
6503 test. */
6504 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6505 result_type);
6506 if (tem)
6507 return tem;
6509 /* Otherwise we have (A & C) != 0 where C is a single bit,
6510 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6511 Similarly for (A & C) == 0. */
6513 /* If INNER is a right shift of a constant and it plus BITNUM does
6514 not overflow, adjust BITNUM and INNER. */
6515 if (TREE_CODE (inner) == RSHIFT_EXPR
6516 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6517 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6518 && bitnum < TYPE_PRECISION (type)
6519 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6520 bitnum - TYPE_PRECISION (type)))
6522 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6523 inner = TREE_OPERAND (inner, 0);
6526 /* If we are going to be able to omit the AND below, we must do our
6527 operations as unsigned. If we must use the AND, we have a choice.
6528 Normally unsigned is faster, but for some machines signed is. */
6529 #ifdef LOAD_EXTEND_OP
6530 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6531 && !flag_syntax_only) ? 0 : 1;
6532 #else
6533 ops_unsigned = 1;
6534 #endif
6536 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6537 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6538 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6539 inner = fold_convert_loc (loc, intermediate_type, inner);
6541 if (bitnum != 0)
6542 inner = build2 (RSHIFT_EXPR, intermediate_type,
6543 inner, size_int (bitnum));
6545 one = build_int_cst (intermediate_type, 1);
6547 if (code == EQ_EXPR)
6548 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6550 /* Put the AND last so it can combine with more things. */
6551 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6553 /* Make sure to return the proper type. */
6554 inner = fold_convert_loc (loc, result_type, inner);
6556 return inner;
6558 return NULL_TREE;
6561 /* Check whether we are allowed to reorder operands arg0 and arg1,
6562 such that the evaluation of arg1 occurs before arg0. */
6564 static bool
6565 reorder_operands_p (const_tree arg0, const_tree arg1)
6567 if (! flag_evaluation_order)
6568 return true;
6569 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6570 return true;
6571 return ! TREE_SIDE_EFFECTS (arg0)
6572 && ! TREE_SIDE_EFFECTS (arg1);
6575 /* Test whether it is preferable two swap two operands, ARG0 and
6576 ARG1, for example because ARG0 is an integer constant and ARG1
6577 isn't. If REORDER is true, only recommend swapping if we can
6578 evaluate the operands in reverse order. */
6580 bool
6581 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6583 STRIP_SIGN_NOPS (arg0);
6584 STRIP_SIGN_NOPS (arg1);
6586 if (TREE_CODE (arg1) == INTEGER_CST)
6587 return 0;
6588 if (TREE_CODE (arg0) == INTEGER_CST)
6589 return 1;
6591 if (TREE_CODE (arg1) == REAL_CST)
6592 return 0;
6593 if (TREE_CODE (arg0) == REAL_CST)
6594 return 1;
6596 if (TREE_CODE (arg1) == FIXED_CST)
6597 return 0;
6598 if (TREE_CODE (arg0) == FIXED_CST)
6599 return 1;
6601 if (TREE_CODE (arg1) == COMPLEX_CST)
6602 return 0;
6603 if (TREE_CODE (arg0) == COMPLEX_CST)
6604 return 1;
6606 if (TREE_CONSTANT (arg1))
6607 return 0;
6608 if (TREE_CONSTANT (arg0))
6609 return 1;
6611 if (optimize_function_for_size_p (cfun))
6612 return 0;
6614 if (reorder && flag_evaluation_order
6615 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6616 return 0;
6618 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6619 for commutative and comparison operators. Ensuring a canonical
6620 form allows the optimizers to find additional redundancies without
6621 having to explicitly check for both orderings. */
6622 if (TREE_CODE (arg0) == SSA_NAME
6623 && TREE_CODE (arg1) == SSA_NAME
6624 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6625 return 1;
6627 /* Put SSA_NAMEs last. */
6628 if (TREE_CODE (arg1) == SSA_NAME)
6629 return 0;
6630 if (TREE_CODE (arg0) == SSA_NAME)
6631 return 1;
6633 /* Put variables last. */
6634 if (DECL_P (arg1))
6635 return 0;
6636 if (DECL_P (arg0))
6637 return 1;
6639 return 0;
6642 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6643 ARG0 is extended to a wider type. */
6645 static tree
6646 fold_widened_comparison (location_t loc, enum tree_code code,
6647 tree type, tree arg0, tree arg1)
6649 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6650 tree arg1_unw;
6651 tree shorter_type, outer_type;
6652 tree min, max;
6653 bool above, below;
6655 if (arg0_unw == arg0)
6656 return NULL_TREE;
6657 shorter_type = TREE_TYPE (arg0_unw);
6659 #ifdef HAVE_canonicalize_funcptr_for_compare
6660 /* Disable this optimization if we're casting a function pointer
6661 type on targets that require function pointer canonicalization. */
6662 if (HAVE_canonicalize_funcptr_for_compare
6663 && TREE_CODE (shorter_type) == POINTER_TYPE
6664 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6665 return NULL_TREE;
6666 #endif
6668 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6669 return NULL_TREE;
6671 arg1_unw = get_unwidened (arg1, NULL_TREE);
6673 /* If possible, express the comparison in the shorter mode. */
6674 if ((code == EQ_EXPR || code == NE_EXPR
6675 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6676 && (TREE_TYPE (arg1_unw) == shorter_type
6677 || ((TYPE_PRECISION (shorter_type)
6678 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6679 && (TYPE_UNSIGNED (shorter_type)
6680 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6681 || (TREE_CODE (arg1_unw) == INTEGER_CST
6682 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6683 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6684 && int_fits_type_p (arg1_unw, shorter_type))))
6685 return fold_build2_loc (loc, code, type, arg0_unw,
6686 fold_convert_loc (loc, shorter_type, arg1_unw));
6688 if (TREE_CODE (arg1_unw) != INTEGER_CST
6689 || TREE_CODE (shorter_type) != INTEGER_TYPE
6690 || !int_fits_type_p (arg1_unw, shorter_type))
6691 return NULL_TREE;
6693 /* If we are comparing with the integer that does not fit into the range
6694 of the shorter type, the result is known. */
6695 outer_type = TREE_TYPE (arg1_unw);
6696 min = lower_bound_in_type (outer_type, shorter_type);
6697 max = upper_bound_in_type (outer_type, shorter_type);
6699 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6700 max, arg1_unw));
6701 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6702 arg1_unw, min));
6704 switch (code)
6706 case EQ_EXPR:
6707 if (above || below)
6708 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6709 break;
6711 case NE_EXPR:
6712 if (above || below)
6713 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714 break;
6716 case LT_EXPR:
6717 case LE_EXPR:
6718 if (above)
6719 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6720 else if (below)
6721 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6723 case GT_EXPR:
6724 case GE_EXPR:
6725 if (above)
6726 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6727 else if (below)
6728 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6730 default:
6731 break;
6734 return NULL_TREE;
6737 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6738 ARG0 just the signedness is changed. */
6740 static tree
6741 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6742 tree arg0, tree arg1)
6744 tree arg0_inner;
6745 tree inner_type, outer_type;
6747 if (!CONVERT_EXPR_P (arg0))
6748 return NULL_TREE;
6750 outer_type = TREE_TYPE (arg0);
6751 arg0_inner = TREE_OPERAND (arg0, 0);
6752 inner_type = TREE_TYPE (arg0_inner);
6754 #ifdef HAVE_canonicalize_funcptr_for_compare
6755 /* Disable this optimization if we're casting a function pointer
6756 type on targets that require function pointer canonicalization. */
6757 if (HAVE_canonicalize_funcptr_for_compare
6758 && TREE_CODE (inner_type) == POINTER_TYPE
6759 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6760 return NULL_TREE;
6761 #endif
6763 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6764 return NULL_TREE;
6766 if (TREE_CODE (arg1) != INTEGER_CST
6767 && !(CONVERT_EXPR_P (arg1)
6768 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6769 return NULL_TREE;
6771 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6772 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6773 && code != NE_EXPR
6774 && code != EQ_EXPR)
6775 return NULL_TREE;
6777 if (TREE_CODE (arg1) == INTEGER_CST)
6778 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6779 0, TREE_OVERFLOW (arg1));
6780 else
6781 arg1 = fold_convert_loc (loc, inner_type, arg1);
6783 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6786 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6787 step of the array. Reconstructs s and delta in the case of s *
6788 delta being an integer constant (and thus already folded). ADDR is
6789 the address. MULT is the multiplicative expression. If the
6790 function succeeds, the new address expression is returned.
6791 Otherwise NULL_TREE is returned. LOC is the location of the
6792 resulting expression. */
6794 static tree
6795 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6797 tree s, delta, step;
6798 tree ref = TREE_OPERAND (addr, 0), pref;
6799 tree ret, pos;
6800 tree itype;
6801 bool mdim = false;
6803 /* Strip the nops that might be added when converting op1 to sizetype. */
6804 STRIP_NOPS (op1);
6806 /* Canonicalize op1 into a possibly non-constant delta
6807 and an INTEGER_CST s. */
6808 if (TREE_CODE (op1) == MULT_EXPR)
6810 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6812 STRIP_NOPS (arg0);
6813 STRIP_NOPS (arg1);
6815 if (TREE_CODE (arg0) == INTEGER_CST)
6817 s = arg0;
6818 delta = arg1;
6820 else if (TREE_CODE (arg1) == INTEGER_CST)
6822 s = arg1;
6823 delta = arg0;
6825 else
6826 return NULL_TREE;
6828 else if (TREE_CODE (op1) == INTEGER_CST)
6830 delta = op1;
6831 s = NULL_TREE;
6833 else
6835 /* Simulate we are delta * 1. */
6836 delta = op1;
6837 s = integer_one_node;
6840 /* Handle &x.array the same as we would handle &x.array[0]. */
6841 if (TREE_CODE (ref) == COMPONENT_REF
6842 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6844 tree domain;
6846 /* Remember if this was a multi-dimensional array. */
6847 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6848 mdim = true;
6850 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6851 if (! domain)
6852 goto cont;
6853 itype = TREE_TYPE (domain);
6855 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6856 if (TREE_CODE (step) != INTEGER_CST)
6857 goto cont;
6859 if (s)
6861 if (! tree_int_cst_equal (step, s))
6862 goto cont;
6864 else
6866 /* Try if delta is a multiple of step. */
6867 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6868 if (! tmp)
6869 goto cont;
6870 delta = tmp;
6873 /* Only fold here if we can verify we do not overflow one
6874 dimension of a multi-dimensional array. */
6875 if (mdim)
6877 tree tmp;
6879 if (!TYPE_MIN_VALUE (domain)
6880 || !TYPE_MAX_VALUE (domain)
6881 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6882 goto cont;
6884 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6885 fold_convert_loc (loc, itype,
6886 TYPE_MIN_VALUE (domain)),
6887 fold_convert_loc (loc, itype, delta));
6888 if (TREE_CODE (tmp) != INTEGER_CST
6889 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6890 goto cont;
6893 /* We found a suitable component reference. */
6895 pref = TREE_OPERAND (addr, 0);
6896 ret = copy_node (pref);
6897 SET_EXPR_LOCATION (ret, loc);
6899 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6900 fold_build2_loc
6901 (loc, PLUS_EXPR, itype,
6902 fold_convert_loc (loc, itype,
6903 TYPE_MIN_VALUE
6904 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6905 fold_convert_loc (loc, itype, delta)),
6906 NULL_TREE, NULL_TREE);
6907 return build_fold_addr_expr_loc (loc, ret);
6910 cont:
6912 for (;; ref = TREE_OPERAND (ref, 0))
6914 if (TREE_CODE (ref) == ARRAY_REF)
6916 tree domain;
6918 /* Remember if this was a multi-dimensional array. */
6919 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6920 mdim = true;
6922 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6923 if (! domain)
6924 continue;
6925 itype = TREE_TYPE (domain);
6927 step = array_ref_element_size (ref);
6928 if (TREE_CODE (step) != INTEGER_CST)
6929 continue;
6931 if (s)
6933 if (! tree_int_cst_equal (step, s))
6934 continue;
6936 else
6938 /* Try if delta is a multiple of step. */
6939 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6940 if (! tmp)
6941 continue;
6942 delta = tmp;
6945 /* Only fold here if we can verify we do not overflow one
6946 dimension of a multi-dimensional array. */
6947 if (mdim)
6949 tree tmp;
6951 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6952 || !TYPE_MAX_VALUE (domain)
6953 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6954 continue;
6956 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6957 fold_convert_loc (loc, itype,
6958 TREE_OPERAND (ref, 1)),
6959 fold_convert_loc (loc, itype, delta));
6960 if (!tmp
6961 || TREE_CODE (tmp) != INTEGER_CST
6962 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6963 continue;
6966 break;
6968 else
6969 mdim = false;
6971 if (!handled_component_p (ref))
6972 return NULL_TREE;
6975 /* We found the suitable array reference. So copy everything up to it,
6976 and replace the index. */
6978 pref = TREE_OPERAND (addr, 0);
6979 ret = copy_node (pref);
6980 SET_EXPR_LOCATION (ret, loc);
6981 pos = ret;
6983 while (pref != ref)
6985 pref = TREE_OPERAND (pref, 0);
6986 TREE_OPERAND (pos, 0) = copy_node (pref);
6987 pos = TREE_OPERAND (pos, 0);
6990 TREE_OPERAND (pos, 1)
6991 = fold_build2_loc (loc, PLUS_EXPR, itype,
6992 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6993 fold_convert_loc (loc, itype, delta));
6994 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6998 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6999 means A >= Y && A != MAX, but in this case we know that
7000 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7002 static tree
7003 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7005 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7007 if (TREE_CODE (bound) == LT_EXPR)
7008 a = TREE_OPERAND (bound, 0);
7009 else if (TREE_CODE (bound) == GT_EXPR)
7010 a = TREE_OPERAND (bound, 1);
7011 else
7012 return NULL_TREE;
7014 typea = TREE_TYPE (a);
7015 if (!INTEGRAL_TYPE_P (typea)
7016 && !POINTER_TYPE_P (typea))
7017 return NULL_TREE;
7019 if (TREE_CODE (ineq) == LT_EXPR)
7021 a1 = TREE_OPERAND (ineq, 1);
7022 y = TREE_OPERAND (ineq, 0);
7024 else if (TREE_CODE (ineq) == GT_EXPR)
7026 a1 = TREE_OPERAND (ineq, 0);
7027 y = TREE_OPERAND (ineq, 1);
7029 else
7030 return NULL_TREE;
7032 if (TREE_TYPE (a1) != typea)
7033 return NULL_TREE;
7035 if (POINTER_TYPE_P (typea))
7037 /* Convert the pointer types into integer before taking the difference. */
7038 tree ta = fold_convert_loc (loc, ssizetype, a);
7039 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7040 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7042 else
7043 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7045 if (!diff || !integer_onep (diff))
7046 return NULL_TREE;
7048 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7051 /* Fold a sum or difference of at least one multiplication.
7052 Returns the folded tree or NULL if no simplification could be made. */
7054 static tree
7055 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7056 tree arg0, tree arg1)
7058 tree arg00, arg01, arg10, arg11;
7059 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7061 /* (A * C) +- (B * C) -> (A+-B) * C.
7062 (A * C) +- A -> A * (C+-1).
7063 We are most concerned about the case where C is a constant,
7064 but other combinations show up during loop reduction. Since
7065 it is not difficult, try all four possibilities. */
7067 if (TREE_CODE (arg0) == MULT_EXPR)
7069 arg00 = TREE_OPERAND (arg0, 0);
7070 arg01 = TREE_OPERAND (arg0, 1);
7072 else if (TREE_CODE (arg0) == INTEGER_CST)
7074 arg00 = build_one_cst (type);
7075 arg01 = arg0;
7077 else
7079 /* We cannot generate constant 1 for fract. */
7080 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7081 return NULL_TREE;
7082 arg00 = arg0;
7083 arg01 = build_one_cst (type);
7085 if (TREE_CODE (arg1) == MULT_EXPR)
7087 arg10 = TREE_OPERAND (arg1, 0);
7088 arg11 = TREE_OPERAND (arg1, 1);
7090 else if (TREE_CODE (arg1) == INTEGER_CST)
7092 arg10 = build_one_cst (type);
7093 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7094 the purpose of this canonicalization. */
7095 if (TREE_INT_CST_HIGH (arg1) == -1
7096 && negate_expr_p (arg1)
7097 && code == PLUS_EXPR)
7099 arg11 = negate_expr (arg1);
7100 code = MINUS_EXPR;
7102 else
7103 arg11 = arg1;
7105 else
7107 /* We cannot generate constant 1 for fract. */
7108 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7109 return NULL_TREE;
7110 arg10 = arg1;
7111 arg11 = build_one_cst (type);
7113 same = NULL_TREE;
7115 if (operand_equal_p (arg01, arg11, 0))
7116 same = arg01, alt0 = arg00, alt1 = arg10;
7117 else if (operand_equal_p (arg00, arg10, 0))
7118 same = arg00, alt0 = arg01, alt1 = arg11;
7119 else if (operand_equal_p (arg00, arg11, 0))
7120 same = arg00, alt0 = arg01, alt1 = arg10;
7121 else if (operand_equal_p (arg01, arg10, 0))
7122 same = arg01, alt0 = arg00, alt1 = arg11;
7124 /* No identical multiplicands; see if we can find a common
7125 power-of-two factor in non-power-of-two multiplies. This
7126 can help in multi-dimensional array access. */
7127 else if (host_integerp (arg01, 0)
7128 && host_integerp (arg11, 0))
7130 HOST_WIDE_INT int01, int11, tmp;
7131 bool swap = false;
7132 tree maybe_same;
7133 int01 = TREE_INT_CST_LOW (arg01);
7134 int11 = TREE_INT_CST_LOW (arg11);
7136 /* Move min of absolute values to int11. */
7137 if (absu_hwi (int01) < absu_hwi (int11))
7139 tmp = int01, int01 = int11, int11 = tmp;
7140 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7141 maybe_same = arg01;
7142 swap = true;
7144 else
7145 maybe_same = arg11;
7147 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7148 /* The remainder should not be a constant, otherwise we
7149 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7150 increased the number of multiplications necessary. */
7151 && TREE_CODE (arg10) != INTEGER_CST)
7153 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7154 build_int_cst (TREE_TYPE (arg00),
7155 int01 / int11));
7156 alt1 = arg10;
7157 same = maybe_same;
7158 if (swap)
7159 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7163 if (same)
7164 return fold_build2_loc (loc, MULT_EXPR, type,
7165 fold_build2_loc (loc, code, type,
7166 fold_convert_loc (loc, type, alt0),
7167 fold_convert_loc (loc, type, alt1)),
7168 fold_convert_loc (loc, type, same));
7170 return NULL_TREE;
7173 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7174 specified by EXPR into the buffer PTR of length LEN bytes.
7175 Return the number of bytes placed in the buffer, or zero
7176 upon failure. */
7178 static int
7179 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7181 tree type = TREE_TYPE (expr);
7182 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7183 int byte, offset, word, words;
7184 unsigned char value;
7186 if (total_bytes > len)
7187 return 0;
7188 words = total_bytes / UNITS_PER_WORD;
7190 for (byte = 0; byte < total_bytes; byte++)
7192 int bitpos = byte * BITS_PER_UNIT;
7193 if (bitpos < HOST_BITS_PER_WIDE_INT)
7194 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7195 else
7196 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7197 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7199 if (total_bytes > UNITS_PER_WORD)
7201 word = byte / UNITS_PER_WORD;
7202 if (WORDS_BIG_ENDIAN)
7203 word = (words - 1) - word;
7204 offset = word * UNITS_PER_WORD;
7205 if (BYTES_BIG_ENDIAN)
7206 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7207 else
7208 offset += byte % UNITS_PER_WORD;
7210 else
7211 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7212 ptr[offset] = value;
7214 return total_bytes;
7218 /* Subroutine of native_encode_expr. Encode the REAL_CST
7219 specified by EXPR into the buffer PTR of length LEN bytes.
7220 Return the number of bytes placed in the buffer, or zero
7221 upon failure. */
7223 static int
7224 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7226 tree type = TREE_TYPE (expr);
7227 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7228 int byte, offset, word, words, bitpos;
7229 unsigned char value;
7231 /* There are always 32 bits in each long, no matter the size of
7232 the hosts long. We handle floating point representations with
7233 up to 192 bits. */
7234 long tmp[6];
7236 if (total_bytes > len)
7237 return 0;
7238 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7240 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7242 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7243 bitpos += BITS_PER_UNIT)
7245 byte = (bitpos / BITS_PER_UNIT) & 3;
7246 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7248 if (UNITS_PER_WORD < 4)
7250 word = byte / UNITS_PER_WORD;
7251 if (WORDS_BIG_ENDIAN)
7252 word = (words - 1) - word;
7253 offset = word * UNITS_PER_WORD;
7254 if (BYTES_BIG_ENDIAN)
7255 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7256 else
7257 offset += byte % UNITS_PER_WORD;
7259 else
7260 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7261 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7263 return total_bytes;
7266 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7267 specified by EXPR into the buffer PTR of length LEN bytes.
7268 Return the number of bytes placed in the buffer, or zero
7269 upon failure. */
7271 static int
7272 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7274 int rsize, isize;
7275 tree part;
7277 part = TREE_REALPART (expr);
7278 rsize = native_encode_expr (part, ptr, len);
7279 if (rsize == 0)
7280 return 0;
7281 part = TREE_IMAGPART (expr);
7282 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7283 if (isize != rsize)
7284 return 0;
7285 return rsize + isize;
7289 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7290 specified by EXPR into the buffer PTR of length LEN bytes.
7291 Return the number of bytes placed in the buffer, or zero
7292 upon failure. */
7294 static int
7295 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7297 unsigned i, count;
7298 int size, offset;
7299 tree itype, elem;
7301 offset = 0;
7302 count = VECTOR_CST_NELTS (expr);
7303 itype = TREE_TYPE (TREE_TYPE (expr));
7304 size = GET_MODE_SIZE (TYPE_MODE (itype));
7305 for (i = 0; i < count; i++)
7307 elem = VECTOR_CST_ELT (expr, i);
7308 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7309 return 0;
7310 offset += size;
7312 return offset;
7316 /* Subroutine of native_encode_expr. Encode the STRING_CST
7317 specified by EXPR into the buffer PTR of length LEN bytes.
7318 Return the number of bytes placed in the buffer, or zero
7319 upon failure. */
7321 static int
7322 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7324 tree type = TREE_TYPE (expr);
7325 HOST_WIDE_INT total_bytes;
7327 if (TREE_CODE (type) != ARRAY_TYPE
7328 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7329 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7330 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7331 return 0;
7332 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7333 if (total_bytes > len)
7334 return 0;
7335 if (TREE_STRING_LENGTH (expr) < total_bytes)
7337 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7338 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7339 total_bytes - TREE_STRING_LENGTH (expr));
7341 else
7342 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7343 return total_bytes;
7347 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7348 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7349 buffer PTR of length LEN bytes. Return the number of bytes
7350 placed in the buffer, or zero upon failure. */
7353 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7355 switch (TREE_CODE (expr))
7357 case INTEGER_CST:
7358 return native_encode_int (expr, ptr, len);
7360 case REAL_CST:
7361 return native_encode_real (expr, ptr, len);
7363 case COMPLEX_CST:
7364 return native_encode_complex (expr, ptr, len);
7366 case VECTOR_CST:
7367 return native_encode_vector (expr, ptr, len);
7369 case STRING_CST:
7370 return native_encode_string (expr, ptr, len);
7372 default:
7373 return 0;
7378 /* Subroutine of native_interpret_expr. Interpret the contents of
7379 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7380 If the buffer cannot be interpreted, return NULL_TREE. */
7382 static tree
7383 native_interpret_int (tree type, const unsigned char *ptr, int len)
7385 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7386 int byte, offset, word, words;
7387 unsigned char value;
7388 double_int result;
7390 if (total_bytes > len)
7391 return NULL_TREE;
7392 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7393 return NULL_TREE;
7395 result = double_int_zero;
7396 words = total_bytes / UNITS_PER_WORD;
7398 for (byte = 0; byte < total_bytes; byte++)
7400 int bitpos = byte * BITS_PER_UNIT;
7401 if (total_bytes > UNITS_PER_WORD)
7403 word = byte / UNITS_PER_WORD;
7404 if (WORDS_BIG_ENDIAN)
7405 word = (words - 1) - word;
7406 offset = word * UNITS_PER_WORD;
7407 if (BYTES_BIG_ENDIAN)
7408 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7409 else
7410 offset += byte % UNITS_PER_WORD;
7412 else
7413 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7414 value = ptr[offset];
7416 if (bitpos < HOST_BITS_PER_WIDE_INT)
7417 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7418 else
7419 result.high |= (unsigned HOST_WIDE_INT) value
7420 << (bitpos - HOST_BITS_PER_WIDE_INT);
7423 return double_int_to_tree (type, result);
7427 /* Subroutine of native_interpret_expr. Interpret the contents of
7428 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7429 If the buffer cannot be interpreted, return NULL_TREE. */
7431 static tree
7432 native_interpret_real (tree type, const unsigned char *ptr, int len)
7434 enum machine_mode mode = TYPE_MODE (type);
7435 int total_bytes = GET_MODE_SIZE (mode);
7436 int byte, offset, word, words, bitpos;
7437 unsigned char value;
7438 /* There are always 32 bits in each long, no matter the size of
7439 the hosts long. We handle floating point representations with
7440 up to 192 bits. */
7441 REAL_VALUE_TYPE r;
7442 long tmp[6];
7444 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7445 if (total_bytes > len || total_bytes > 24)
7446 return NULL_TREE;
7447 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7449 memset (tmp, 0, sizeof (tmp));
7450 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7451 bitpos += BITS_PER_UNIT)
7453 byte = (bitpos / BITS_PER_UNIT) & 3;
7454 if (UNITS_PER_WORD < 4)
7456 word = byte / UNITS_PER_WORD;
7457 if (WORDS_BIG_ENDIAN)
7458 word = (words - 1) - word;
7459 offset = word * UNITS_PER_WORD;
7460 if (BYTES_BIG_ENDIAN)
7461 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7462 else
7463 offset += byte % UNITS_PER_WORD;
7465 else
7466 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7467 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7469 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7472 real_from_target (&r, tmp, mode);
7473 return build_real (type, r);
7477 /* Subroutine of native_interpret_expr. Interpret the contents of
7478 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7479 If the buffer cannot be interpreted, return NULL_TREE. */
7481 static tree
7482 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7484 tree etype, rpart, ipart;
7485 int size;
7487 etype = TREE_TYPE (type);
7488 size = GET_MODE_SIZE (TYPE_MODE (etype));
7489 if (size * 2 > len)
7490 return NULL_TREE;
7491 rpart = native_interpret_expr (etype, ptr, size);
7492 if (!rpart)
7493 return NULL_TREE;
7494 ipart = native_interpret_expr (etype, ptr+size, size);
7495 if (!ipart)
7496 return NULL_TREE;
7497 return build_complex (type, rpart, ipart);
7501 /* Subroutine of native_interpret_expr. Interpret the contents of
7502 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7503 If the buffer cannot be interpreted, return NULL_TREE. */
7505 static tree
7506 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7508 tree etype, elem;
7509 int i, size, count;
7510 tree *elements;
7512 etype = TREE_TYPE (type);
7513 size = GET_MODE_SIZE (TYPE_MODE (etype));
7514 count = TYPE_VECTOR_SUBPARTS (type);
7515 if (size * count > len)
7516 return NULL_TREE;
7518 elements = XALLOCAVEC (tree, count);
7519 for (i = count - 1; i >= 0; i--)
7521 elem = native_interpret_expr (etype, ptr+(i*size), size);
7522 if (!elem)
7523 return NULL_TREE;
7524 elements[i] = elem;
7526 return build_vector (type, elements);
7530 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7531 the buffer PTR of length LEN as a constant of type TYPE. For
7532 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7533 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7534 return NULL_TREE. */
7536 tree
7537 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7539 switch (TREE_CODE (type))
7541 case INTEGER_TYPE:
7542 case ENUMERAL_TYPE:
7543 case BOOLEAN_TYPE:
7544 case POINTER_TYPE:
7545 case REFERENCE_TYPE:
7546 return native_interpret_int (type, ptr, len);
7548 case REAL_TYPE:
7549 return native_interpret_real (type, ptr, len);
7551 case COMPLEX_TYPE:
7552 return native_interpret_complex (type, ptr, len);
7554 case VECTOR_TYPE:
7555 return native_interpret_vector (type, ptr, len);
7557 default:
7558 return NULL_TREE;
7562 /* Returns true if we can interpret the contents of a native encoding
7563 as TYPE. */
7565 static bool
7566 can_native_interpret_type_p (tree type)
7568 switch (TREE_CODE (type))
7570 case INTEGER_TYPE:
7571 case ENUMERAL_TYPE:
7572 case BOOLEAN_TYPE:
7573 case POINTER_TYPE:
7574 case REFERENCE_TYPE:
7575 case REAL_TYPE:
7576 case COMPLEX_TYPE:
7577 case VECTOR_TYPE:
7578 return true;
7579 default:
7580 return false;
7584 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7585 TYPE at compile-time. If we're unable to perform the conversion
7586 return NULL_TREE. */
7588 static tree
7589 fold_view_convert_expr (tree type, tree expr)
7591 /* We support up to 512-bit values (for V8DFmode). */
7592 unsigned char buffer[64];
7593 int len;
7595 /* Check that the host and target are sane. */
7596 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7597 return NULL_TREE;
7599 len = native_encode_expr (expr, buffer, sizeof (buffer));
7600 if (len == 0)
7601 return NULL_TREE;
7603 return native_interpret_expr (type, buffer, len);
7606 /* Build an expression for the address of T. Folds away INDIRECT_REF
7607 to avoid confusing the gimplify process. */
7609 tree
7610 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7612 /* The size of the object is not relevant when talking about its address. */
7613 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7614 t = TREE_OPERAND (t, 0);
7616 if (TREE_CODE (t) == INDIRECT_REF)
7618 t = TREE_OPERAND (t, 0);
7620 if (TREE_TYPE (t) != ptrtype)
7621 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7623 else if (TREE_CODE (t) == MEM_REF
7624 && integer_zerop (TREE_OPERAND (t, 1)))
7625 return TREE_OPERAND (t, 0);
7626 else if (TREE_CODE (t) == MEM_REF
7627 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7628 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7629 TREE_OPERAND (t, 0),
7630 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7631 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7633 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7635 if (TREE_TYPE (t) != ptrtype)
7636 t = fold_convert_loc (loc, ptrtype, t);
7638 else
7639 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7641 return t;
7644 /* Build an expression for the address of T. */
7646 tree
7647 build_fold_addr_expr_loc (location_t loc, tree t)
7649 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7651 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7654 static bool vec_cst_ctor_to_array (tree, tree *);
7656 /* Fold a unary expression of code CODE and type TYPE with operand
7657 OP0. Return the folded expression if folding is successful.
7658 Otherwise, return NULL_TREE. */
7660 tree
7661 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7663 tree tem;
7664 tree arg0;
7665 enum tree_code_class kind = TREE_CODE_CLASS (code);
7667 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7668 && TREE_CODE_LENGTH (code) == 1);
7670 arg0 = op0;
7671 if (arg0)
7673 if (CONVERT_EXPR_CODE_P (code)
7674 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7676 /* Don't use STRIP_NOPS, because signedness of argument type
7677 matters. */
7678 STRIP_SIGN_NOPS (arg0);
7680 else
7682 /* Strip any conversions that don't change the mode. This
7683 is safe for every expression, except for a comparison
7684 expression because its signedness is derived from its
7685 operands.
7687 Note that this is done as an internal manipulation within
7688 the constant folder, in order to find the simplest
7689 representation of the arguments so that their form can be
7690 studied. In any cases, the appropriate type conversions
7691 should be put back in the tree that will get out of the
7692 constant folder. */
7693 STRIP_NOPS (arg0);
7697 if (TREE_CODE_CLASS (code) == tcc_unary)
7699 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7700 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7701 fold_build1_loc (loc, code, type,
7702 fold_convert_loc (loc, TREE_TYPE (op0),
7703 TREE_OPERAND (arg0, 1))));
7704 else if (TREE_CODE (arg0) == COND_EXPR)
7706 tree arg01 = TREE_OPERAND (arg0, 1);
7707 tree arg02 = TREE_OPERAND (arg0, 2);
7708 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7709 arg01 = fold_build1_loc (loc, code, type,
7710 fold_convert_loc (loc,
7711 TREE_TYPE (op0), arg01));
7712 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7713 arg02 = fold_build1_loc (loc, code, type,
7714 fold_convert_loc (loc,
7715 TREE_TYPE (op0), arg02));
7716 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7717 arg01, arg02);
7719 /* If this was a conversion, and all we did was to move into
7720 inside the COND_EXPR, bring it back out. But leave it if
7721 it is a conversion from integer to integer and the
7722 result precision is no wider than a word since such a
7723 conversion is cheap and may be optimized away by combine,
7724 while it couldn't if it were outside the COND_EXPR. Then return
7725 so we don't get into an infinite recursion loop taking the
7726 conversion out and then back in. */
7728 if ((CONVERT_EXPR_CODE_P (code)
7729 || code == NON_LVALUE_EXPR)
7730 && TREE_CODE (tem) == COND_EXPR
7731 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7732 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7733 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7734 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7735 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7736 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7737 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7738 && (INTEGRAL_TYPE_P
7739 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7740 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7741 || flag_syntax_only))
7742 tem = build1_loc (loc, code, type,
7743 build3 (COND_EXPR,
7744 TREE_TYPE (TREE_OPERAND
7745 (TREE_OPERAND (tem, 1), 0)),
7746 TREE_OPERAND (tem, 0),
7747 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7748 TREE_OPERAND (TREE_OPERAND (tem, 2),
7749 0)));
7750 return tem;
7754 switch (code)
7756 case PAREN_EXPR:
7757 /* Re-association barriers around constants and other re-association
7758 barriers can be removed. */
7759 if (CONSTANT_CLASS_P (op0)
7760 || TREE_CODE (op0) == PAREN_EXPR)
7761 return fold_convert_loc (loc, type, op0);
7762 return NULL_TREE;
7764 CASE_CONVERT:
7765 case FLOAT_EXPR:
7766 case FIX_TRUNC_EXPR:
7767 if (TREE_TYPE (op0) == type)
7768 return op0;
7770 if (COMPARISON_CLASS_P (op0))
7772 /* If we have (type) (a CMP b) and type is an integral type, return
7773 new expression involving the new type. Canonicalize
7774 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7775 non-integral type.
7776 Do not fold the result as that would not simplify further, also
7777 folding again results in recursions. */
7778 if (TREE_CODE (type) == BOOLEAN_TYPE)
7779 return build2_loc (loc, TREE_CODE (op0), type,
7780 TREE_OPERAND (op0, 0),
7781 TREE_OPERAND (op0, 1));
7782 else if (!INTEGRAL_TYPE_P (type))
7783 return build3_loc (loc, COND_EXPR, type, op0,
7784 constant_boolean_node (true, type),
7785 constant_boolean_node (false, type));
7788 /* Handle cases of two conversions in a row. */
7789 if (CONVERT_EXPR_P (op0))
7791 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7792 tree inter_type = TREE_TYPE (op0);
7793 int inside_int = INTEGRAL_TYPE_P (inside_type);
7794 int inside_ptr = POINTER_TYPE_P (inside_type);
7795 int inside_float = FLOAT_TYPE_P (inside_type);
7796 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7797 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7798 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7799 int inter_int = INTEGRAL_TYPE_P (inter_type);
7800 int inter_ptr = POINTER_TYPE_P (inter_type);
7801 int inter_float = FLOAT_TYPE_P (inter_type);
7802 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7803 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7804 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7805 int final_int = INTEGRAL_TYPE_P (type);
7806 int final_ptr = POINTER_TYPE_P (type);
7807 int final_float = FLOAT_TYPE_P (type);
7808 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7809 unsigned int final_prec = TYPE_PRECISION (type);
7810 int final_unsignedp = TYPE_UNSIGNED (type);
7812 /* In addition to the cases of two conversions in a row
7813 handled below, if we are converting something to its own
7814 type via an object of identical or wider precision, neither
7815 conversion is needed. */
7816 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7817 && (((inter_int || inter_ptr) && final_int)
7818 || (inter_float && final_float))
7819 && inter_prec >= final_prec)
7820 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7822 /* Likewise, if the intermediate and initial types are either both
7823 float or both integer, we don't need the middle conversion if the
7824 former is wider than the latter and doesn't change the signedness
7825 (for integers). Avoid this if the final type is a pointer since
7826 then we sometimes need the middle conversion. Likewise if the
7827 final type has a precision not equal to the size of its mode. */
7828 if (((inter_int && inside_int)
7829 || (inter_float && inside_float)
7830 || (inter_vec && inside_vec))
7831 && inter_prec >= inside_prec
7832 && (inter_float || inter_vec
7833 || inter_unsignedp == inside_unsignedp)
7834 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7835 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7836 && ! final_ptr
7837 && (! final_vec || inter_prec == inside_prec))
7838 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7840 /* If we have a sign-extension of a zero-extended value, we can
7841 replace that by a single zero-extension. Likewise if the
7842 final conversion does not change precision we can drop the
7843 intermediate conversion. */
7844 if (inside_int && inter_int && final_int
7845 && ((inside_prec < inter_prec && inter_prec < final_prec
7846 && inside_unsignedp && !inter_unsignedp)
7847 || final_prec == inter_prec))
7848 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7850 /* Two conversions in a row are not needed unless:
7851 - some conversion is floating-point (overstrict for now), or
7852 - some conversion is a vector (overstrict for now), or
7853 - the intermediate type is narrower than both initial and
7854 final, or
7855 - the intermediate type and innermost type differ in signedness,
7856 and the outermost type is wider than the intermediate, or
7857 - the initial type is a pointer type and the precisions of the
7858 intermediate and final types differ, or
7859 - the final type is a pointer type and the precisions of the
7860 initial and intermediate types differ. */
7861 if (! inside_float && ! inter_float && ! final_float
7862 && ! inside_vec && ! inter_vec && ! final_vec
7863 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7864 && ! (inside_int && inter_int
7865 && inter_unsignedp != inside_unsignedp
7866 && inter_prec < final_prec)
7867 && ((inter_unsignedp && inter_prec > inside_prec)
7868 == (final_unsignedp && final_prec > inter_prec))
7869 && ! (inside_ptr && inter_prec != final_prec)
7870 && ! (final_ptr && inside_prec != inter_prec)
7871 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7872 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7873 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7876 /* Handle (T *)&A.B.C for A being of type T and B and C
7877 living at offset zero. This occurs frequently in
7878 C++ upcasting and then accessing the base. */
7879 if (TREE_CODE (op0) == ADDR_EXPR
7880 && POINTER_TYPE_P (type)
7881 && handled_component_p (TREE_OPERAND (op0, 0)))
7883 HOST_WIDE_INT bitsize, bitpos;
7884 tree offset;
7885 enum machine_mode mode;
7886 int unsignedp, volatilep;
7887 tree base = TREE_OPERAND (op0, 0);
7888 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7889 &mode, &unsignedp, &volatilep, false);
7890 /* If the reference was to a (constant) zero offset, we can use
7891 the address of the base if it has the same base type
7892 as the result type and the pointer type is unqualified. */
7893 if (! offset && bitpos == 0
7894 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7895 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7896 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7897 return fold_convert_loc (loc, type,
7898 build_fold_addr_expr_loc (loc, base));
7901 if (TREE_CODE (op0) == MODIFY_EXPR
7902 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7903 /* Detect assigning a bitfield. */
7904 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7905 && DECL_BIT_FIELD
7906 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7908 /* Don't leave an assignment inside a conversion
7909 unless assigning a bitfield. */
7910 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7911 /* First do the assignment, then return converted constant. */
7912 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7913 TREE_NO_WARNING (tem) = 1;
7914 TREE_USED (tem) = 1;
7915 return tem;
7918 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7919 constants (if x has signed type, the sign bit cannot be set
7920 in c). This folds extension into the BIT_AND_EXPR.
7921 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7922 very likely don't have maximal range for their precision and this
7923 transformation effectively doesn't preserve non-maximal ranges. */
7924 if (TREE_CODE (type) == INTEGER_TYPE
7925 && TREE_CODE (op0) == BIT_AND_EXPR
7926 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7928 tree and_expr = op0;
7929 tree and0 = TREE_OPERAND (and_expr, 0);
7930 tree and1 = TREE_OPERAND (and_expr, 1);
7931 int change = 0;
7933 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7934 || (TYPE_PRECISION (type)
7935 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7936 change = 1;
7937 else if (TYPE_PRECISION (TREE_TYPE (and1))
7938 <= HOST_BITS_PER_WIDE_INT
7939 && host_integerp (and1, 1))
7941 unsigned HOST_WIDE_INT cst;
7943 cst = tree_low_cst (and1, 1);
7944 cst &= (HOST_WIDE_INT) -1
7945 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7946 change = (cst == 0);
7947 #ifdef LOAD_EXTEND_OP
7948 if (change
7949 && !flag_syntax_only
7950 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7951 == ZERO_EXTEND))
7953 tree uns = unsigned_type_for (TREE_TYPE (and0));
7954 and0 = fold_convert_loc (loc, uns, and0);
7955 and1 = fold_convert_loc (loc, uns, and1);
7957 #endif
7959 if (change)
7961 tem = force_fit_type_double (type, tree_to_double_int (and1),
7962 0, TREE_OVERFLOW (and1));
7963 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7964 fold_convert_loc (loc, type, and0), tem);
7968 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7969 when one of the new casts will fold away. Conservatively we assume
7970 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7971 if (POINTER_TYPE_P (type)
7972 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7973 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7974 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7975 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7976 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7978 tree arg00 = TREE_OPERAND (arg0, 0);
7979 tree arg01 = TREE_OPERAND (arg0, 1);
7981 return fold_build_pointer_plus_loc
7982 (loc, fold_convert_loc (loc, type, arg00), arg01);
7985 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7986 of the same precision, and X is an integer type not narrower than
7987 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7988 if (INTEGRAL_TYPE_P (type)
7989 && TREE_CODE (op0) == BIT_NOT_EXPR
7990 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7991 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7992 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7994 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7995 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7996 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7997 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7998 fold_convert_loc (loc, type, tem));
8001 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8002 type of X and Y (integer types only). */
8003 if (INTEGRAL_TYPE_P (type)
8004 && TREE_CODE (op0) == MULT_EXPR
8005 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8006 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8008 /* Be careful not to introduce new overflows. */
8009 tree mult_type;
8010 if (TYPE_OVERFLOW_WRAPS (type))
8011 mult_type = type;
8012 else
8013 mult_type = unsigned_type_for (type);
8015 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8017 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8018 fold_convert_loc (loc, mult_type,
8019 TREE_OPERAND (op0, 0)),
8020 fold_convert_loc (loc, mult_type,
8021 TREE_OPERAND (op0, 1)));
8022 return fold_convert_loc (loc, type, tem);
8026 tem = fold_convert_const (code, type, op0);
8027 return tem ? tem : NULL_TREE;
8029 case ADDR_SPACE_CONVERT_EXPR:
8030 if (integer_zerop (arg0))
8031 return fold_convert_const (code, type, arg0);
8032 return NULL_TREE;
8034 case FIXED_CONVERT_EXPR:
8035 tem = fold_convert_const (code, type, arg0);
8036 return tem ? tem : NULL_TREE;
8038 case VIEW_CONVERT_EXPR:
8039 if (TREE_TYPE (op0) == type)
8040 return op0;
8041 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8042 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8043 type, TREE_OPERAND (op0, 0));
8044 if (TREE_CODE (op0) == MEM_REF)
8045 return fold_build2_loc (loc, MEM_REF, type,
8046 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8048 /* For integral conversions with the same precision or pointer
8049 conversions use a NOP_EXPR instead. */
8050 if ((INTEGRAL_TYPE_P (type)
8051 || POINTER_TYPE_P (type))
8052 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8053 || POINTER_TYPE_P (TREE_TYPE (op0)))
8054 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8055 return fold_convert_loc (loc, type, op0);
8057 /* Strip inner integral conversions that do not change the precision. */
8058 if (CONVERT_EXPR_P (op0)
8059 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8060 || POINTER_TYPE_P (TREE_TYPE (op0)))
8061 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8062 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8063 && (TYPE_PRECISION (TREE_TYPE (op0))
8064 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8065 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8066 type, TREE_OPERAND (op0, 0));
8068 return fold_view_convert_expr (type, op0);
8070 case NEGATE_EXPR:
8071 tem = fold_negate_expr (loc, arg0);
8072 if (tem)
8073 return fold_convert_loc (loc, type, tem);
8074 return NULL_TREE;
8076 case ABS_EXPR:
8077 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8078 return fold_abs_const (arg0, type);
8079 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8080 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8081 /* Convert fabs((double)float) into (double)fabsf(float). */
8082 else if (TREE_CODE (arg0) == NOP_EXPR
8083 && TREE_CODE (type) == REAL_TYPE)
8085 tree targ0 = strip_float_extensions (arg0);
8086 if (targ0 != arg0)
8087 return fold_convert_loc (loc, type,
8088 fold_build1_loc (loc, ABS_EXPR,
8089 TREE_TYPE (targ0),
8090 targ0));
8092 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8093 else if (TREE_CODE (arg0) == ABS_EXPR)
8094 return arg0;
8095 else if (tree_expr_nonnegative_p (arg0))
8096 return arg0;
8098 /* Strip sign ops from argument. */
8099 if (TREE_CODE (type) == REAL_TYPE)
8101 tem = fold_strip_sign_ops (arg0);
8102 if (tem)
8103 return fold_build1_loc (loc, ABS_EXPR, type,
8104 fold_convert_loc (loc, type, tem));
8106 return NULL_TREE;
8108 case CONJ_EXPR:
8109 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8110 return fold_convert_loc (loc, type, arg0);
8111 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8113 tree itype = TREE_TYPE (type);
8114 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8115 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8116 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8117 negate_expr (ipart));
8119 if (TREE_CODE (arg0) == COMPLEX_CST)
8121 tree itype = TREE_TYPE (type);
8122 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8123 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8124 return build_complex (type, rpart, negate_expr (ipart));
8126 if (TREE_CODE (arg0) == CONJ_EXPR)
8127 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8128 return NULL_TREE;
8130 case BIT_NOT_EXPR:
8131 if (TREE_CODE (arg0) == INTEGER_CST)
8132 return fold_not_const (arg0, type);
8133 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8134 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8135 /* Convert ~ (-A) to A - 1. */
8136 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8137 return fold_build2_loc (loc, MINUS_EXPR, type,
8138 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8139 build_int_cst (type, 1));
8140 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8141 else if (INTEGRAL_TYPE_P (type)
8142 && ((TREE_CODE (arg0) == MINUS_EXPR
8143 && integer_onep (TREE_OPERAND (arg0, 1)))
8144 || (TREE_CODE (arg0) == PLUS_EXPR
8145 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8146 return fold_build1_loc (loc, NEGATE_EXPR, type,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 0)));
8149 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8150 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8151 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8152 fold_convert_loc (loc, type,
8153 TREE_OPERAND (arg0, 0)))))
8154 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8155 fold_convert_loc (loc, type,
8156 TREE_OPERAND (arg0, 1)));
8157 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8158 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8159 fold_convert_loc (loc, type,
8160 TREE_OPERAND (arg0, 1)))))
8161 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8162 fold_convert_loc (loc, type,
8163 TREE_OPERAND (arg0, 0)), tem);
8164 /* Perform BIT_NOT_EXPR on each element individually. */
8165 else if (TREE_CODE (arg0) == VECTOR_CST)
8167 tree *elements;
8168 tree elem;
8169 unsigned count = VECTOR_CST_NELTS (arg0), i;
8171 elements = XALLOCAVEC (tree, count);
8172 for (i = 0; i < count; i++)
8174 elem = VECTOR_CST_ELT (arg0, i);
8175 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8176 if (elem == NULL_TREE)
8177 break;
8178 elements[i] = elem;
8180 if (i == count)
8181 return build_vector (type, elements);
8184 return NULL_TREE;
8186 case TRUTH_NOT_EXPR:
8187 /* The argument to invert_truthvalue must have Boolean type. */
8188 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8189 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8191 /* Note that the operand of this must be an int
8192 and its values must be 0 or 1.
8193 ("true" is a fixed value perhaps depending on the language,
8194 but we don't handle values other than 1 correctly yet.) */
8195 tem = fold_truth_not_expr (loc, arg0);
8196 if (!tem)
8197 return NULL_TREE;
8198 return fold_convert_loc (loc, type, tem);
8200 case REALPART_EXPR:
8201 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8202 return fold_convert_loc (loc, type, arg0);
8203 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8204 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8205 TREE_OPERAND (arg0, 1));
8206 if (TREE_CODE (arg0) == COMPLEX_CST)
8207 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8208 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8210 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8211 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8212 fold_build1_loc (loc, REALPART_EXPR, itype,
8213 TREE_OPERAND (arg0, 0)),
8214 fold_build1_loc (loc, REALPART_EXPR, itype,
8215 TREE_OPERAND (arg0, 1)));
8216 return fold_convert_loc (loc, type, tem);
8218 if (TREE_CODE (arg0) == CONJ_EXPR)
8220 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8221 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8222 TREE_OPERAND (arg0, 0));
8223 return fold_convert_loc (loc, type, tem);
8225 if (TREE_CODE (arg0) == CALL_EXPR)
8227 tree fn = get_callee_fndecl (arg0);
8228 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8229 switch (DECL_FUNCTION_CODE (fn))
8231 CASE_FLT_FN (BUILT_IN_CEXPI):
8232 fn = mathfn_built_in (type, BUILT_IN_COS);
8233 if (fn)
8234 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8235 break;
8237 default:
8238 break;
8241 return NULL_TREE;
8243 case IMAGPART_EXPR:
8244 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8245 return build_zero_cst (type);
8246 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8247 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8248 TREE_OPERAND (arg0, 0));
8249 if (TREE_CODE (arg0) == COMPLEX_CST)
8250 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8251 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8253 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8254 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8255 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8256 TREE_OPERAND (arg0, 0)),
8257 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8258 TREE_OPERAND (arg0, 1)));
8259 return fold_convert_loc (loc, type, tem);
8261 if (TREE_CODE (arg0) == CONJ_EXPR)
8263 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8264 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8265 return fold_convert_loc (loc, type, negate_expr (tem));
8267 if (TREE_CODE (arg0) == CALL_EXPR)
8269 tree fn = get_callee_fndecl (arg0);
8270 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8271 switch (DECL_FUNCTION_CODE (fn))
8273 CASE_FLT_FN (BUILT_IN_CEXPI):
8274 fn = mathfn_built_in (type, BUILT_IN_SIN);
8275 if (fn)
8276 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8277 break;
8279 default:
8280 break;
8283 return NULL_TREE;
8285 case INDIRECT_REF:
8286 /* Fold *&X to X if X is an lvalue. */
8287 if (TREE_CODE (op0) == ADDR_EXPR)
8289 tree op00 = TREE_OPERAND (op0, 0);
8290 if ((TREE_CODE (op00) == VAR_DECL
8291 || TREE_CODE (op00) == PARM_DECL
8292 || TREE_CODE (op00) == RESULT_DECL)
8293 && !TREE_READONLY (op00))
8294 return op00;
8296 return NULL_TREE;
8298 case VEC_UNPACK_LO_EXPR:
8299 case VEC_UNPACK_HI_EXPR:
8300 case VEC_UNPACK_FLOAT_LO_EXPR:
8301 case VEC_UNPACK_FLOAT_HI_EXPR:
8303 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8304 tree *elts;
8305 enum tree_code subcode;
8307 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8308 if (TREE_CODE (arg0) != VECTOR_CST)
8309 return NULL_TREE;
8311 elts = XALLOCAVEC (tree, nelts * 2);
8312 if (!vec_cst_ctor_to_array (arg0, elts))
8313 return NULL_TREE;
8315 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8316 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8317 elts += nelts;
8319 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8320 subcode = NOP_EXPR;
8321 else
8322 subcode = FLOAT_EXPR;
8324 for (i = 0; i < nelts; i++)
8326 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8327 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8328 return NULL_TREE;
8331 return build_vector (type, elts);
8334 default:
8335 return NULL_TREE;
8336 } /* switch (code) */
8340 /* If the operation was a conversion do _not_ mark a resulting constant
8341 with TREE_OVERFLOW if the original constant was not. These conversions
8342 have implementation defined behavior and retaining the TREE_OVERFLOW
8343 flag here would confuse later passes such as VRP. */
8344 tree
8345 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8346 tree type, tree op0)
8348 tree res = fold_unary_loc (loc, code, type, op0);
8349 if (res
8350 && TREE_CODE (res) == INTEGER_CST
8351 && TREE_CODE (op0) == INTEGER_CST
8352 && CONVERT_EXPR_CODE_P (code))
8353 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8355 return res;
8358 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8359 operands OP0 and OP1. LOC is the location of the resulting expression.
8360 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8361 Return the folded expression if folding is successful. Otherwise,
8362 return NULL_TREE. */
8363 static tree
8364 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8365 tree arg0, tree arg1, tree op0, tree op1)
8367 tree tem;
8369 /* We only do these simplifications if we are optimizing. */
8370 if (!optimize)
8371 return NULL_TREE;
8373 /* Check for things like (A || B) && (A || C). We can convert this
8374 to A || (B && C). Note that either operator can be any of the four
8375 truth and/or operations and the transformation will still be
8376 valid. Also note that we only care about order for the
8377 ANDIF and ORIF operators. If B contains side effects, this
8378 might change the truth-value of A. */
8379 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8380 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8381 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8382 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8383 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8384 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8386 tree a00 = TREE_OPERAND (arg0, 0);
8387 tree a01 = TREE_OPERAND (arg0, 1);
8388 tree a10 = TREE_OPERAND (arg1, 0);
8389 tree a11 = TREE_OPERAND (arg1, 1);
8390 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8391 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8392 && (code == TRUTH_AND_EXPR
8393 || code == TRUTH_OR_EXPR));
8395 if (operand_equal_p (a00, a10, 0))
8396 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8397 fold_build2_loc (loc, code, type, a01, a11));
8398 else if (commutative && operand_equal_p (a00, a11, 0))
8399 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8400 fold_build2_loc (loc, code, type, a01, a10));
8401 else if (commutative && operand_equal_p (a01, a10, 0))
8402 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8403 fold_build2_loc (loc, code, type, a00, a11));
8405 /* This case if tricky because we must either have commutative
8406 operators or else A10 must not have side-effects. */
8408 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8409 && operand_equal_p (a01, a11, 0))
8410 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8411 fold_build2_loc (loc, code, type, a00, a10),
8412 a01);
8415 /* See if we can build a range comparison. */
8416 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8417 return tem;
8419 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8420 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8422 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8423 if (tem)
8424 return fold_build2_loc (loc, code, type, tem, arg1);
8427 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8428 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8430 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8431 if (tem)
8432 return fold_build2_loc (loc, code, type, arg0, tem);
8435 /* Check for the possibility of merging component references. If our
8436 lhs is another similar operation, try to merge its rhs with our
8437 rhs. Then try to merge our lhs and rhs. */
8438 if (TREE_CODE (arg0) == code
8439 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8440 TREE_OPERAND (arg0, 1), arg1)))
8441 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8443 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8444 return tem;
8446 if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8447 false) >= 2)
8448 && LOGICAL_OP_NON_SHORT_CIRCUIT
8449 && (code == TRUTH_AND_EXPR
8450 || code == TRUTH_ANDIF_EXPR
8451 || code == TRUTH_OR_EXPR
8452 || code == TRUTH_ORIF_EXPR))
8454 enum tree_code ncode, icode;
8456 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8457 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8458 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8460 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8461 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8462 We don't want to pack more than two leafs to a non-IF AND/OR
8463 expression.
8464 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8465 equal to IF-CODE, then we don't want to add right-hand operand.
8466 If the inner right-hand side of left-hand operand has
8467 side-effects, or isn't simple, then we can't add to it,
8468 as otherwise we might destroy if-sequence. */
8469 if (TREE_CODE (arg0) == icode
8470 && simple_operand_p_2 (arg1)
8471 /* Needed for sequence points to handle trappings, and
8472 side-effects. */
8473 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8475 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8476 arg1);
8477 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8478 tem);
8480 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8481 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8482 else if (TREE_CODE (arg1) == icode
8483 && simple_operand_p_2 (arg0)
8484 /* Needed for sequence points to handle trappings, and
8485 side-effects. */
8486 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8488 tem = fold_build2_loc (loc, ncode, type,
8489 arg0, TREE_OPERAND (arg1, 0));
8490 return fold_build2_loc (loc, icode, type, tem,
8491 TREE_OPERAND (arg1, 1));
8493 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8494 into (A OR B).
8495 For sequence point consistancy, we need to check for trapping,
8496 and side-effects. */
8497 else if (code == icode && simple_operand_p_2 (arg0)
8498 && simple_operand_p_2 (arg1))
8499 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8502 return NULL_TREE;
8505 /* Fold a binary expression of code CODE and type TYPE with operands
8506 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8507 Return the folded expression if folding is successful. Otherwise,
8508 return NULL_TREE. */
8510 static tree
8511 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8513 enum tree_code compl_code;
8515 if (code == MIN_EXPR)
8516 compl_code = MAX_EXPR;
8517 else if (code == MAX_EXPR)
8518 compl_code = MIN_EXPR;
8519 else
8520 gcc_unreachable ();
8522 /* MIN (MAX (a, b), b) == b. */
8523 if (TREE_CODE (op0) == compl_code
8524 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8525 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8527 /* MIN (MAX (b, a), b) == b. */
8528 if (TREE_CODE (op0) == compl_code
8529 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8530 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8531 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8533 /* MIN (a, MAX (a, b)) == a. */
8534 if (TREE_CODE (op1) == compl_code
8535 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8536 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8537 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8539 /* MIN (a, MAX (b, a)) == a. */
8540 if (TREE_CODE (op1) == compl_code
8541 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8542 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8543 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8545 return NULL_TREE;
8548 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8549 by changing CODE to reduce the magnitude of constants involved in
8550 ARG0 of the comparison.
8551 Returns a canonicalized comparison tree if a simplification was
8552 possible, otherwise returns NULL_TREE.
8553 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8554 valid if signed overflow is undefined. */
8556 static tree
8557 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8558 tree arg0, tree arg1,
8559 bool *strict_overflow_p)
8561 enum tree_code code0 = TREE_CODE (arg0);
8562 tree t, cst0 = NULL_TREE;
8563 int sgn0;
8564 bool swap = false;
8566 /* Match A +- CST code arg1 and CST code arg1. We can change the
8567 first form only if overflow is undefined. */
8568 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8569 /* In principle pointers also have undefined overflow behavior,
8570 but that causes problems elsewhere. */
8571 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8572 && (code0 == MINUS_EXPR
8573 || code0 == PLUS_EXPR)
8574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8575 || code0 == INTEGER_CST))
8576 return NULL_TREE;
8578 /* Identify the constant in arg0 and its sign. */
8579 if (code0 == INTEGER_CST)
8580 cst0 = arg0;
8581 else
8582 cst0 = TREE_OPERAND (arg0, 1);
8583 sgn0 = tree_int_cst_sgn (cst0);
8585 /* Overflowed constants and zero will cause problems. */
8586 if (integer_zerop (cst0)
8587 || TREE_OVERFLOW (cst0))
8588 return NULL_TREE;
8590 /* See if we can reduce the magnitude of the constant in
8591 arg0 by changing the comparison code. */
8592 if (code0 == INTEGER_CST)
8594 /* CST <= arg1 -> CST-1 < arg1. */
8595 if (code == LE_EXPR && sgn0 == 1)
8596 code = LT_EXPR;
8597 /* -CST < arg1 -> -CST-1 <= arg1. */
8598 else if (code == LT_EXPR && sgn0 == -1)
8599 code = LE_EXPR;
8600 /* CST > arg1 -> CST-1 >= arg1. */
8601 else if (code == GT_EXPR && sgn0 == 1)
8602 code = GE_EXPR;
8603 /* -CST >= arg1 -> -CST-1 > arg1. */
8604 else if (code == GE_EXPR && sgn0 == -1)
8605 code = GT_EXPR;
8606 else
8607 return NULL_TREE;
8608 /* arg1 code' CST' might be more canonical. */
8609 swap = true;
8611 else
8613 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8614 if (code == LT_EXPR
8615 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8616 code = LE_EXPR;
8617 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8618 else if (code == GT_EXPR
8619 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8620 code = GE_EXPR;
8621 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8622 else if (code == LE_EXPR
8623 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8624 code = LT_EXPR;
8625 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8626 else if (code == GE_EXPR
8627 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8628 code = GT_EXPR;
8629 else
8630 return NULL_TREE;
8631 *strict_overflow_p = true;
8634 /* Now build the constant reduced in magnitude. But not if that
8635 would produce one outside of its types range. */
8636 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8637 && ((sgn0 == 1
8638 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8639 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8640 || (sgn0 == -1
8641 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8642 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8643 /* We cannot swap the comparison here as that would cause us to
8644 endlessly recurse. */
8645 return NULL_TREE;
8647 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8648 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8649 if (code0 != INTEGER_CST)
8650 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8651 t = fold_convert (TREE_TYPE (arg1), t);
8653 /* If swapping might yield to a more canonical form, do so. */
8654 if (swap)
8655 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8656 else
8657 return fold_build2_loc (loc, code, type, t, arg1);
8660 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8661 overflow further. Try to decrease the magnitude of constants involved
8662 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8663 and put sole constants at the second argument position.
8664 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8666 static tree
8667 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8668 tree arg0, tree arg1)
8670 tree t;
8671 bool strict_overflow_p;
8672 const char * const warnmsg = G_("assuming signed overflow does not occur "
8673 "when reducing constant in comparison");
8675 /* Try canonicalization by simplifying arg0. */
8676 strict_overflow_p = false;
8677 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8678 &strict_overflow_p);
8679 if (t)
8681 if (strict_overflow_p)
8682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8683 return t;
8686 /* Try canonicalization by simplifying arg1 using the swapped
8687 comparison. */
8688 code = swap_tree_comparison (code);
8689 strict_overflow_p = false;
8690 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8691 &strict_overflow_p);
8692 if (t && strict_overflow_p)
8693 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8694 return t;
8697 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8698 space. This is used to avoid issuing overflow warnings for
8699 expressions like &p->x which can not wrap. */
8701 static bool
8702 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8704 unsigned HOST_WIDE_INT offset_low, total_low;
8705 HOST_WIDE_INT size, offset_high, total_high;
8707 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8708 return true;
8710 if (bitpos < 0)
8711 return true;
8713 if (offset == NULL_TREE)
8715 offset_low = 0;
8716 offset_high = 0;
8718 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8719 return true;
8720 else
8722 offset_low = TREE_INT_CST_LOW (offset);
8723 offset_high = TREE_INT_CST_HIGH (offset);
8726 if (add_double_with_sign (offset_low, offset_high,
8727 bitpos / BITS_PER_UNIT, 0,
8728 &total_low, &total_high,
8729 true))
8730 return true;
8732 if (total_high != 0)
8733 return true;
8735 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8736 if (size <= 0)
8737 return true;
8739 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8740 array. */
8741 if (TREE_CODE (base) == ADDR_EXPR)
8743 HOST_WIDE_INT base_size;
8745 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8746 if (base_size > 0 && size < base_size)
8747 size = base_size;
8750 return total_low > (unsigned HOST_WIDE_INT) size;
8753 /* Subroutine of fold_binary. This routine performs all of the
8754 transformations that are common to the equality/inequality
8755 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8756 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8757 fold_binary should call fold_binary. Fold a comparison with
8758 tree code CODE and type TYPE with operands OP0 and OP1. Return
8759 the folded comparison or NULL_TREE. */
8761 static tree
8762 fold_comparison (location_t loc, enum tree_code code, tree type,
8763 tree op0, tree op1)
8765 tree arg0, arg1, tem;
8767 arg0 = op0;
8768 arg1 = op1;
8770 STRIP_SIGN_NOPS (arg0);
8771 STRIP_SIGN_NOPS (arg1);
8773 tem = fold_relational_const (code, type, arg0, arg1);
8774 if (tem != NULL_TREE)
8775 return tem;
8777 /* If one arg is a real or integer constant, put it last. */
8778 if (tree_swap_operands_p (arg0, arg1, true))
8779 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8781 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8782 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8783 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8784 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8785 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8786 && (TREE_CODE (arg1) == INTEGER_CST
8787 && !TREE_OVERFLOW (arg1)))
8789 tree const1 = TREE_OPERAND (arg0, 1);
8790 tree const2 = arg1;
8791 tree variable = TREE_OPERAND (arg0, 0);
8792 tree lhs;
8793 int lhs_add;
8794 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8796 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8797 TREE_TYPE (arg1), const2, const1);
8799 /* If the constant operation overflowed this can be
8800 simplified as a comparison against INT_MAX/INT_MIN. */
8801 if (TREE_CODE (lhs) == INTEGER_CST
8802 && TREE_OVERFLOW (lhs))
8804 int const1_sgn = tree_int_cst_sgn (const1);
8805 enum tree_code code2 = code;
8807 /* Get the sign of the constant on the lhs if the
8808 operation were VARIABLE + CONST1. */
8809 if (TREE_CODE (arg0) == MINUS_EXPR)
8810 const1_sgn = -const1_sgn;
8812 /* The sign of the constant determines if we overflowed
8813 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8814 Canonicalize to the INT_MIN overflow by swapping the comparison
8815 if necessary. */
8816 if (const1_sgn == -1)
8817 code2 = swap_tree_comparison (code);
8819 /* We now can look at the canonicalized case
8820 VARIABLE + 1 CODE2 INT_MIN
8821 and decide on the result. */
8822 if (code2 == LT_EXPR
8823 || code2 == LE_EXPR
8824 || code2 == EQ_EXPR)
8825 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8826 else if (code2 == NE_EXPR
8827 || code2 == GE_EXPR
8828 || code2 == GT_EXPR)
8829 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8832 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8833 && (TREE_CODE (lhs) != INTEGER_CST
8834 || !TREE_OVERFLOW (lhs)))
8836 if (code != EQ_EXPR && code != NE_EXPR)
8837 fold_overflow_warning ("assuming signed overflow does not occur "
8838 "when changing X +- C1 cmp C2 to "
8839 "X cmp C1 +- C2",
8840 WARN_STRICT_OVERFLOW_COMPARISON);
8841 return fold_build2_loc (loc, code, type, variable, lhs);
8845 /* For comparisons of pointers we can decompose it to a compile time
8846 comparison of the base objects and the offsets into the object.
8847 This requires at least one operand being an ADDR_EXPR or a
8848 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8849 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8850 && (TREE_CODE (arg0) == ADDR_EXPR
8851 || TREE_CODE (arg1) == ADDR_EXPR
8852 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8853 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8855 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8856 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8857 enum machine_mode mode;
8858 int volatilep, unsignedp;
8859 bool indirect_base0 = false, indirect_base1 = false;
8861 /* Get base and offset for the access. Strip ADDR_EXPR for
8862 get_inner_reference, but put it back by stripping INDIRECT_REF
8863 off the base object if possible. indirect_baseN will be true
8864 if baseN is not an address but refers to the object itself. */
8865 base0 = arg0;
8866 if (TREE_CODE (arg0) == ADDR_EXPR)
8868 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8869 &bitsize, &bitpos0, &offset0, &mode,
8870 &unsignedp, &volatilep, false);
8871 if (TREE_CODE (base0) == INDIRECT_REF)
8872 base0 = TREE_OPERAND (base0, 0);
8873 else
8874 indirect_base0 = true;
8876 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8878 base0 = TREE_OPERAND (arg0, 0);
8879 STRIP_SIGN_NOPS (base0);
8880 if (TREE_CODE (base0) == ADDR_EXPR)
8882 base0 = TREE_OPERAND (base0, 0);
8883 indirect_base0 = true;
8885 offset0 = TREE_OPERAND (arg0, 1);
8886 if (host_integerp (offset0, 0))
8888 HOST_WIDE_INT off = size_low_cst (offset0);
8889 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8890 * BITS_PER_UNIT)
8891 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8893 bitpos0 = off * BITS_PER_UNIT;
8894 offset0 = NULL_TREE;
8899 base1 = arg1;
8900 if (TREE_CODE (arg1) == ADDR_EXPR)
8902 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8903 &bitsize, &bitpos1, &offset1, &mode,
8904 &unsignedp, &volatilep, false);
8905 if (TREE_CODE (base1) == INDIRECT_REF)
8906 base1 = TREE_OPERAND (base1, 0);
8907 else
8908 indirect_base1 = true;
8910 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8912 base1 = TREE_OPERAND (arg1, 0);
8913 STRIP_SIGN_NOPS (base1);
8914 if (TREE_CODE (base1) == ADDR_EXPR)
8916 base1 = TREE_OPERAND (base1, 0);
8917 indirect_base1 = true;
8919 offset1 = TREE_OPERAND (arg1, 1);
8920 if (host_integerp (offset1, 0))
8922 HOST_WIDE_INT off = size_low_cst (offset1);
8923 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8924 * BITS_PER_UNIT)
8925 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8927 bitpos1 = off * BITS_PER_UNIT;
8928 offset1 = NULL_TREE;
8933 /* A local variable can never be pointed to by
8934 the default SSA name of an incoming parameter. */
8935 if ((TREE_CODE (arg0) == ADDR_EXPR
8936 && indirect_base0
8937 && TREE_CODE (base0) == VAR_DECL
8938 && auto_var_in_fn_p (base0, current_function_decl)
8939 && !indirect_base1
8940 && TREE_CODE (base1) == SSA_NAME
8941 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8942 && SSA_NAME_IS_DEFAULT_DEF (base1))
8943 || (TREE_CODE (arg1) == ADDR_EXPR
8944 && indirect_base1
8945 && TREE_CODE (base1) == VAR_DECL
8946 && auto_var_in_fn_p (base1, current_function_decl)
8947 && !indirect_base0
8948 && TREE_CODE (base0) == SSA_NAME
8949 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8950 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8952 if (code == NE_EXPR)
8953 return constant_boolean_node (1, type);
8954 else if (code == EQ_EXPR)
8955 return constant_boolean_node (0, type);
8957 /* If we have equivalent bases we might be able to simplify. */
8958 else if (indirect_base0 == indirect_base1
8959 && operand_equal_p (base0, base1, 0))
8961 /* We can fold this expression to a constant if the non-constant
8962 offset parts are equal. */
8963 if ((offset0 == offset1
8964 || (offset0 && offset1
8965 && operand_equal_p (offset0, offset1, 0)))
8966 && (code == EQ_EXPR
8967 || code == NE_EXPR
8968 || (indirect_base0 && DECL_P (base0))
8969 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8972 if (code != EQ_EXPR
8973 && code != NE_EXPR
8974 && bitpos0 != bitpos1
8975 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8976 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8977 fold_overflow_warning (("assuming pointer wraparound does not "
8978 "occur when comparing P +- C1 with "
8979 "P +- C2"),
8980 WARN_STRICT_OVERFLOW_CONDITIONAL);
8982 switch (code)
8984 case EQ_EXPR:
8985 return constant_boolean_node (bitpos0 == bitpos1, type);
8986 case NE_EXPR:
8987 return constant_boolean_node (bitpos0 != bitpos1, type);
8988 case LT_EXPR:
8989 return constant_boolean_node (bitpos0 < bitpos1, type);
8990 case LE_EXPR:
8991 return constant_boolean_node (bitpos0 <= bitpos1, type);
8992 case GE_EXPR:
8993 return constant_boolean_node (bitpos0 >= bitpos1, type);
8994 case GT_EXPR:
8995 return constant_boolean_node (bitpos0 > bitpos1, type);
8996 default:;
8999 /* We can simplify the comparison to a comparison of the variable
9000 offset parts if the constant offset parts are equal.
9001 Be careful to use signed size type here because otherwise we
9002 mess with array offsets in the wrong way. This is possible
9003 because pointer arithmetic is restricted to retain within an
9004 object and overflow on pointer differences is undefined as of
9005 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9006 else if (bitpos0 == bitpos1
9007 && ((code == EQ_EXPR || code == NE_EXPR)
9008 || (indirect_base0 && DECL_P (base0))
9009 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9011 /* By converting to signed size type we cover middle-end pointer
9012 arithmetic which operates on unsigned pointer types of size
9013 type size and ARRAY_REF offsets which are properly sign or
9014 zero extended from their type in case it is narrower than
9015 size type. */
9016 if (offset0 == NULL_TREE)
9017 offset0 = build_int_cst (ssizetype, 0);
9018 else
9019 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9020 if (offset1 == NULL_TREE)
9021 offset1 = build_int_cst (ssizetype, 0);
9022 else
9023 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9025 if (code != EQ_EXPR
9026 && code != NE_EXPR
9027 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9028 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9029 fold_overflow_warning (("assuming pointer wraparound does not "
9030 "occur when comparing P +- C1 with "
9031 "P +- C2"),
9032 WARN_STRICT_OVERFLOW_COMPARISON);
9034 return fold_build2_loc (loc, code, type, offset0, offset1);
9037 /* For non-equal bases we can simplify if they are addresses
9038 of local binding decls or constants. */
9039 else if (indirect_base0 && indirect_base1
9040 /* We know that !operand_equal_p (base0, base1, 0)
9041 because the if condition was false. But make
9042 sure two decls are not the same. */
9043 && base0 != base1
9044 && TREE_CODE (arg0) == ADDR_EXPR
9045 && TREE_CODE (arg1) == ADDR_EXPR
9046 && (((TREE_CODE (base0) == VAR_DECL
9047 || TREE_CODE (base0) == PARM_DECL)
9048 && (targetm.binds_local_p (base0)
9049 || CONSTANT_CLASS_P (base1)))
9050 || CONSTANT_CLASS_P (base0))
9051 && (((TREE_CODE (base1) == VAR_DECL
9052 || TREE_CODE (base1) == PARM_DECL)
9053 && (targetm.binds_local_p (base1)
9054 || CONSTANT_CLASS_P (base0)))
9055 || CONSTANT_CLASS_P (base1)))
9057 if (code == EQ_EXPR)
9058 return omit_two_operands_loc (loc, type, boolean_false_node,
9059 arg0, arg1);
9060 else if (code == NE_EXPR)
9061 return omit_two_operands_loc (loc, type, boolean_true_node,
9062 arg0, arg1);
9064 /* For equal offsets we can simplify to a comparison of the
9065 base addresses. */
9066 else if (bitpos0 == bitpos1
9067 && (indirect_base0
9068 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9069 && (indirect_base1
9070 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9071 && ((offset0 == offset1)
9072 || (offset0 && offset1
9073 && operand_equal_p (offset0, offset1, 0))))
9075 if (indirect_base0)
9076 base0 = build_fold_addr_expr_loc (loc, base0);
9077 if (indirect_base1)
9078 base1 = build_fold_addr_expr_loc (loc, base1);
9079 return fold_build2_loc (loc, code, type, base0, base1);
9083 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9084 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9085 the resulting offset is smaller in absolute value than the
9086 original one. */
9087 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9088 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9089 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9090 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9091 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9092 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9093 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9095 tree const1 = TREE_OPERAND (arg0, 1);
9096 tree const2 = TREE_OPERAND (arg1, 1);
9097 tree variable1 = TREE_OPERAND (arg0, 0);
9098 tree variable2 = TREE_OPERAND (arg1, 0);
9099 tree cst;
9100 const char * const warnmsg = G_("assuming signed overflow does not "
9101 "occur when combining constants around "
9102 "a comparison");
9104 /* Put the constant on the side where it doesn't overflow and is
9105 of lower absolute value than before. */
9106 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9107 ? MINUS_EXPR : PLUS_EXPR,
9108 const2, const1);
9109 if (!TREE_OVERFLOW (cst)
9110 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9112 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9113 return fold_build2_loc (loc, code, type,
9114 variable1,
9115 fold_build2_loc (loc,
9116 TREE_CODE (arg1), TREE_TYPE (arg1),
9117 variable2, cst));
9120 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9121 ? MINUS_EXPR : PLUS_EXPR,
9122 const1, const2);
9123 if (!TREE_OVERFLOW (cst)
9124 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9126 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9127 return fold_build2_loc (loc, code, type,
9128 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9129 variable1, cst),
9130 variable2);
9134 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9135 signed arithmetic case. That form is created by the compiler
9136 often enough for folding it to be of value. One example is in
9137 computing loop trip counts after Operator Strength Reduction. */
9138 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9139 && TREE_CODE (arg0) == MULT_EXPR
9140 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9141 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9142 && integer_zerop (arg1))
9144 tree const1 = TREE_OPERAND (arg0, 1);
9145 tree const2 = arg1; /* zero */
9146 tree variable1 = TREE_OPERAND (arg0, 0);
9147 enum tree_code cmp_code = code;
9149 /* Handle unfolded multiplication by zero. */
9150 if (integer_zerop (const1))
9151 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9153 fold_overflow_warning (("assuming signed overflow does not occur when "
9154 "eliminating multiplication in comparison "
9155 "with zero"),
9156 WARN_STRICT_OVERFLOW_COMPARISON);
9158 /* If const1 is negative we swap the sense of the comparison. */
9159 if (tree_int_cst_sgn (const1) < 0)
9160 cmp_code = swap_tree_comparison (cmp_code);
9162 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9165 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9166 if (tem)
9167 return tem;
9169 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9171 tree targ0 = strip_float_extensions (arg0);
9172 tree targ1 = strip_float_extensions (arg1);
9173 tree newtype = TREE_TYPE (targ0);
9175 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9176 newtype = TREE_TYPE (targ1);
9178 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9179 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9180 return fold_build2_loc (loc, code, type,
9181 fold_convert_loc (loc, newtype, targ0),
9182 fold_convert_loc (loc, newtype, targ1));
9184 /* (-a) CMP (-b) -> b CMP a */
9185 if (TREE_CODE (arg0) == NEGATE_EXPR
9186 && TREE_CODE (arg1) == NEGATE_EXPR)
9187 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9188 TREE_OPERAND (arg0, 0));
9190 if (TREE_CODE (arg1) == REAL_CST)
9192 REAL_VALUE_TYPE cst;
9193 cst = TREE_REAL_CST (arg1);
9195 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9196 if (TREE_CODE (arg0) == NEGATE_EXPR)
9197 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9198 TREE_OPERAND (arg0, 0),
9199 build_real (TREE_TYPE (arg1),
9200 real_value_negate (&cst)));
9202 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9203 /* a CMP (-0) -> a CMP 0 */
9204 if (REAL_VALUE_MINUS_ZERO (cst))
9205 return fold_build2_loc (loc, code, type, arg0,
9206 build_real (TREE_TYPE (arg1), dconst0));
9208 /* x != NaN is always true, other ops are always false. */
9209 if (REAL_VALUE_ISNAN (cst)
9210 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9212 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9213 return omit_one_operand_loc (loc, type, tem, arg0);
9216 /* Fold comparisons against infinity. */
9217 if (REAL_VALUE_ISINF (cst)
9218 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9220 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9221 if (tem != NULL_TREE)
9222 return tem;
9226 /* If this is a comparison of a real constant with a PLUS_EXPR
9227 or a MINUS_EXPR of a real constant, we can convert it into a
9228 comparison with a revised real constant as long as no overflow
9229 occurs when unsafe_math_optimizations are enabled. */
9230 if (flag_unsafe_math_optimizations
9231 && TREE_CODE (arg1) == REAL_CST
9232 && (TREE_CODE (arg0) == PLUS_EXPR
9233 || TREE_CODE (arg0) == MINUS_EXPR)
9234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9235 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9236 ? MINUS_EXPR : PLUS_EXPR,
9237 arg1, TREE_OPERAND (arg0, 1)))
9238 && !TREE_OVERFLOW (tem))
9239 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9241 /* Likewise, we can simplify a comparison of a real constant with
9242 a MINUS_EXPR whose first operand is also a real constant, i.e.
9243 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9244 floating-point types only if -fassociative-math is set. */
9245 if (flag_associative_math
9246 && TREE_CODE (arg1) == REAL_CST
9247 && TREE_CODE (arg0) == MINUS_EXPR
9248 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9249 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9250 arg1))
9251 && !TREE_OVERFLOW (tem))
9252 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9253 TREE_OPERAND (arg0, 1), tem);
9255 /* Fold comparisons against built-in math functions. */
9256 if (TREE_CODE (arg1) == REAL_CST
9257 && flag_unsafe_math_optimizations
9258 && ! flag_errno_math)
9260 enum built_in_function fcode = builtin_mathfn_code (arg0);
9262 if (fcode != END_BUILTINS)
9264 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9265 if (tem != NULL_TREE)
9266 return tem;
9271 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9272 && CONVERT_EXPR_P (arg0))
9274 /* If we are widening one operand of an integer comparison,
9275 see if the other operand is similarly being widened. Perhaps we
9276 can do the comparison in the narrower type. */
9277 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9278 if (tem)
9279 return tem;
9281 /* Or if we are changing signedness. */
9282 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9283 if (tem)
9284 return tem;
9287 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9288 constant, we can simplify it. */
9289 if (TREE_CODE (arg1) == INTEGER_CST
9290 && (TREE_CODE (arg0) == MIN_EXPR
9291 || TREE_CODE (arg0) == MAX_EXPR)
9292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9294 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9295 if (tem)
9296 return tem;
9299 /* Simplify comparison of something with itself. (For IEEE
9300 floating-point, we can only do some of these simplifications.) */
9301 if (operand_equal_p (arg0, arg1, 0))
9303 switch (code)
9305 case EQ_EXPR:
9306 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9307 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9308 return constant_boolean_node (1, type);
9309 break;
9311 case GE_EXPR:
9312 case LE_EXPR:
9313 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9314 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9315 return constant_boolean_node (1, type);
9316 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9318 case NE_EXPR:
9319 /* For NE, we can only do this simplification if integer
9320 or we don't honor IEEE floating point NaNs. */
9321 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9322 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9323 break;
9324 /* ... fall through ... */
9325 case GT_EXPR:
9326 case LT_EXPR:
9327 return constant_boolean_node (0, type);
9328 default:
9329 gcc_unreachable ();
9333 /* If we are comparing an expression that just has comparisons
9334 of two integer values, arithmetic expressions of those comparisons,
9335 and constants, we can simplify it. There are only three cases
9336 to check: the two values can either be equal, the first can be
9337 greater, or the second can be greater. Fold the expression for
9338 those three values. Since each value must be 0 or 1, we have
9339 eight possibilities, each of which corresponds to the constant 0
9340 or 1 or one of the six possible comparisons.
9342 This handles common cases like (a > b) == 0 but also handles
9343 expressions like ((x > y) - (y > x)) > 0, which supposedly
9344 occur in macroized code. */
9346 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9348 tree cval1 = 0, cval2 = 0;
9349 int save_p = 0;
9351 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9352 /* Don't handle degenerate cases here; they should already
9353 have been handled anyway. */
9354 && cval1 != 0 && cval2 != 0
9355 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9356 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9357 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9358 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9359 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9360 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9361 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9363 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9364 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9366 /* We can't just pass T to eval_subst in case cval1 or cval2
9367 was the same as ARG1. */
9369 tree high_result
9370 = fold_build2_loc (loc, code, type,
9371 eval_subst (loc, arg0, cval1, maxval,
9372 cval2, minval),
9373 arg1);
9374 tree equal_result
9375 = fold_build2_loc (loc, code, type,
9376 eval_subst (loc, arg0, cval1, maxval,
9377 cval2, maxval),
9378 arg1);
9379 tree low_result
9380 = fold_build2_loc (loc, code, type,
9381 eval_subst (loc, arg0, cval1, minval,
9382 cval2, maxval),
9383 arg1);
9385 /* All three of these results should be 0 or 1. Confirm they are.
9386 Then use those values to select the proper code to use. */
9388 if (TREE_CODE (high_result) == INTEGER_CST
9389 && TREE_CODE (equal_result) == INTEGER_CST
9390 && TREE_CODE (low_result) == INTEGER_CST)
9392 /* Make a 3-bit mask with the high-order bit being the
9393 value for `>', the next for '=', and the low for '<'. */
9394 switch ((integer_onep (high_result) * 4)
9395 + (integer_onep (equal_result) * 2)
9396 + integer_onep (low_result))
9398 case 0:
9399 /* Always false. */
9400 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9401 case 1:
9402 code = LT_EXPR;
9403 break;
9404 case 2:
9405 code = EQ_EXPR;
9406 break;
9407 case 3:
9408 code = LE_EXPR;
9409 break;
9410 case 4:
9411 code = GT_EXPR;
9412 break;
9413 case 5:
9414 code = NE_EXPR;
9415 break;
9416 case 6:
9417 code = GE_EXPR;
9418 break;
9419 case 7:
9420 /* Always true. */
9421 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9424 if (save_p)
9426 tem = save_expr (build2 (code, type, cval1, cval2));
9427 SET_EXPR_LOCATION (tem, loc);
9428 return tem;
9430 return fold_build2_loc (loc, code, type, cval1, cval2);
9435 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9436 into a single range test. */
9437 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9438 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9439 && TREE_CODE (arg1) == INTEGER_CST
9440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9441 && !integer_zerop (TREE_OPERAND (arg0, 1))
9442 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9443 && !TREE_OVERFLOW (arg1))
9445 tem = fold_div_compare (loc, code, type, arg0, arg1);
9446 if (tem != NULL_TREE)
9447 return tem;
9450 /* Fold ~X op ~Y as Y op X. */
9451 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9452 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9454 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9455 return fold_build2_loc (loc, code, type,
9456 fold_convert_loc (loc, cmp_type,
9457 TREE_OPERAND (arg1, 0)),
9458 TREE_OPERAND (arg0, 0));
9461 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9462 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9463 && TREE_CODE (arg1) == INTEGER_CST)
9465 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9466 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9467 TREE_OPERAND (arg0, 0),
9468 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9469 fold_convert_loc (loc, cmp_type, arg1)));
9472 return NULL_TREE;
9476 /* Subroutine of fold_binary. Optimize complex multiplications of the
9477 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9478 argument EXPR represents the expression "z" of type TYPE. */
9480 static tree
9481 fold_mult_zconjz (location_t loc, tree type, tree expr)
9483 tree itype = TREE_TYPE (type);
9484 tree rpart, ipart, tem;
9486 if (TREE_CODE (expr) == COMPLEX_EXPR)
9488 rpart = TREE_OPERAND (expr, 0);
9489 ipart = TREE_OPERAND (expr, 1);
9491 else if (TREE_CODE (expr) == COMPLEX_CST)
9493 rpart = TREE_REALPART (expr);
9494 ipart = TREE_IMAGPART (expr);
9496 else
9498 expr = save_expr (expr);
9499 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9500 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9503 rpart = save_expr (rpart);
9504 ipart = save_expr (ipart);
9505 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9506 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9507 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9508 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9509 build_zero_cst (itype));
9513 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9514 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9515 guarantees that P and N have the same least significant log2(M) bits.
9516 N is not otherwise constrained. In particular, N is not normalized to
9517 0 <= N < M as is common. In general, the precise value of P is unknown.
9518 M is chosen as large as possible such that constant N can be determined.
9520 Returns M and sets *RESIDUE to N.
9522 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9523 account. This is not always possible due to PR 35705.
9526 static unsigned HOST_WIDE_INT
9527 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9528 bool allow_func_align)
9530 enum tree_code code;
9532 *residue = 0;
9534 code = TREE_CODE (expr);
9535 if (code == ADDR_EXPR)
9537 unsigned int bitalign;
9538 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9539 *residue /= BITS_PER_UNIT;
9540 return bitalign / BITS_PER_UNIT;
9542 else if (code == POINTER_PLUS_EXPR)
9544 tree op0, op1;
9545 unsigned HOST_WIDE_INT modulus;
9546 enum tree_code inner_code;
9548 op0 = TREE_OPERAND (expr, 0);
9549 STRIP_NOPS (op0);
9550 modulus = get_pointer_modulus_and_residue (op0, residue,
9551 allow_func_align);
9553 op1 = TREE_OPERAND (expr, 1);
9554 STRIP_NOPS (op1);
9555 inner_code = TREE_CODE (op1);
9556 if (inner_code == INTEGER_CST)
9558 *residue += TREE_INT_CST_LOW (op1);
9559 return modulus;
9561 else if (inner_code == MULT_EXPR)
9563 op1 = TREE_OPERAND (op1, 1);
9564 if (TREE_CODE (op1) == INTEGER_CST)
9566 unsigned HOST_WIDE_INT align;
9568 /* Compute the greatest power-of-2 divisor of op1. */
9569 align = TREE_INT_CST_LOW (op1);
9570 align &= -align;
9572 /* If align is non-zero and less than *modulus, replace
9573 *modulus with align., If align is 0, then either op1 is 0
9574 or the greatest power-of-2 divisor of op1 doesn't fit in an
9575 unsigned HOST_WIDE_INT. In either case, no additional
9576 constraint is imposed. */
9577 if (align)
9578 modulus = MIN (modulus, align);
9580 return modulus;
9585 /* If we get here, we were unable to determine anything useful about the
9586 expression. */
9587 return 1;
9590 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9591 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9593 static bool
9594 vec_cst_ctor_to_array (tree arg, tree *elts)
9596 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9598 if (TREE_CODE (arg) == VECTOR_CST)
9600 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9601 elts[i] = VECTOR_CST_ELT (arg, i);
9603 else if (TREE_CODE (arg) == CONSTRUCTOR)
9605 constructor_elt *elt;
9607 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9608 if (i >= nelts)
9609 return false;
9610 else
9611 elts[i] = elt->value;
9613 else
9614 return false;
9615 for (; i < nelts; i++)
9616 elts[i]
9617 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9618 return true;
9621 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9622 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9623 NULL_TREE otherwise. */
9625 static tree
9626 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9628 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9629 tree *elts;
9630 bool need_ctor = false;
9632 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9633 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9634 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9635 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9636 return NULL_TREE;
9638 elts = XALLOCAVEC (tree, nelts * 3);
9639 if (!vec_cst_ctor_to_array (arg0, elts)
9640 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9641 return NULL_TREE;
9643 for (i = 0; i < nelts; i++)
9645 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9646 need_ctor = true;
9647 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9650 if (need_ctor)
9652 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9653 for (i = 0; i < nelts; i++)
9654 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9655 return build_constructor (type, v);
9657 else
9658 return build_vector (type, &elts[2 * nelts]);
9661 /* Try to fold a pointer difference of type TYPE two address expressions of
9662 array references AREF0 and AREF1 using location LOC. Return a
9663 simplified expression for the difference or NULL_TREE. */
9665 static tree
9666 fold_addr_of_array_ref_difference (location_t loc, tree type,
9667 tree aref0, tree aref1)
9669 tree base0 = TREE_OPERAND (aref0, 0);
9670 tree base1 = TREE_OPERAND (aref1, 0);
9671 tree base_offset = build_int_cst (type, 0);
9673 /* If the bases are array references as well, recurse. If the bases
9674 are pointer indirections compute the difference of the pointers.
9675 If the bases are equal, we are set. */
9676 if ((TREE_CODE (base0) == ARRAY_REF
9677 && TREE_CODE (base1) == ARRAY_REF
9678 && (base_offset
9679 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9680 || (INDIRECT_REF_P (base0)
9681 && INDIRECT_REF_P (base1)
9682 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9683 TREE_OPERAND (base0, 0),
9684 TREE_OPERAND (base1, 0))))
9685 || operand_equal_p (base0, base1, 0))
9687 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9688 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9689 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9690 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9691 return fold_build2_loc (loc, PLUS_EXPR, type,
9692 base_offset,
9693 fold_build2_loc (loc, MULT_EXPR, type,
9694 diff, esz));
9696 return NULL_TREE;
9699 /* If the real or vector real constant CST of type TYPE has an exact
9700 inverse, return it, else return NULL. */
9702 static tree
9703 exact_inverse (tree type, tree cst)
9705 REAL_VALUE_TYPE r;
9706 tree unit_type, *elts;
9707 enum machine_mode mode;
9708 unsigned vec_nelts, i;
9710 switch (TREE_CODE (cst))
9712 case REAL_CST:
9713 r = TREE_REAL_CST (cst);
9715 if (exact_real_inverse (TYPE_MODE (type), &r))
9716 return build_real (type, r);
9718 return NULL_TREE;
9720 case VECTOR_CST:
9721 vec_nelts = VECTOR_CST_NELTS (cst);
9722 elts = XALLOCAVEC (tree, vec_nelts);
9723 unit_type = TREE_TYPE (type);
9724 mode = TYPE_MODE (unit_type);
9726 for (i = 0; i < vec_nelts; i++)
9728 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9729 if (!exact_real_inverse (mode, &r))
9730 return NULL_TREE;
9731 elts[i] = build_real (unit_type, r);
9734 return build_vector (type, elts);
9736 default:
9737 return NULL_TREE;
9741 /* Fold a binary expression of code CODE and type TYPE with operands
9742 OP0 and OP1. LOC is the location of the resulting expression.
9743 Return the folded expression if folding is successful. Otherwise,
9744 return NULL_TREE. */
9746 tree
9747 fold_binary_loc (location_t loc,
9748 enum tree_code code, tree type, tree op0, tree op1)
9750 enum tree_code_class kind = TREE_CODE_CLASS (code);
9751 tree arg0, arg1, tem;
9752 tree t1 = NULL_TREE;
9753 bool strict_overflow_p;
9755 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9756 && TREE_CODE_LENGTH (code) == 2
9757 && op0 != NULL_TREE
9758 && op1 != NULL_TREE);
9760 arg0 = op0;
9761 arg1 = op1;
9763 /* Strip any conversions that don't change the mode. This is
9764 safe for every expression, except for a comparison expression
9765 because its signedness is derived from its operands. So, in
9766 the latter case, only strip conversions that don't change the
9767 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9768 preserved.
9770 Note that this is done as an internal manipulation within the
9771 constant folder, in order to find the simplest representation
9772 of the arguments so that their form can be studied. In any
9773 cases, the appropriate type conversions should be put back in
9774 the tree that will get out of the constant folder. */
9776 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9778 STRIP_SIGN_NOPS (arg0);
9779 STRIP_SIGN_NOPS (arg1);
9781 else
9783 STRIP_NOPS (arg0);
9784 STRIP_NOPS (arg1);
9787 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9788 constant but we can't do arithmetic on them. */
9789 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9790 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9791 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9792 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9793 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9794 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9796 if (kind == tcc_binary)
9798 /* Make sure type and arg0 have the same saturating flag. */
9799 gcc_assert (TYPE_SATURATING (type)
9800 == TYPE_SATURATING (TREE_TYPE (arg0)));
9801 tem = const_binop (code, arg0, arg1);
9803 else if (kind == tcc_comparison)
9804 tem = fold_relational_const (code, type, arg0, arg1);
9805 else
9806 tem = NULL_TREE;
9808 if (tem != NULL_TREE)
9810 if (TREE_TYPE (tem) != type)
9811 tem = fold_convert_loc (loc, type, tem);
9812 return tem;
9816 /* If this is a commutative operation, and ARG0 is a constant, move it
9817 to ARG1 to reduce the number of tests below. */
9818 if (commutative_tree_code (code)
9819 && tree_swap_operands_p (arg0, arg1, true))
9820 return fold_build2_loc (loc, code, type, op1, op0);
9822 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9824 First check for cases where an arithmetic operation is applied to a
9825 compound, conditional, or comparison operation. Push the arithmetic
9826 operation inside the compound or conditional to see if any folding
9827 can then be done. Convert comparison to conditional for this purpose.
9828 The also optimizes non-constant cases that used to be done in
9829 expand_expr.
9831 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9832 one of the operands is a comparison and the other is a comparison, a
9833 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9834 code below would make the expression more complex. Change it to a
9835 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9836 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9838 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9839 || code == EQ_EXPR || code == NE_EXPR)
9840 && ((truth_value_p (TREE_CODE (arg0))
9841 && (truth_value_p (TREE_CODE (arg1))
9842 || (TREE_CODE (arg1) == BIT_AND_EXPR
9843 && integer_onep (TREE_OPERAND (arg1, 1)))))
9844 || (truth_value_p (TREE_CODE (arg1))
9845 && (truth_value_p (TREE_CODE (arg0))
9846 || (TREE_CODE (arg0) == BIT_AND_EXPR
9847 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9849 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9850 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9851 : TRUTH_XOR_EXPR,
9852 boolean_type_node,
9853 fold_convert_loc (loc, boolean_type_node, arg0),
9854 fold_convert_loc (loc, boolean_type_node, arg1));
9856 if (code == EQ_EXPR)
9857 tem = invert_truthvalue_loc (loc, tem);
9859 return fold_convert_loc (loc, type, tem);
9862 if (TREE_CODE_CLASS (code) == tcc_binary
9863 || TREE_CODE_CLASS (code) == tcc_comparison)
9865 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9867 tem = fold_build2_loc (loc, code, type,
9868 fold_convert_loc (loc, TREE_TYPE (op0),
9869 TREE_OPERAND (arg0, 1)), op1);
9870 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9871 tem);
9873 if (TREE_CODE (arg1) == COMPOUND_EXPR
9874 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9876 tem = fold_build2_loc (loc, code, type, op0,
9877 fold_convert_loc (loc, TREE_TYPE (op1),
9878 TREE_OPERAND (arg1, 1)));
9879 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9880 tem);
9883 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9885 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9886 arg0, arg1,
9887 /*cond_first_p=*/1);
9888 if (tem != NULL_TREE)
9889 return tem;
9892 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9894 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9895 arg1, arg0,
9896 /*cond_first_p=*/0);
9897 if (tem != NULL_TREE)
9898 return tem;
9902 switch (code)
9904 case MEM_REF:
9905 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9906 if (TREE_CODE (arg0) == ADDR_EXPR
9907 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9909 tree iref = TREE_OPERAND (arg0, 0);
9910 return fold_build2 (MEM_REF, type,
9911 TREE_OPERAND (iref, 0),
9912 int_const_binop (PLUS_EXPR, arg1,
9913 TREE_OPERAND (iref, 1)));
9916 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9917 if (TREE_CODE (arg0) == ADDR_EXPR
9918 && handled_component_p (TREE_OPERAND (arg0, 0)))
9920 tree base;
9921 HOST_WIDE_INT coffset;
9922 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9923 &coffset);
9924 if (!base)
9925 return NULL_TREE;
9926 return fold_build2 (MEM_REF, type,
9927 build_fold_addr_expr (base),
9928 int_const_binop (PLUS_EXPR, arg1,
9929 size_int (coffset)));
9932 return NULL_TREE;
9934 case POINTER_PLUS_EXPR:
9935 /* 0 +p index -> (type)index */
9936 if (integer_zerop (arg0))
9937 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9939 /* PTR +p 0 -> PTR */
9940 if (integer_zerop (arg1))
9941 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9943 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9944 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9945 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9946 return fold_convert_loc (loc, type,
9947 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9948 fold_convert_loc (loc, sizetype,
9949 arg1),
9950 fold_convert_loc (loc, sizetype,
9951 arg0)));
9953 /* (PTR +p B) +p A -> PTR +p (B + A) */
9954 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9956 tree inner;
9957 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9958 tree arg00 = TREE_OPERAND (arg0, 0);
9959 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9960 arg01, fold_convert_loc (loc, sizetype, arg1));
9961 return fold_convert_loc (loc, type,
9962 fold_build_pointer_plus_loc (loc,
9963 arg00, inner));
9966 /* PTR_CST +p CST -> CST1 */
9967 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9968 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9969 fold_convert_loc (loc, type, arg1));
9971 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9972 of the array. Loop optimizer sometimes produce this type of
9973 expressions. */
9974 if (TREE_CODE (arg0) == ADDR_EXPR)
9976 tem = try_move_mult_to_index (loc, arg0,
9977 fold_convert_loc (loc,
9978 ssizetype, arg1));
9979 if (tem)
9980 return fold_convert_loc (loc, type, tem);
9983 return NULL_TREE;
9985 case PLUS_EXPR:
9986 /* A + (-B) -> A - B */
9987 if (TREE_CODE (arg1) == NEGATE_EXPR)
9988 return fold_build2_loc (loc, MINUS_EXPR, type,
9989 fold_convert_loc (loc, type, arg0),
9990 fold_convert_loc (loc, type,
9991 TREE_OPERAND (arg1, 0)));
9992 /* (-A) + B -> B - A */
9993 if (TREE_CODE (arg0) == NEGATE_EXPR
9994 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9995 return fold_build2_loc (loc, MINUS_EXPR, type,
9996 fold_convert_loc (loc, type, arg1),
9997 fold_convert_loc (loc, type,
9998 TREE_OPERAND (arg0, 0)));
10000 if (INTEGRAL_TYPE_P (type))
10002 /* Convert ~A + 1 to -A. */
10003 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10004 && integer_onep (arg1))
10005 return fold_build1_loc (loc, NEGATE_EXPR, type,
10006 fold_convert_loc (loc, type,
10007 TREE_OPERAND (arg0, 0)));
10009 /* ~X + X is -1. */
10010 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10011 && !TYPE_OVERFLOW_TRAPS (type))
10013 tree tem = TREE_OPERAND (arg0, 0);
10015 STRIP_NOPS (tem);
10016 if (operand_equal_p (tem, arg1, 0))
10018 t1 = build_int_cst_type (type, -1);
10019 return omit_one_operand_loc (loc, type, t1, arg1);
10023 /* X + ~X is -1. */
10024 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10025 && !TYPE_OVERFLOW_TRAPS (type))
10027 tree tem = TREE_OPERAND (arg1, 0);
10029 STRIP_NOPS (tem);
10030 if (operand_equal_p (arg0, tem, 0))
10032 t1 = build_int_cst_type (type, -1);
10033 return omit_one_operand_loc (loc, type, t1, arg0);
10037 /* X + (X / CST) * -CST is X % CST. */
10038 if (TREE_CODE (arg1) == MULT_EXPR
10039 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10040 && operand_equal_p (arg0,
10041 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10043 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10044 tree cst1 = TREE_OPERAND (arg1, 1);
10045 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10046 cst1, cst0);
10047 if (sum && integer_zerop (sum))
10048 return fold_convert_loc (loc, type,
10049 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10050 TREE_TYPE (arg0), arg0,
10051 cst0));
10055 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10056 one. Make sure the type is not saturating and has the signedness of
10057 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10058 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10059 if ((TREE_CODE (arg0) == MULT_EXPR
10060 || TREE_CODE (arg1) == MULT_EXPR)
10061 && !TYPE_SATURATING (type)
10062 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10063 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10064 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10066 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10067 if (tem)
10068 return tem;
10071 if (! FLOAT_TYPE_P (type))
10073 if (integer_zerop (arg1))
10074 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10076 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10077 with a constant, and the two constants have no bits in common,
10078 we should treat this as a BIT_IOR_EXPR since this may produce more
10079 simplifications. */
10080 if (TREE_CODE (arg0) == BIT_AND_EXPR
10081 && TREE_CODE (arg1) == BIT_AND_EXPR
10082 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10083 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10084 && integer_zerop (const_binop (BIT_AND_EXPR,
10085 TREE_OPERAND (arg0, 1),
10086 TREE_OPERAND (arg1, 1))))
10088 code = BIT_IOR_EXPR;
10089 goto bit_ior;
10092 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10093 (plus (plus (mult) (mult)) (foo)) so that we can
10094 take advantage of the factoring cases below. */
10095 if (TYPE_OVERFLOW_WRAPS (type)
10096 && (((TREE_CODE (arg0) == PLUS_EXPR
10097 || TREE_CODE (arg0) == MINUS_EXPR)
10098 && TREE_CODE (arg1) == MULT_EXPR)
10099 || ((TREE_CODE (arg1) == PLUS_EXPR
10100 || TREE_CODE (arg1) == MINUS_EXPR)
10101 && TREE_CODE (arg0) == MULT_EXPR)))
10103 tree parg0, parg1, parg, marg;
10104 enum tree_code pcode;
10106 if (TREE_CODE (arg1) == MULT_EXPR)
10107 parg = arg0, marg = arg1;
10108 else
10109 parg = arg1, marg = arg0;
10110 pcode = TREE_CODE (parg);
10111 parg0 = TREE_OPERAND (parg, 0);
10112 parg1 = TREE_OPERAND (parg, 1);
10113 STRIP_NOPS (parg0);
10114 STRIP_NOPS (parg1);
10116 if (TREE_CODE (parg0) == MULT_EXPR
10117 && TREE_CODE (parg1) != MULT_EXPR)
10118 return fold_build2_loc (loc, pcode, type,
10119 fold_build2_loc (loc, PLUS_EXPR, type,
10120 fold_convert_loc (loc, type,
10121 parg0),
10122 fold_convert_loc (loc, type,
10123 marg)),
10124 fold_convert_loc (loc, type, parg1));
10125 if (TREE_CODE (parg0) != MULT_EXPR
10126 && TREE_CODE (parg1) == MULT_EXPR)
10127 return
10128 fold_build2_loc (loc, PLUS_EXPR, type,
10129 fold_convert_loc (loc, type, parg0),
10130 fold_build2_loc (loc, pcode, type,
10131 fold_convert_loc (loc, type, marg),
10132 fold_convert_loc (loc, type,
10133 parg1)));
10136 else
10138 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10139 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10140 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10142 /* Likewise if the operands are reversed. */
10143 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10144 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10146 /* Convert X + -C into X - C. */
10147 if (TREE_CODE (arg1) == REAL_CST
10148 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10150 tem = fold_negate_const (arg1, type);
10151 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10152 return fold_build2_loc (loc, MINUS_EXPR, type,
10153 fold_convert_loc (loc, type, arg0),
10154 fold_convert_loc (loc, type, tem));
10157 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10158 to __complex__ ( x, y ). This is not the same for SNaNs or
10159 if signed zeros are involved. */
10160 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10162 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10164 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10165 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10166 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10167 bool arg0rz = false, arg0iz = false;
10168 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10169 || (arg0i && (arg0iz = real_zerop (arg0i))))
10171 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10172 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10173 if (arg0rz && arg1i && real_zerop (arg1i))
10175 tree rp = arg1r ? arg1r
10176 : build1 (REALPART_EXPR, rtype, arg1);
10177 tree ip = arg0i ? arg0i
10178 : build1 (IMAGPART_EXPR, rtype, arg0);
10179 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10181 else if (arg0iz && arg1r && real_zerop (arg1r))
10183 tree rp = arg0r ? arg0r
10184 : build1 (REALPART_EXPR, rtype, arg0);
10185 tree ip = arg1i ? arg1i
10186 : build1 (IMAGPART_EXPR, rtype, arg1);
10187 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10192 if (flag_unsafe_math_optimizations
10193 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10194 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10195 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10196 return tem;
10198 /* Convert x+x into x*2.0. */
10199 if (operand_equal_p (arg0, arg1, 0)
10200 && SCALAR_FLOAT_TYPE_P (type))
10201 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10202 build_real (type, dconst2));
10204 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10205 We associate floats only if the user has specified
10206 -fassociative-math. */
10207 if (flag_associative_math
10208 && TREE_CODE (arg1) == PLUS_EXPR
10209 && TREE_CODE (arg0) != MULT_EXPR)
10211 tree tree10 = TREE_OPERAND (arg1, 0);
10212 tree tree11 = TREE_OPERAND (arg1, 1);
10213 if (TREE_CODE (tree11) == MULT_EXPR
10214 && TREE_CODE (tree10) == MULT_EXPR)
10216 tree tree0;
10217 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10218 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10221 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10222 We associate floats only if the user has specified
10223 -fassociative-math. */
10224 if (flag_associative_math
10225 && TREE_CODE (arg0) == PLUS_EXPR
10226 && TREE_CODE (arg1) != MULT_EXPR)
10228 tree tree00 = TREE_OPERAND (arg0, 0);
10229 tree tree01 = TREE_OPERAND (arg0, 1);
10230 if (TREE_CODE (tree01) == MULT_EXPR
10231 && TREE_CODE (tree00) == MULT_EXPR)
10233 tree tree0;
10234 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10235 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10240 bit_rotate:
10241 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10242 is a rotate of A by C1 bits. */
10243 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10244 is a rotate of A by B bits. */
10246 enum tree_code code0, code1;
10247 tree rtype;
10248 code0 = TREE_CODE (arg0);
10249 code1 = TREE_CODE (arg1);
10250 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10251 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10252 && operand_equal_p (TREE_OPERAND (arg0, 0),
10253 TREE_OPERAND (arg1, 0), 0)
10254 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10255 TYPE_UNSIGNED (rtype))
10256 /* Only create rotates in complete modes. Other cases are not
10257 expanded properly. */
10258 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10260 tree tree01, tree11;
10261 enum tree_code code01, code11;
10263 tree01 = TREE_OPERAND (arg0, 1);
10264 tree11 = TREE_OPERAND (arg1, 1);
10265 STRIP_NOPS (tree01);
10266 STRIP_NOPS (tree11);
10267 code01 = TREE_CODE (tree01);
10268 code11 = TREE_CODE (tree11);
10269 if (code01 == INTEGER_CST
10270 && code11 == INTEGER_CST
10271 && TREE_INT_CST_HIGH (tree01) == 0
10272 && TREE_INT_CST_HIGH (tree11) == 0
10273 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10274 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10276 tem = build2_loc (loc, LROTATE_EXPR,
10277 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10278 TREE_OPERAND (arg0, 0),
10279 code0 == LSHIFT_EXPR ? tree01 : tree11);
10280 return fold_convert_loc (loc, type, tem);
10282 else if (code11 == MINUS_EXPR)
10284 tree tree110, tree111;
10285 tree110 = TREE_OPERAND (tree11, 0);
10286 tree111 = TREE_OPERAND (tree11, 1);
10287 STRIP_NOPS (tree110);
10288 STRIP_NOPS (tree111);
10289 if (TREE_CODE (tree110) == INTEGER_CST
10290 && 0 == compare_tree_int (tree110,
10291 TYPE_PRECISION
10292 (TREE_TYPE (TREE_OPERAND
10293 (arg0, 0))))
10294 && operand_equal_p (tree01, tree111, 0))
10295 return
10296 fold_convert_loc (loc, type,
10297 build2 ((code0 == LSHIFT_EXPR
10298 ? LROTATE_EXPR
10299 : RROTATE_EXPR),
10300 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10301 TREE_OPERAND (arg0, 0), tree01));
10303 else if (code01 == MINUS_EXPR)
10305 tree tree010, tree011;
10306 tree010 = TREE_OPERAND (tree01, 0);
10307 tree011 = TREE_OPERAND (tree01, 1);
10308 STRIP_NOPS (tree010);
10309 STRIP_NOPS (tree011);
10310 if (TREE_CODE (tree010) == INTEGER_CST
10311 && 0 == compare_tree_int (tree010,
10312 TYPE_PRECISION
10313 (TREE_TYPE (TREE_OPERAND
10314 (arg0, 0))))
10315 && operand_equal_p (tree11, tree011, 0))
10316 return fold_convert_loc
10317 (loc, type,
10318 build2 ((code0 != LSHIFT_EXPR
10319 ? LROTATE_EXPR
10320 : RROTATE_EXPR),
10321 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10322 TREE_OPERAND (arg0, 0), tree11));
10327 associate:
10328 /* In most languages, can't associate operations on floats through
10329 parentheses. Rather than remember where the parentheses were, we
10330 don't associate floats at all, unless the user has specified
10331 -fassociative-math.
10332 And, we need to make sure type is not saturating. */
10334 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10335 && !TYPE_SATURATING (type))
10337 tree var0, con0, lit0, minus_lit0;
10338 tree var1, con1, lit1, minus_lit1;
10339 bool ok = true;
10341 /* Split both trees into variables, constants, and literals. Then
10342 associate each group together, the constants with literals,
10343 then the result with variables. This increases the chances of
10344 literals being recombined later and of generating relocatable
10345 expressions for the sum of a constant and literal. */
10346 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10347 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10348 code == MINUS_EXPR);
10350 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10351 if (code == MINUS_EXPR)
10352 code = PLUS_EXPR;
10354 /* With undefined overflow we can only associate constants with one
10355 variable, and constants whose association doesn't overflow. */
10356 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10357 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10359 if (var0 && var1)
10361 tree tmp0 = var0;
10362 tree tmp1 = var1;
10364 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10365 tmp0 = TREE_OPERAND (tmp0, 0);
10366 if (CONVERT_EXPR_P (tmp0)
10367 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10369 <= TYPE_PRECISION (type)))
10370 tmp0 = TREE_OPERAND (tmp0, 0);
10371 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10372 tmp1 = TREE_OPERAND (tmp1, 0);
10373 if (CONVERT_EXPR_P (tmp1)
10374 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10375 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10376 <= TYPE_PRECISION (type)))
10377 tmp1 = TREE_OPERAND (tmp1, 0);
10378 /* The only case we can still associate with two variables
10379 is if they are the same, modulo negation and bit-pattern
10380 preserving conversions. */
10381 if (!operand_equal_p (tmp0, tmp1, 0))
10382 ok = false;
10385 if (ok && lit0 && lit1)
10387 tree tmp0 = fold_convert (type, lit0);
10388 tree tmp1 = fold_convert (type, lit1);
10390 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10391 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10392 ok = false;
10396 /* Only do something if we found more than two objects. Otherwise,
10397 nothing has changed and we risk infinite recursion. */
10398 if (ok
10399 && (2 < ((var0 != 0) + (var1 != 0)
10400 + (con0 != 0) + (con1 != 0)
10401 + (lit0 != 0) + (lit1 != 0)
10402 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10404 var0 = associate_trees (loc, var0, var1, code, type);
10405 con0 = associate_trees (loc, con0, con1, code, type);
10406 lit0 = associate_trees (loc, lit0, lit1, code, type);
10407 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10409 /* Preserve the MINUS_EXPR if the negative part of the literal is
10410 greater than the positive part. Otherwise, the multiplicative
10411 folding code (i.e extract_muldiv) may be fooled in case
10412 unsigned constants are subtracted, like in the following
10413 example: ((X*2 + 4) - 8U)/2. */
10414 if (minus_lit0 && lit0)
10416 if (TREE_CODE (lit0) == INTEGER_CST
10417 && TREE_CODE (minus_lit0) == INTEGER_CST
10418 && tree_int_cst_lt (lit0, minus_lit0))
10420 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10421 MINUS_EXPR, type);
10422 lit0 = 0;
10424 else
10426 lit0 = associate_trees (loc, lit0, minus_lit0,
10427 MINUS_EXPR, type);
10428 minus_lit0 = 0;
10431 if (minus_lit0)
10433 if (con0 == 0)
10434 return
10435 fold_convert_loc (loc, type,
10436 associate_trees (loc, var0, minus_lit0,
10437 MINUS_EXPR, type));
10438 else
10440 con0 = associate_trees (loc, con0, minus_lit0,
10441 MINUS_EXPR, type);
10442 return
10443 fold_convert_loc (loc, type,
10444 associate_trees (loc, var0, con0,
10445 PLUS_EXPR, type));
10449 con0 = associate_trees (loc, con0, lit0, code, type);
10450 return
10451 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10452 code, type));
10456 return NULL_TREE;
10458 case MINUS_EXPR:
10459 /* Pointer simplifications for subtraction, simple reassociations. */
10460 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10462 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10463 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10464 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10466 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10467 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10468 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10469 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10470 return fold_build2_loc (loc, PLUS_EXPR, type,
10471 fold_build2_loc (loc, MINUS_EXPR, type,
10472 arg00, arg10),
10473 fold_build2_loc (loc, MINUS_EXPR, type,
10474 arg01, arg11));
10476 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10477 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10479 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10480 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10481 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10482 fold_convert_loc (loc, type, arg1));
10483 if (tmp)
10484 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10487 /* A - (-B) -> A + B */
10488 if (TREE_CODE (arg1) == NEGATE_EXPR)
10489 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10490 fold_convert_loc (loc, type,
10491 TREE_OPERAND (arg1, 0)));
10492 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10493 if (TREE_CODE (arg0) == NEGATE_EXPR
10494 && (FLOAT_TYPE_P (type)
10495 || INTEGRAL_TYPE_P (type))
10496 && negate_expr_p (arg1)
10497 && reorder_operands_p (arg0, arg1))
10498 return fold_build2_loc (loc, MINUS_EXPR, type,
10499 fold_convert_loc (loc, type,
10500 negate_expr (arg1)),
10501 fold_convert_loc (loc, type,
10502 TREE_OPERAND (arg0, 0)));
10503 /* Convert -A - 1 to ~A. */
10504 if (INTEGRAL_TYPE_P (type)
10505 && TREE_CODE (arg0) == NEGATE_EXPR
10506 && integer_onep (arg1)
10507 && !TYPE_OVERFLOW_TRAPS (type))
10508 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10509 fold_convert_loc (loc, type,
10510 TREE_OPERAND (arg0, 0)));
10512 /* Convert -1 - A to ~A. */
10513 if (INTEGRAL_TYPE_P (type)
10514 && integer_all_onesp (arg0))
10515 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10518 /* X - (X / CST) * CST is X % CST. */
10519 if (INTEGRAL_TYPE_P (type)
10520 && TREE_CODE (arg1) == MULT_EXPR
10521 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10522 && operand_equal_p (arg0,
10523 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10524 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10525 TREE_OPERAND (arg1, 1), 0))
10526 return
10527 fold_convert_loc (loc, type,
10528 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10529 arg0, TREE_OPERAND (arg1, 1)));
10531 if (! FLOAT_TYPE_P (type))
10533 if (integer_zerop (arg0))
10534 return negate_expr (fold_convert_loc (loc, type, arg1));
10535 if (integer_zerop (arg1))
10536 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10538 /* Fold A - (A & B) into ~B & A. */
10539 if (!TREE_SIDE_EFFECTS (arg0)
10540 && TREE_CODE (arg1) == BIT_AND_EXPR)
10542 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10544 tree arg10 = fold_convert_loc (loc, type,
10545 TREE_OPERAND (arg1, 0));
10546 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10547 fold_build1_loc (loc, BIT_NOT_EXPR,
10548 type, arg10),
10549 fold_convert_loc (loc, type, arg0));
10551 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10553 tree arg11 = fold_convert_loc (loc,
10554 type, TREE_OPERAND (arg1, 1));
10555 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10556 fold_build1_loc (loc, BIT_NOT_EXPR,
10557 type, arg11),
10558 fold_convert_loc (loc, type, arg0));
10562 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10563 any power of 2 minus 1. */
10564 if (TREE_CODE (arg0) == BIT_AND_EXPR
10565 && TREE_CODE (arg1) == BIT_AND_EXPR
10566 && operand_equal_p (TREE_OPERAND (arg0, 0),
10567 TREE_OPERAND (arg1, 0), 0))
10569 tree mask0 = TREE_OPERAND (arg0, 1);
10570 tree mask1 = TREE_OPERAND (arg1, 1);
10571 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10573 if (operand_equal_p (tem, mask1, 0))
10575 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10576 TREE_OPERAND (arg0, 0), mask1);
10577 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10582 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10583 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10584 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10586 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10587 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10588 (-ARG1 + ARG0) reduces to -ARG1. */
10589 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10590 return negate_expr (fold_convert_loc (loc, type, arg1));
10592 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10593 __complex__ ( x, -y ). This is not the same for SNaNs or if
10594 signed zeros are involved. */
10595 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10596 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10597 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10599 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10600 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10601 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10602 bool arg0rz = false, arg0iz = false;
10603 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10604 || (arg0i && (arg0iz = real_zerop (arg0i))))
10606 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10607 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10608 if (arg0rz && arg1i && real_zerop (arg1i))
10610 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10611 arg1r ? arg1r
10612 : build1 (REALPART_EXPR, rtype, arg1));
10613 tree ip = arg0i ? arg0i
10614 : build1 (IMAGPART_EXPR, rtype, arg0);
10615 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10617 else if (arg0iz && arg1r && real_zerop (arg1r))
10619 tree rp = arg0r ? arg0r
10620 : build1 (REALPART_EXPR, rtype, arg0);
10621 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10622 arg1i ? arg1i
10623 : build1 (IMAGPART_EXPR, rtype, arg1));
10624 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10629 /* Fold &x - &x. This can happen from &x.foo - &x.
10630 This is unsafe for certain floats even in non-IEEE formats.
10631 In IEEE, it is unsafe because it does wrong for NaNs.
10632 Also note that operand_equal_p is always false if an operand
10633 is volatile. */
10635 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10636 && operand_equal_p (arg0, arg1, 0))
10637 return build_zero_cst (type);
10639 /* A - B -> A + (-B) if B is easily negatable. */
10640 if (negate_expr_p (arg1)
10641 && ((FLOAT_TYPE_P (type)
10642 /* Avoid this transformation if B is a positive REAL_CST. */
10643 && (TREE_CODE (arg1) != REAL_CST
10644 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10645 || INTEGRAL_TYPE_P (type)))
10646 return fold_build2_loc (loc, PLUS_EXPR, type,
10647 fold_convert_loc (loc, type, arg0),
10648 fold_convert_loc (loc, type,
10649 negate_expr (arg1)));
10651 /* Try folding difference of addresses. */
10653 HOST_WIDE_INT diff;
10655 if ((TREE_CODE (arg0) == ADDR_EXPR
10656 || TREE_CODE (arg1) == ADDR_EXPR)
10657 && ptr_difference_const (arg0, arg1, &diff))
10658 return build_int_cst_type (type, diff);
10661 /* Fold &a[i] - &a[j] to i-j. */
10662 if (TREE_CODE (arg0) == ADDR_EXPR
10663 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10664 && TREE_CODE (arg1) == ADDR_EXPR
10665 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10667 tree tem = fold_addr_of_array_ref_difference (loc, type,
10668 TREE_OPERAND (arg0, 0),
10669 TREE_OPERAND (arg1, 0));
10670 if (tem)
10671 return tem;
10674 if (FLOAT_TYPE_P (type)
10675 && flag_unsafe_math_optimizations
10676 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10677 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10678 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10679 return tem;
10681 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10682 one. Make sure the type is not saturating and has the signedness of
10683 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10684 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10685 if ((TREE_CODE (arg0) == MULT_EXPR
10686 || TREE_CODE (arg1) == MULT_EXPR)
10687 && !TYPE_SATURATING (type)
10688 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10689 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10690 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10692 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10693 if (tem)
10694 return tem;
10697 goto associate;
10699 case MULT_EXPR:
10700 /* (-A) * (-B) -> A * B */
10701 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10702 return fold_build2_loc (loc, MULT_EXPR, type,
10703 fold_convert_loc (loc, type,
10704 TREE_OPERAND (arg0, 0)),
10705 fold_convert_loc (loc, type,
10706 negate_expr (arg1)));
10707 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10708 return fold_build2_loc (loc, MULT_EXPR, type,
10709 fold_convert_loc (loc, type,
10710 negate_expr (arg0)),
10711 fold_convert_loc (loc, type,
10712 TREE_OPERAND (arg1, 0)));
10714 if (! FLOAT_TYPE_P (type))
10716 if (integer_zerop (arg1))
10717 return omit_one_operand_loc (loc, type, arg1, arg0);
10718 if (integer_onep (arg1))
10719 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10720 /* Transform x * -1 into -x. Make sure to do the negation
10721 on the original operand with conversions not stripped
10722 because we can only strip non-sign-changing conversions. */
10723 if (integer_all_onesp (arg1))
10724 return fold_convert_loc (loc, type, negate_expr (op0));
10725 /* Transform x * -C into -x * C if x is easily negatable. */
10726 if (TREE_CODE (arg1) == INTEGER_CST
10727 && tree_int_cst_sgn (arg1) == -1
10728 && negate_expr_p (arg0)
10729 && (tem = negate_expr (arg1)) != arg1
10730 && !TREE_OVERFLOW (tem))
10731 return fold_build2_loc (loc, MULT_EXPR, type,
10732 fold_convert_loc (loc, type,
10733 negate_expr (arg0)),
10734 tem);
10736 /* (a * (1 << b)) is (a << b) */
10737 if (TREE_CODE (arg1) == LSHIFT_EXPR
10738 && integer_onep (TREE_OPERAND (arg1, 0)))
10739 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10740 TREE_OPERAND (arg1, 1));
10741 if (TREE_CODE (arg0) == LSHIFT_EXPR
10742 && integer_onep (TREE_OPERAND (arg0, 0)))
10743 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10744 TREE_OPERAND (arg0, 1));
10746 /* (A + A) * C -> A * 2 * C */
10747 if (TREE_CODE (arg0) == PLUS_EXPR
10748 && TREE_CODE (arg1) == INTEGER_CST
10749 && operand_equal_p (TREE_OPERAND (arg0, 0),
10750 TREE_OPERAND (arg0, 1), 0))
10751 return fold_build2_loc (loc, MULT_EXPR, type,
10752 omit_one_operand_loc (loc, type,
10753 TREE_OPERAND (arg0, 0),
10754 TREE_OPERAND (arg0, 1)),
10755 fold_build2_loc (loc, MULT_EXPR, type,
10756 build_int_cst (type, 2) , arg1));
10758 strict_overflow_p = false;
10759 if (TREE_CODE (arg1) == INTEGER_CST
10760 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10761 &strict_overflow_p)))
10763 if (strict_overflow_p)
10764 fold_overflow_warning (("assuming signed overflow does not "
10765 "occur when simplifying "
10766 "multiplication"),
10767 WARN_STRICT_OVERFLOW_MISC);
10768 return fold_convert_loc (loc, type, tem);
10771 /* Optimize z * conj(z) for integer complex numbers. */
10772 if (TREE_CODE (arg0) == CONJ_EXPR
10773 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10774 return fold_mult_zconjz (loc, type, arg1);
10775 if (TREE_CODE (arg1) == CONJ_EXPR
10776 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10777 return fold_mult_zconjz (loc, type, arg0);
10779 else
10781 /* Maybe fold x * 0 to 0. The expressions aren't the same
10782 when x is NaN, since x * 0 is also NaN. Nor are they the
10783 same in modes with signed zeros, since multiplying a
10784 negative value by 0 gives -0, not +0. */
10785 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10786 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10787 && real_zerop (arg1))
10788 return omit_one_operand_loc (loc, type, arg1, arg0);
10789 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10790 Likewise for complex arithmetic with signed zeros. */
10791 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10792 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10793 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10794 && real_onep (arg1))
10795 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10797 /* Transform x * -1.0 into -x. */
10798 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10799 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10800 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10801 && real_minus_onep (arg1))
10802 return fold_convert_loc (loc, type, negate_expr (arg0));
10804 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10805 the result for floating point types due to rounding so it is applied
10806 only if -fassociative-math was specify. */
10807 if (flag_associative_math
10808 && TREE_CODE (arg0) == RDIV_EXPR
10809 && TREE_CODE (arg1) == REAL_CST
10810 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10812 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10813 arg1);
10814 if (tem)
10815 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10816 TREE_OPERAND (arg0, 1));
10819 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10820 if (operand_equal_p (arg0, arg1, 0))
10822 tree tem = fold_strip_sign_ops (arg0);
10823 if (tem != NULL_TREE)
10825 tem = fold_convert_loc (loc, type, tem);
10826 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10830 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10831 This is not the same for NaNs or if signed zeros are
10832 involved. */
10833 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10834 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10835 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10836 && TREE_CODE (arg1) == COMPLEX_CST
10837 && real_zerop (TREE_REALPART (arg1)))
10839 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10840 if (real_onep (TREE_IMAGPART (arg1)))
10841 return
10842 fold_build2_loc (loc, COMPLEX_EXPR, type,
10843 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10844 rtype, arg0)),
10845 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10846 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10847 return
10848 fold_build2_loc (loc, COMPLEX_EXPR, type,
10849 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10850 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10851 rtype, arg0)));
10854 /* Optimize z * conj(z) for floating point complex numbers.
10855 Guarded by flag_unsafe_math_optimizations as non-finite
10856 imaginary components don't produce scalar results. */
10857 if (flag_unsafe_math_optimizations
10858 && TREE_CODE (arg0) == CONJ_EXPR
10859 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10860 return fold_mult_zconjz (loc, type, arg1);
10861 if (flag_unsafe_math_optimizations
10862 && TREE_CODE (arg1) == CONJ_EXPR
10863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10864 return fold_mult_zconjz (loc, type, arg0);
10866 if (flag_unsafe_math_optimizations)
10868 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10869 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10871 /* Optimizations of root(...)*root(...). */
10872 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10874 tree rootfn, arg;
10875 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10876 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10878 /* Optimize sqrt(x)*sqrt(x) as x. */
10879 if (BUILTIN_SQRT_P (fcode0)
10880 && operand_equal_p (arg00, arg10, 0)
10881 && ! HONOR_SNANS (TYPE_MODE (type)))
10882 return arg00;
10884 /* Optimize root(x)*root(y) as root(x*y). */
10885 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10886 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10887 return build_call_expr_loc (loc, rootfn, 1, arg);
10890 /* Optimize expN(x)*expN(y) as expN(x+y). */
10891 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10893 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10894 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10895 CALL_EXPR_ARG (arg0, 0),
10896 CALL_EXPR_ARG (arg1, 0));
10897 return build_call_expr_loc (loc, expfn, 1, arg);
10900 /* Optimizations of pow(...)*pow(...). */
10901 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10902 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10903 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10905 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10906 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10907 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10908 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10910 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10911 if (operand_equal_p (arg01, arg11, 0))
10913 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10914 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10915 arg00, arg10);
10916 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10919 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10920 if (operand_equal_p (arg00, arg10, 0))
10922 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10923 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10924 arg01, arg11);
10925 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10929 /* Optimize tan(x)*cos(x) as sin(x). */
10930 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10931 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10932 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10933 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10934 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10935 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10936 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10937 CALL_EXPR_ARG (arg1, 0), 0))
10939 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10941 if (sinfn != NULL_TREE)
10942 return build_call_expr_loc (loc, sinfn, 1,
10943 CALL_EXPR_ARG (arg0, 0));
10946 /* Optimize x*pow(x,c) as pow(x,c+1). */
10947 if (fcode1 == BUILT_IN_POW
10948 || fcode1 == BUILT_IN_POWF
10949 || fcode1 == BUILT_IN_POWL)
10951 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10952 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10953 if (TREE_CODE (arg11) == REAL_CST
10954 && !TREE_OVERFLOW (arg11)
10955 && operand_equal_p (arg0, arg10, 0))
10957 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10958 REAL_VALUE_TYPE c;
10959 tree arg;
10961 c = TREE_REAL_CST (arg11);
10962 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10963 arg = build_real (type, c);
10964 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10968 /* Optimize pow(x,c)*x as pow(x,c+1). */
10969 if (fcode0 == BUILT_IN_POW
10970 || fcode0 == BUILT_IN_POWF
10971 || fcode0 == BUILT_IN_POWL)
10973 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10974 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10975 if (TREE_CODE (arg01) == REAL_CST
10976 && !TREE_OVERFLOW (arg01)
10977 && operand_equal_p (arg1, arg00, 0))
10979 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10980 REAL_VALUE_TYPE c;
10981 tree arg;
10983 c = TREE_REAL_CST (arg01);
10984 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10985 arg = build_real (type, c);
10986 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10990 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10991 if (!in_gimple_form
10992 && optimize
10993 && operand_equal_p (arg0, arg1, 0))
10995 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10997 if (powfn)
10999 tree arg = build_real (type, dconst2);
11000 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11005 goto associate;
11007 case BIT_IOR_EXPR:
11008 bit_ior:
11009 if (integer_all_onesp (arg1))
11010 return omit_one_operand_loc (loc, type, arg1, arg0);
11011 if (integer_zerop (arg1))
11012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11013 if (operand_equal_p (arg0, arg1, 0))
11014 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11016 /* ~X | X is -1. */
11017 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11018 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11020 t1 = build_zero_cst (type);
11021 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11022 return omit_one_operand_loc (loc, type, t1, arg1);
11025 /* X | ~X is -1. */
11026 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11027 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11029 t1 = build_zero_cst (type);
11030 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11031 return omit_one_operand_loc (loc, type, t1, arg0);
11034 /* Canonicalize (X & C1) | C2. */
11035 if (TREE_CODE (arg0) == BIT_AND_EXPR
11036 && TREE_CODE (arg1) == INTEGER_CST
11037 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11039 double_int c1, c2, c3, msk;
11040 int width = TYPE_PRECISION (type), w;
11041 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11042 c2 = tree_to_double_int (arg1);
11044 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11045 if (double_int_equal_p (double_int_and (c1, c2), c1))
11046 return omit_one_operand_loc (loc, type, arg1,
11047 TREE_OPERAND (arg0, 0));
11049 msk = double_int_mask (width);
11051 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11052 if (double_int_zero_p (double_int_and_not (msk,
11053 double_int_ior (c1, c2))))
11054 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11055 TREE_OPERAND (arg0, 0), arg1);
11057 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11058 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11059 mode which allows further optimizations. */
11060 c1 = double_int_and (c1, msk);
11061 c2 = double_int_and (c2, msk);
11062 c3 = double_int_and_not (c1, c2);
11063 for (w = BITS_PER_UNIT;
11064 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11065 w <<= 1)
11067 unsigned HOST_WIDE_INT mask
11068 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11069 if (((c1.low | c2.low) & mask) == mask
11070 && (c1.low & ~mask) == 0 && c1.high == 0)
11072 c3 = uhwi_to_double_int (mask);
11073 break;
11076 if (!double_int_equal_p (c3, c1))
11077 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11078 fold_build2_loc (loc, BIT_AND_EXPR, type,
11079 TREE_OPERAND (arg0, 0),
11080 double_int_to_tree (type,
11081 c3)),
11082 arg1);
11085 /* (X & Y) | Y is (X, Y). */
11086 if (TREE_CODE (arg0) == BIT_AND_EXPR
11087 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11088 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11089 /* (X & Y) | X is (Y, X). */
11090 if (TREE_CODE (arg0) == BIT_AND_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11092 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11093 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11094 /* X | (X & Y) is (Y, X). */
11095 if (TREE_CODE (arg1) == BIT_AND_EXPR
11096 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11097 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11098 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11099 /* X | (Y & X) is (Y, X). */
11100 if (TREE_CODE (arg1) == BIT_AND_EXPR
11101 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11102 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11103 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11105 /* (X & ~Y) | (~X & Y) is X ^ Y */
11106 if (TREE_CODE (arg0) == BIT_AND_EXPR
11107 && TREE_CODE (arg1) == BIT_AND_EXPR)
11109 tree a0, a1, l0, l1, n0, n1;
11111 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11112 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11114 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11115 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11117 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11118 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11120 if ((operand_equal_p (n0, a0, 0)
11121 && operand_equal_p (n1, a1, 0))
11122 || (operand_equal_p (n0, a1, 0)
11123 && operand_equal_p (n1, a0, 0)))
11124 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11127 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11128 if (t1 != NULL_TREE)
11129 return t1;
11131 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11133 This results in more efficient code for machines without a NAND
11134 instruction. Combine will canonicalize to the first form
11135 which will allow use of NAND instructions provided by the
11136 backend if they exist. */
11137 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11138 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11140 return
11141 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11142 build2 (BIT_AND_EXPR, type,
11143 fold_convert_loc (loc, type,
11144 TREE_OPERAND (arg0, 0)),
11145 fold_convert_loc (loc, type,
11146 TREE_OPERAND (arg1, 0))));
11149 /* See if this can be simplified into a rotate first. If that
11150 is unsuccessful continue in the association code. */
11151 goto bit_rotate;
11153 case BIT_XOR_EXPR:
11154 if (integer_zerop (arg1))
11155 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11156 if (integer_all_onesp (arg1))
11157 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11158 if (operand_equal_p (arg0, arg1, 0))
11159 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11161 /* ~X ^ X is -1. */
11162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11165 t1 = build_zero_cst (type);
11166 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11167 return omit_one_operand_loc (loc, type, t1, arg1);
11170 /* X ^ ~X is -1. */
11171 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11174 t1 = build_zero_cst (type);
11175 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11176 return omit_one_operand_loc (loc, type, t1, arg0);
11179 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11180 with a constant, and the two constants have no bits in common,
11181 we should treat this as a BIT_IOR_EXPR since this may produce more
11182 simplifications. */
11183 if (TREE_CODE (arg0) == BIT_AND_EXPR
11184 && TREE_CODE (arg1) == BIT_AND_EXPR
11185 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11186 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11187 && integer_zerop (const_binop (BIT_AND_EXPR,
11188 TREE_OPERAND (arg0, 1),
11189 TREE_OPERAND (arg1, 1))))
11191 code = BIT_IOR_EXPR;
11192 goto bit_ior;
11195 /* (X | Y) ^ X -> Y & ~ X*/
11196 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11199 tree t2 = TREE_OPERAND (arg0, 1);
11200 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11201 arg1);
11202 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11203 fold_convert_loc (loc, type, t2),
11204 fold_convert_loc (loc, type, t1));
11205 return t1;
11208 /* (Y | X) ^ X -> Y & ~ X*/
11209 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11210 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11212 tree t2 = TREE_OPERAND (arg0, 0);
11213 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11214 arg1);
11215 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11216 fold_convert_loc (loc, type, t2),
11217 fold_convert_loc (loc, type, t1));
11218 return t1;
11221 /* X ^ (X | Y) -> Y & ~ X*/
11222 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11223 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11225 tree t2 = TREE_OPERAND (arg1, 1);
11226 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11227 arg0);
11228 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11229 fold_convert_loc (loc, type, t2),
11230 fold_convert_loc (loc, type, t1));
11231 return t1;
11234 /* X ^ (Y | X) -> Y & ~ X*/
11235 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11236 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11238 tree t2 = TREE_OPERAND (arg1, 0);
11239 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11240 arg0);
11241 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11242 fold_convert_loc (loc, type, t2),
11243 fold_convert_loc (loc, type, t1));
11244 return t1;
11247 /* Convert ~X ^ ~Y to X ^ Y. */
11248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11249 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11250 return fold_build2_loc (loc, code, type,
11251 fold_convert_loc (loc, type,
11252 TREE_OPERAND (arg0, 0)),
11253 fold_convert_loc (loc, type,
11254 TREE_OPERAND (arg1, 0)));
11256 /* Convert ~X ^ C to X ^ ~C. */
11257 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11258 && TREE_CODE (arg1) == INTEGER_CST)
11259 return fold_build2_loc (loc, code, type,
11260 fold_convert_loc (loc, type,
11261 TREE_OPERAND (arg0, 0)),
11262 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11264 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11265 if (TREE_CODE (arg0) == BIT_AND_EXPR
11266 && integer_onep (TREE_OPERAND (arg0, 1))
11267 && integer_onep (arg1))
11268 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11269 build_zero_cst (TREE_TYPE (arg0)));
11271 /* Fold (X & Y) ^ Y as ~X & Y. */
11272 if (TREE_CODE (arg0) == BIT_AND_EXPR
11273 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11275 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11276 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11277 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11278 fold_convert_loc (loc, type, arg1));
11280 /* Fold (X & Y) ^ X as ~Y & X. */
11281 if (TREE_CODE (arg0) == BIT_AND_EXPR
11282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11283 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11285 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11286 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11287 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11288 fold_convert_loc (loc, type, arg1));
11290 /* Fold X ^ (X & Y) as X & ~Y. */
11291 if (TREE_CODE (arg1) == BIT_AND_EXPR
11292 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11294 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11295 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11296 fold_convert_loc (loc, type, arg0),
11297 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11299 /* Fold X ^ (Y & X) as ~Y & X. */
11300 if (TREE_CODE (arg1) == BIT_AND_EXPR
11301 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11302 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11304 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11305 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11306 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11307 fold_convert_loc (loc, type, arg0));
11310 /* See if this can be simplified into a rotate first. If that
11311 is unsuccessful continue in the association code. */
11312 goto bit_rotate;
11314 case BIT_AND_EXPR:
11315 if (integer_all_onesp (arg1))
11316 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11317 if (integer_zerop (arg1))
11318 return omit_one_operand_loc (loc, type, arg1, arg0);
11319 if (operand_equal_p (arg0, arg1, 0))
11320 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11322 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11323 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11324 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11325 || (TREE_CODE (arg0) == EQ_EXPR
11326 && integer_zerop (TREE_OPERAND (arg0, 1))))
11327 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11328 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11330 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11331 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11332 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11333 || (TREE_CODE (arg1) == EQ_EXPR
11334 && integer_zerop (TREE_OPERAND (arg1, 1))))
11335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11336 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11338 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11339 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11340 && TREE_CODE (arg1) == INTEGER_CST
11341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11343 tree tmp1 = fold_convert_loc (loc, type, arg1);
11344 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11345 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11346 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11347 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11348 return
11349 fold_convert_loc (loc, type,
11350 fold_build2_loc (loc, BIT_IOR_EXPR,
11351 type, tmp2, tmp3));
11354 /* (X | Y) & Y is (X, Y). */
11355 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11356 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11357 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11358 /* (X | Y) & X is (Y, X). */
11359 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11360 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11361 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11362 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11363 /* X & (X | Y) is (Y, X). */
11364 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11365 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11366 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11367 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11368 /* X & (Y | X) is (Y, X). */
11369 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11370 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11371 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11372 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11374 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11375 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11376 && integer_onep (TREE_OPERAND (arg0, 1))
11377 && integer_onep (arg1))
11379 tree tem2;
11380 tem = TREE_OPERAND (arg0, 0);
11381 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11382 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11383 tem, tem2);
11384 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11385 build_zero_cst (TREE_TYPE (tem)));
11387 /* Fold ~X & 1 as (X & 1) == 0. */
11388 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11389 && integer_onep (arg1))
11391 tree tem2;
11392 tem = TREE_OPERAND (arg0, 0);
11393 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11394 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11395 tem, tem2);
11396 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11397 build_zero_cst (TREE_TYPE (tem)));
11399 /* Fold !X & 1 as X == 0. */
11400 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11401 && integer_onep (arg1))
11403 tem = TREE_OPERAND (arg0, 0);
11404 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11405 build_zero_cst (TREE_TYPE (tem)));
11408 /* Fold (X ^ Y) & Y as ~X & Y. */
11409 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11410 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11412 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11413 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11414 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11415 fold_convert_loc (loc, type, arg1));
11417 /* Fold (X ^ Y) & X as ~Y & X. */
11418 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11420 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11422 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11423 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11424 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11425 fold_convert_loc (loc, type, arg1));
11427 /* Fold X & (X ^ Y) as X & ~Y. */
11428 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11431 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11432 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11433 fold_convert_loc (loc, type, arg0),
11434 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11436 /* Fold X & (Y ^ X) as ~Y & X. */
11437 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11438 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11439 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11441 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11442 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11443 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11444 fold_convert_loc (loc, type, arg0));
11447 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11448 multiple of 1 << CST. */
11449 if (TREE_CODE (arg1) == INTEGER_CST)
11451 double_int cst1 = tree_to_double_int (arg1);
11452 double_int ncst1 = double_int_ext (double_int_neg (cst1),
11453 TYPE_PRECISION (TREE_TYPE (arg1)),
11454 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11455 if (double_int_equal_p (double_int_and (cst1, ncst1), ncst1)
11456 && multiple_of_p (type, arg0,
11457 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11458 return fold_convert_loc (loc, type, arg0);
11461 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11462 bits from CST2. */
11463 if (TREE_CODE (arg1) == INTEGER_CST
11464 && TREE_CODE (arg0) == MULT_EXPR
11465 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11467 int arg1tz
11468 = double_int_ctz (tree_to_double_int (TREE_OPERAND (arg0, 1)));
11469 if (arg1tz > 0)
11471 double_int arg1mask, masked;
11472 arg1mask = double_int_not (double_int_mask (arg1tz));
11473 arg1mask = double_int_ext (arg1mask, TYPE_PRECISION (type),
11474 TYPE_UNSIGNED (type));
11475 masked = double_int_and (arg1mask, tree_to_double_int (arg1));
11476 if (double_int_zero_p (masked))
11477 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11478 arg0, arg1);
11479 else if (!double_int_equal_p (masked, tree_to_double_int (arg1)))
11480 return fold_build2_loc (loc, code, type, op0,
11481 double_int_to_tree (type, masked));
11485 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11486 ((A & N) + B) & M -> (A + B) & M
11487 Similarly if (N & M) == 0,
11488 ((A | N) + B) & M -> (A + B) & M
11489 and for - instead of + (or unary - instead of +)
11490 and/or ^ instead of |.
11491 If B is constant and (B & M) == 0, fold into A & M. */
11492 if (host_integerp (arg1, 1))
11494 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11495 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11496 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11497 && (TREE_CODE (arg0) == PLUS_EXPR
11498 || TREE_CODE (arg0) == MINUS_EXPR
11499 || TREE_CODE (arg0) == NEGATE_EXPR)
11500 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11501 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11503 tree pmop[2];
11504 int which = 0;
11505 unsigned HOST_WIDE_INT cst0;
11507 /* Now we know that arg0 is (C + D) or (C - D) or
11508 -C and arg1 (M) is == (1LL << cst) - 1.
11509 Store C into PMOP[0] and D into PMOP[1]. */
11510 pmop[0] = TREE_OPERAND (arg0, 0);
11511 pmop[1] = NULL;
11512 if (TREE_CODE (arg0) != NEGATE_EXPR)
11514 pmop[1] = TREE_OPERAND (arg0, 1);
11515 which = 1;
11518 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11519 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11520 & cst1) != cst1)
11521 which = -1;
11523 for (; which >= 0; which--)
11524 switch (TREE_CODE (pmop[which]))
11526 case BIT_AND_EXPR:
11527 case BIT_IOR_EXPR:
11528 case BIT_XOR_EXPR:
11529 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11530 != INTEGER_CST)
11531 break;
11532 /* tree_low_cst not used, because we don't care about
11533 the upper bits. */
11534 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11535 cst0 &= cst1;
11536 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11538 if (cst0 != cst1)
11539 break;
11541 else if (cst0 != 0)
11542 break;
11543 /* If C or D is of the form (A & N) where
11544 (N & M) == M, or of the form (A | N) or
11545 (A ^ N) where (N & M) == 0, replace it with A. */
11546 pmop[which] = TREE_OPERAND (pmop[which], 0);
11547 break;
11548 case INTEGER_CST:
11549 /* If C or D is a N where (N & M) == 0, it can be
11550 omitted (assumed 0). */
11551 if ((TREE_CODE (arg0) == PLUS_EXPR
11552 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11553 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11554 pmop[which] = NULL;
11555 break;
11556 default:
11557 break;
11560 /* Only build anything new if we optimized one or both arguments
11561 above. */
11562 if (pmop[0] != TREE_OPERAND (arg0, 0)
11563 || (TREE_CODE (arg0) != NEGATE_EXPR
11564 && pmop[1] != TREE_OPERAND (arg0, 1)))
11566 tree utype = TREE_TYPE (arg0);
11567 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11569 /* Perform the operations in a type that has defined
11570 overflow behavior. */
11571 utype = unsigned_type_for (TREE_TYPE (arg0));
11572 if (pmop[0] != NULL)
11573 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11574 if (pmop[1] != NULL)
11575 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11578 if (TREE_CODE (arg0) == NEGATE_EXPR)
11579 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11580 else if (TREE_CODE (arg0) == PLUS_EXPR)
11582 if (pmop[0] != NULL && pmop[1] != NULL)
11583 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11584 pmop[0], pmop[1]);
11585 else if (pmop[0] != NULL)
11586 tem = pmop[0];
11587 else if (pmop[1] != NULL)
11588 tem = pmop[1];
11589 else
11590 return build_int_cst (type, 0);
11592 else if (pmop[0] == NULL)
11593 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11594 else
11595 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11596 pmop[0], pmop[1]);
11597 /* TEM is now the new binary +, - or unary - replacement. */
11598 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11599 fold_convert_loc (loc, utype, arg1));
11600 return fold_convert_loc (loc, type, tem);
11605 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11606 if (t1 != NULL_TREE)
11607 return t1;
11608 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11609 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11610 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11612 unsigned int prec
11613 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11615 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11616 && (~TREE_INT_CST_LOW (arg1)
11617 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11618 return
11619 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11622 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11624 This results in more efficient code for machines without a NOR
11625 instruction. Combine will canonicalize to the first form
11626 which will allow use of NOR instructions provided by the
11627 backend if they exist. */
11628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11629 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11631 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11632 build2 (BIT_IOR_EXPR, type,
11633 fold_convert_loc (loc, type,
11634 TREE_OPERAND (arg0, 0)),
11635 fold_convert_loc (loc, type,
11636 TREE_OPERAND (arg1, 0))));
11639 /* If arg0 is derived from the address of an object or function, we may
11640 be able to fold this expression using the object or function's
11641 alignment. */
11642 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11644 unsigned HOST_WIDE_INT modulus, residue;
11645 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11647 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11648 integer_onep (arg1));
11650 /* This works because modulus is a power of 2. If this weren't the
11651 case, we'd have to replace it by its greatest power-of-2
11652 divisor: modulus & -modulus. */
11653 if (low < modulus)
11654 return build_int_cst (type, residue & low);
11657 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11658 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11659 if the new mask might be further optimized. */
11660 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11661 || TREE_CODE (arg0) == RSHIFT_EXPR)
11662 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11663 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11664 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11665 < TYPE_PRECISION (TREE_TYPE (arg0))
11666 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11667 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11669 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11670 unsigned HOST_WIDE_INT mask
11671 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11672 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11673 tree shift_type = TREE_TYPE (arg0);
11675 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11676 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11677 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11678 && TYPE_PRECISION (TREE_TYPE (arg0))
11679 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11681 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11682 tree arg00 = TREE_OPERAND (arg0, 0);
11683 /* See if more bits can be proven as zero because of
11684 zero extension. */
11685 if (TREE_CODE (arg00) == NOP_EXPR
11686 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11688 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11689 if (TYPE_PRECISION (inner_type)
11690 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11691 && TYPE_PRECISION (inner_type) < prec)
11693 prec = TYPE_PRECISION (inner_type);
11694 /* See if we can shorten the right shift. */
11695 if (shiftc < prec)
11696 shift_type = inner_type;
11699 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11700 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11701 zerobits <<= prec - shiftc;
11702 /* For arithmetic shift if sign bit could be set, zerobits
11703 can contain actually sign bits, so no transformation is
11704 possible, unless MASK masks them all away. In that
11705 case the shift needs to be converted into logical shift. */
11706 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11707 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11709 if ((mask & zerobits) == 0)
11710 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11711 else
11712 zerobits = 0;
11716 /* ((X << 16) & 0xff00) is (X, 0). */
11717 if ((mask & zerobits) == mask)
11718 return omit_one_operand_loc (loc, type,
11719 build_int_cst (type, 0), arg0);
11721 newmask = mask | zerobits;
11722 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11724 unsigned int prec;
11726 /* Only do the transformation if NEWMASK is some integer
11727 mode's mask. */
11728 for (prec = BITS_PER_UNIT;
11729 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11730 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11731 break;
11732 if (prec < HOST_BITS_PER_WIDE_INT
11733 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11735 tree newmaskt;
11737 if (shift_type != TREE_TYPE (arg0))
11739 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11740 fold_convert_loc (loc, shift_type,
11741 TREE_OPERAND (arg0, 0)),
11742 TREE_OPERAND (arg0, 1));
11743 tem = fold_convert_loc (loc, type, tem);
11745 else
11746 tem = op0;
11747 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11748 if (!tree_int_cst_equal (newmaskt, arg1))
11749 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11754 goto associate;
11756 case RDIV_EXPR:
11757 /* Don't touch a floating-point divide by zero unless the mode
11758 of the constant can represent infinity. */
11759 if (TREE_CODE (arg1) == REAL_CST
11760 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11761 && real_zerop (arg1))
11762 return NULL_TREE;
11764 /* Optimize A / A to 1.0 if we don't care about
11765 NaNs or Infinities. Skip the transformation
11766 for non-real operands. */
11767 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11768 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11769 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11770 && operand_equal_p (arg0, arg1, 0))
11772 tree r = build_real (TREE_TYPE (arg0), dconst1);
11774 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11777 /* The complex version of the above A / A optimization. */
11778 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11779 && operand_equal_p (arg0, arg1, 0))
11781 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11782 if (! HONOR_NANS (TYPE_MODE (elem_type))
11783 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11785 tree r = build_real (elem_type, dconst1);
11786 /* omit_two_operands will call fold_convert for us. */
11787 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11791 /* (-A) / (-B) -> A / B */
11792 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11793 return fold_build2_loc (loc, RDIV_EXPR, type,
11794 TREE_OPERAND (arg0, 0),
11795 negate_expr (arg1));
11796 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11797 return fold_build2_loc (loc, RDIV_EXPR, type,
11798 negate_expr (arg0),
11799 TREE_OPERAND (arg1, 0));
11801 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11802 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11803 && real_onep (arg1))
11804 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11806 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11807 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11808 && real_minus_onep (arg1))
11809 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11810 negate_expr (arg0)));
11812 /* If ARG1 is a constant, we can convert this to a multiply by the
11813 reciprocal. This does not have the same rounding properties,
11814 so only do this if -freciprocal-math. We can actually
11815 always safely do it if ARG1 is a power of two, but it's hard to
11816 tell if it is or not in a portable manner. */
11817 if (optimize
11818 && (TREE_CODE (arg1) == REAL_CST
11819 || (TREE_CODE (arg1) == COMPLEX_CST
11820 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11821 || (TREE_CODE (arg1) == VECTOR_CST
11822 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11824 if (flag_reciprocal_math
11825 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11826 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11827 /* Find the reciprocal if optimizing and the result is exact.
11828 TODO: Complex reciprocal not implemented. */
11829 if (TREE_CODE (arg1) != COMPLEX_CST)
11831 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11833 if (inverse)
11834 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11837 /* Convert A/B/C to A/(B*C). */
11838 if (flag_reciprocal_math
11839 && TREE_CODE (arg0) == RDIV_EXPR)
11840 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11841 fold_build2_loc (loc, MULT_EXPR, type,
11842 TREE_OPERAND (arg0, 1), arg1));
11844 /* Convert A/(B/C) to (A/B)*C. */
11845 if (flag_reciprocal_math
11846 && TREE_CODE (arg1) == RDIV_EXPR)
11847 return fold_build2_loc (loc, MULT_EXPR, type,
11848 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11849 TREE_OPERAND (arg1, 0)),
11850 TREE_OPERAND (arg1, 1));
11852 /* Convert C1/(X*C2) into (C1/C2)/X. */
11853 if (flag_reciprocal_math
11854 && TREE_CODE (arg1) == MULT_EXPR
11855 && TREE_CODE (arg0) == REAL_CST
11856 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11858 tree tem = const_binop (RDIV_EXPR, arg0,
11859 TREE_OPERAND (arg1, 1));
11860 if (tem)
11861 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11862 TREE_OPERAND (arg1, 0));
11865 if (flag_unsafe_math_optimizations)
11867 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11868 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11870 /* Optimize sin(x)/cos(x) as tan(x). */
11871 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11872 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11873 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11874 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11875 CALL_EXPR_ARG (arg1, 0), 0))
11877 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11879 if (tanfn != NULL_TREE)
11880 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11883 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11884 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11885 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11886 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11887 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11888 CALL_EXPR_ARG (arg1, 0), 0))
11890 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11892 if (tanfn != NULL_TREE)
11894 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11895 CALL_EXPR_ARG (arg0, 0));
11896 return fold_build2_loc (loc, RDIV_EXPR, type,
11897 build_real (type, dconst1), tmp);
11901 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11902 NaNs or Infinities. */
11903 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11904 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11905 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11907 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11908 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11910 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11911 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11912 && operand_equal_p (arg00, arg01, 0))
11914 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11916 if (cosfn != NULL_TREE)
11917 return build_call_expr_loc (loc, cosfn, 1, arg00);
11921 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11922 NaNs or Infinities. */
11923 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11924 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11925 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11927 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11928 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11930 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11931 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11932 && operand_equal_p (arg00, arg01, 0))
11934 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11936 if (cosfn != NULL_TREE)
11938 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11939 return fold_build2_loc (loc, RDIV_EXPR, type,
11940 build_real (type, dconst1),
11941 tmp);
11946 /* Optimize pow(x,c)/x as pow(x,c-1). */
11947 if (fcode0 == BUILT_IN_POW
11948 || fcode0 == BUILT_IN_POWF
11949 || fcode0 == BUILT_IN_POWL)
11951 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11952 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11953 if (TREE_CODE (arg01) == REAL_CST
11954 && !TREE_OVERFLOW (arg01)
11955 && operand_equal_p (arg1, arg00, 0))
11957 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11958 REAL_VALUE_TYPE c;
11959 tree arg;
11961 c = TREE_REAL_CST (arg01);
11962 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11963 arg = build_real (type, c);
11964 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11968 /* Optimize a/root(b/c) into a*root(c/b). */
11969 if (BUILTIN_ROOT_P (fcode1))
11971 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11973 if (TREE_CODE (rootarg) == RDIV_EXPR)
11975 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11976 tree b = TREE_OPERAND (rootarg, 0);
11977 tree c = TREE_OPERAND (rootarg, 1);
11979 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11981 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11982 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11986 /* Optimize x/expN(y) into x*expN(-y). */
11987 if (BUILTIN_EXPONENT_P (fcode1))
11989 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11990 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11991 arg1 = build_call_expr_loc (loc,
11992 expfn, 1,
11993 fold_convert_loc (loc, type, arg));
11994 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11997 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11998 if (fcode1 == BUILT_IN_POW
11999 || fcode1 == BUILT_IN_POWF
12000 || fcode1 == BUILT_IN_POWL)
12002 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12003 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12004 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12005 tree neg11 = fold_convert_loc (loc, type,
12006 negate_expr (arg11));
12007 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12008 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12011 return NULL_TREE;
12013 case TRUNC_DIV_EXPR:
12014 /* Optimize (X & (-A)) / A where A is a power of 2,
12015 to X >> log2(A) */
12016 if (TREE_CODE (arg0) == BIT_AND_EXPR
12017 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12018 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12020 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12021 arg1, TREE_OPERAND (arg0, 1));
12022 if (sum && integer_zerop (sum)) {
12023 unsigned long pow2;
12025 if (TREE_INT_CST_LOW (arg1))
12026 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12027 else
12028 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12029 + HOST_BITS_PER_WIDE_INT;
12031 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12032 TREE_OPERAND (arg0, 0),
12033 build_int_cst (integer_type_node, pow2));
12037 /* Fall through */
12039 case FLOOR_DIV_EXPR:
12040 /* Simplify A / (B << N) where A and B are positive and B is
12041 a power of 2, to A >> (N + log2(B)). */
12042 strict_overflow_p = false;
12043 if (TREE_CODE (arg1) == LSHIFT_EXPR
12044 && (TYPE_UNSIGNED (type)
12045 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12047 tree sval = TREE_OPERAND (arg1, 0);
12048 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12050 tree sh_cnt = TREE_OPERAND (arg1, 1);
12051 unsigned long pow2;
12053 if (TREE_INT_CST_LOW (sval))
12054 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12055 else
12056 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12057 + HOST_BITS_PER_WIDE_INT;
12059 if (strict_overflow_p)
12060 fold_overflow_warning (("assuming signed overflow does not "
12061 "occur when simplifying A / (B << N)"),
12062 WARN_STRICT_OVERFLOW_MISC);
12064 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12065 sh_cnt,
12066 build_int_cst (TREE_TYPE (sh_cnt),
12067 pow2));
12068 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12069 fold_convert_loc (loc, type, arg0), sh_cnt);
12073 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12074 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12075 if (INTEGRAL_TYPE_P (type)
12076 && TYPE_UNSIGNED (type)
12077 && code == FLOOR_DIV_EXPR)
12078 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12080 /* Fall through */
12082 case ROUND_DIV_EXPR:
12083 case CEIL_DIV_EXPR:
12084 case EXACT_DIV_EXPR:
12085 if (integer_onep (arg1))
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12087 if (integer_zerop (arg1))
12088 return NULL_TREE;
12089 /* X / -1 is -X. */
12090 if (!TYPE_UNSIGNED (type)
12091 && TREE_CODE (arg1) == INTEGER_CST
12092 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12093 && TREE_INT_CST_HIGH (arg1) == -1)
12094 return fold_convert_loc (loc, type, negate_expr (arg0));
12096 /* Convert -A / -B to A / B when the type is signed and overflow is
12097 undefined. */
12098 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12099 && TREE_CODE (arg0) == NEGATE_EXPR
12100 && negate_expr_p (arg1))
12102 if (INTEGRAL_TYPE_P (type))
12103 fold_overflow_warning (("assuming signed overflow does not occur "
12104 "when distributing negation across "
12105 "division"),
12106 WARN_STRICT_OVERFLOW_MISC);
12107 return fold_build2_loc (loc, code, type,
12108 fold_convert_loc (loc, type,
12109 TREE_OPERAND (arg0, 0)),
12110 fold_convert_loc (loc, type,
12111 negate_expr (arg1)));
12113 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12114 && TREE_CODE (arg1) == NEGATE_EXPR
12115 && negate_expr_p (arg0))
12117 if (INTEGRAL_TYPE_P (type))
12118 fold_overflow_warning (("assuming signed overflow does not occur "
12119 "when distributing negation across "
12120 "division"),
12121 WARN_STRICT_OVERFLOW_MISC);
12122 return fold_build2_loc (loc, code, type,
12123 fold_convert_loc (loc, type,
12124 negate_expr (arg0)),
12125 fold_convert_loc (loc, type,
12126 TREE_OPERAND (arg1, 0)));
12129 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12130 operation, EXACT_DIV_EXPR.
12132 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12133 At one time others generated faster code, it's not clear if they do
12134 after the last round to changes to the DIV code in expmed.c. */
12135 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12136 && multiple_of_p (type, arg0, arg1))
12137 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12139 strict_overflow_p = false;
12140 if (TREE_CODE (arg1) == INTEGER_CST
12141 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12142 &strict_overflow_p)))
12144 if (strict_overflow_p)
12145 fold_overflow_warning (("assuming signed overflow does not occur "
12146 "when simplifying division"),
12147 WARN_STRICT_OVERFLOW_MISC);
12148 return fold_convert_loc (loc, type, tem);
12151 return NULL_TREE;
12153 case CEIL_MOD_EXPR:
12154 case FLOOR_MOD_EXPR:
12155 case ROUND_MOD_EXPR:
12156 case TRUNC_MOD_EXPR:
12157 /* X % 1 is always zero, but be sure to preserve any side
12158 effects in X. */
12159 if (integer_onep (arg1))
12160 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12162 /* X % 0, return X % 0 unchanged so that we can get the
12163 proper warnings and errors. */
12164 if (integer_zerop (arg1))
12165 return NULL_TREE;
12167 /* 0 % X is always zero, but be sure to preserve any side
12168 effects in X. Place this after checking for X == 0. */
12169 if (integer_zerop (arg0))
12170 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12172 /* X % -1 is zero. */
12173 if (!TYPE_UNSIGNED (type)
12174 && TREE_CODE (arg1) == INTEGER_CST
12175 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12176 && TREE_INT_CST_HIGH (arg1) == -1)
12177 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12179 /* X % -C is the same as X % C. */
12180 if (code == TRUNC_MOD_EXPR
12181 && !TYPE_UNSIGNED (type)
12182 && TREE_CODE (arg1) == INTEGER_CST
12183 && !TREE_OVERFLOW (arg1)
12184 && TREE_INT_CST_HIGH (arg1) < 0
12185 && !TYPE_OVERFLOW_TRAPS (type)
12186 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12187 && !sign_bit_p (arg1, arg1))
12188 return fold_build2_loc (loc, code, type,
12189 fold_convert_loc (loc, type, arg0),
12190 fold_convert_loc (loc, type,
12191 negate_expr (arg1)));
12193 /* X % -Y is the same as X % Y. */
12194 if (code == TRUNC_MOD_EXPR
12195 && !TYPE_UNSIGNED (type)
12196 && TREE_CODE (arg1) == NEGATE_EXPR
12197 && !TYPE_OVERFLOW_TRAPS (type))
12198 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12199 fold_convert_loc (loc, type,
12200 TREE_OPERAND (arg1, 0)));
12202 strict_overflow_p = false;
12203 if (TREE_CODE (arg1) == INTEGER_CST
12204 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12205 &strict_overflow_p)))
12207 if (strict_overflow_p)
12208 fold_overflow_warning (("assuming signed overflow does not occur "
12209 "when simplifying modulus"),
12210 WARN_STRICT_OVERFLOW_MISC);
12211 return fold_convert_loc (loc, type, tem);
12214 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12215 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12216 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12217 && (TYPE_UNSIGNED (type)
12218 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12220 tree c = arg1;
12221 /* Also optimize A % (C << N) where C is a power of 2,
12222 to A & ((C << N) - 1). */
12223 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12224 c = TREE_OPERAND (arg1, 0);
12226 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12228 tree mask
12229 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12230 build_int_cst (TREE_TYPE (arg1), 1));
12231 if (strict_overflow_p)
12232 fold_overflow_warning (("assuming signed overflow does not "
12233 "occur when simplifying "
12234 "X % (power of two)"),
12235 WARN_STRICT_OVERFLOW_MISC);
12236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12237 fold_convert_loc (loc, type, arg0),
12238 fold_convert_loc (loc, type, mask));
12242 return NULL_TREE;
12244 case LROTATE_EXPR:
12245 case RROTATE_EXPR:
12246 if (integer_all_onesp (arg0))
12247 return omit_one_operand_loc (loc, type, arg0, arg1);
12248 goto shift;
12250 case RSHIFT_EXPR:
12251 /* Optimize -1 >> x for arithmetic right shifts. */
12252 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12253 && tree_expr_nonnegative_p (arg1))
12254 return omit_one_operand_loc (loc, type, arg0, arg1);
12255 /* ... fall through ... */
12257 case LSHIFT_EXPR:
12258 shift:
12259 if (integer_zerop (arg1))
12260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12261 if (integer_zerop (arg0))
12262 return omit_one_operand_loc (loc, type, arg0, arg1);
12264 /* Since negative shift count is not well-defined,
12265 don't try to compute it in the compiler. */
12266 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12267 return NULL_TREE;
12269 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12270 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12271 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12272 && host_integerp (TREE_OPERAND (arg0, 1), false)
12273 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12275 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12276 + TREE_INT_CST_LOW (arg1));
12278 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12279 being well defined. */
12280 if (low >= TYPE_PRECISION (type))
12282 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12283 low = low % TYPE_PRECISION (type);
12284 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12285 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12286 TREE_OPERAND (arg0, 0));
12287 else
12288 low = TYPE_PRECISION (type) - 1;
12291 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12292 build_int_cst (type, low));
12295 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12296 into x & ((unsigned)-1 >> c) for unsigned types. */
12297 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12298 || (TYPE_UNSIGNED (type)
12299 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12300 && host_integerp (arg1, false)
12301 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12302 && host_integerp (TREE_OPERAND (arg0, 1), false)
12303 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12305 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12306 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12307 tree lshift;
12308 tree arg00;
12310 if (low0 == low1)
12312 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12314 lshift = build_int_cst (type, -1);
12315 lshift = int_const_binop (code, lshift, arg1);
12317 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12321 /* Rewrite an LROTATE_EXPR by a constant into an
12322 RROTATE_EXPR by a new constant. */
12323 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12325 tree tem = build_int_cst (TREE_TYPE (arg1),
12326 TYPE_PRECISION (type));
12327 tem = const_binop (MINUS_EXPR, tem, arg1);
12328 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12331 /* If we have a rotate of a bit operation with the rotate count and
12332 the second operand of the bit operation both constant,
12333 permute the two operations. */
12334 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12335 && (TREE_CODE (arg0) == BIT_AND_EXPR
12336 || TREE_CODE (arg0) == BIT_IOR_EXPR
12337 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12339 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12340 fold_build2_loc (loc, code, type,
12341 TREE_OPERAND (arg0, 0), arg1),
12342 fold_build2_loc (loc, code, type,
12343 TREE_OPERAND (arg0, 1), arg1));
12345 /* Two consecutive rotates adding up to the precision of the
12346 type can be ignored. */
12347 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12348 && TREE_CODE (arg0) == RROTATE_EXPR
12349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12350 && TREE_INT_CST_HIGH (arg1) == 0
12351 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12352 && ((TREE_INT_CST_LOW (arg1)
12353 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12354 == (unsigned int) TYPE_PRECISION (type)))
12355 return TREE_OPERAND (arg0, 0);
12357 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12358 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12359 if the latter can be further optimized. */
12360 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12361 && TREE_CODE (arg0) == BIT_AND_EXPR
12362 && TREE_CODE (arg1) == INTEGER_CST
12363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12365 tree mask = fold_build2_loc (loc, code, type,
12366 fold_convert_loc (loc, type,
12367 TREE_OPERAND (arg0, 1)),
12368 arg1);
12369 tree shift = fold_build2_loc (loc, code, type,
12370 fold_convert_loc (loc, type,
12371 TREE_OPERAND (arg0, 0)),
12372 arg1);
12373 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12374 if (tem)
12375 return tem;
12378 return NULL_TREE;
12380 case MIN_EXPR:
12381 if (operand_equal_p (arg0, arg1, 0))
12382 return omit_one_operand_loc (loc, type, arg0, arg1);
12383 if (INTEGRAL_TYPE_P (type)
12384 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12385 return omit_one_operand_loc (loc, type, arg1, arg0);
12386 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12387 if (tem)
12388 return tem;
12389 goto associate;
12391 case MAX_EXPR:
12392 if (operand_equal_p (arg0, arg1, 0))
12393 return omit_one_operand_loc (loc, type, arg0, arg1);
12394 if (INTEGRAL_TYPE_P (type)
12395 && TYPE_MAX_VALUE (type)
12396 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12397 return omit_one_operand_loc (loc, type, arg1, arg0);
12398 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12399 if (tem)
12400 return tem;
12401 goto associate;
12403 case TRUTH_ANDIF_EXPR:
12404 /* Note that the operands of this must be ints
12405 and their values must be 0 or 1.
12406 ("true" is a fixed value perhaps depending on the language.) */
12407 /* If first arg is constant zero, return it. */
12408 if (integer_zerop (arg0))
12409 return fold_convert_loc (loc, type, arg0);
12410 case TRUTH_AND_EXPR:
12411 /* If either arg is constant true, drop it. */
12412 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12414 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12415 /* Preserve sequence points. */
12416 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12418 /* If second arg is constant zero, result is zero, but first arg
12419 must be evaluated. */
12420 if (integer_zerop (arg1))
12421 return omit_one_operand_loc (loc, type, arg1, arg0);
12422 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12423 case will be handled here. */
12424 if (integer_zerop (arg0))
12425 return omit_one_operand_loc (loc, type, arg0, arg1);
12427 /* !X && X is always false. */
12428 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12429 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12430 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12431 /* X && !X is always false. */
12432 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12433 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12434 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12436 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12437 means A >= Y && A != MAX, but in this case we know that
12438 A < X <= MAX. */
12440 if (!TREE_SIDE_EFFECTS (arg0)
12441 && !TREE_SIDE_EFFECTS (arg1))
12443 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12444 if (tem && !operand_equal_p (tem, arg0, 0))
12445 return fold_build2_loc (loc, code, type, tem, arg1);
12447 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12448 if (tem && !operand_equal_p (tem, arg1, 0))
12449 return fold_build2_loc (loc, code, type, arg0, tem);
12452 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12453 != NULL_TREE)
12454 return tem;
12456 return NULL_TREE;
12458 case TRUTH_ORIF_EXPR:
12459 /* Note that the operands of this must be ints
12460 and their values must be 0 or true.
12461 ("true" is a fixed value perhaps depending on the language.) */
12462 /* If first arg is constant true, return it. */
12463 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12464 return fold_convert_loc (loc, type, arg0);
12465 case TRUTH_OR_EXPR:
12466 /* If either arg is constant zero, drop it. */
12467 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12468 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12469 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12470 /* Preserve sequence points. */
12471 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12472 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12473 /* If second arg is constant true, result is true, but we must
12474 evaluate first arg. */
12475 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12476 return omit_one_operand_loc (loc, type, arg1, arg0);
12477 /* Likewise for first arg, but note this only occurs here for
12478 TRUTH_OR_EXPR. */
12479 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12480 return omit_one_operand_loc (loc, type, arg0, arg1);
12482 /* !X || X is always true. */
12483 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12485 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12486 /* X || !X is always true. */
12487 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12489 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12491 /* (X && !Y) || (!X && Y) is X ^ Y */
12492 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12493 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12495 tree a0, a1, l0, l1, n0, n1;
12497 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12498 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12500 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12501 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12503 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12504 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12506 if ((operand_equal_p (n0, a0, 0)
12507 && operand_equal_p (n1, a1, 0))
12508 || (operand_equal_p (n0, a1, 0)
12509 && operand_equal_p (n1, a0, 0)))
12510 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12513 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12514 != NULL_TREE)
12515 return tem;
12517 return NULL_TREE;
12519 case TRUTH_XOR_EXPR:
12520 /* If the second arg is constant zero, drop it. */
12521 if (integer_zerop (arg1))
12522 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12523 /* If the second arg is constant true, this is a logical inversion. */
12524 if (integer_onep (arg1))
12526 /* Only call invert_truthvalue if operand is a truth value. */
12527 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12528 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12529 else
12530 tem = invert_truthvalue_loc (loc, arg0);
12531 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12533 /* Identical arguments cancel to zero. */
12534 if (operand_equal_p (arg0, arg1, 0))
12535 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12537 /* !X ^ X is always true. */
12538 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12539 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12540 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12542 /* X ^ !X is always true. */
12543 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12544 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12545 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12547 return NULL_TREE;
12549 case EQ_EXPR:
12550 case NE_EXPR:
12551 STRIP_NOPS (arg0);
12552 STRIP_NOPS (arg1);
12554 tem = fold_comparison (loc, code, type, op0, op1);
12555 if (tem != NULL_TREE)
12556 return tem;
12558 /* bool_var != 0 becomes bool_var. */
12559 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12560 && code == NE_EXPR)
12561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12563 /* bool_var == 1 becomes bool_var. */
12564 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12565 && code == EQ_EXPR)
12566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12568 /* bool_var != 1 becomes !bool_var. */
12569 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12570 && code == NE_EXPR)
12571 return fold_convert_loc (loc, type,
12572 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12573 TREE_TYPE (arg0), arg0));
12575 /* bool_var == 0 becomes !bool_var. */
12576 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12577 && code == EQ_EXPR)
12578 return fold_convert_loc (loc, type,
12579 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12580 TREE_TYPE (arg0), arg0));
12582 /* !exp != 0 becomes !exp */
12583 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12584 && code == NE_EXPR)
12585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12587 /* If this is an equality comparison of the address of two non-weak,
12588 unaliased symbols neither of which are extern (since we do not
12589 have access to attributes for externs), then we know the result. */
12590 if (TREE_CODE (arg0) == ADDR_EXPR
12591 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12592 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12593 && ! lookup_attribute ("alias",
12594 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12595 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12596 && TREE_CODE (arg1) == ADDR_EXPR
12597 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12598 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12599 && ! lookup_attribute ("alias",
12600 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12601 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12603 /* We know that we're looking at the address of two
12604 non-weak, unaliased, static _DECL nodes.
12606 It is both wasteful and incorrect to call operand_equal_p
12607 to compare the two ADDR_EXPR nodes. It is wasteful in that
12608 all we need to do is test pointer equality for the arguments
12609 to the two ADDR_EXPR nodes. It is incorrect to use
12610 operand_equal_p as that function is NOT equivalent to a
12611 C equality test. It can in fact return false for two
12612 objects which would test as equal using the C equality
12613 operator. */
12614 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12615 return constant_boolean_node (equal
12616 ? code == EQ_EXPR : code != EQ_EXPR,
12617 type);
12620 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12621 a MINUS_EXPR of a constant, we can convert it into a comparison with
12622 a revised constant as long as no overflow occurs. */
12623 if (TREE_CODE (arg1) == INTEGER_CST
12624 && (TREE_CODE (arg0) == PLUS_EXPR
12625 || TREE_CODE (arg0) == MINUS_EXPR)
12626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12627 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12628 ? MINUS_EXPR : PLUS_EXPR,
12629 fold_convert_loc (loc, TREE_TYPE (arg0),
12630 arg1),
12631 TREE_OPERAND (arg0, 1)))
12632 && !TREE_OVERFLOW (tem))
12633 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12635 /* Similarly for a NEGATE_EXPR. */
12636 if (TREE_CODE (arg0) == NEGATE_EXPR
12637 && TREE_CODE (arg1) == INTEGER_CST
12638 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12639 arg1)))
12640 && TREE_CODE (tem) == INTEGER_CST
12641 && !TREE_OVERFLOW (tem))
12642 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12644 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12645 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12646 && TREE_CODE (arg1) == INTEGER_CST
12647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12648 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12649 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12650 fold_convert_loc (loc,
12651 TREE_TYPE (arg0),
12652 arg1),
12653 TREE_OPERAND (arg0, 1)));
12655 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12656 if ((TREE_CODE (arg0) == PLUS_EXPR
12657 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12658 || TREE_CODE (arg0) == MINUS_EXPR)
12659 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12660 0)),
12661 arg1, 0)
12662 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12663 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12665 tree val = TREE_OPERAND (arg0, 1);
12666 return omit_two_operands_loc (loc, type,
12667 fold_build2_loc (loc, code, type,
12668 val,
12669 build_int_cst (TREE_TYPE (val),
12670 0)),
12671 TREE_OPERAND (arg0, 0), arg1);
12674 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12675 if (TREE_CODE (arg0) == MINUS_EXPR
12676 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12677 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12678 1)),
12679 arg1, 0)
12680 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12682 return omit_two_operands_loc (loc, type,
12683 code == NE_EXPR
12684 ? boolean_true_node : boolean_false_node,
12685 TREE_OPERAND (arg0, 1), arg1);
12688 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12689 for !=. Don't do this for ordered comparisons due to overflow. */
12690 if (TREE_CODE (arg0) == MINUS_EXPR
12691 && integer_zerop (arg1))
12692 return fold_build2_loc (loc, code, type,
12693 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12695 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12696 if (TREE_CODE (arg0) == ABS_EXPR
12697 && (integer_zerop (arg1) || real_zerop (arg1)))
12698 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12700 /* If this is an EQ or NE comparison with zero and ARG0 is
12701 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12702 two operations, but the latter can be done in one less insn
12703 on machines that have only two-operand insns or on which a
12704 constant cannot be the first operand. */
12705 if (TREE_CODE (arg0) == BIT_AND_EXPR
12706 && integer_zerop (arg1))
12708 tree arg00 = TREE_OPERAND (arg0, 0);
12709 tree arg01 = TREE_OPERAND (arg0, 1);
12710 if (TREE_CODE (arg00) == LSHIFT_EXPR
12711 && integer_onep (TREE_OPERAND (arg00, 0)))
12713 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12714 arg01, TREE_OPERAND (arg00, 1));
12715 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12716 build_int_cst (TREE_TYPE (arg0), 1));
12717 return fold_build2_loc (loc, code, type,
12718 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12719 arg1);
12721 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12722 && integer_onep (TREE_OPERAND (arg01, 0)))
12724 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12725 arg00, TREE_OPERAND (arg01, 1));
12726 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12727 build_int_cst (TREE_TYPE (arg0), 1));
12728 return fold_build2_loc (loc, code, type,
12729 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12730 arg1);
12734 /* If this is an NE or EQ comparison of zero against the result of a
12735 signed MOD operation whose second operand is a power of 2, make
12736 the MOD operation unsigned since it is simpler and equivalent. */
12737 if (integer_zerop (arg1)
12738 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12739 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12740 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12741 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12742 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12743 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12745 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12746 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12747 fold_convert_loc (loc, newtype,
12748 TREE_OPERAND (arg0, 0)),
12749 fold_convert_loc (loc, newtype,
12750 TREE_OPERAND (arg0, 1)));
12752 return fold_build2_loc (loc, code, type, newmod,
12753 fold_convert_loc (loc, newtype, arg1));
12756 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12757 C1 is a valid shift constant, and C2 is a power of two, i.e.
12758 a single bit. */
12759 if (TREE_CODE (arg0) == BIT_AND_EXPR
12760 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12761 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12762 == INTEGER_CST
12763 && integer_pow2p (TREE_OPERAND (arg0, 1))
12764 && integer_zerop (arg1))
12766 tree itype = TREE_TYPE (arg0);
12767 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12768 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12770 /* Check for a valid shift count. */
12771 if (TREE_INT_CST_HIGH (arg001) == 0
12772 && TREE_INT_CST_LOW (arg001) < prec)
12774 tree arg01 = TREE_OPERAND (arg0, 1);
12775 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12776 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12777 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12778 can be rewritten as (X & (C2 << C1)) != 0. */
12779 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12781 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12782 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12783 return fold_build2_loc (loc, code, type, tem,
12784 fold_convert_loc (loc, itype, arg1));
12786 /* Otherwise, for signed (arithmetic) shifts,
12787 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12788 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12789 else if (!TYPE_UNSIGNED (itype))
12790 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12791 arg000, build_int_cst (itype, 0));
12792 /* Otherwise, of unsigned (logical) shifts,
12793 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12794 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12795 else
12796 return omit_one_operand_loc (loc, type,
12797 code == EQ_EXPR ? integer_one_node
12798 : integer_zero_node,
12799 arg000);
12803 /* If we have (A & C) == C where C is a power of 2, convert this into
12804 (A & C) != 0. Similarly for NE_EXPR. */
12805 if (TREE_CODE (arg0) == BIT_AND_EXPR
12806 && integer_pow2p (TREE_OPERAND (arg0, 1))
12807 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12808 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12809 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12810 integer_zero_node));
12812 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12813 bit, then fold the expression into A < 0 or A >= 0. */
12814 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12815 if (tem)
12816 return tem;
12818 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12819 Similarly for NE_EXPR. */
12820 if (TREE_CODE (arg0) == BIT_AND_EXPR
12821 && TREE_CODE (arg1) == INTEGER_CST
12822 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12824 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12825 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12826 TREE_OPERAND (arg0, 1));
12827 tree dandnotc
12828 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12829 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12830 notc);
12831 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12832 if (integer_nonzerop (dandnotc))
12833 return omit_one_operand_loc (loc, type, rslt, arg0);
12836 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12837 Similarly for NE_EXPR. */
12838 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12839 && TREE_CODE (arg1) == INTEGER_CST
12840 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12842 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12843 tree candnotd
12844 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12845 TREE_OPERAND (arg0, 1),
12846 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12847 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12848 if (integer_nonzerop (candnotd))
12849 return omit_one_operand_loc (loc, type, rslt, arg0);
12852 /* If this is a comparison of a field, we may be able to simplify it. */
12853 if ((TREE_CODE (arg0) == COMPONENT_REF
12854 || TREE_CODE (arg0) == BIT_FIELD_REF)
12855 /* Handle the constant case even without -O
12856 to make sure the warnings are given. */
12857 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12859 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12860 if (t1)
12861 return t1;
12864 /* Optimize comparisons of strlen vs zero to a compare of the
12865 first character of the string vs zero. To wit,
12866 strlen(ptr) == 0 => *ptr == 0
12867 strlen(ptr) != 0 => *ptr != 0
12868 Other cases should reduce to one of these two (or a constant)
12869 due to the return value of strlen being unsigned. */
12870 if (TREE_CODE (arg0) == CALL_EXPR
12871 && integer_zerop (arg1))
12873 tree fndecl = get_callee_fndecl (arg0);
12875 if (fndecl
12876 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12877 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12878 && call_expr_nargs (arg0) == 1
12879 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12881 tree iref = build_fold_indirect_ref_loc (loc,
12882 CALL_EXPR_ARG (arg0, 0));
12883 return fold_build2_loc (loc, code, type, iref,
12884 build_int_cst (TREE_TYPE (iref), 0));
12888 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12889 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12890 if (TREE_CODE (arg0) == RSHIFT_EXPR
12891 && integer_zerop (arg1)
12892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12894 tree arg00 = TREE_OPERAND (arg0, 0);
12895 tree arg01 = TREE_OPERAND (arg0, 1);
12896 tree itype = TREE_TYPE (arg00);
12897 if (TREE_INT_CST_HIGH (arg01) == 0
12898 && TREE_INT_CST_LOW (arg01)
12899 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12901 if (TYPE_UNSIGNED (itype))
12903 itype = signed_type_for (itype);
12904 arg00 = fold_convert_loc (loc, itype, arg00);
12906 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12907 type, arg00, build_int_cst (itype, 0));
12911 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12912 if (integer_zerop (arg1)
12913 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12914 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12915 TREE_OPERAND (arg0, 1));
12917 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12918 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12919 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12920 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12921 build_zero_cst (TREE_TYPE (arg0)));
12922 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12923 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12925 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12926 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12927 build_zero_cst (TREE_TYPE (arg0)));
12929 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12930 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12931 && TREE_CODE (arg1) == INTEGER_CST
12932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12933 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12934 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12935 TREE_OPERAND (arg0, 1), arg1));
12937 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12938 (X & C) == 0 when C is a single bit. */
12939 if (TREE_CODE (arg0) == BIT_AND_EXPR
12940 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12941 && integer_zerop (arg1)
12942 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12944 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12945 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12946 TREE_OPERAND (arg0, 1));
12947 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12948 type, tem,
12949 fold_convert_loc (loc, TREE_TYPE (arg0),
12950 arg1));
12953 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12954 constant C is a power of two, i.e. a single bit. */
12955 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12956 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12957 && integer_zerop (arg1)
12958 && integer_pow2p (TREE_OPERAND (arg0, 1))
12959 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12960 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12962 tree arg00 = TREE_OPERAND (arg0, 0);
12963 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12964 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12967 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12968 when is C is a power of two, i.e. a single bit. */
12969 if (TREE_CODE (arg0) == BIT_AND_EXPR
12970 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12971 && integer_zerop (arg1)
12972 && integer_pow2p (TREE_OPERAND (arg0, 1))
12973 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12974 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12976 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12977 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12978 arg000, TREE_OPERAND (arg0, 1));
12979 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12980 tem, build_int_cst (TREE_TYPE (tem), 0));
12983 if (integer_zerop (arg1)
12984 && tree_expr_nonzero_p (arg0))
12986 tree res = constant_boolean_node (code==NE_EXPR, type);
12987 return omit_one_operand_loc (loc, type, res, arg0);
12990 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12991 if (TREE_CODE (arg0) == NEGATE_EXPR
12992 && TREE_CODE (arg1) == NEGATE_EXPR)
12993 return fold_build2_loc (loc, code, type,
12994 TREE_OPERAND (arg0, 0),
12995 fold_convert_loc (loc, TREE_TYPE (arg0),
12996 TREE_OPERAND (arg1, 0)));
12998 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12999 if (TREE_CODE (arg0) == BIT_AND_EXPR
13000 && TREE_CODE (arg1) == BIT_AND_EXPR)
13002 tree arg00 = TREE_OPERAND (arg0, 0);
13003 tree arg01 = TREE_OPERAND (arg0, 1);
13004 tree arg10 = TREE_OPERAND (arg1, 0);
13005 tree arg11 = TREE_OPERAND (arg1, 1);
13006 tree itype = TREE_TYPE (arg0);
13008 if (operand_equal_p (arg01, arg11, 0))
13009 return fold_build2_loc (loc, code, type,
13010 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13011 fold_build2_loc (loc,
13012 BIT_XOR_EXPR, itype,
13013 arg00, arg10),
13014 arg01),
13015 build_zero_cst (itype));
13017 if (operand_equal_p (arg01, arg10, 0))
13018 return fold_build2_loc (loc, code, type,
13019 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13020 fold_build2_loc (loc,
13021 BIT_XOR_EXPR, itype,
13022 arg00, arg11),
13023 arg01),
13024 build_zero_cst (itype));
13026 if (operand_equal_p (arg00, arg11, 0))
13027 return fold_build2_loc (loc, code, type,
13028 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13029 fold_build2_loc (loc,
13030 BIT_XOR_EXPR, itype,
13031 arg01, arg10),
13032 arg00),
13033 build_zero_cst (itype));
13035 if (operand_equal_p (arg00, arg10, 0))
13036 return fold_build2_loc (loc, code, type,
13037 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13038 fold_build2_loc (loc,
13039 BIT_XOR_EXPR, itype,
13040 arg01, arg11),
13041 arg00),
13042 build_zero_cst (itype));
13045 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13046 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13048 tree arg00 = TREE_OPERAND (arg0, 0);
13049 tree arg01 = TREE_OPERAND (arg0, 1);
13050 tree arg10 = TREE_OPERAND (arg1, 0);
13051 tree arg11 = TREE_OPERAND (arg1, 1);
13052 tree itype = TREE_TYPE (arg0);
13054 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13055 operand_equal_p guarantees no side-effects so we don't need
13056 to use omit_one_operand on Z. */
13057 if (operand_equal_p (arg01, arg11, 0))
13058 return fold_build2_loc (loc, code, type, arg00,
13059 fold_convert_loc (loc, TREE_TYPE (arg00),
13060 arg10));
13061 if (operand_equal_p (arg01, arg10, 0))
13062 return fold_build2_loc (loc, code, type, arg00,
13063 fold_convert_loc (loc, TREE_TYPE (arg00),
13064 arg11));
13065 if (operand_equal_p (arg00, arg11, 0))
13066 return fold_build2_loc (loc, code, type, arg01,
13067 fold_convert_loc (loc, TREE_TYPE (arg01),
13068 arg10));
13069 if (operand_equal_p (arg00, arg10, 0))
13070 return fold_build2_loc (loc, code, type, arg01,
13071 fold_convert_loc (loc, TREE_TYPE (arg01),
13072 arg11));
13074 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13075 if (TREE_CODE (arg01) == INTEGER_CST
13076 && TREE_CODE (arg11) == INTEGER_CST)
13078 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13079 fold_convert_loc (loc, itype, arg11));
13080 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13081 return fold_build2_loc (loc, code, type, tem,
13082 fold_convert_loc (loc, itype, arg10));
13086 /* Attempt to simplify equality/inequality comparisons of complex
13087 values. Only lower the comparison if the result is known or
13088 can be simplified to a single scalar comparison. */
13089 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13090 || TREE_CODE (arg0) == COMPLEX_CST)
13091 && (TREE_CODE (arg1) == COMPLEX_EXPR
13092 || TREE_CODE (arg1) == COMPLEX_CST))
13094 tree real0, imag0, real1, imag1;
13095 tree rcond, icond;
13097 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13099 real0 = TREE_OPERAND (arg0, 0);
13100 imag0 = TREE_OPERAND (arg0, 1);
13102 else
13104 real0 = TREE_REALPART (arg0);
13105 imag0 = TREE_IMAGPART (arg0);
13108 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13110 real1 = TREE_OPERAND (arg1, 0);
13111 imag1 = TREE_OPERAND (arg1, 1);
13113 else
13115 real1 = TREE_REALPART (arg1);
13116 imag1 = TREE_IMAGPART (arg1);
13119 rcond = fold_binary_loc (loc, code, type, real0, real1);
13120 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13122 if (integer_zerop (rcond))
13124 if (code == EQ_EXPR)
13125 return omit_two_operands_loc (loc, type, boolean_false_node,
13126 imag0, imag1);
13127 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13129 else
13131 if (code == NE_EXPR)
13132 return omit_two_operands_loc (loc, type, boolean_true_node,
13133 imag0, imag1);
13134 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13138 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13139 if (icond && TREE_CODE (icond) == INTEGER_CST)
13141 if (integer_zerop (icond))
13143 if (code == EQ_EXPR)
13144 return omit_two_operands_loc (loc, type, boolean_false_node,
13145 real0, real1);
13146 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13148 else
13150 if (code == NE_EXPR)
13151 return omit_two_operands_loc (loc, type, boolean_true_node,
13152 real0, real1);
13153 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13158 return NULL_TREE;
13160 case LT_EXPR:
13161 case GT_EXPR:
13162 case LE_EXPR:
13163 case GE_EXPR:
13164 tem = fold_comparison (loc, code, type, op0, op1);
13165 if (tem != NULL_TREE)
13166 return tem;
13168 /* Transform comparisons of the form X +- C CMP X. */
13169 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13171 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13172 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13173 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13174 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13176 tree arg01 = TREE_OPERAND (arg0, 1);
13177 enum tree_code code0 = TREE_CODE (arg0);
13178 int is_positive;
13180 if (TREE_CODE (arg01) == REAL_CST)
13181 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13182 else
13183 is_positive = tree_int_cst_sgn (arg01);
13185 /* (X - c) > X becomes false. */
13186 if (code == GT_EXPR
13187 && ((code0 == MINUS_EXPR && is_positive >= 0)
13188 || (code0 == PLUS_EXPR && is_positive <= 0)))
13190 if (TREE_CODE (arg01) == INTEGER_CST
13191 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13192 fold_overflow_warning (("assuming signed overflow does not "
13193 "occur when assuming that (X - c) > X "
13194 "is always false"),
13195 WARN_STRICT_OVERFLOW_ALL);
13196 return constant_boolean_node (0, type);
13199 /* Likewise (X + c) < X becomes false. */
13200 if (code == LT_EXPR
13201 && ((code0 == PLUS_EXPR && is_positive >= 0)
13202 || (code0 == MINUS_EXPR && is_positive <= 0)))
13204 if (TREE_CODE (arg01) == INTEGER_CST
13205 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13206 fold_overflow_warning (("assuming signed overflow does not "
13207 "occur when assuming that "
13208 "(X + c) < X is always false"),
13209 WARN_STRICT_OVERFLOW_ALL);
13210 return constant_boolean_node (0, type);
13213 /* Convert (X - c) <= X to true. */
13214 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13215 && code == LE_EXPR
13216 && ((code0 == MINUS_EXPR && is_positive >= 0)
13217 || (code0 == PLUS_EXPR && is_positive <= 0)))
13219 if (TREE_CODE (arg01) == INTEGER_CST
13220 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13221 fold_overflow_warning (("assuming signed overflow does not "
13222 "occur when assuming that "
13223 "(X - c) <= X is always true"),
13224 WARN_STRICT_OVERFLOW_ALL);
13225 return constant_boolean_node (1, type);
13228 /* Convert (X + c) >= X to true. */
13229 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13230 && code == GE_EXPR
13231 && ((code0 == PLUS_EXPR && is_positive >= 0)
13232 || (code0 == MINUS_EXPR && is_positive <= 0)))
13234 if (TREE_CODE (arg01) == INTEGER_CST
13235 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13236 fold_overflow_warning (("assuming signed overflow does not "
13237 "occur when assuming that "
13238 "(X + c) >= X is always true"),
13239 WARN_STRICT_OVERFLOW_ALL);
13240 return constant_boolean_node (1, type);
13243 if (TREE_CODE (arg01) == INTEGER_CST)
13245 /* Convert X + c > X and X - c < X to true for integers. */
13246 if (code == GT_EXPR
13247 && ((code0 == PLUS_EXPR && is_positive > 0)
13248 || (code0 == MINUS_EXPR && is_positive < 0)))
13250 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13251 fold_overflow_warning (("assuming signed overflow does "
13252 "not occur when assuming that "
13253 "(X + c) > X is always true"),
13254 WARN_STRICT_OVERFLOW_ALL);
13255 return constant_boolean_node (1, type);
13258 if (code == LT_EXPR
13259 && ((code0 == MINUS_EXPR && is_positive > 0)
13260 || (code0 == PLUS_EXPR && is_positive < 0)))
13262 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13263 fold_overflow_warning (("assuming signed overflow does "
13264 "not occur when assuming that "
13265 "(X - c) < X is always true"),
13266 WARN_STRICT_OVERFLOW_ALL);
13267 return constant_boolean_node (1, type);
13270 /* Convert X + c <= X and X - c >= X to false for integers. */
13271 if (code == LE_EXPR
13272 && ((code0 == PLUS_EXPR && is_positive > 0)
13273 || (code0 == MINUS_EXPR && is_positive < 0)))
13275 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13276 fold_overflow_warning (("assuming signed overflow does "
13277 "not occur when assuming that "
13278 "(X + c) <= X is always false"),
13279 WARN_STRICT_OVERFLOW_ALL);
13280 return constant_boolean_node (0, type);
13283 if (code == GE_EXPR
13284 && ((code0 == MINUS_EXPR && is_positive > 0)
13285 || (code0 == PLUS_EXPR && is_positive < 0)))
13287 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13288 fold_overflow_warning (("assuming signed overflow does "
13289 "not occur when assuming that "
13290 "(X - c) >= X is always false"),
13291 WARN_STRICT_OVERFLOW_ALL);
13292 return constant_boolean_node (0, type);
13297 /* Comparisons with the highest or lowest possible integer of
13298 the specified precision will have known values. */
13300 tree arg1_type = TREE_TYPE (arg1);
13301 unsigned int width = TYPE_PRECISION (arg1_type);
13303 if (TREE_CODE (arg1) == INTEGER_CST
13304 && width <= HOST_BITS_PER_DOUBLE_INT
13305 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13307 HOST_WIDE_INT signed_max_hi;
13308 unsigned HOST_WIDE_INT signed_max_lo;
13309 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13311 if (width <= HOST_BITS_PER_WIDE_INT)
13313 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13314 - 1;
13315 signed_max_hi = 0;
13316 max_hi = 0;
13318 if (TYPE_UNSIGNED (arg1_type))
13320 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13321 min_lo = 0;
13322 min_hi = 0;
13324 else
13326 max_lo = signed_max_lo;
13327 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13328 min_hi = -1;
13331 else
13333 width -= HOST_BITS_PER_WIDE_INT;
13334 signed_max_lo = -1;
13335 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13336 - 1;
13337 max_lo = -1;
13338 min_lo = 0;
13340 if (TYPE_UNSIGNED (arg1_type))
13342 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13343 min_hi = 0;
13345 else
13347 max_hi = signed_max_hi;
13348 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13352 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13353 && TREE_INT_CST_LOW (arg1) == max_lo)
13354 switch (code)
13356 case GT_EXPR:
13357 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13359 case GE_EXPR:
13360 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13362 case LE_EXPR:
13363 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13365 case LT_EXPR:
13366 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13368 /* The GE_EXPR and LT_EXPR cases above are not normally
13369 reached because of previous transformations. */
13371 default:
13372 break;
13374 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13375 == max_hi
13376 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13377 switch (code)
13379 case GT_EXPR:
13380 arg1 = const_binop (PLUS_EXPR, arg1,
13381 build_int_cst (TREE_TYPE (arg1), 1));
13382 return fold_build2_loc (loc, EQ_EXPR, type,
13383 fold_convert_loc (loc,
13384 TREE_TYPE (arg1), arg0),
13385 arg1);
13386 case LE_EXPR:
13387 arg1 = const_binop (PLUS_EXPR, arg1,
13388 build_int_cst (TREE_TYPE (arg1), 1));
13389 return fold_build2_loc (loc, NE_EXPR, type,
13390 fold_convert_loc (loc, TREE_TYPE (arg1),
13391 arg0),
13392 arg1);
13393 default:
13394 break;
13396 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13397 == min_hi
13398 && TREE_INT_CST_LOW (arg1) == min_lo)
13399 switch (code)
13401 case LT_EXPR:
13402 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13404 case LE_EXPR:
13405 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13407 case GE_EXPR:
13408 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13410 case GT_EXPR:
13411 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13413 default:
13414 break;
13416 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13417 == min_hi
13418 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13419 switch (code)
13421 case GE_EXPR:
13422 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13423 return fold_build2_loc (loc, NE_EXPR, type,
13424 fold_convert_loc (loc,
13425 TREE_TYPE (arg1), arg0),
13426 arg1);
13427 case LT_EXPR:
13428 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13429 return fold_build2_loc (loc, EQ_EXPR, type,
13430 fold_convert_loc (loc, TREE_TYPE (arg1),
13431 arg0),
13432 arg1);
13433 default:
13434 break;
13437 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13438 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13439 && TYPE_UNSIGNED (arg1_type)
13440 /* We will flip the signedness of the comparison operator
13441 associated with the mode of arg1, so the sign bit is
13442 specified by this mode. Check that arg1 is the signed
13443 max associated with this sign bit. */
13444 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13445 /* signed_type does not work on pointer types. */
13446 && INTEGRAL_TYPE_P (arg1_type))
13448 /* The following case also applies to X < signed_max+1
13449 and X >= signed_max+1 because previous transformations. */
13450 if (code == LE_EXPR || code == GT_EXPR)
13452 tree st;
13453 st = signed_type_for (TREE_TYPE (arg1));
13454 return fold_build2_loc (loc,
13455 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13456 type, fold_convert_loc (loc, st, arg0),
13457 build_int_cst (st, 0));
13463 /* If we are comparing an ABS_EXPR with a constant, we can
13464 convert all the cases into explicit comparisons, but they may
13465 well not be faster than doing the ABS and one comparison.
13466 But ABS (X) <= C is a range comparison, which becomes a subtraction
13467 and a comparison, and is probably faster. */
13468 if (code == LE_EXPR
13469 && TREE_CODE (arg1) == INTEGER_CST
13470 && TREE_CODE (arg0) == ABS_EXPR
13471 && ! TREE_SIDE_EFFECTS (arg0)
13472 && (0 != (tem = negate_expr (arg1)))
13473 && TREE_CODE (tem) == INTEGER_CST
13474 && !TREE_OVERFLOW (tem))
13475 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13476 build2 (GE_EXPR, type,
13477 TREE_OPERAND (arg0, 0), tem),
13478 build2 (LE_EXPR, type,
13479 TREE_OPERAND (arg0, 0), arg1));
13481 /* Convert ABS_EXPR<x> >= 0 to true. */
13482 strict_overflow_p = false;
13483 if (code == GE_EXPR
13484 && (integer_zerop (arg1)
13485 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13486 && real_zerop (arg1)))
13487 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13489 if (strict_overflow_p)
13490 fold_overflow_warning (("assuming signed overflow does not occur "
13491 "when simplifying comparison of "
13492 "absolute value and zero"),
13493 WARN_STRICT_OVERFLOW_CONDITIONAL);
13494 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13497 /* Convert ABS_EXPR<x> < 0 to false. */
13498 strict_overflow_p = false;
13499 if (code == LT_EXPR
13500 && (integer_zerop (arg1) || real_zerop (arg1))
13501 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13503 if (strict_overflow_p)
13504 fold_overflow_warning (("assuming signed overflow does not occur "
13505 "when simplifying comparison of "
13506 "absolute value and zero"),
13507 WARN_STRICT_OVERFLOW_CONDITIONAL);
13508 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13511 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13512 and similarly for >= into !=. */
13513 if ((code == LT_EXPR || code == GE_EXPR)
13514 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13515 && TREE_CODE (arg1) == LSHIFT_EXPR
13516 && integer_onep (TREE_OPERAND (arg1, 0)))
13517 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13518 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13519 TREE_OPERAND (arg1, 1)),
13520 build_zero_cst (TREE_TYPE (arg0)));
13522 if ((code == LT_EXPR || code == GE_EXPR)
13523 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13524 && CONVERT_EXPR_P (arg1)
13525 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13526 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13528 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13529 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13530 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13531 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13532 build_zero_cst (TREE_TYPE (arg0)));
13535 return NULL_TREE;
13537 case UNORDERED_EXPR:
13538 case ORDERED_EXPR:
13539 case UNLT_EXPR:
13540 case UNLE_EXPR:
13541 case UNGT_EXPR:
13542 case UNGE_EXPR:
13543 case UNEQ_EXPR:
13544 case LTGT_EXPR:
13545 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13547 t1 = fold_relational_const (code, type, arg0, arg1);
13548 if (t1 != NULL_TREE)
13549 return t1;
13552 /* If the first operand is NaN, the result is constant. */
13553 if (TREE_CODE (arg0) == REAL_CST
13554 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13555 && (code != LTGT_EXPR || ! flag_trapping_math))
13557 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13558 ? integer_zero_node
13559 : integer_one_node;
13560 return omit_one_operand_loc (loc, type, t1, arg1);
13563 /* If the second operand is NaN, the result is constant. */
13564 if (TREE_CODE (arg1) == REAL_CST
13565 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13566 && (code != LTGT_EXPR || ! flag_trapping_math))
13568 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13569 ? integer_zero_node
13570 : integer_one_node;
13571 return omit_one_operand_loc (loc, type, t1, arg0);
13574 /* Simplify unordered comparison of something with itself. */
13575 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13576 && operand_equal_p (arg0, arg1, 0))
13577 return constant_boolean_node (1, type);
13579 if (code == LTGT_EXPR
13580 && !flag_trapping_math
13581 && operand_equal_p (arg0, arg1, 0))
13582 return constant_boolean_node (0, type);
13584 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13586 tree targ0 = strip_float_extensions (arg0);
13587 tree targ1 = strip_float_extensions (arg1);
13588 tree newtype = TREE_TYPE (targ0);
13590 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13591 newtype = TREE_TYPE (targ1);
13593 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13594 return fold_build2_loc (loc, code, type,
13595 fold_convert_loc (loc, newtype, targ0),
13596 fold_convert_loc (loc, newtype, targ1));
13599 return NULL_TREE;
13601 case COMPOUND_EXPR:
13602 /* When pedantic, a compound expression can be neither an lvalue
13603 nor an integer constant expression. */
13604 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13605 return NULL_TREE;
13606 /* Don't let (0, 0) be null pointer constant. */
13607 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13608 : fold_convert_loc (loc, type, arg1);
13609 return pedantic_non_lvalue_loc (loc, tem);
13611 case COMPLEX_EXPR:
13612 if ((TREE_CODE (arg0) == REAL_CST
13613 && TREE_CODE (arg1) == REAL_CST)
13614 || (TREE_CODE (arg0) == INTEGER_CST
13615 && TREE_CODE (arg1) == INTEGER_CST))
13616 return build_complex (type, arg0, arg1);
13617 if (TREE_CODE (arg0) == REALPART_EXPR
13618 && TREE_CODE (arg1) == IMAGPART_EXPR
13619 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13620 && operand_equal_p (TREE_OPERAND (arg0, 0),
13621 TREE_OPERAND (arg1, 0), 0))
13622 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13623 TREE_OPERAND (arg1, 0));
13624 return NULL_TREE;
13626 case ASSERT_EXPR:
13627 /* An ASSERT_EXPR should never be passed to fold_binary. */
13628 gcc_unreachable ();
13630 case VEC_PACK_TRUNC_EXPR:
13631 case VEC_PACK_FIX_TRUNC_EXPR:
13633 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13634 tree *elts;
13636 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13637 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13638 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13639 return NULL_TREE;
13641 elts = XALLOCAVEC (tree, nelts);
13642 if (!vec_cst_ctor_to_array (arg0, elts)
13643 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13644 return NULL_TREE;
13646 for (i = 0; i < nelts; i++)
13648 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13649 ? NOP_EXPR : FIX_TRUNC_EXPR,
13650 TREE_TYPE (type), elts[i]);
13651 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13652 return NULL_TREE;
13655 return build_vector (type, elts);
13658 case VEC_WIDEN_MULT_LO_EXPR:
13659 case VEC_WIDEN_MULT_HI_EXPR:
13661 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13662 tree *elts;
13664 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13665 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13666 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13667 return NULL_TREE;
13669 elts = XALLOCAVEC (tree, nelts * 4);
13670 if (!vec_cst_ctor_to_array (arg0, elts)
13671 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13672 return NULL_TREE;
13674 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR))
13675 elts += nelts;
13677 for (i = 0; i < nelts; i++)
13679 elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]);
13680 elts[i + nelts * 2]
13681 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
13682 elts[i + nelts * 2]);
13683 if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE)
13684 return NULL_TREE;
13685 elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]);
13686 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13687 return NULL_TREE;
13690 return build_vector (type, elts);
13693 default:
13694 return NULL_TREE;
13695 } /* switch (code) */
13698 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13699 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13700 of GOTO_EXPR. */
13702 static tree
13703 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13705 switch (TREE_CODE (*tp))
13707 case LABEL_EXPR:
13708 return *tp;
13710 case GOTO_EXPR:
13711 *walk_subtrees = 0;
13713 /* ... fall through ... */
13715 default:
13716 return NULL_TREE;
13720 /* Return whether the sub-tree ST contains a label which is accessible from
13721 outside the sub-tree. */
13723 static bool
13724 contains_label_p (tree st)
13726 return
13727 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13730 /* Fold a ternary expression of code CODE and type TYPE with operands
13731 OP0, OP1, and OP2. Return the folded expression if folding is
13732 successful. Otherwise, return NULL_TREE. */
13734 tree
13735 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13736 tree op0, tree op1, tree op2)
13738 tree tem;
13739 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13740 enum tree_code_class kind = TREE_CODE_CLASS (code);
13742 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13743 && TREE_CODE_LENGTH (code) == 3);
13745 /* Strip any conversions that don't change the mode. This is safe
13746 for every expression, except for a comparison expression because
13747 its signedness is derived from its operands. So, in the latter
13748 case, only strip conversions that don't change the signedness.
13750 Note that this is done as an internal manipulation within the
13751 constant folder, in order to find the simplest representation of
13752 the arguments so that their form can be studied. In any cases,
13753 the appropriate type conversions should be put back in the tree
13754 that will get out of the constant folder. */
13755 if (op0)
13757 arg0 = op0;
13758 STRIP_NOPS (arg0);
13761 if (op1)
13763 arg1 = op1;
13764 STRIP_NOPS (arg1);
13767 if (op2)
13769 arg2 = op2;
13770 STRIP_NOPS (arg2);
13773 switch (code)
13775 case COMPONENT_REF:
13776 if (TREE_CODE (arg0) == CONSTRUCTOR
13777 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13779 unsigned HOST_WIDE_INT idx;
13780 tree field, value;
13781 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13782 if (field == arg1)
13783 return value;
13785 return NULL_TREE;
13787 case COND_EXPR:
13788 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13789 so all simple results must be passed through pedantic_non_lvalue. */
13790 if (TREE_CODE (arg0) == INTEGER_CST)
13792 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13793 tem = integer_zerop (arg0) ? op2 : op1;
13794 /* Only optimize constant conditions when the selected branch
13795 has the same type as the COND_EXPR. This avoids optimizing
13796 away "c ? x : throw", where the throw has a void type.
13797 Avoid throwing away that operand which contains label. */
13798 if ((!TREE_SIDE_EFFECTS (unused_op)
13799 || !contains_label_p (unused_op))
13800 && (! VOID_TYPE_P (TREE_TYPE (tem))
13801 || VOID_TYPE_P (type)))
13802 return pedantic_non_lvalue_loc (loc, tem);
13803 return NULL_TREE;
13805 if (operand_equal_p (arg1, op2, 0))
13806 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13808 /* If we have A op B ? A : C, we may be able to convert this to a
13809 simpler expression, depending on the operation and the values
13810 of B and C. Signed zeros prevent all of these transformations,
13811 for reasons given above each one.
13813 Also try swapping the arguments and inverting the conditional. */
13814 if (COMPARISON_CLASS_P (arg0)
13815 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13816 arg1, TREE_OPERAND (arg0, 1))
13817 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13819 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13820 if (tem)
13821 return tem;
13824 if (COMPARISON_CLASS_P (arg0)
13825 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13826 op2,
13827 TREE_OPERAND (arg0, 1))
13828 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13830 location_t loc0 = expr_location_or (arg0, loc);
13831 tem = fold_truth_not_expr (loc0, arg0);
13832 if (tem && COMPARISON_CLASS_P (tem))
13834 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13835 if (tem)
13836 return tem;
13840 /* If the second operand is simpler than the third, swap them
13841 since that produces better jump optimization results. */
13842 if (truth_value_p (TREE_CODE (arg0))
13843 && tree_swap_operands_p (op1, op2, false))
13845 location_t loc0 = expr_location_or (arg0, loc);
13846 /* See if this can be inverted. If it can't, possibly because
13847 it was a floating-point inequality comparison, don't do
13848 anything. */
13849 tem = fold_truth_not_expr (loc0, arg0);
13850 if (tem)
13851 return fold_build3_loc (loc, code, type, tem, op2, op1);
13854 /* Convert A ? 1 : 0 to simply A. */
13855 if (integer_onep (op1)
13856 && integer_zerop (op2)
13857 /* If we try to convert OP0 to our type, the
13858 call to fold will try to move the conversion inside
13859 a COND, which will recurse. In that case, the COND_EXPR
13860 is probably the best choice, so leave it alone. */
13861 && type == TREE_TYPE (arg0))
13862 return pedantic_non_lvalue_loc (loc, arg0);
13864 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13865 over COND_EXPR in cases such as floating point comparisons. */
13866 if (integer_zerop (op1)
13867 && integer_onep (op2)
13868 && truth_value_p (TREE_CODE (arg0)))
13869 return pedantic_non_lvalue_loc (loc,
13870 fold_convert_loc (loc, type,
13871 invert_truthvalue_loc (loc,
13872 arg0)));
13874 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13875 if (TREE_CODE (arg0) == LT_EXPR
13876 && integer_zerop (TREE_OPERAND (arg0, 1))
13877 && integer_zerop (op2)
13878 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13880 /* sign_bit_p only checks ARG1 bits within A's precision.
13881 If <sign bit of A> has wider type than A, bits outside
13882 of A's precision in <sign bit of A> need to be checked.
13883 If they are all 0, this optimization needs to be done
13884 in unsigned A's type, if they are all 1 in signed A's type,
13885 otherwise this can't be done. */
13886 if (TYPE_PRECISION (TREE_TYPE (tem))
13887 < TYPE_PRECISION (TREE_TYPE (arg1))
13888 && TYPE_PRECISION (TREE_TYPE (tem))
13889 < TYPE_PRECISION (type))
13891 unsigned HOST_WIDE_INT mask_lo;
13892 HOST_WIDE_INT mask_hi;
13893 int inner_width, outer_width;
13894 tree tem_type;
13896 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13897 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13898 if (outer_width > TYPE_PRECISION (type))
13899 outer_width = TYPE_PRECISION (type);
13901 if (outer_width > HOST_BITS_PER_WIDE_INT)
13903 mask_hi = ((unsigned HOST_WIDE_INT) -1
13904 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13905 mask_lo = -1;
13907 else
13909 mask_hi = 0;
13910 mask_lo = ((unsigned HOST_WIDE_INT) -1
13911 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13913 if (inner_width > HOST_BITS_PER_WIDE_INT)
13915 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13916 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13917 mask_lo = 0;
13919 else
13920 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13921 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13923 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13924 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13926 tem_type = signed_type_for (TREE_TYPE (tem));
13927 tem = fold_convert_loc (loc, tem_type, tem);
13929 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13930 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13932 tem_type = unsigned_type_for (TREE_TYPE (tem));
13933 tem = fold_convert_loc (loc, tem_type, tem);
13935 else
13936 tem = NULL;
13939 if (tem)
13940 return
13941 fold_convert_loc (loc, type,
13942 fold_build2_loc (loc, BIT_AND_EXPR,
13943 TREE_TYPE (tem), tem,
13944 fold_convert_loc (loc,
13945 TREE_TYPE (tem),
13946 arg1)));
13949 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13950 already handled above. */
13951 if (TREE_CODE (arg0) == BIT_AND_EXPR
13952 && integer_onep (TREE_OPERAND (arg0, 1))
13953 && integer_zerop (op2)
13954 && integer_pow2p (arg1))
13956 tree tem = TREE_OPERAND (arg0, 0);
13957 STRIP_NOPS (tem);
13958 if (TREE_CODE (tem) == RSHIFT_EXPR
13959 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13960 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13961 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13962 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13963 TREE_OPERAND (tem, 0), arg1);
13966 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13967 is probably obsolete because the first operand should be a
13968 truth value (that's why we have the two cases above), but let's
13969 leave it in until we can confirm this for all front-ends. */
13970 if (integer_zerop (op2)
13971 && TREE_CODE (arg0) == NE_EXPR
13972 && integer_zerop (TREE_OPERAND (arg0, 1))
13973 && integer_pow2p (arg1)
13974 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13975 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13976 arg1, OEP_ONLY_CONST))
13977 return pedantic_non_lvalue_loc (loc,
13978 fold_convert_loc (loc, type,
13979 TREE_OPERAND (arg0, 0)));
13981 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13982 if (integer_zerop (op2)
13983 && truth_value_p (TREE_CODE (arg0))
13984 && truth_value_p (TREE_CODE (arg1)))
13985 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13986 fold_convert_loc (loc, type, arg0),
13987 arg1);
13989 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13990 if (integer_onep (op2)
13991 && truth_value_p (TREE_CODE (arg0))
13992 && truth_value_p (TREE_CODE (arg1)))
13994 location_t loc0 = expr_location_or (arg0, loc);
13995 /* Only perform transformation if ARG0 is easily inverted. */
13996 tem = fold_truth_not_expr (loc0, arg0);
13997 if (tem)
13998 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13999 fold_convert_loc (loc, type, tem),
14000 arg1);
14003 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14004 if (integer_zerop (arg1)
14005 && truth_value_p (TREE_CODE (arg0))
14006 && truth_value_p (TREE_CODE (op2)))
14008 location_t loc0 = expr_location_or (arg0, loc);
14009 /* Only perform transformation if ARG0 is easily inverted. */
14010 tem = fold_truth_not_expr (loc0, arg0);
14011 if (tem)
14012 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14013 fold_convert_loc (loc, type, tem),
14014 op2);
14017 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14018 if (integer_onep (arg1)
14019 && truth_value_p (TREE_CODE (arg0))
14020 && truth_value_p (TREE_CODE (op2)))
14021 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14022 fold_convert_loc (loc, type, arg0),
14023 op2);
14025 return NULL_TREE;
14027 case CALL_EXPR:
14028 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14029 of fold_ternary on them. */
14030 gcc_unreachable ();
14032 case BIT_FIELD_REF:
14033 if ((TREE_CODE (arg0) == VECTOR_CST
14034 || TREE_CODE (arg0) == CONSTRUCTOR)
14035 && (type == TREE_TYPE (TREE_TYPE (arg0))
14036 || (TREE_CODE (type) == VECTOR_TYPE
14037 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14039 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14040 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14041 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14042 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14044 if (n != 0
14045 && (idx % width) == 0
14046 && (n % width) == 0
14047 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14049 idx = idx / width;
14050 n = n / width;
14051 if (TREE_CODE (type) == VECTOR_TYPE)
14053 if (TREE_CODE (arg0) == VECTOR_CST)
14055 tree *vals = XALLOCAVEC (tree, n);
14056 unsigned i;
14057 for (i = 0; i < n; ++i)
14058 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14059 return build_vector (type, vals);
14061 else
14063 VEC(constructor_elt, gc) *vals;
14064 unsigned i;
14065 if (CONSTRUCTOR_NELTS (arg0) == 0)
14066 return build_constructor (type, NULL);
14067 vals = VEC_alloc (constructor_elt, gc, n);
14068 for (i = 0; i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14069 ++i)
14070 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14071 CONSTRUCTOR_ELT
14072 (arg0, idx + i)->value);
14073 return build_constructor (type, vals);
14076 else if (n == 1)
14078 if (TREE_CODE (arg0) == VECTOR_CST)
14079 return VECTOR_CST_ELT (arg0, idx);
14080 else if (idx < CONSTRUCTOR_NELTS (arg0))
14081 return CONSTRUCTOR_ELT (arg0, idx)->value;
14082 return build_zero_cst (type);
14087 /* A bit-field-ref that referenced the full argument can be stripped. */
14088 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14089 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14090 && integer_zerop (op2))
14091 return fold_convert_loc (loc, type, arg0);
14093 /* On constants we can use native encode/interpret to constant
14094 fold (nearly) all BIT_FIELD_REFs. */
14095 if (CONSTANT_CLASS_P (arg0)
14096 && can_native_interpret_type_p (type)
14097 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14098 /* This limitation should not be necessary, we just need to
14099 round this up to mode size. */
14100 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14101 /* Need bit-shifting of the buffer to relax the following. */
14102 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14104 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14105 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14106 unsigned HOST_WIDE_INT clen;
14107 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14108 /* ??? We cannot tell native_encode_expr to start at
14109 some random byte only. So limit us to a reasonable amount
14110 of work. */
14111 if (clen <= 4096)
14113 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14114 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14115 if (len > 0
14116 && len * BITS_PER_UNIT >= bitpos + bitsize)
14118 tree v = native_interpret_expr (type,
14119 b + bitpos / BITS_PER_UNIT,
14120 bitsize / BITS_PER_UNIT);
14121 if (v)
14122 return v;
14127 return NULL_TREE;
14129 case FMA_EXPR:
14130 /* For integers we can decompose the FMA if possible. */
14131 if (TREE_CODE (arg0) == INTEGER_CST
14132 && TREE_CODE (arg1) == INTEGER_CST)
14133 return fold_build2_loc (loc, PLUS_EXPR, type,
14134 const_binop (MULT_EXPR, arg0, arg1), arg2);
14135 if (integer_zerop (arg2))
14136 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14138 return fold_fma (loc, type, arg0, arg1, arg2);
14140 case VEC_PERM_EXPR:
14141 if (TREE_CODE (arg2) == VECTOR_CST)
14143 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14144 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14145 tree t;
14146 bool need_mask_canon = false;
14148 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14149 for (i = 0; i < nelts; i++)
14151 tree val = VECTOR_CST_ELT (arg2, i);
14152 if (TREE_CODE (val) != INTEGER_CST)
14153 return NULL_TREE;
14155 sel[i] = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
14156 if (TREE_INT_CST_HIGH (val)
14157 || ((unsigned HOST_WIDE_INT)
14158 TREE_INT_CST_LOW (val) != sel[i]))
14159 need_mask_canon = true;
14162 if ((TREE_CODE (arg0) == VECTOR_CST
14163 || TREE_CODE (arg0) == CONSTRUCTOR)
14164 && (TREE_CODE (arg1) == VECTOR_CST
14165 || TREE_CODE (arg1) == CONSTRUCTOR))
14167 t = fold_vec_perm (type, arg0, arg1, sel);
14168 if (t != NULL_TREE)
14169 return t;
14172 if (need_mask_canon && arg2 == op2)
14174 tree *tsel = XALLOCAVEC (tree, nelts);
14175 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14176 for (i = 0; i < nelts; i++)
14177 tsel[i] = build_int_cst (eltype, sel[nelts - i - 1]);
14178 t = build_vector (TREE_TYPE (arg2), tsel);
14179 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
14182 return NULL_TREE;
14184 default:
14185 return NULL_TREE;
14186 } /* switch (code) */
14189 /* Perform constant folding and related simplification of EXPR.
14190 The related simplifications include x*1 => x, x*0 => 0, etc.,
14191 and application of the associative law.
14192 NOP_EXPR conversions may be removed freely (as long as we
14193 are careful not to change the type of the overall expression).
14194 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14195 but we can constant-fold them if they have constant operands. */
14197 #ifdef ENABLE_FOLD_CHECKING
14198 # define fold(x) fold_1 (x)
14199 static tree fold_1 (tree);
14200 static
14201 #endif
14202 tree
14203 fold (tree expr)
14205 const tree t = expr;
14206 enum tree_code code = TREE_CODE (t);
14207 enum tree_code_class kind = TREE_CODE_CLASS (code);
14208 tree tem;
14209 location_t loc = EXPR_LOCATION (expr);
14211 /* Return right away if a constant. */
14212 if (kind == tcc_constant)
14213 return t;
14215 /* CALL_EXPR-like objects with variable numbers of operands are
14216 treated specially. */
14217 if (kind == tcc_vl_exp)
14219 if (code == CALL_EXPR)
14221 tem = fold_call_expr (loc, expr, false);
14222 return tem ? tem : expr;
14224 return expr;
14227 if (IS_EXPR_CODE_CLASS (kind))
14229 tree type = TREE_TYPE (t);
14230 tree op0, op1, op2;
14232 switch (TREE_CODE_LENGTH (code))
14234 case 1:
14235 op0 = TREE_OPERAND (t, 0);
14236 tem = fold_unary_loc (loc, code, type, op0);
14237 return tem ? tem : expr;
14238 case 2:
14239 op0 = TREE_OPERAND (t, 0);
14240 op1 = TREE_OPERAND (t, 1);
14241 tem = fold_binary_loc (loc, code, type, op0, op1);
14242 return tem ? tem : expr;
14243 case 3:
14244 op0 = TREE_OPERAND (t, 0);
14245 op1 = TREE_OPERAND (t, 1);
14246 op2 = TREE_OPERAND (t, 2);
14247 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14248 return tem ? tem : expr;
14249 default:
14250 break;
14254 switch (code)
14256 case ARRAY_REF:
14258 tree op0 = TREE_OPERAND (t, 0);
14259 tree op1 = TREE_OPERAND (t, 1);
14261 if (TREE_CODE (op1) == INTEGER_CST
14262 && TREE_CODE (op0) == CONSTRUCTOR
14263 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14265 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14266 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14267 unsigned HOST_WIDE_INT begin = 0;
14269 /* Find a matching index by means of a binary search. */
14270 while (begin != end)
14272 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14273 tree index = VEC_index (constructor_elt, elts, middle)->index;
14275 if (TREE_CODE (index) == INTEGER_CST
14276 && tree_int_cst_lt (index, op1))
14277 begin = middle + 1;
14278 else if (TREE_CODE (index) == INTEGER_CST
14279 && tree_int_cst_lt (op1, index))
14280 end = middle;
14281 else if (TREE_CODE (index) == RANGE_EXPR
14282 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14283 begin = middle + 1;
14284 else if (TREE_CODE (index) == RANGE_EXPR
14285 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14286 end = middle;
14287 else
14288 return VEC_index (constructor_elt, elts, middle)->value;
14292 return t;
14295 case CONST_DECL:
14296 return fold (DECL_INITIAL (t));
14298 default:
14299 return t;
14300 } /* switch (code) */
14303 #ifdef ENABLE_FOLD_CHECKING
14304 #undef fold
14306 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14307 static void fold_check_failed (const_tree, const_tree);
14308 void print_fold_checksum (const_tree);
14310 /* When --enable-checking=fold, compute a digest of expr before
14311 and after actual fold call to see if fold did not accidentally
14312 change original expr. */
14314 tree
14315 fold (tree expr)
14317 tree ret;
14318 struct md5_ctx ctx;
14319 unsigned char checksum_before[16], checksum_after[16];
14320 htab_t ht;
14322 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14323 md5_init_ctx (&ctx);
14324 fold_checksum_tree (expr, &ctx, ht);
14325 md5_finish_ctx (&ctx, checksum_before);
14326 htab_empty (ht);
14328 ret = fold_1 (expr);
14330 md5_init_ctx (&ctx);
14331 fold_checksum_tree (expr, &ctx, ht);
14332 md5_finish_ctx (&ctx, checksum_after);
14333 htab_delete (ht);
14335 if (memcmp (checksum_before, checksum_after, 16))
14336 fold_check_failed (expr, ret);
14338 return ret;
14341 void
14342 print_fold_checksum (const_tree expr)
14344 struct md5_ctx ctx;
14345 unsigned char checksum[16], cnt;
14346 htab_t ht;
14348 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14349 md5_init_ctx (&ctx);
14350 fold_checksum_tree (expr, &ctx, ht);
14351 md5_finish_ctx (&ctx, checksum);
14352 htab_delete (ht);
14353 for (cnt = 0; cnt < 16; ++cnt)
14354 fprintf (stderr, "%02x", checksum[cnt]);
14355 putc ('\n', stderr);
14358 static void
14359 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14361 internal_error ("fold check: original tree changed by fold");
14364 static void
14365 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14367 void **slot;
14368 enum tree_code code;
14369 union tree_node buf;
14370 int i, len;
14372 recursive_label:
14373 if (expr == NULL)
14374 return;
14375 slot = (void **) htab_find_slot (ht, expr, INSERT);
14376 if (*slot != NULL)
14377 return;
14378 *slot = CONST_CAST_TREE (expr);
14379 code = TREE_CODE (expr);
14380 if (TREE_CODE_CLASS (code) == tcc_declaration
14381 && DECL_ASSEMBLER_NAME_SET_P (expr))
14383 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14384 memcpy ((char *) &buf, expr, tree_size (expr));
14385 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14386 expr = (tree) &buf;
14388 else if (TREE_CODE_CLASS (code) == tcc_type
14389 && (TYPE_POINTER_TO (expr)
14390 || TYPE_REFERENCE_TO (expr)
14391 || TYPE_CACHED_VALUES_P (expr)
14392 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14393 || TYPE_NEXT_VARIANT (expr)))
14395 /* Allow these fields to be modified. */
14396 tree tmp;
14397 memcpy ((char *) &buf, expr, tree_size (expr));
14398 expr = tmp = (tree) &buf;
14399 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14400 TYPE_POINTER_TO (tmp) = NULL;
14401 TYPE_REFERENCE_TO (tmp) = NULL;
14402 TYPE_NEXT_VARIANT (tmp) = NULL;
14403 if (TYPE_CACHED_VALUES_P (tmp))
14405 TYPE_CACHED_VALUES_P (tmp) = 0;
14406 TYPE_CACHED_VALUES (tmp) = NULL;
14409 md5_process_bytes (expr, tree_size (expr), ctx);
14410 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14411 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14412 if (TREE_CODE_CLASS (code) != tcc_type
14413 && TREE_CODE_CLASS (code) != tcc_declaration
14414 && code != TREE_LIST
14415 && code != SSA_NAME
14416 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14417 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14418 switch (TREE_CODE_CLASS (code))
14420 case tcc_constant:
14421 switch (code)
14423 case STRING_CST:
14424 md5_process_bytes (TREE_STRING_POINTER (expr),
14425 TREE_STRING_LENGTH (expr), ctx);
14426 break;
14427 case COMPLEX_CST:
14428 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14429 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14430 break;
14431 case VECTOR_CST:
14432 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14433 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14434 break;
14435 default:
14436 break;
14438 break;
14439 case tcc_exceptional:
14440 switch (code)
14442 case TREE_LIST:
14443 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14444 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14445 expr = TREE_CHAIN (expr);
14446 goto recursive_label;
14447 break;
14448 case TREE_VEC:
14449 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14450 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14451 break;
14452 default:
14453 break;
14455 break;
14456 case tcc_expression:
14457 case tcc_reference:
14458 case tcc_comparison:
14459 case tcc_unary:
14460 case tcc_binary:
14461 case tcc_statement:
14462 case tcc_vl_exp:
14463 len = TREE_OPERAND_LENGTH (expr);
14464 for (i = 0; i < len; ++i)
14465 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14466 break;
14467 case tcc_declaration:
14468 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14469 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14470 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14472 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14473 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14474 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14475 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14476 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14478 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14479 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14481 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14483 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14484 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14485 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14487 break;
14488 case tcc_type:
14489 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14490 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14491 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14492 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14493 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14494 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14495 if (INTEGRAL_TYPE_P (expr)
14496 || SCALAR_FLOAT_TYPE_P (expr))
14498 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14499 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14501 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14502 if (TREE_CODE (expr) == RECORD_TYPE
14503 || TREE_CODE (expr) == UNION_TYPE
14504 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14505 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14506 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14507 break;
14508 default:
14509 break;
14513 /* Helper function for outputting the checksum of a tree T. When
14514 debugging with gdb, you can "define mynext" to be "next" followed
14515 by "call debug_fold_checksum (op0)", then just trace down till the
14516 outputs differ. */
14518 DEBUG_FUNCTION void
14519 debug_fold_checksum (const_tree t)
14521 int i;
14522 unsigned char checksum[16];
14523 struct md5_ctx ctx;
14524 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14526 md5_init_ctx (&ctx);
14527 fold_checksum_tree (t, &ctx, ht);
14528 md5_finish_ctx (&ctx, checksum);
14529 htab_empty (ht);
14531 for (i = 0; i < 16; i++)
14532 fprintf (stderr, "%d ", checksum[i]);
14534 fprintf (stderr, "\n");
14537 #endif
14539 /* Fold a unary tree expression with code CODE of type TYPE with an
14540 operand OP0. LOC is the location of the resulting expression.
14541 Return a folded expression if successful. Otherwise, return a tree
14542 expression with code CODE of type TYPE with an operand OP0. */
14544 tree
14545 fold_build1_stat_loc (location_t loc,
14546 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14548 tree tem;
14549 #ifdef ENABLE_FOLD_CHECKING
14550 unsigned char checksum_before[16], checksum_after[16];
14551 struct md5_ctx ctx;
14552 htab_t ht;
14554 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14555 md5_init_ctx (&ctx);
14556 fold_checksum_tree (op0, &ctx, ht);
14557 md5_finish_ctx (&ctx, checksum_before);
14558 htab_empty (ht);
14559 #endif
14561 tem = fold_unary_loc (loc, code, type, op0);
14562 if (!tem)
14563 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14565 #ifdef ENABLE_FOLD_CHECKING
14566 md5_init_ctx (&ctx);
14567 fold_checksum_tree (op0, &ctx, ht);
14568 md5_finish_ctx (&ctx, checksum_after);
14569 htab_delete (ht);
14571 if (memcmp (checksum_before, checksum_after, 16))
14572 fold_check_failed (op0, tem);
14573 #endif
14574 return tem;
14577 /* Fold a binary tree expression with code CODE of type TYPE with
14578 operands OP0 and OP1. LOC is the location of the resulting
14579 expression. Return a folded expression if successful. Otherwise,
14580 return a tree expression with code CODE of type TYPE with operands
14581 OP0 and OP1. */
14583 tree
14584 fold_build2_stat_loc (location_t loc,
14585 enum tree_code code, tree type, tree op0, tree op1
14586 MEM_STAT_DECL)
14588 tree tem;
14589 #ifdef ENABLE_FOLD_CHECKING
14590 unsigned char checksum_before_op0[16],
14591 checksum_before_op1[16],
14592 checksum_after_op0[16],
14593 checksum_after_op1[16];
14594 struct md5_ctx ctx;
14595 htab_t ht;
14597 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14598 md5_init_ctx (&ctx);
14599 fold_checksum_tree (op0, &ctx, ht);
14600 md5_finish_ctx (&ctx, checksum_before_op0);
14601 htab_empty (ht);
14603 md5_init_ctx (&ctx);
14604 fold_checksum_tree (op1, &ctx, ht);
14605 md5_finish_ctx (&ctx, checksum_before_op1);
14606 htab_empty (ht);
14607 #endif
14609 tem = fold_binary_loc (loc, code, type, op0, op1);
14610 if (!tem)
14611 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14613 #ifdef ENABLE_FOLD_CHECKING
14614 md5_init_ctx (&ctx);
14615 fold_checksum_tree (op0, &ctx, ht);
14616 md5_finish_ctx (&ctx, checksum_after_op0);
14617 htab_empty (ht);
14619 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14620 fold_check_failed (op0, tem);
14622 md5_init_ctx (&ctx);
14623 fold_checksum_tree (op1, &ctx, ht);
14624 md5_finish_ctx (&ctx, checksum_after_op1);
14625 htab_delete (ht);
14627 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14628 fold_check_failed (op1, tem);
14629 #endif
14630 return tem;
14633 /* Fold a ternary tree expression with code CODE of type TYPE with
14634 operands OP0, OP1, and OP2. Return a folded expression if
14635 successful. Otherwise, return a tree expression with code CODE of
14636 type TYPE with operands OP0, OP1, and OP2. */
14638 tree
14639 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14640 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14642 tree tem;
14643 #ifdef ENABLE_FOLD_CHECKING
14644 unsigned char checksum_before_op0[16],
14645 checksum_before_op1[16],
14646 checksum_before_op2[16],
14647 checksum_after_op0[16],
14648 checksum_after_op1[16],
14649 checksum_after_op2[16];
14650 struct md5_ctx ctx;
14651 htab_t ht;
14653 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14654 md5_init_ctx (&ctx);
14655 fold_checksum_tree (op0, &ctx, ht);
14656 md5_finish_ctx (&ctx, checksum_before_op0);
14657 htab_empty (ht);
14659 md5_init_ctx (&ctx);
14660 fold_checksum_tree (op1, &ctx, ht);
14661 md5_finish_ctx (&ctx, checksum_before_op1);
14662 htab_empty (ht);
14664 md5_init_ctx (&ctx);
14665 fold_checksum_tree (op2, &ctx, ht);
14666 md5_finish_ctx (&ctx, checksum_before_op2);
14667 htab_empty (ht);
14668 #endif
14670 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14671 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14672 if (!tem)
14673 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14675 #ifdef ENABLE_FOLD_CHECKING
14676 md5_init_ctx (&ctx);
14677 fold_checksum_tree (op0, &ctx, ht);
14678 md5_finish_ctx (&ctx, checksum_after_op0);
14679 htab_empty (ht);
14681 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14682 fold_check_failed (op0, tem);
14684 md5_init_ctx (&ctx);
14685 fold_checksum_tree (op1, &ctx, ht);
14686 md5_finish_ctx (&ctx, checksum_after_op1);
14687 htab_empty (ht);
14689 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14690 fold_check_failed (op1, tem);
14692 md5_init_ctx (&ctx);
14693 fold_checksum_tree (op2, &ctx, ht);
14694 md5_finish_ctx (&ctx, checksum_after_op2);
14695 htab_delete (ht);
14697 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14698 fold_check_failed (op2, tem);
14699 #endif
14700 return tem;
14703 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14704 arguments in ARGARRAY, and a null static chain.
14705 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14706 of type TYPE from the given operands as constructed by build_call_array. */
14708 tree
14709 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14710 int nargs, tree *argarray)
14712 tree tem;
14713 #ifdef ENABLE_FOLD_CHECKING
14714 unsigned char checksum_before_fn[16],
14715 checksum_before_arglist[16],
14716 checksum_after_fn[16],
14717 checksum_after_arglist[16];
14718 struct md5_ctx ctx;
14719 htab_t ht;
14720 int i;
14722 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14723 md5_init_ctx (&ctx);
14724 fold_checksum_tree (fn, &ctx, ht);
14725 md5_finish_ctx (&ctx, checksum_before_fn);
14726 htab_empty (ht);
14728 md5_init_ctx (&ctx);
14729 for (i = 0; i < nargs; i++)
14730 fold_checksum_tree (argarray[i], &ctx, ht);
14731 md5_finish_ctx (&ctx, checksum_before_arglist);
14732 htab_empty (ht);
14733 #endif
14735 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14737 #ifdef ENABLE_FOLD_CHECKING
14738 md5_init_ctx (&ctx);
14739 fold_checksum_tree (fn, &ctx, ht);
14740 md5_finish_ctx (&ctx, checksum_after_fn);
14741 htab_empty (ht);
14743 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14744 fold_check_failed (fn, tem);
14746 md5_init_ctx (&ctx);
14747 for (i = 0; i < nargs; i++)
14748 fold_checksum_tree (argarray[i], &ctx, ht);
14749 md5_finish_ctx (&ctx, checksum_after_arglist);
14750 htab_delete (ht);
14752 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14753 fold_check_failed (NULL_TREE, tem);
14754 #endif
14755 return tem;
14758 /* Perform constant folding and related simplification of initializer
14759 expression EXPR. These behave identically to "fold_buildN" but ignore
14760 potential run-time traps and exceptions that fold must preserve. */
14762 #define START_FOLD_INIT \
14763 int saved_signaling_nans = flag_signaling_nans;\
14764 int saved_trapping_math = flag_trapping_math;\
14765 int saved_rounding_math = flag_rounding_math;\
14766 int saved_trapv = flag_trapv;\
14767 int saved_folding_initializer = folding_initializer;\
14768 flag_signaling_nans = 0;\
14769 flag_trapping_math = 0;\
14770 flag_rounding_math = 0;\
14771 flag_trapv = 0;\
14772 folding_initializer = 1;
14774 #define END_FOLD_INIT \
14775 flag_signaling_nans = saved_signaling_nans;\
14776 flag_trapping_math = saved_trapping_math;\
14777 flag_rounding_math = saved_rounding_math;\
14778 flag_trapv = saved_trapv;\
14779 folding_initializer = saved_folding_initializer;
14781 tree
14782 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14783 tree type, tree op)
14785 tree result;
14786 START_FOLD_INIT;
14788 result = fold_build1_loc (loc, code, type, op);
14790 END_FOLD_INIT;
14791 return result;
14794 tree
14795 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14796 tree type, tree op0, tree op1)
14798 tree result;
14799 START_FOLD_INIT;
14801 result = fold_build2_loc (loc, code, type, op0, op1);
14803 END_FOLD_INIT;
14804 return result;
14807 tree
14808 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14809 tree type, tree op0, tree op1, tree op2)
14811 tree result;
14812 START_FOLD_INIT;
14814 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14816 END_FOLD_INIT;
14817 return result;
14820 tree
14821 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14822 int nargs, tree *argarray)
14824 tree result;
14825 START_FOLD_INIT;
14827 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14829 END_FOLD_INIT;
14830 return result;
14833 #undef START_FOLD_INIT
14834 #undef END_FOLD_INIT
14836 /* Determine if first argument is a multiple of second argument. Return 0 if
14837 it is not, or we cannot easily determined it to be.
14839 An example of the sort of thing we care about (at this point; this routine
14840 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14841 fold cases do now) is discovering that
14843 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14845 is a multiple of
14847 SAVE_EXPR (J * 8)
14849 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14851 This code also handles discovering that
14853 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14855 is a multiple of 8 so we don't have to worry about dealing with a
14856 possible remainder.
14858 Note that we *look* inside a SAVE_EXPR only to determine how it was
14859 calculated; it is not safe for fold to do much of anything else with the
14860 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14861 at run time. For example, the latter example above *cannot* be implemented
14862 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14863 evaluation time of the original SAVE_EXPR is not necessarily the same at
14864 the time the new expression is evaluated. The only optimization of this
14865 sort that would be valid is changing
14867 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14869 divided by 8 to
14871 SAVE_EXPR (I) * SAVE_EXPR (J)
14873 (where the same SAVE_EXPR (J) is used in the original and the
14874 transformed version). */
14877 multiple_of_p (tree type, const_tree top, const_tree bottom)
14879 if (operand_equal_p (top, bottom, 0))
14880 return 1;
14882 if (TREE_CODE (type) != INTEGER_TYPE)
14883 return 0;
14885 switch (TREE_CODE (top))
14887 case BIT_AND_EXPR:
14888 /* Bitwise and provides a power of two multiple. If the mask is
14889 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14890 if (!integer_pow2p (bottom))
14891 return 0;
14892 /* FALLTHRU */
14894 case MULT_EXPR:
14895 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14896 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14898 case PLUS_EXPR:
14899 case MINUS_EXPR:
14900 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14901 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14903 case LSHIFT_EXPR:
14904 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14906 tree op1, t1;
14908 op1 = TREE_OPERAND (top, 1);
14909 /* const_binop may not detect overflow correctly,
14910 so check for it explicitly here. */
14911 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14912 > TREE_INT_CST_LOW (op1)
14913 && TREE_INT_CST_HIGH (op1) == 0
14914 && 0 != (t1 = fold_convert (type,
14915 const_binop (LSHIFT_EXPR,
14916 size_one_node,
14917 op1)))
14918 && !TREE_OVERFLOW (t1))
14919 return multiple_of_p (type, t1, bottom);
14921 return 0;
14923 case NOP_EXPR:
14924 /* Can't handle conversions from non-integral or wider integral type. */
14925 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14926 || (TYPE_PRECISION (type)
14927 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14928 return 0;
14930 /* .. fall through ... */
14932 case SAVE_EXPR:
14933 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14935 case COND_EXPR:
14936 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14937 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14939 case INTEGER_CST:
14940 if (TREE_CODE (bottom) != INTEGER_CST
14941 || integer_zerop (bottom)
14942 || (TYPE_UNSIGNED (type)
14943 && (tree_int_cst_sgn (top) < 0
14944 || tree_int_cst_sgn (bottom) < 0)))
14945 return 0;
14946 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14947 top, bottom));
14949 default:
14950 return 0;
14954 /* Return true if CODE or TYPE is known to be non-negative. */
14956 static bool
14957 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14959 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14960 && truth_value_p (code))
14961 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14962 have a signed:1 type (where the value is -1 and 0). */
14963 return true;
14964 return false;
14967 /* Return true if (CODE OP0) is known to be non-negative. If the return
14968 value is based on the assumption that signed overflow is undefined,
14969 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14970 *STRICT_OVERFLOW_P. */
14972 bool
14973 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14974 bool *strict_overflow_p)
14976 if (TYPE_UNSIGNED (type))
14977 return true;
14979 switch (code)
14981 case ABS_EXPR:
14982 /* We can't return 1 if flag_wrapv is set because
14983 ABS_EXPR<INT_MIN> = INT_MIN. */
14984 if (!INTEGRAL_TYPE_P (type))
14985 return true;
14986 if (TYPE_OVERFLOW_UNDEFINED (type))
14988 *strict_overflow_p = true;
14989 return true;
14991 break;
14993 case NON_LVALUE_EXPR:
14994 case FLOAT_EXPR:
14995 case FIX_TRUNC_EXPR:
14996 return tree_expr_nonnegative_warnv_p (op0,
14997 strict_overflow_p);
14999 case NOP_EXPR:
15001 tree inner_type = TREE_TYPE (op0);
15002 tree outer_type = type;
15004 if (TREE_CODE (outer_type) == REAL_TYPE)
15006 if (TREE_CODE (inner_type) == REAL_TYPE)
15007 return tree_expr_nonnegative_warnv_p (op0,
15008 strict_overflow_p);
15009 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15011 if (TYPE_UNSIGNED (inner_type))
15012 return true;
15013 return tree_expr_nonnegative_warnv_p (op0,
15014 strict_overflow_p);
15017 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15019 if (TREE_CODE (inner_type) == REAL_TYPE)
15020 return tree_expr_nonnegative_warnv_p (op0,
15021 strict_overflow_p);
15022 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15023 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15024 && TYPE_UNSIGNED (inner_type);
15027 break;
15029 default:
15030 return tree_simple_nonnegative_warnv_p (code, type);
15033 /* We don't know sign of `t', so be conservative and return false. */
15034 return false;
15037 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15038 value is based on the assumption that signed overflow is undefined,
15039 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15040 *STRICT_OVERFLOW_P. */
15042 bool
15043 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15044 tree op1, bool *strict_overflow_p)
15046 if (TYPE_UNSIGNED (type))
15047 return true;
15049 switch (code)
15051 case POINTER_PLUS_EXPR:
15052 case PLUS_EXPR:
15053 if (FLOAT_TYPE_P (type))
15054 return (tree_expr_nonnegative_warnv_p (op0,
15055 strict_overflow_p)
15056 && tree_expr_nonnegative_warnv_p (op1,
15057 strict_overflow_p));
15059 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15060 both unsigned and at least 2 bits shorter than the result. */
15061 if (TREE_CODE (type) == INTEGER_TYPE
15062 && TREE_CODE (op0) == NOP_EXPR
15063 && TREE_CODE (op1) == NOP_EXPR)
15065 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15066 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15067 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15068 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15070 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15071 TYPE_PRECISION (inner2)) + 1;
15072 return prec < TYPE_PRECISION (type);
15075 break;
15077 case MULT_EXPR:
15078 if (FLOAT_TYPE_P (type))
15080 /* x * x for floating point x is always non-negative. */
15081 if (operand_equal_p (op0, op1, 0))
15082 return true;
15083 return (tree_expr_nonnegative_warnv_p (op0,
15084 strict_overflow_p)
15085 && tree_expr_nonnegative_warnv_p (op1,
15086 strict_overflow_p));
15089 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15090 both unsigned and their total bits is shorter than the result. */
15091 if (TREE_CODE (type) == INTEGER_TYPE
15092 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15093 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15095 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15096 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15097 : TREE_TYPE (op0);
15098 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15099 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15100 : TREE_TYPE (op1);
15102 bool unsigned0 = TYPE_UNSIGNED (inner0);
15103 bool unsigned1 = TYPE_UNSIGNED (inner1);
15105 if (TREE_CODE (op0) == INTEGER_CST)
15106 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15108 if (TREE_CODE (op1) == INTEGER_CST)
15109 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15111 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15112 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15114 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15115 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15116 : TYPE_PRECISION (inner0);
15118 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15119 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15120 : TYPE_PRECISION (inner1);
15122 return precision0 + precision1 < TYPE_PRECISION (type);
15125 return false;
15127 case BIT_AND_EXPR:
15128 case MAX_EXPR:
15129 return (tree_expr_nonnegative_warnv_p (op0,
15130 strict_overflow_p)
15131 || tree_expr_nonnegative_warnv_p (op1,
15132 strict_overflow_p));
15134 case BIT_IOR_EXPR:
15135 case BIT_XOR_EXPR:
15136 case MIN_EXPR:
15137 case RDIV_EXPR:
15138 case TRUNC_DIV_EXPR:
15139 case CEIL_DIV_EXPR:
15140 case FLOOR_DIV_EXPR:
15141 case ROUND_DIV_EXPR:
15142 return (tree_expr_nonnegative_warnv_p (op0,
15143 strict_overflow_p)
15144 && tree_expr_nonnegative_warnv_p (op1,
15145 strict_overflow_p));
15147 case TRUNC_MOD_EXPR:
15148 case CEIL_MOD_EXPR:
15149 case FLOOR_MOD_EXPR:
15150 case ROUND_MOD_EXPR:
15151 return tree_expr_nonnegative_warnv_p (op0,
15152 strict_overflow_p);
15153 default:
15154 return tree_simple_nonnegative_warnv_p (code, type);
15157 /* We don't know sign of `t', so be conservative and return false. */
15158 return false;
15161 /* Return true if T is known to be non-negative. If the return
15162 value is based on the assumption that signed overflow is undefined,
15163 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15164 *STRICT_OVERFLOW_P. */
15166 bool
15167 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15169 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15170 return true;
15172 switch (TREE_CODE (t))
15174 case INTEGER_CST:
15175 return tree_int_cst_sgn (t) >= 0;
15177 case REAL_CST:
15178 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15180 case FIXED_CST:
15181 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15183 case COND_EXPR:
15184 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15185 strict_overflow_p)
15186 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15187 strict_overflow_p));
15188 default:
15189 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15190 TREE_TYPE (t));
15192 /* We don't know sign of `t', so be conservative and return false. */
15193 return false;
15196 /* Return true if T is known to be non-negative. If the return
15197 value is based on the assumption that signed overflow is undefined,
15198 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15199 *STRICT_OVERFLOW_P. */
15201 bool
15202 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15203 tree arg0, tree arg1, bool *strict_overflow_p)
15205 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15206 switch (DECL_FUNCTION_CODE (fndecl))
15208 CASE_FLT_FN (BUILT_IN_ACOS):
15209 CASE_FLT_FN (BUILT_IN_ACOSH):
15210 CASE_FLT_FN (BUILT_IN_CABS):
15211 CASE_FLT_FN (BUILT_IN_COSH):
15212 CASE_FLT_FN (BUILT_IN_ERFC):
15213 CASE_FLT_FN (BUILT_IN_EXP):
15214 CASE_FLT_FN (BUILT_IN_EXP10):
15215 CASE_FLT_FN (BUILT_IN_EXP2):
15216 CASE_FLT_FN (BUILT_IN_FABS):
15217 CASE_FLT_FN (BUILT_IN_FDIM):
15218 CASE_FLT_FN (BUILT_IN_HYPOT):
15219 CASE_FLT_FN (BUILT_IN_POW10):
15220 CASE_INT_FN (BUILT_IN_FFS):
15221 CASE_INT_FN (BUILT_IN_PARITY):
15222 CASE_INT_FN (BUILT_IN_POPCOUNT):
15223 case BUILT_IN_BSWAP32:
15224 case BUILT_IN_BSWAP64:
15225 /* Always true. */
15226 return true;
15228 CASE_FLT_FN (BUILT_IN_SQRT):
15229 /* sqrt(-0.0) is -0.0. */
15230 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15231 return true;
15232 return tree_expr_nonnegative_warnv_p (arg0,
15233 strict_overflow_p);
15235 CASE_FLT_FN (BUILT_IN_ASINH):
15236 CASE_FLT_FN (BUILT_IN_ATAN):
15237 CASE_FLT_FN (BUILT_IN_ATANH):
15238 CASE_FLT_FN (BUILT_IN_CBRT):
15239 CASE_FLT_FN (BUILT_IN_CEIL):
15240 CASE_FLT_FN (BUILT_IN_ERF):
15241 CASE_FLT_FN (BUILT_IN_EXPM1):
15242 CASE_FLT_FN (BUILT_IN_FLOOR):
15243 CASE_FLT_FN (BUILT_IN_FMOD):
15244 CASE_FLT_FN (BUILT_IN_FREXP):
15245 CASE_FLT_FN (BUILT_IN_ICEIL):
15246 CASE_FLT_FN (BUILT_IN_IFLOOR):
15247 CASE_FLT_FN (BUILT_IN_IRINT):
15248 CASE_FLT_FN (BUILT_IN_IROUND):
15249 CASE_FLT_FN (BUILT_IN_LCEIL):
15250 CASE_FLT_FN (BUILT_IN_LDEXP):
15251 CASE_FLT_FN (BUILT_IN_LFLOOR):
15252 CASE_FLT_FN (BUILT_IN_LLCEIL):
15253 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15254 CASE_FLT_FN (BUILT_IN_LLRINT):
15255 CASE_FLT_FN (BUILT_IN_LLROUND):
15256 CASE_FLT_FN (BUILT_IN_LRINT):
15257 CASE_FLT_FN (BUILT_IN_LROUND):
15258 CASE_FLT_FN (BUILT_IN_MODF):
15259 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15260 CASE_FLT_FN (BUILT_IN_RINT):
15261 CASE_FLT_FN (BUILT_IN_ROUND):
15262 CASE_FLT_FN (BUILT_IN_SCALB):
15263 CASE_FLT_FN (BUILT_IN_SCALBLN):
15264 CASE_FLT_FN (BUILT_IN_SCALBN):
15265 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15266 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15267 CASE_FLT_FN (BUILT_IN_SINH):
15268 CASE_FLT_FN (BUILT_IN_TANH):
15269 CASE_FLT_FN (BUILT_IN_TRUNC):
15270 /* True if the 1st argument is nonnegative. */
15271 return tree_expr_nonnegative_warnv_p (arg0,
15272 strict_overflow_p);
15274 CASE_FLT_FN (BUILT_IN_FMAX):
15275 /* True if the 1st OR 2nd arguments are nonnegative. */
15276 return (tree_expr_nonnegative_warnv_p (arg0,
15277 strict_overflow_p)
15278 || (tree_expr_nonnegative_warnv_p (arg1,
15279 strict_overflow_p)));
15281 CASE_FLT_FN (BUILT_IN_FMIN):
15282 /* True if the 1st AND 2nd arguments are nonnegative. */
15283 return (tree_expr_nonnegative_warnv_p (arg0,
15284 strict_overflow_p)
15285 && (tree_expr_nonnegative_warnv_p (arg1,
15286 strict_overflow_p)));
15288 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15289 /* True if the 2nd argument is nonnegative. */
15290 return tree_expr_nonnegative_warnv_p (arg1,
15291 strict_overflow_p);
15293 CASE_FLT_FN (BUILT_IN_POWI):
15294 /* True if the 1st argument is nonnegative or the second
15295 argument is an even integer. */
15296 if (TREE_CODE (arg1) == INTEGER_CST
15297 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15298 return true;
15299 return tree_expr_nonnegative_warnv_p (arg0,
15300 strict_overflow_p);
15302 CASE_FLT_FN (BUILT_IN_POW):
15303 /* True if the 1st argument is nonnegative or the second
15304 argument is an even integer valued real. */
15305 if (TREE_CODE (arg1) == REAL_CST)
15307 REAL_VALUE_TYPE c;
15308 HOST_WIDE_INT n;
15310 c = TREE_REAL_CST (arg1);
15311 n = real_to_integer (&c);
15312 if ((n & 1) == 0)
15314 REAL_VALUE_TYPE cint;
15315 real_from_integer (&cint, VOIDmode, n,
15316 n < 0 ? -1 : 0, 0);
15317 if (real_identical (&c, &cint))
15318 return true;
15321 return tree_expr_nonnegative_warnv_p (arg0,
15322 strict_overflow_p);
15324 default:
15325 break;
15327 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15328 type);
15331 /* Return true if T is known to be non-negative. If the return
15332 value is based on the assumption that signed overflow is undefined,
15333 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15334 *STRICT_OVERFLOW_P. */
15336 bool
15337 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15339 enum tree_code code = TREE_CODE (t);
15340 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15341 return true;
15343 switch (code)
15345 case TARGET_EXPR:
15347 tree temp = TARGET_EXPR_SLOT (t);
15348 t = TARGET_EXPR_INITIAL (t);
15350 /* If the initializer is non-void, then it's a normal expression
15351 that will be assigned to the slot. */
15352 if (!VOID_TYPE_P (t))
15353 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15355 /* Otherwise, the initializer sets the slot in some way. One common
15356 way is an assignment statement at the end of the initializer. */
15357 while (1)
15359 if (TREE_CODE (t) == BIND_EXPR)
15360 t = expr_last (BIND_EXPR_BODY (t));
15361 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15362 || TREE_CODE (t) == TRY_CATCH_EXPR)
15363 t = expr_last (TREE_OPERAND (t, 0));
15364 else if (TREE_CODE (t) == STATEMENT_LIST)
15365 t = expr_last (t);
15366 else
15367 break;
15369 if (TREE_CODE (t) == MODIFY_EXPR
15370 && TREE_OPERAND (t, 0) == temp)
15371 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15372 strict_overflow_p);
15374 return false;
15377 case CALL_EXPR:
15379 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15380 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15382 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15383 get_callee_fndecl (t),
15384 arg0,
15385 arg1,
15386 strict_overflow_p);
15388 case COMPOUND_EXPR:
15389 case MODIFY_EXPR:
15390 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15391 strict_overflow_p);
15392 case BIND_EXPR:
15393 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15394 strict_overflow_p);
15395 case SAVE_EXPR:
15396 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15397 strict_overflow_p);
15399 default:
15400 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15401 TREE_TYPE (t));
15404 /* We don't know sign of `t', so be conservative and return false. */
15405 return false;
15408 /* Return true if T is known to be non-negative. If the return
15409 value is based on the assumption that signed overflow is undefined,
15410 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15411 *STRICT_OVERFLOW_P. */
15413 bool
15414 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15416 enum tree_code code;
15417 if (t == error_mark_node)
15418 return false;
15420 code = TREE_CODE (t);
15421 switch (TREE_CODE_CLASS (code))
15423 case tcc_binary:
15424 case tcc_comparison:
15425 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15426 TREE_TYPE (t),
15427 TREE_OPERAND (t, 0),
15428 TREE_OPERAND (t, 1),
15429 strict_overflow_p);
15431 case tcc_unary:
15432 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15433 TREE_TYPE (t),
15434 TREE_OPERAND (t, 0),
15435 strict_overflow_p);
15437 case tcc_constant:
15438 case tcc_declaration:
15439 case tcc_reference:
15440 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15442 default:
15443 break;
15446 switch (code)
15448 case TRUTH_AND_EXPR:
15449 case TRUTH_OR_EXPR:
15450 case TRUTH_XOR_EXPR:
15451 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15452 TREE_TYPE (t),
15453 TREE_OPERAND (t, 0),
15454 TREE_OPERAND (t, 1),
15455 strict_overflow_p);
15456 case TRUTH_NOT_EXPR:
15457 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15458 TREE_TYPE (t),
15459 TREE_OPERAND (t, 0),
15460 strict_overflow_p);
15462 case COND_EXPR:
15463 case CONSTRUCTOR:
15464 case OBJ_TYPE_REF:
15465 case ASSERT_EXPR:
15466 case ADDR_EXPR:
15467 case WITH_SIZE_EXPR:
15468 case SSA_NAME:
15469 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15471 default:
15472 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15476 /* Return true if `t' is known to be non-negative. Handle warnings
15477 about undefined signed overflow. */
15479 bool
15480 tree_expr_nonnegative_p (tree t)
15482 bool ret, strict_overflow_p;
15484 strict_overflow_p = false;
15485 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15486 if (strict_overflow_p)
15487 fold_overflow_warning (("assuming signed overflow does not occur when "
15488 "determining that expression is always "
15489 "non-negative"),
15490 WARN_STRICT_OVERFLOW_MISC);
15491 return ret;
15495 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15496 For floating point we further ensure that T is not denormal.
15497 Similar logic is present in nonzero_address in rtlanal.h.
15499 If the return value is based on the assumption that signed overflow
15500 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15501 change *STRICT_OVERFLOW_P. */
15503 bool
15504 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15505 bool *strict_overflow_p)
15507 switch (code)
15509 case ABS_EXPR:
15510 return tree_expr_nonzero_warnv_p (op0,
15511 strict_overflow_p);
15513 case NOP_EXPR:
15515 tree inner_type = TREE_TYPE (op0);
15516 tree outer_type = type;
15518 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15519 && tree_expr_nonzero_warnv_p (op0,
15520 strict_overflow_p));
15522 break;
15524 case NON_LVALUE_EXPR:
15525 return tree_expr_nonzero_warnv_p (op0,
15526 strict_overflow_p);
15528 default:
15529 break;
15532 return false;
15535 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15536 For floating point we further ensure that T is not denormal.
15537 Similar logic is present in nonzero_address in rtlanal.h.
15539 If the return value is based on the assumption that signed overflow
15540 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15541 change *STRICT_OVERFLOW_P. */
15543 bool
15544 tree_binary_nonzero_warnv_p (enum tree_code code,
15545 tree type,
15546 tree op0,
15547 tree op1, bool *strict_overflow_p)
15549 bool sub_strict_overflow_p;
15550 switch (code)
15552 case POINTER_PLUS_EXPR:
15553 case PLUS_EXPR:
15554 if (TYPE_OVERFLOW_UNDEFINED (type))
15556 /* With the presence of negative values it is hard
15557 to say something. */
15558 sub_strict_overflow_p = false;
15559 if (!tree_expr_nonnegative_warnv_p (op0,
15560 &sub_strict_overflow_p)
15561 || !tree_expr_nonnegative_warnv_p (op1,
15562 &sub_strict_overflow_p))
15563 return false;
15564 /* One of operands must be positive and the other non-negative. */
15565 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15566 overflows, on a twos-complement machine the sum of two
15567 nonnegative numbers can never be zero. */
15568 return (tree_expr_nonzero_warnv_p (op0,
15569 strict_overflow_p)
15570 || tree_expr_nonzero_warnv_p (op1,
15571 strict_overflow_p));
15573 break;
15575 case MULT_EXPR:
15576 if (TYPE_OVERFLOW_UNDEFINED (type))
15578 if (tree_expr_nonzero_warnv_p (op0,
15579 strict_overflow_p)
15580 && tree_expr_nonzero_warnv_p (op1,
15581 strict_overflow_p))
15583 *strict_overflow_p = true;
15584 return true;
15587 break;
15589 case MIN_EXPR:
15590 sub_strict_overflow_p = false;
15591 if (tree_expr_nonzero_warnv_p (op0,
15592 &sub_strict_overflow_p)
15593 && tree_expr_nonzero_warnv_p (op1,
15594 &sub_strict_overflow_p))
15596 if (sub_strict_overflow_p)
15597 *strict_overflow_p = true;
15599 break;
15601 case MAX_EXPR:
15602 sub_strict_overflow_p = false;
15603 if (tree_expr_nonzero_warnv_p (op0,
15604 &sub_strict_overflow_p))
15606 if (sub_strict_overflow_p)
15607 *strict_overflow_p = true;
15609 /* When both operands are nonzero, then MAX must be too. */
15610 if (tree_expr_nonzero_warnv_p (op1,
15611 strict_overflow_p))
15612 return true;
15614 /* MAX where operand 0 is positive is positive. */
15615 return tree_expr_nonnegative_warnv_p (op0,
15616 strict_overflow_p);
15618 /* MAX where operand 1 is positive is positive. */
15619 else if (tree_expr_nonzero_warnv_p (op1,
15620 &sub_strict_overflow_p)
15621 && tree_expr_nonnegative_warnv_p (op1,
15622 &sub_strict_overflow_p))
15624 if (sub_strict_overflow_p)
15625 *strict_overflow_p = true;
15626 return true;
15628 break;
15630 case BIT_IOR_EXPR:
15631 return (tree_expr_nonzero_warnv_p (op1,
15632 strict_overflow_p)
15633 || tree_expr_nonzero_warnv_p (op0,
15634 strict_overflow_p));
15636 default:
15637 break;
15640 return false;
15643 /* Return true when T is an address and is known to be nonzero.
15644 For floating point we further ensure that T is not denormal.
15645 Similar logic is present in nonzero_address in rtlanal.h.
15647 If the return value is based on the assumption that signed overflow
15648 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15649 change *STRICT_OVERFLOW_P. */
15651 bool
15652 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15654 bool sub_strict_overflow_p;
15655 switch (TREE_CODE (t))
15657 case INTEGER_CST:
15658 return !integer_zerop (t);
15660 case ADDR_EXPR:
15662 tree base = TREE_OPERAND (t, 0);
15663 if (!DECL_P (base))
15664 base = get_base_address (base);
15666 if (!base)
15667 return false;
15669 /* Weak declarations may link to NULL. Other things may also be NULL
15670 so protect with -fdelete-null-pointer-checks; but not variables
15671 allocated on the stack. */
15672 if (DECL_P (base)
15673 && (flag_delete_null_pointer_checks
15674 || (DECL_CONTEXT (base)
15675 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15676 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15677 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15679 /* Constants are never weak. */
15680 if (CONSTANT_CLASS_P (base))
15681 return true;
15683 return false;
15686 case COND_EXPR:
15687 sub_strict_overflow_p = false;
15688 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15689 &sub_strict_overflow_p)
15690 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15691 &sub_strict_overflow_p))
15693 if (sub_strict_overflow_p)
15694 *strict_overflow_p = true;
15695 return true;
15697 break;
15699 default:
15700 break;
15702 return false;
15705 /* Return true when T is an address and is known to be nonzero.
15706 For floating point we further ensure that T is not denormal.
15707 Similar logic is present in nonzero_address in rtlanal.h.
15709 If the return value is based on the assumption that signed overflow
15710 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15711 change *STRICT_OVERFLOW_P. */
15713 bool
15714 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15716 tree type = TREE_TYPE (t);
15717 enum tree_code code;
15719 /* Doing something useful for floating point would need more work. */
15720 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15721 return false;
15723 code = TREE_CODE (t);
15724 switch (TREE_CODE_CLASS (code))
15726 case tcc_unary:
15727 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15728 strict_overflow_p);
15729 case tcc_binary:
15730 case tcc_comparison:
15731 return tree_binary_nonzero_warnv_p (code, type,
15732 TREE_OPERAND (t, 0),
15733 TREE_OPERAND (t, 1),
15734 strict_overflow_p);
15735 case tcc_constant:
15736 case tcc_declaration:
15737 case tcc_reference:
15738 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15740 default:
15741 break;
15744 switch (code)
15746 case TRUTH_NOT_EXPR:
15747 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15748 strict_overflow_p);
15750 case TRUTH_AND_EXPR:
15751 case TRUTH_OR_EXPR:
15752 case TRUTH_XOR_EXPR:
15753 return tree_binary_nonzero_warnv_p (code, type,
15754 TREE_OPERAND (t, 0),
15755 TREE_OPERAND (t, 1),
15756 strict_overflow_p);
15758 case COND_EXPR:
15759 case CONSTRUCTOR:
15760 case OBJ_TYPE_REF:
15761 case ASSERT_EXPR:
15762 case ADDR_EXPR:
15763 case WITH_SIZE_EXPR:
15764 case SSA_NAME:
15765 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15767 case COMPOUND_EXPR:
15768 case MODIFY_EXPR:
15769 case BIND_EXPR:
15770 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15771 strict_overflow_p);
15773 case SAVE_EXPR:
15774 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15775 strict_overflow_p);
15777 case CALL_EXPR:
15778 return alloca_call_p (t);
15780 default:
15781 break;
15783 return false;
15786 /* Return true when T is an address and is known to be nonzero.
15787 Handle warnings about undefined signed overflow. */
15789 bool
15790 tree_expr_nonzero_p (tree t)
15792 bool ret, strict_overflow_p;
15794 strict_overflow_p = false;
15795 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15796 if (strict_overflow_p)
15797 fold_overflow_warning (("assuming signed overflow does not occur when "
15798 "determining that expression is always "
15799 "non-zero"),
15800 WARN_STRICT_OVERFLOW_MISC);
15801 return ret;
15804 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15805 attempt to fold the expression to a constant without modifying TYPE,
15806 OP0 or OP1.
15808 If the expression could be simplified to a constant, then return
15809 the constant. If the expression would not be simplified to a
15810 constant, then return NULL_TREE. */
15812 tree
15813 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15815 tree tem = fold_binary (code, type, op0, op1);
15816 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15819 /* Given the components of a unary expression CODE, TYPE and OP0,
15820 attempt to fold the expression to a constant without modifying
15821 TYPE or OP0.
15823 If the expression could be simplified to a constant, then return
15824 the constant. If the expression would not be simplified to a
15825 constant, then return NULL_TREE. */
15827 tree
15828 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15830 tree tem = fold_unary (code, type, op0);
15831 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15834 /* If EXP represents referencing an element in a constant string
15835 (either via pointer arithmetic or array indexing), return the
15836 tree representing the value accessed, otherwise return NULL. */
15838 tree
15839 fold_read_from_constant_string (tree exp)
15841 if ((TREE_CODE (exp) == INDIRECT_REF
15842 || TREE_CODE (exp) == ARRAY_REF)
15843 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15845 tree exp1 = TREE_OPERAND (exp, 0);
15846 tree index;
15847 tree string;
15848 location_t loc = EXPR_LOCATION (exp);
15850 if (TREE_CODE (exp) == INDIRECT_REF)
15851 string = string_constant (exp1, &index);
15852 else
15854 tree low_bound = array_ref_low_bound (exp);
15855 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15857 /* Optimize the special-case of a zero lower bound.
15859 We convert the low_bound to sizetype to avoid some problems
15860 with constant folding. (E.g. suppose the lower bound is 1,
15861 and its mode is QI. Without the conversion,l (ARRAY
15862 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15863 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15864 if (! integer_zerop (low_bound))
15865 index = size_diffop_loc (loc, index,
15866 fold_convert_loc (loc, sizetype, low_bound));
15868 string = exp1;
15871 if (string
15872 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15873 && TREE_CODE (string) == STRING_CST
15874 && TREE_CODE (index) == INTEGER_CST
15875 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15876 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15877 == MODE_INT)
15878 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15879 return build_int_cst_type (TREE_TYPE (exp),
15880 (TREE_STRING_POINTER (string)
15881 [TREE_INT_CST_LOW (index)]));
15883 return NULL;
15886 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15887 an integer constant, real, or fixed-point constant.
15889 TYPE is the type of the result. */
15891 static tree
15892 fold_negate_const (tree arg0, tree type)
15894 tree t = NULL_TREE;
15896 switch (TREE_CODE (arg0))
15898 case INTEGER_CST:
15900 double_int val = tree_to_double_int (arg0);
15901 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15903 t = force_fit_type_double (type, val, 1,
15904 (overflow | TREE_OVERFLOW (arg0))
15905 && !TYPE_UNSIGNED (type));
15906 break;
15909 case REAL_CST:
15910 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15911 break;
15913 case FIXED_CST:
15915 FIXED_VALUE_TYPE f;
15916 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15917 &(TREE_FIXED_CST (arg0)), NULL,
15918 TYPE_SATURATING (type));
15919 t = build_fixed (type, f);
15920 /* Propagate overflow flags. */
15921 if (overflow_p | TREE_OVERFLOW (arg0))
15922 TREE_OVERFLOW (t) = 1;
15923 break;
15926 default:
15927 gcc_unreachable ();
15930 return t;
15933 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15934 an integer constant or real constant.
15936 TYPE is the type of the result. */
15938 tree
15939 fold_abs_const (tree arg0, tree type)
15941 tree t = NULL_TREE;
15943 switch (TREE_CODE (arg0))
15945 case INTEGER_CST:
15947 double_int val = tree_to_double_int (arg0);
15949 /* If the value is unsigned or non-negative, then the absolute value
15950 is the same as the ordinary value. */
15951 if (TYPE_UNSIGNED (type)
15952 || !double_int_negative_p (val))
15953 t = arg0;
15955 /* If the value is negative, then the absolute value is
15956 its negation. */
15957 else
15959 int overflow;
15961 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15962 t = force_fit_type_double (type, val, -1,
15963 overflow | TREE_OVERFLOW (arg0));
15966 break;
15968 case REAL_CST:
15969 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15970 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15971 else
15972 t = arg0;
15973 break;
15975 default:
15976 gcc_unreachable ();
15979 return t;
15982 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15983 constant. TYPE is the type of the result. */
15985 static tree
15986 fold_not_const (const_tree arg0, tree type)
15988 double_int val;
15990 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15992 val = double_int_not (tree_to_double_int (arg0));
15993 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15996 /* Given CODE, a relational operator, the target type, TYPE and two
15997 constant operands OP0 and OP1, return the result of the
15998 relational operation. If the result is not a compile time
15999 constant, then return NULL_TREE. */
16001 static tree
16002 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16004 int result, invert;
16006 /* From here on, the only cases we handle are when the result is
16007 known to be a constant. */
16009 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16011 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16012 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16014 /* Handle the cases where either operand is a NaN. */
16015 if (real_isnan (c0) || real_isnan (c1))
16017 switch (code)
16019 case EQ_EXPR:
16020 case ORDERED_EXPR:
16021 result = 0;
16022 break;
16024 case NE_EXPR:
16025 case UNORDERED_EXPR:
16026 case UNLT_EXPR:
16027 case UNLE_EXPR:
16028 case UNGT_EXPR:
16029 case UNGE_EXPR:
16030 case UNEQ_EXPR:
16031 result = 1;
16032 break;
16034 case LT_EXPR:
16035 case LE_EXPR:
16036 case GT_EXPR:
16037 case GE_EXPR:
16038 case LTGT_EXPR:
16039 if (flag_trapping_math)
16040 return NULL_TREE;
16041 result = 0;
16042 break;
16044 default:
16045 gcc_unreachable ();
16048 return constant_boolean_node (result, type);
16051 return constant_boolean_node (real_compare (code, c0, c1), type);
16054 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16056 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16057 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16058 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16061 /* Handle equality/inequality of complex constants. */
16062 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16064 tree rcond = fold_relational_const (code, type,
16065 TREE_REALPART (op0),
16066 TREE_REALPART (op1));
16067 tree icond = fold_relational_const (code, type,
16068 TREE_IMAGPART (op0),
16069 TREE_IMAGPART (op1));
16070 if (code == EQ_EXPR)
16071 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16072 else if (code == NE_EXPR)
16073 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16074 else
16075 return NULL_TREE;
16078 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16080 To compute GT, swap the arguments and do LT.
16081 To compute GE, do LT and invert the result.
16082 To compute LE, swap the arguments, do LT and invert the result.
16083 To compute NE, do EQ and invert the result.
16085 Therefore, the code below must handle only EQ and LT. */
16087 if (code == LE_EXPR || code == GT_EXPR)
16089 tree tem = op0;
16090 op0 = op1;
16091 op1 = tem;
16092 code = swap_tree_comparison (code);
16095 /* Note that it is safe to invert for real values here because we
16096 have already handled the one case that it matters. */
16098 invert = 0;
16099 if (code == NE_EXPR || code == GE_EXPR)
16101 invert = 1;
16102 code = invert_tree_comparison (code, false);
16105 /* Compute a result for LT or EQ if args permit;
16106 Otherwise return T. */
16107 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16109 if (code == EQ_EXPR)
16110 result = tree_int_cst_equal (op0, op1);
16111 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16112 result = INT_CST_LT_UNSIGNED (op0, op1);
16113 else
16114 result = INT_CST_LT (op0, op1);
16116 else
16117 return NULL_TREE;
16119 if (invert)
16120 result ^= 1;
16121 return constant_boolean_node (result, type);
16124 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16125 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16126 itself. */
16128 tree
16129 fold_build_cleanup_point_expr (tree type, tree expr)
16131 /* If the expression does not have side effects then we don't have to wrap
16132 it with a cleanup point expression. */
16133 if (!TREE_SIDE_EFFECTS (expr))
16134 return expr;
16136 /* If the expression is a return, check to see if the expression inside the
16137 return has no side effects or the right hand side of the modify expression
16138 inside the return. If either don't have side effects set we don't need to
16139 wrap the expression in a cleanup point expression. Note we don't check the
16140 left hand side of the modify because it should always be a return decl. */
16141 if (TREE_CODE (expr) == RETURN_EXPR)
16143 tree op = TREE_OPERAND (expr, 0);
16144 if (!op || !TREE_SIDE_EFFECTS (op))
16145 return expr;
16146 op = TREE_OPERAND (op, 1);
16147 if (!TREE_SIDE_EFFECTS (op))
16148 return expr;
16151 return build1 (CLEANUP_POINT_EXPR, type, expr);
16154 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16155 of an indirection through OP0, or NULL_TREE if no simplification is
16156 possible. */
16158 tree
16159 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16161 tree sub = op0;
16162 tree subtype;
16164 STRIP_NOPS (sub);
16165 subtype = TREE_TYPE (sub);
16166 if (!POINTER_TYPE_P (subtype))
16167 return NULL_TREE;
16169 if (TREE_CODE (sub) == ADDR_EXPR)
16171 tree op = TREE_OPERAND (sub, 0);
16172 tree optype = TREE_TYPE (op);
16173 /* *&CONST_DECL -> to the value of the const decl. */
16174 if (TREE_CODE (op) == CONST_DECL)
16175 return DECL_INITIAL (op);
16176 /* *&p => p; make sure to handle *&"str"[cst] here. */
16177 if (type == optype)
16179 tree fop = fold_read_from_constant_string (op);
16180 if (fop)
16181 return fop;
16182 else
16183 return op;
16185 /* *(foo *)&fooarray => fooarray[0] */
16186 else if (TREE_CODE (optype) == ARRAY_TYPE
16187 && type == TREE_TYPE (optype)
16188 && (!in_gimple_form
16189 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16191 tree type_domain = TYPE_DOMAIN (optype);
16192 tree min_val = size_zero_node;
16193 if (type_domain && TYPE_MIN_VALUE (type_domain))
16194 min_val = TYPE_MIN_VALUE (type_domain);
16195 if (in_gimple_form
16196 && TREE_CODE (min_val) != INTEGER_CST)
16197 return NULL_TREE;
16198 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16199 NULL_TREE, NULL_TREE);
16201 /* *(foo *)&complexfoo => __real__ complexfoo */
16202 else if (TREE_CODE (optype) == COMPLEX_TYPE
16203 && type == TREE_TYPE (optype))
16204 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16205 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16206 else if (TREE_CODE (optype) == VECTOR_TYPE
16207 && type == TREE_TYPE (optype))
16209 tree part_width = TYPE_SIZE (type);
16210 tree index = bitsize_int (0);
16211 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16215 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16216 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16218 tree op00 = TREE_OPERAND (sub, 0);
16219 tree op01 = TREE_OPERAND (sub, 1);
16221 STRIP_NOPS (op00);
16222 if (TREE_CODE (op00) == ADDR_EXPR)
16224 tree op00type;
16225 op00 = TREE_OPERAND (op00, 0);
16226 op00type = TREE_TYPE (op00);
16228 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16229 if (TREE_CODE (op00type) == VECTOR_TYPE
16230 && type == TREE_TYPE (op00type))
16232 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16233 tree part_width = TYPE_SIZE (type);
16234 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16235 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16236 tree index = bitsize_int (indexi);
16238 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16239 return fold_build3_loc (loc,
16240 BIT_FIELD_REF, type, op00,
16241 part_width, index);
16244 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16245 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16246 && type == TREE_TYPE (op00type))
16248 tree size = TYPE_SIZE_UNIT (type);
16249 if (tree_int_cst_equal (size, op01))
16250 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16252 /* ((foo *)&fooarray)[1] => fooarray[1] */
16253 else if (TREE_CODE (op00type) == ARRAY_TYPE
16254 && type == TREE_TYPE (op00type))
16256 tree type_domain = TYPE_DOMAIN (op00type);
16257 tree min_val = size_zero_node;
16258 if (type_domain && TYPE_MIN_VALUE (type_domain))
16259 min_val = TYPE_MIN_VALUE (type_domain);
16260 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16261 TYPE_SIZE_UNIT (type));
16262 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16263 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16264 NULL_TREE, NULL_TREE);
16269 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16270 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16271 && type == TREE_TYPE (TREE_TYPE (subtype))
16272 && (!in_gimple_form
16273 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16275 tree type_domain;
16276 tree min_val = size_zero_node;
16277 sub = build_fold_indirect_ref_loc (loc, sub);
16278 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16279 if (type_domain && TYPE_MIN_VALUE (type_domain))
16280 min_val = TYPE_MIN_VALUE (type_domain);
16281 if (in_gimple_form
16282 && TREE_CODE (min_val) != INTEGER_CST)
16283 return NULL_TREE;
16284 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16285 NULL_TREE);
16288 return NULL_TREE;
16291 /* Builds an expression for an indirection through T, simplifying some
16292 cases. */
16294 tree
16295 build_fold_indirect_ref_loc (location_t loc, tree t)
16297 tree type = TREE_TYPE (TREE_TYPE (t));
16298 tree sub = fold_indirect_ref_1 (loc, type, t);
16300 if (sub)
16301 return sub;
16303 return build1_loc (loc, INDIRECT_REF, type, t);
16306 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16308 tree
16309 fold_indirect_ref_loc (location_t loc, tree t)
16311 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16313 if (sub)
16314 return sub;
16315 else
16316 return t;
16319 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16320 whose result is ignored. The type of the returned tree need not be
16321 the same as the original expression. */
16323 tree
16324 fold_ignored_result (tree t)
16326 if (!TREE_SIDE_EFFECTS (t))
16327 return integer_zero_node;
16329 for (;;)
16330 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16332 case tcc_unary:
16333 t = TREE_OPERAND (t, 0);
16334 break;
16336 case tcc_binary:
16337 case tcc_comparison:
16338 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16339 t = TREE_OPERAND (t, 0);
16340 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16341 t = TREE_OPERAND (t, 1);
16342 else
16343 return t;
16344 break;
16346 case tcc_expression:
16347 switch (TREE_CODE (t))
16349 case COMPOUND_EXPR:
16350 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16351 return t;
16352 t = TREE_OPERAND (t, 0);
16353 break;
16355 case COND_EXPR:
16356 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16357 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16358 return t;
16359 t = TREE_OPERAND (t, 0);
16360 break;
16362 default:
16363 return t;
16365 break;
16367 default:
16368 return t;
16372 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16373 This can only be applied to objects of a sizetype. */
16375 tree
16376 round_up_loc (location_t loc, tree value, int divisor)
16378 tree div = NULL_TREE;
16380 gcc_assert (divisor > 0);
16381 if (divisor == 1)
16382 return value;
16384 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16385 have to do anything. Only do this when we are not given a const,
16386 because in that case, this check is more expensive than just
16387 doing it. */
16388 if (TREE_CODE (value) != INTEGER_CST)
16390 div = build_int_cst (TREE_TYPE (value), divisor);
16392 if (multiple_of_p (TREE_TYPE (value), value, div))
16393 return value;
16396 /* If divisor is a power of two, simplify this to bit manipulation. */
16397 if (divisor == (divisor & -divisor))
16399 if (TREE_CODE (value) == INTEGER_CST)
16401 double_int val = tree_to_double_int (value);
16402 bool overflow_p;
16404 if ((val.low & (divisor - 1)) == 0)
16405 return value;
16407 overflow_p = TREE_OVERFLOW (value);
16408 val.low &= ~(divisor - 1);
16409 val.low += divisor;
16410 if (val.low == 0)
16412 val.high++;
16413 if (val.high == 0)
16414 overflow_p = true;
16417 return force_fit_type_double (TREE_TYPE (value), val,
16418 -1, overflow_p);
16420 else
16422 tree t;
16424 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16425 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16426 t = build_int_cst (TREE_TYPE (value), -divisor);
16427 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16430 else
16432 if (!div)
16433 div = build_int_cst (TREE_TYPE (value), divisor);
16434 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16435 value = size_binop_loc (loc, MULT_EXPR, value, div);
16438 return value;
16441 /* Likewise, but round down. */
16443 tree
16444 round_down_loc (location_t loc, tree value, int divisor)
16446 tree div = NULL_TREE;
16448 gcc_assert (divisor > 0);
16449 if (divisor == 1)
16450 return value;
16452 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16453 have to do anything. Only do this when we are not given a const,
16454 because in that case, this check is more expensive than just
16455 doing it. */
16456 if (TREE_CODE (value) != INTEGER_CST)
16458 div = build_int_cst (TREE_TYPE (value), divisor);
16460 if (multiple_of_p (TREE_TYPE (value), value, div))
16461 return value;
16464 /* If divisor is a power of two, simplify this to bit manipulation. */
16465 if (divisor == (divisor & -divisor))
16467 tree t;
16469 t = build_int_cst (TREE_TYPE (value), -divisor);
16470 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16472 else
16474 if (!div)
16475 div = build_int_cst (TREE_TYPE (value), divisor);
16476 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16477 value = size_binop_loc (loc, MULT_EXPR, value, div);
16480 return value;
16483 /* Returns the pointer to the base of the object addressed by EXP and
16484 extracts the information about the offset of the access, storing it
16485 to PBITPOS and POFFSET. */
16487 static tree
16488 split_address_to_core_and_offset (tree exp,
16489 HOST_WIDE_INT *pbitpos, tree *poffset)
16491 tree core;
16492 enum machine_mode mode;
16493 int unsignedp, volatilep;
16494 HOST_WIDE_INT bitsize;
16495 location_t loc = EXPR_LOCATION (exp);
16497 if (TREE_CODE (exp) == ADDR_EXPR)
16499 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16500 poffset, &mode, &unsignedp, &volatilep,
16501 false);
16502 core = build_fold_addr_expr_loc (loc, core);
16504 else
16506 core = exp;
16507 *pbitpos = 0;
16508 *poffset = NULL_TREE;
16511 return core;
16514 /* Returns true if addresses of E1 and E2 differ by a constant, false
16515 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16517 bool
16518 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16520 tree core1, core2;
16521 HOST_WIDE_INT bitpos1, bitpos2;
16522 tree toffset1, toffset2, tdiff, type;
16524 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16525 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16527 if (bitpos1 % BITS_PER_UNIT != 0
16528 || bitpos2 % BITS_PER_UNIT != 0
16529 || !operand_equal_p (core1, core2, 0))
16530 return false;
16532 if (toffset1 && toffset2)
16534 type = TREE_TYPE (toffset1);
16535 if (type != TREE_TYPE (toffset2))
16536 toffset2 = fold_convert (type, toffset2);
16538 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16539 if (!cst_and_fits_in_hwi (tdiff))
16540 return false;
16542 *diff = int_cst_value (tdiff);
16544 else if (toffset1 || toffset2)
16546 /* If only one of the offsets is non-constant, the difference cannot
16547 be a constant. */
16548 return false;
16550 else
16551 *diff = 0;
16553 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16554 return true;
16557 /* Simplify the floating point expression EXP when the sign of the
16558 result is not significant. Return NULL_TREE if no simplification
16559 is possible. */
16561 tree
16562 fold_strip_sign_ops (tree exp)
16564 tree arg0, arg1;
16565 location_t loc = EXPR_LOCATION (exp);
16567 switch (TREE_CODE (exp))
16569 case ABS_EXPR:
16570 case NEGATE_EXPR:
16571 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16572 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16574 case MULT_EXPR:
16575 case RDIV_EXPR:
16576 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16577 return NULL_TREE;
16578 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16579 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16580 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16581 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16582 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16583 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16584 break;
16586 case COMPOUND_EXPR:
16587 arg0 = TREE_OPERAND (exp, 0);
16588 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16589 if (arg1)
16590 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16591 break;
16593 case COND_EXPR:
16594 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16595 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16596 if (arg0 || arg1)
16597 return fold_build3_loc (loc,
16598 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16599 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16600 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16601 break;
16603 case CALL_EXPR:
16605 const enum built_in_function fcode = builtin_mathfn_code (exp);
16606 switch (fcode)
16608 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16609 /* Strip copysign function call, return the 1st argument. */
16610 arg0 = CALL_EXPR_ARG (exp, 0);
16611 arg1 = CALL_EXPR_ARG (exp, 1);
16612 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16614 default:
16615 /* Strip sign ops from the argument of "odd" math functions. */
16616 if (negate_mathfn_p (fcode))
16618 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16619 if (arg0)
16620 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16622 break;
16625 break;
16627 default:
16628 break;
16630 return NULL_TREE;