IVOPT performance tuning patch. The main problem is a variant of maximal weight
[official-gcc.git] / gcc / fold-const.c
blob9ca5eff554e209d91b7b6a228a1ddd5f91ac27a0
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "toplev.h"
58 #include "intl.h"
59 #include "ggc.h"
60 #include "hashtab.h"
61 #include "langhooks.h"
62 #include "md5.h"
63 #include "gimple.h"
64 #include "tree-flow.h"
66 /* Nonzero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
104 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (location_t, tree, tree,
106 HOST_WIDE_INT, HOST_WIDE_INT, int);
107 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 tree, tree, tree);
109 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
112 tree *, tree *);
113 static int all_ones_mask_p (const_tree, int);
114 static tree sign_bit_p (tree, const_tree);
115 static int simple_operand_p (const_tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 extern tree make_range (tree, int *, tree *, tree *, bool *);
120 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 tree, tree);
122 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
123 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
124 static tree unextend (tree, int, int, tree);
125 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
126 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_mathfn_compare (location_t,
135 enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (const_tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static tree fold_convert_const (enum tree_code, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
149 addition.
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 sign. */
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* If ARG2 divides ARG1 with zero remainder, carries out the division
157 of type CODE and returns the quotient.
158 Otherwise returns NULL_TREE. */
160 tree
161 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
163 double_int quo, rem;
164 int uns;
166 /* The sign of the division is according to operand two, that
167 does the correct thing for POINTER_PLUS_EXPR where we want
168 a signed division. */
169 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
170 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
171 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
172 uns = false;
174 quo = double_int_divmod (tree_to_double_int (arg1),
175 tree_to_double_int (arg2),
176 uns, code, &rem);
178 if (double_int_zero_p (rem))
179 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
181 return NULL_TREE;
184 /* This is nonzero if we should defer warnings about undefined
185 overflow. This facility exists because these warnings are a
186 special case. The code to estimate loop iterations does not want
187 to issue any warnings, since it works with expressions which do not
188 occur in user code. Various bits of cleanup code call fold(), but
189 only use the result if it has certain characteristics (e.g., is a
190 constant); that code only wants to issue a warning if the result is
191 used. */
193 static int fold_deferring_overflow_warnings;
195 /* If a warning about undefined overflow is deferred, this is the
196 warning. Note that this may cause us to turn two warnings into
197 one, but that is fine since it is sufficient to only give one
198 warning per expression. */
200 static const char* fold_deferred_overflow_warning;
202 /* If a warning about undefined overflow is deferred, this is the
203 level at which the warning should be emitted. */
205 static enum warn_strict_overflow_code fold_deferred_overflow_code;
207 /* Start deferring overflow warnings. We could use a stack here to
208 permit nested calls, but at present it is not necessary. */
210 void
211 fold_defer_overflow_warnings (void)
213 ++fold_deferring_overflow_warnings;
216 /* Stop deferring overflow warnings. If there is a pending warning,
217 and ISSUE is true, then issue the warning if appropriate. STMT is
218 the statement with which the warning should be associated (used for
219 location information); STMT may be NULL. CODE is the level of the
220 warning--a warn_strict_overflow_code value. This function will use
221 the smaller of CODE and the deferred code when deciding whether to
222 issue the warning. CODE may be zero to mean to always use the
223 deferred code. */
225 void
226 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
228 const char *warnmsg;
229 location_t locus;
231 gcc_assert (fold_deferring_overflow_warnings > 0);
232 --fold_deferring_overflow_warnings;
233 if (fold_deferring_overflow_warnings > 0)
235 if (fold_deferred_overflow_warning != NULL
236 && code != 0
237 && code < (int) fold_deferred_overflow_code)
238 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
239 return;
242 warnmsg = fold_deferred_overflow_warning;
243 fold_deferred_overflow_warning = NULL;
245 if (!issue || warnmsg == NULL)
246 return;
248 if (gimple_no_warning_p (stmt))
249 return;
251 /* Use the smallest code level when deciding to issue the
252 warning. */
253 if (code == 0 || code > (int) fold_deferred_overflow_code)
254 code = fold_deferred_overflow_code;
256 if (!issue_strict_overflow_warning (code))
257 return;
259 if (stmt == NULL)
260 locus = input_location;
261 else
262 locus = gimple_location (stmt);
263 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
266 /* Stop deferring overflow warnings, ignoring any deferred
267 warnings. */
269 void
270 fold_undefer_and_ignore_overflow_warnings (void)
272 fold_undefer_overflow_warnings (false, NULL, 0);
275 /* Whether we are deferring overflow warnings. */
277 bool
278 fold_deferring_overflow_warnings_p (void)
280 return fold_deferring_overflow_warnings > 0;
283 /* This is called when we fold something based on the fact that signed
284 overflow is undefined. */
286 static void
287 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
289 if (fold_deferring_overflow_warnings > 0)
291 if (fold_deferred_overflow_warning == NULL
292 || wc < fold_deferred_overflow_code)
294 fold_deferred_overflow_warning = gmsgid;
295 fold_deferred_overflow_code = wc;
298 else if (issue_strict_overflow_warning (wc))
299 warning (OPT_Wstrict_overflow, gmsgid);
302 /* Return true if the built-in mathematical function specified by CODE
303 is odd, i.e. -f(x) == f(-x). */
305 static bool
306 negate_mathfn_p (enum built_in_function code)
308 switch (code)
310 CASE_FLT_FN (BUILT_IN_ASIN):
311 CASE_FLT_FN (BUILT_IN_ASINH):
312 CASE_FLT_FN (BUILT_IN_ATAN):
313 CASE_FLT_FN (BUILT_IN_ATANH):
314 CASE_FLT_FN (BUILT_IN_CASIN):
315 CASE_FLT_FN (BUILT_IN_CASINH):
316 CASE_FLT_FN (BUILT_IN_CATAN):
317 CASE_FLT_FN (BUILT_IN_CATANH):
318 CASE_FLT_FN (BUILT_IN_CBRT):
319 CASE_FLT_FN (BUILT_IN_CPROJ):
320 CASE_FLT_FN (BUILT_IN_CSIN):
321 CASE_FLT_FN (BUILT_IN_CSINH):
322 CASE_FLT_FN (BUILT_IN_CTAN):
323 CASE_FLT_FN (BUILT_IN_CTANH):
324 CASE_FLT_FN (BUILT_IN_ERF):
325 CASE_FLT_FN (BUILT_IN_LLROUND):
326 CASE_FLT_FN (BUILT_IN_LROUND):
327 CASE_FLT_FN (BUILT_IN_ROUND):
328 CASE_FLT_FN (BUILT_IN_SIN):
329 CASE_FLT_FN (BUILT_IN_SINH):
330 CASE_FLT_FN (BUILT_IN_TAN):
331 CASE_FLT_FN (BUILT_IN_TANH):
332 CASE_FLT_FN (BUILT_IN_TRUNC):
333 return true;
335 CASE_FLT_FN (BUILT_IN_LLRINT):
336 CASE_FLT_FN (BUILT_IN_LRINT):
337 CASE_FLT_FN (BUILT_IN_NEARBYINT):
338 CASE_FLT_FN (BUILT_IN_RINT):
339 return !flag_rounding_math;
341 default:
342 break;
344 return false;
347 /* Check whether we may negate an integer constant T without causing
348 overflow. */
350 bool
351 may_negate_without_overflow_p (const_tree t)
353 unsigned HOST_WIDE_INT val;
354 unsigned int prec;
355 tree type;
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
363 prec = TYPE_PRECISION (type);
364 if (prec > HOST_BITS_PER_WIDE_INT)
366 if (TREE_INT_CST_LOW (t) != 0)
367 return true;
368 prec -= HOST_BITS_PER_WIDE_INT;
369 val = TREE_INT_CST_HIGH (t);
371 else
372 val = TREE_INT_CST_LOW (t);
373 if (prec < HOST_BITS_PER_WIDE_INT)
374 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
375 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
381 static bool
382 negate_expr_p (tree t)
384 tree type;
386 if (t == 0)
387 return false;
389 type = TREE_TYPE (t);
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
394 case INTEGER_CST:
395 if (TYPE_OVERFLOW_WRAPS (type))
396 return true;
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
404 case FIXED_CST:
405 case NEGATE_EXPR:
406 return true;
408 case REAL_CST:
409 /* We want to canonicalize to positive real constants. Pretend
410 that only negative ones can be easily negated. */
411 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
413 case COMPLEX_CST:
414 return negate_expr_p (TREE_REALPART (t))
415 && negate_expr_p (TREE_IMAGPART (t));
417 case COMPLEX_EXPR:
418 return negate_expr_p (TREE_OPERAND (t, 0))
419 && negate_expr_p (TREE_OPERAND (t, 1));
421 case CONJ_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0));
424 case PLUS_EXPR:
425 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
426 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
427 return false;
428 /* -(A + B) -> (-B) - A. */
429 if (negate_expr_p (TREE_OPERAND (t, 1))
430 && reorder_operands_p (TREE_OPERAND (t, 0),
431 TREE_OPERAND (t, 1)))
432 return true;
433 /* -(A + B) -> (-A) - B. */
434 return negate_expr_p (TREE_OPERAND (t, 0));
436 case MINUS_EXPR:
437 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
438 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
439 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
440 && reorder_operands_p (TREE_OPERAND (t, 0),
441 TREE_OPERAND (t, 1));
443 case MULT_EXPR:
444 if (TYPE_UNSIGNED (TREE_TYPE (t)))
445 break;
447 /* Fall through. */
449 case RDIV_EXPR:
450 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
451 return negate_expr_p (TREE_OPERAND (t, 1))
452 || negate_expr_p (TREE_OPERAND (t, 0));
453 break;
455 case TRUNC_DIV_EXPR:
456 case ROUND_DIV_EXPR:
457 case FLOOR_DIV_EXPR:
458 case CEIL_DIV_EXPR:
459 case EXACT_DIV_EXPR:
460 /* In general we can't negate A / B, because if A is INT_MIN and
461 B is 1, we may turn this into INT_MIN / -1 which is undefined
462 and actually traps on some architectures. But if overflow is
463 undefined, we can negate, because - (INT_MIN / 1) is an
464 overflow. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
467 break;
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
471 case NOP_EXPR:
472 /* Negate -((double)float) as (double)(-float). */
473 if (TREE_CODE (type) == REAL_TYPE)
475 tree tem = strip_float_extensions (t);
476 if (tem != t)
477 return negate_expr_p (tem);
479 break;
481 case CALL_EXPR:
482 /* Negate -f(x) as f(-x). */
483 if (negate_mathfn_p (builtin_mathfn_code (t)))
484 return negate_expr_p (CALL_EXPR_ARG (t, 0));
485 break;
487 case RSHIFT_EXPR:
488 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
489 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
491 tree op1 = TREE_OPERAND (t, 1);
492 if (TREE_INT_CST_HIGH (op1) == 0
493 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
494 == TREE_INT_CST_LOW (op1))
495 return true;
497 break;
499 default:
500 break;
502 return false;
505 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
506 simplification is possible.
507 If negate_expr_p would return true for T, NULL_TREE will never be
508 returned. */
510 static tree
511 fold_negate_expr (location_t loc, tree t)
513 tree type = TREE_TYPE (t);
514 tree tem;
516 switch (TREE_CODE (t))
518 /* Convert - (~A) to A + 1. */
519 case BIT_NOT_EXPR:
520 if (INTEGRAL_TYPE_P (type))
521 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
522 build_int_cst (type, 1));
523 break;
525 case INTEGER_CST:
526 tem = fold_negate_const (t, type);
527 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
528 || !TYPE_OVERFLOW_TRAPS (type))
529 return tem;
530 break;
532 case REAL_CST:
533 tem = fold_negate_const (t, type);
534 /* Two's complement FP formats, such as c4x, may overflow. */
535 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
536 return tem;
537 break;
539 case FIXED_CST:
540 tem = fold_negate_const (t, type);
541 return tem;
543 case COMPLEX_CST:
545 tree rpart = negate_expr (TREE_REALPART (t));
546 tree ipart = negate_expr (TREE_IMAGPART (t));
548 if ((TREE_CODE (rpart) == REAL_CST
549 && TREE_CODE (ipart) == REAL_CST)
550 || (TREE_CODE (rpart) == INTEGER_CST
551 && TREE_CODE (ipart) == INTEGER_CST))
552 return build_complex (type, rpart, ipart);
554 break;
556 case COMPLEX_EXPR:
557 if (negate_expr_p (t))
558 return fold_build2_loc (loc, COMPLEX_EXPR, type,
559 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
560 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
561 break;
563 case CONJ_EXPR:
564 if (negate_expr_p (t))
565 return fold_build1_loc (loc, CONJ_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
567 break;
569 case NEGATE_EXPR:
570 return TREE_OPERAND (t, 0);
572 case PLUS_EXPR:
573 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
574 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
576 /* -(A + B) -> (-B) - A. */
577 if (negate_expr_p (TREE_OPERAND (t, 1))
578 && reorder_operands_p (TREE_OPERAND (t, 0),
579 TREE_OPERAND (t, 1)))
581 tem = negate_expr (TREE_OPERAND (t, 1));
582 return fold_build2_loc (loc, MINUS_EXPR, type,
583 tem, TREE_OPERAND (t, 0));
586 /* -(A + B) -> (-A) - B. */
587 if (negate_expr_p (TREE_OPERAND (t, 0)))
589 tem = negate_expr (TREE_OPERAND (t, 0));
590 return fold_build2_loc (loc, MINUS_EXPR, type,
591 tem, TREE_OPERAND (t, 1));
594 break;
596 case MINUS_EXPR:
597 /* - (A - B) -> B - A */
598 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
599 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
600 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
601 return fold_build2_loc (loc, MINUS_EXPR, type,
602 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
603 break;
605 case MULT_EXPR:
606 if (TYPE_UNSIGNED (type))
607 break;
609 /* Fall through. */
611 case RDIV_EXPR:
612 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
614 tem = TREE_OPERAND (t, 1);
615 if (negate_expr_p (tem))
616 return fold_build2_loc (loc, TREE_CODE (t), type,
617 TREE_OPERAND (t, 0), negate_expr (tem));
618 tem = TREE_OPERAND (t, 0);
619 if (negate_expr_p (tem))
620 return fold_build2_loc (loc, TREE_CODE (t), type,
621 negate_expr (tem), TREE_OPERAND (t, 1));
623 break;
625 case TRUNC_DIV_EXPR:
626 case ROUND_DIV_EXPR:
627 case FLOOR_DIV_EXPR:
628 case CEIL_DIV_EXPR:
629 case EXACT_DIV_EXPR:
630 /* In general we can't negate A / B, because if A is INT_MIN and
631 B is 1, we may turn this into INT_MIN / -1 which is undefined
632 and actually traps on some architectures. But if overflow is
633 undefined, we can negate, because - (INT_MIN / 1) is an
634 overflow. */
635 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
637 const char * const warnmsg = G_("assuming signed overflow does not "
638 "occur when negating a division");
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
642 if (INTEGRAL_TYPE_P (type)
643 && (TREE_CODE (tem) != INTEGER_CST
644 || integer_onep (tem)))
645 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 TREE_OPERAND (t, 0), negate_expr (tem));
649 tem = TREE_OPERAND (t, 0);
650 if (negate_expr_p (tem))
652 if (INTEGRAL_TYPE_P (type)
653 && (TREE_CODE (tem) != INTEGER_CST
654 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
655 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (tem), TREE_OPERAND (t, 1));
660 break;
662 case NOP_EXPR:
663 /* Convert -((double)float) into (double)(-float). */
664 if (TREE_CODE (type) == REAL_TYPE)
666 tem = strip_float_extensions (t);
667 if (tem != t && negate_expr_p (tem))
668 return fold_convert_loc (loc, type, negate_expr (tem));
670 break;
672 case CALL_EXPR:
673 /* Negate -f(x) as f(-x). */
674 if (negate_mathfn_p (builtin_mathfn_code (t))
675 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
677 tree fndecl, arg;
679 fndecl = get_callee_fndecl (t);
680 arg = negate_expr (CALL_EXPR_ARG (t, 0));
681 return build_call_expr_loc (loc, fndecl, 1, arg);
683 break;
685 case RSHIFT_EXPR:
686 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
687 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
689 tree op1 = TREE_OPERAND (t, 1);
690 if (TREE_INT_CST_HIGH (op1) == 0
691 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
692 == TREE_INT_CST_LOW (op1))
694 tree ntype = TYPE_UNSIGNED (type)
695 ? signed_type_for (type)
696 : unsigned_type_for (type);
697 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
698 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
699 return fold_convert_loc (loc, type, temp);
702 break;
704 default:
705 break;
708 return NULL_TREE;
711 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
712 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
713 return NULL_TREE. */
715 static tree
716 negate_expr (tree t)
718 tree type, tem;
719 location_t loc;
721 if (t == NULL_TREE)
722 return NULL_TREE;
724 loc = EXPR_LOCATION (t);
725 type = TREE_TYPE (t);
726 STRIP_SIGN_NOPS (t);
728 tem = fold_negate_expr (loc, t);
729 if (!tem)
731 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
732 SET_EXPR_LOCATION (tem, loc);
734 return fold_convert_loc (loc, type, tem);
737 /* Split a tree IN into a constant, literal and variable parts that could be
738 combined with CODE to make IN. "constant" means an expression with
739 TREE_CONSTANT but that isn't an actual constant. CODE must be a
740 commutative arithmetic operation. Store the constant part into *CONP,
741 the literal in *LITP and return the variable part. If a part isn't
742 present, set it to null. If the tree does not decompose in this way,
743 return the entire tree as the variable part and the other parts as null.
745 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
746 case, we negate an operand that was subtracted. Except if it is a
747 literal for which we use *MINUS_LITP instead.
749 If NEGATE_P is true, we are negating all of IN, again except a literal
750 for which we use *MINUS_LITP instead.
752 If IN is itself a literal or constant, return it as appropriate.
754 Note that we do not guarantee that any of the three values will be the
755 same type as IN, but they will have the same signedness and mode. */
757 static tree
758 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
759 tree *minus_litp, int negate_p)
761 tree var = 0;
763 *conp = 0;
764 *litp = 0;
765 *minus_litp = 0;
767 /* Strip any conversions that don't change the machine mode or signedness. */
768 STRIP_SIGN_NOPS (in);
770 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
771 || TREE_CODE (in) == FIXED_CST)
772 *litp = in;
773 else if (TREE_CODE (in) == code
774 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
775 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
776 /* We can associate addition and subtraction together (even
777 though the C standard doesn't say so) for integers because
778 the value is not affected. For reals, the value might be
779 affected, so we can't. */
780 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
781 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
783 tree op0 = TREE_OPERAND (in, 0);
784 tree op1 = TREE_OPERAND (in, 1);
785 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
786 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
788 /* First see if either of the operands is a literal, then a constant. */
789 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
790 || TREE_CODE (op0) == FIXED_CST)
791 *litp = op0, op0 = 0;
792 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
793 || TREE_CODE (op1) == FIXED_CST)
794 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
796 if (op0 != 0 && TREE_CONSTANT (op0))
797 *conp = op0, op0 = 0;
798 else if (op1 != 0 && TREE_CONSTANT (op1))
799 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
801 /* If we haven't dealt with either operand, this is not a case we can
802 decompose. Otherwise, VAR is either of the ones remaining, if any. */
803 if (op0 != 0 && op1 != 0)
804 var = in;
805 else if (op0 != 0)
806 var = op0;
807 else
808 var = op1, neg_var_p = neg1_p;
810 /* Now do any needed negations. */
811 if (neg_litp_p)
812 *minus_litp = *litp, *litp = 0;
813 if (neg_conp_p)
814 *conp = negate_expr (*conp);
815 if (neg_var_p)
816 var = negate_expr (var);
818 else if (TREE_CONSTANT (in))
819 *conp = in;
820 else
821 var = in;
823 if (negate_p)
825 if (*litp)
826 *minus_litp = *litp, *litp = 0;
827 else if (*minus_litp)
828 *litp = *minus_litp, *minus_litp = 0;
829 *conp = negate_expr (*conp);
830 var = negate_expr (var);
833 return var;
836 /* Re-associate trees split by the above function. T1 and T2 are
837 either expressions to associate or null. Return the new
838 expression, if any. LOC is the location of the new expression. If
839 we build an operation, do it in TYPE and with CODE. */
841 static tree
842 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
844 tree tem;
846 if (t1 == 0)
847 return t2;
848 else if (t2 == 0)
849 return t1;
851 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
852 try to fold this since we will have infinite recursion. But do
853 deal with any NEGATE_EXPRs. */
854 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
855 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
857 if (code == PLUS_EXPR)
859 if (TREE_CODE (t1) == NEGATE_EXPR)
860 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
861 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
862 else if (TREE_CODE (t2) == NEGATE_EXPR)
863 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
864 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
865 else if (integer_zerop (t2))
866 return fold_convert_loc (loc, type, t1);
868 else if (code == MINUS_EXPR)
870 if (integer_zerop (t2))
871 return fold_convert_loc (loc, type, t1);
874 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
875 fold_convert_loc (loc, type, t2));
876 goto associate_trees_exit;
879 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type, t2));
881 associate_trees_exit:
882 protected_set_expr_location (tem, loc);
883 return tem;
886 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
887 for use in int_const_binop, size_binop and size_diffop. */
889 static bool
890 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
892 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
893 return false;
894 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
895 return false;
897 switch (code)
899 case LSHIFT_EXPR:
900 case RSHIFT_EXPR:
901 case LROTATE_EXPR:
902 case RROTATE_EXPR:
903 return true;
905 default:
906 break;
909 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
910 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
911 && TYPE_MODE (type1) == TYPE_MODE (type2);
915 /* Combine two integer constants ARG1 and ARG2 under operation CODE
916 to produce a new constant. Return NULL_TREE if we don't know how
917 to evaluate CODE at compile-time.
919 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
921 tree
922 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
924 double_int op1, op2, res, tmp;
925 tree t;
926 tree type = TREE_TYPE (arg1);
927 bool uns = TYPE_UNSIGNED (type);
928 bool is_sizetype
929 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
930 bool overflow = false;
932 op1 = tree_to_double_int (arg1);
933 op2 = tree_to_double_int (arg2);
935 switch (code)
937 case BIT_IOR_EXPR:
938 res = double_int_ior (op1, op2);
939 break;
941 case BIT_XOR_EXPR:
942 res = double_int_xor (op1, op2);
943 break;
945 case BIT_AND_EXPR:
946 res = double_int_and (op1, op2);
947 break;
949 case RSHIFT_EXPR:
950 res = double_int_rshift (op1, double_int_to_shwi (op2),
951 TYPE_PRECISION (type), !uns);
952 break;
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = double_int_lshift (op1, double_int_to_shwi (op2),
959 TYPE_PRECISION (type), !uns);
960 break;
962 case RROTATE_EXPR:
963 res = double_int_rrotate (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type));
965 break;
967 case LROTATE_EXPR:
968 res = double_int_lrotate (op1, double_int_to_shwi (op2),
969 TYPE_PRECISION (type));
970 break;
972 case PLUS_EXPR:
973 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
974 &res.low, &res.high);
975 break;
977 case MINUS_EXPR:
978 neg_double (op2.low, op2.high, &res.low, &res.high);
979 add_double (op1.low, op1.high, res.low, res.high,
980 &res.low, &res.high);
981 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
982 break;
984 case MULT_EXPR:
985 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
986 &res.low, &res.high);
987 break;
989 case TRUNC_DIV_EXPR:
990 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
991 case EXACT_DIV_EXPR:
992 /* This is a shortcut for a common special case. */
993 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
994 && !TREE_OVERFLOW (arg1)
995 && !TREE_OVERFLOW (arg2)
996 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
998 if (code == CEIL_DIV_EXPR)
999 op1.low += op2.low - 1;
1001 res.low = op1.low / op2.low, res.high = 0;
1002 break;
1005 /* ... fall through ... */
1007 case ROUND_DIV_EXPR:
1008 if (double_int_zero_p (op2))
1009 return NULL_TREE;
1010 if (double_int_one_p (op2))
1012 res = op1;
1013 break;
1015 if (double_int_equal_p (op1, op2)
1016 && ! double_int_zero_p (op1))
1018 res = double_int_one;
1019 break;
1021 overflow = div_and_round_double (code, uns,
1022 op1.low, op1.high, op2.low, op2.high,
1023 &res.low, &res.high,
1024 &tmp.low, &tmp.high);
1025 break;
1027 case TRUNC_MOD_EXPR:
1028 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1029 /* This is a shortcut for a common special case. */
1030 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1031 && !TREE_OVERFLOW (arg1)
1032 && !TREE_OVERFLOW (arg2)
1033 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1035 if (code == CEIL_MOD_EXPR)
1036 op1.low += op2.low - 1;
1037 res.low = op1.low % op2.low, res.high = 0;
1038 break;
1041 /* ... fall through ... */
1043 case ROUND_MOD_EXPR:
1044 if (double_int_zero_p (op2))
1045 return NULL_TREE;
1046 overflow = div_and_round_double (code, uns,
1047 op1.low, op1.high, op2.low, op2.high,
1048 &tmp.low, &tmp.high,
1049 &res.low, &res.high);
1050 break;
1052 case MIN_EXPR:
1053 res = double_int_min (op1, op2, uns);
1054 break;
1056 case MAX_EXPR:
1057 res = double_int_max (op1, op2, uns);
1058 break;
1060 default:
1061 return NULL_TREE;
1064 if (notrunc)
1066 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1068 /* Propagate overflow flags ourselves. */
1069 if (((!uns || is_sizetype) && overflow)
1070 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1072 t = copy_node (t);
1073 TREE_OVERFLOW (t) = 1;
1076 else
1077 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1078 ((!uns || is_sizetype) && overflow)
1079 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1081 return t;
1084 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1085 constant. We assume ARG1 and ARG2 have the same data type, or at least
1086 are the same kind of constant and the same machine mode. Return zero if
1087 combining the constants is not allowed in the current operating mode. */
1089 static tree
1090 const_binop (enum tree_code code, tree arg1, tree arg2)
1092 /* Sanity check for the recursive cases. */
1093 if (!arg1 || !arg2)
1094 return NULL_TREE;
1096 STRIP_NOPS (arg1);
1097 STRIP_NOPS (arg2);
1099 if (TREE_CODE (arg1) == INTEGER_CST)
1100 return int_const_binop (code, arg1, arg2, 0);
1102 if (TREE_CODE (arg1) == REAL_CST)
1104 enum machine_mode mode;
1105 REAL_VALUE_TYPE d1;
1106 REAL_VALUE_TYPE d2;
1107 REAL_VALUE_TYPE value;
1108 REAL_VALUE_TYPE result;
1109 bool inexact;
1110 tree t, type;
1112 /* The following codes are handled by real_arithmetic. */
1113 switch (code)
1115 case PLUS_EXPR:
1116 case MINUS_EXPR:
1117 case MULT_EXPR:
1118 case RDIV_EXPR:
1119 case MIN_EXPR:
1120 case MAX_EXPR:
1121 break;
1123 default:
1124 return NULL_TREE;
1127 d1 = TREE_REAL_CST (arg1);
1128 d2 = TREE_REAL_CST (arg2);
1130 type = TREE_TYPE (arg1);
1131 mode = TYPE_MODE (type);
1133 /* Don't perform operation if we honor signaling NaNs and
1134 either operand is a NaN. */
1135 if (HONOR_SNANS (mode)
1136 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1137 return NULL_TREE;
1139 /* Don't perform operation if it would raise a division
1140 by zero exception. */
1141 if (code == RDIV_EXPR
1142 && REAL_VALUES_EQUAL (d2, dconst0)
1143 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1144 return NULL_TREE;
1146 /* If either operand is a NaN, just return it. Otherwise, set up
1147 for floating-point trap; we return an overflow. */
1148 if (REAL_VALUE_ISNAN (d1))
1149 return arg1;
1150 else if (REAL_VALUE_ISNAN (d2))
1151 return arg2;
1153 inexact = real_arithmetic (&value, code, &d1, &d2);
1154 real_convert (&result, mode, &value);
1156 /* Don't constant fold this floating point operation if
1157 the result has overflowed and flag_trapping_math. */
1158 if (flag_trapping_math
1159 && MODE_HAS_INFINITIES (mode)
1160 && REAL_VALUE_ISINF (result)
1161 && !REAL_VALUE_ISINF (d1)
1162 && !REAL_VALUE_ISINF (d2))
1163 return NULL_TREE;
1165 /* Don't constant fold this floating point operation if the
1166 result may dependent upon the run-time rounding mode and
1167 flag_rounding_math is set, or if GCC's software emulation
1168 is unable to accurately represent the result. */
1169 if ((flag_rounding_math
1170 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1171 && (inexact || !real_identical (&result, &value)))
1172 return NULL_TREE;
1174 t = build_real (type, result);
1176 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1177 return t;
1180 if (TREE_CODE (arg1) == FIXED_CST)
1182 FIXED_VALUE_TYPE f1;
1183 FIXED_VALUE_TYPE f2;
1184 FIXED_VALUE_TYPE result;
1185 tree t, type;
1186 int sat_p;
1187 bool overflow_p;
1189 /* The following codes are handled by fixed_arithmetic. */
1190 switch (code)
1192 case PLUS_EXPR:
1193 case MINUS_EXPR:
1194 case MULT_EXPR:
1195 case TRUNC_DIV_EXPR:
1196 f2 = TREE_FIXED_CST (arg2);
1197 break;
1199 case LSHIFT_EXPR:
1200 case RSHIFT_EXPR:
1201 f2.data.high = TREE_INT_CST_HIGH (arg2);
1202 f2.data.low = TREE_INT_CST_LOW (arg2);
1203 f2.mode = SImode;
1204 break;
1206 default:
1207 return NULL_TREE;
1210 f1 = TREE_FIXED_CST (arg1);
1211 type = TREE_TYPE (arg1);
1212 sat_p = TYPE_SATURATING (type);
1213 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1214 t = build_fixed (type, result);
1215 /* Propagate overflow flags. */
1216 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1217 TREE_OVERFLOW (t) = 1;
1218 return t;
1221 if (TREE_CODE (arg1) == COMPLEX_CST)
1223 tree type = TREE_TYPE (arg1);
1224 tree r1 = TREE_REALPART (arg1);
1225 tree i1 = TREE_IMAGPART (arg1);
1226 tree r2 = TREE_REALPART (arg2);
1227 tree i2 = TREE_IMAGPART (arg2);
1228 tree real, imag;
1230 switch (code)
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 real = const_binop (code, r1, r2);
1235 imag = const_binop (code, i1, i2);
1236 break;
1238 case MULT_EXPR:
1239 if (COMPLEX_FLOAT_TYPE_P (type))
1240 return do_mpc_arg2 (arg1, arg2, type,
1241 /* do_nonfinite= */ folding_initializer,
1242 mpc_mul);
1244 real = const_binop (MINUS_EXPR,
1245 const_binop (MULT_EXPR, r1, r2),
1246 const_binop (MULT_EXPR, i1, i2));
1247 imag = const_binop (PLUS_EXPR,
1248 const_binop (MULT_EXPR, r1, i2),
1249 const_binop (MULT_EXPR, i1, r2));
1250 break;
1252 case RDIV_EXPR:
1253 if (COMPLEX_FLOAT_TYPE_P (type))
1254 return do_mpc_arg2 (arg1, arg2, type,
1255 /* do_nonfinite= */ folding_initializer,
1256 mpc_div);
1257 /* Fallthru ... */
1258 case TRUNC_DIV_EXPR:
1259 case CEIL_DIV_EXPR:
1260 case FLOOR_DIV_EXPR:
1261 case ROUND_DIV_EXPR:
1262 if (flag_complex_method == 0)
1264 /* Keep this algorithm in sync with
1265 tree-complex.c:expand_complex_div_straight().
1267 Expand complex division to scalars, straightforward algorithm.
1268 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1269 t = br*br + bi*bi
1271 tree magsquared
1272 = const_binop (PLUS_EXPR,
1273 const_binop (MULT_EXPR, r2, r2),
1274 const_binop (MULT_EXPR, i2, i2));
1275 tree t1
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r1, r2),
1278 const_binop (MULT_EXPR, i1, i2));
1279 tree t2
1280 = const_binop (MINUS_EXPR,
1281 const_binop (MULT_EXPR, i1, r2),
1282 const_binop (MULT_EXPR, r1, i2));
1284 real = const_binop (code, t1, magsquared);
1285 imag = const_binop (code, t2, magsquared);
1287 else
1289 /* Keep this algorithm in sync with
1290 tree-complex.c:expand_complex_div_wide().
1292 Expand complex division to scalars, modified algorithm to minimize
1293 overflow with wide input ranges. */
1294 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1295 fold_abs_const (r2, TREE_TYPE (type)),
1296 fold_abs_const (i2, TREE_TYPE (type)));
1298 if (integer_nonzerop (compare))
1300 /* In the TRUE branch, we compute
1301 ratio = br/bi;
1302 div = (br * ratio) + bi;
1303 tr = (ar * ratio) + ai;
1304 ti = (ai * ratio) - ar;
1305 tr = tr / div;
1306 ti = ti / div; */
1307 tree ratio = const_binop (code, r2, i2);
1308 tree div = const_binop (PLUS_EXPR, i2,
1309 const_binop (MULT_EXPR, r2, ratio));
1310 real = const_binop (MULT_EXPR, r1, ratio);
1311 real = const_binop (PLUS_EXPR, real, i1);
1312 real = const_binop (code, real, div);
1314 imag = const_binop (MULT_EXPR, i1, ratio);
1315 imag = const_binop (MINUS_EXPR, imag, r1);
1316 imag = const_binop (code, imag, div);
1318 else
1320 /* In the FALSE branch, we compute
1321 ratio = d/c;
1322 divisor = (d * ratio) + c;
1323 tr = (b * ratio) + a;
1324 ti = b - (a * ratio);
1325 tr = tr / div;
1326 ti = ti / div; */
1327 tree ratio = const_binop (code, i2, r2);
1328 tree div = const_binop (PLUS_EXPR, r2,
1329 const_binop (MULT_EXPR, i2, ratio));
1331 real = const_binop (MULT_EXPR, i1, ratio);
1332 real = const_binop (PLUS_EXPR, real, r1);
1333 real = const_binop (code, real, div);
1335 imag = const_binop (MULT_EXPR, r1, ratio);
1336 imag = const_binop (MINUS_EXPR, i1, imag);
1337 imag = const_binop (code, imag, div);
1340 break;
1342 default:
1343 return NULL_TREE;
1346 if (real && imag)
1347 return build_complex (type, real, imag);
1350 if (TREE_CODE (arg1) == VECTOR_CST)
1352 tree type = TREE_TYPE(arg1);
1353 int count = TYPE_VECTOR_SUBPARTS (type), i;
1354 tree elements1, elements2, list = NULL_TREE;
1356 if(TREE_CODE(arg2) != VECTOR_CST)
1357 return NULL_TREE;
1359 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1360 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1362 for (i = 0; i < count; i++)
1364 tree elem1, elem2, elem;
1366 /* The trailing elements can be empty and should be treated as 0 */
1367 if(!elements1)
1368 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1369 else
1371 elem1 = TREE_VALUE(elements1);
1372 elements1 = TREE_CHAIN (elements1);
1375 if(!elements2)
1376 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1377 else
1379 elem2 = TREE_VALUE(elements2);
1380 elements2 = TREE_CHAIN (elements2);
1383 elem = const_binop (code, elem1, elem2);
1385 /* It is possible that const_binop cannot handle the given
1386 code and return NULL_TREE */
1387 if(elem == NULL_TREE)
1388 return NULL_TREE;
1390 list = tree_cons (NULL_TREE, elem, list);
1392 return build_vector(type, nreverse(list));
1394 return NULL_TREE;
1397 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1398 indicates which particular sizetype to create. */
1400 tree
1401 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1403 return build_int_cst (sizetype_tab[(int) kind], number);
1406 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1407 is a tree code. The type of the result is taken from the operands.
1408 Both must be equivalent integer types, ala int_binop_types_match_p.
1409 If the operands are constant, so is the result. */
1411 tree
1412 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1414 tree type = TREE_TYPE (arg0);
1416 if (arg0 == error_mark_node || arg1 == error_mark_node)
1417 return error_mark_node;
1419 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1420 TREE_TYPE (arg1)));
1422 /* Handle the special case of two integer constants faster. */
1423 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1425 /* And some specific cases even faster than that. */
1426 if (code == PLUS_EXPR)
1428 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1429 return arg1;
1430 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1431 return arg0;
1433 else if (code == MINUS_EXPR)
1435 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1436 return arg0;
1438 else if (code == MULT_EXPR)
1440 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1441 return arg1;
1444 /* Handle general case of two integer constants. */
1445 return int_const_binop (code, arg0, arg1, 0);
1448 return fold_build2_loc (loc, code, type, arg0, arg1);
1451 /* Given two values, either both of sizetype or both of bitsizetype,
1452 compute the difference between the two values. Return the value
1453 in signed type corresponding to the type of the operands. */
1455 tree
1456 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1458 tree type = TREE_TYPE (arg0);
1459 tree ctype;
1461 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1462 TREE_TYPE (arg1)));
1464 /* If the type is already signed, just do the simple thing. */
1465 if (!TYPE_UNSIGNED (type))
1466 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1468 if (type == sizetype)
1469 ctype = ssizetype;
1470 else if (type == bitsizetype)
1471 ctype = sbitsizetype;
1472 else
1473 ctype = signed_type_for (type);
1475 /* If either operand is not a constant, do the conversions to the signed
1476 type and subtract. The hardware will do the right thing with any
1477 overflow in the subtraction. */
1478 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1479 return size_binop_loc (loc, MINUS_EXPR,
1480 fold_convert_loc (loc, ctype, arg0),
1481 fold_convert_loc (loc, ctype, arg1));
1483 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1484 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1485 overflow) and negate (which can't either). Special-case a result
1486 of zero while we're here. */
1487 if (tree_int_cst_equal (arg0, arg1))
1488 return build_int_cst (ctype, 0);
1489 else if (tree_int_cst_lt (arg1, arg0))
1490 return fold_convert_loc (loc, ctype,
1491 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1492 else
1493 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1494 fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc,
1496 MINUS_EXPR,
1497 arg1, arg0)));
1500 /* A subroutine of fold_convert_const handling conversions of an
1501 INTEGER_CST to another integer type. */
1503 static tree
1504 fold_convert_const_int_from_int (tree type, const_tree arg1)
1506 tree t;
1508 /* Given an integer constant, make new constant with new type,
1509 appropriately sign-extended or truncated. */
1510 t = force_fit_type_double (type, tree_to_double_int (arg1),
1511 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1512 (TREE_INT_CST_HIGH (arg1) < 0
1513 && (TYPE_UNSIGNED (type)
1514 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1515 | TREE_OVERFLOW (arg1));
1517 return t;
1520 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1521 to an integer type. */
1523 static tree
1524 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1526 int overflow = 0;
1527 tree t;
1529 /* The following code implements the floating point to integer
1530 conversion rules required by the Java Language Specification,
1531 that IEEE NaNs are mapped to zero and values that overflow
1532 the target precision saturate, i.e. values greater than
1533 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1534 are mapped to INT_MIN. These semantics are allowed by the
1535 C and C++ standards that simply state that the behavior of
1536 FP-to-integer conversion is unspecified upon overflow. */
1538 double_int val;
1539 REAL_VALUE_TYPE r;
1540 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1542 switch (code)
1544 case FIX_TRUNC_EXPR:
1545 real_trunc (&r, VOIDmode, &x);
1546 break;
1548 default:
1549 gcc_unreachable ();
1552 /* If R is NaN, return zero and show we have an overflow. */
1553 if (REAL_VALUE_ISNAN (r))
1555 overflow = 1;
1556 val = double_int_zero;
1559 /* See if R is less than the lower bound or greater than the
1560 upper bound. */
1562 if (! overflow)
1564 tree lt = TYPE_MIN_VALUE (type);
1565 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1566 if (REAL_VALUES_LESS (r, l))
1568 overflow = 1;
1569 val = tree_to_double_int (lt);
1573 if (! overflow)
1575 tree ut = TYPE_MAX_VALUE (type);
1576 if (ut)
1578 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1579 if (REAL_VALUES_LESS (u, r))
1581 overflow = 1;
1582 val = tree_to_double_int (ut);
1587 if (! overflow)
1588 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1590 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1591 return t;
1594 /* A subroutine of fold_convert_const handling conversions of a
1595 FIXED_CST to an integer type. */
1597 static tree
1598 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1600 tree t;
1601 double_int temp, temp_trunc;
1602 unsigned int mode;
1604 /* Right shift FIXED_CST to temp by fbit. */
1605 temp = TREE_FIXED_CST (arg1).data;
1606 mode = TREE_FIXED_CST (arg1).mode;
1607 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1609 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1610 HOST_BITS_PER_DOUBLE_INT,
1611 SIGNED_FIXED_POINT_MODE_P (mode));
1613 /* Left shift temp to temp_trunc by fbit. */
1614 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1615 HOST_BITS_PER_DOUBLE_INT,
1616 SIGNED_FIXED_POINT_MODE_P (mode));
1618 else
1620 temp = double_int_zero;
1621 temp_trunc = double_int_zero;
1624 /* If FIXED_CST is negative, we need to round the value toward 0.
1625 By checking if the fractional bits are not zero to add 1 to temp. */
1626 if (SIGNED_FIXED_POINT_MODE_P (mode)
1627 && double_int_negative_p (temp_trunc)
1628 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1629 temp = double_int_add (temp, double_int_one);
1631 /* Given a fixed-point constant, make new constant with new type,
1632 appropriately sign-extended or truncated. */
1633 t = force_fit_type_double (type, temp, -1,
1634 (double_int_negative_p (temp)
1635 && (TYPE_UNSIGNED (type)
1636 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1637 | TREE_OVERFLOW (arg1));
1639 return t;
1642 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1643 to another floating point type. */
1645 static tree
1646 fold_convert_const_real_from_real (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1649 tree t;
1651 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1652 t = build_real (type, value);
1654 /* If converting an infinity or NAN to a representation that doesn't
1655 have one, set the overflow bit so that we can produce some kind of
1656 error message at the appropriate point if necessary. It's not the
1657 most user-friendly message, but it's better than nothing. */
1658 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1659 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1660 TREE_OVERFLOW (t) = 1;
1661 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1662 && !MODE_HAS_NANS (TYPE_MODE (type)))
1663 TREE_OVERFLOW (t) = 1;
1664 /* Regular overflow, conversion produced an infinity in a mode that
1665 can't represent them. */
1666 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1667 && REAL_VALUE_ISINF (value)
1668 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1669 TREE_OVERFLOW (t) = 1;
1670 else
1671 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1672 return t;
1675 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1676 to a floating point type. */
1678 static tree
1679 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1681 REAL_VALUE_TYPE value;
1682 tree t;
1684 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1685 t = build_real (type, value);
1687 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1688 return t;
1691 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1692 to another fixed-point type. */
1694 static tree
1695 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1697 FIXED_VALUE_TYPE value;
1698 tree t;
1699 bool overflow_p;
1701 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1702 TYPE_SATURATING (type));
1703 t = build_fixed (type, value);
1705 /* Propagate overflow flags. */
1706 if (overflow_p | TREE_OVERFLOW (arg1))
1707 TREE_OVERFLOW (t) = 1;
1708 return t;
1711 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1712 to a fixed-point type. */
1714 static tree
1715 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1717 FIXED_VALUE_TYPE value;
1718 tree t;
1719 bool overflow_p;
1721 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1722 TREE_INT_CST (arg1),
1723 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1730 return t;
1733 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1734 to a fixed-point type. */
1736 static tree
1737 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1743 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1744 &TREE_REAL_CST (arg1),
1745 TYPE_SATURATING (type));
1746 t = build_fixed (type, value);
1748 /* Propagate overflow flags. */
1749 if (overflow_p | TREE_OVERFLOW (arg1))
1750 TREE_OVERFLOW (t) = 1;
1751 return t;
1754 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1755 type TYPE. If no simplification can be done return NULL_TREE. */
1757 static tree
1758 fold_convert_const (enum tree_code code, tree type, tree arg1)
1760 if (TREE_TYPE (arg1) == type)
1761 return arg1;
1763 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1764 || TREE_CODE (type) == OFFSET_TYPE)
1766 if (TREE_CODE (arg1) == INTEGER_CST)
1767 return fold_convert_const_int_from_int (type, arg1);
1768 else if (TREE_CODE (arg1) == REAL_CST)
1769 return fold_convert_const_int_from_real (code, type, arg1);
1770 else if (TREE_CODE (arg1) == FIXED_CST)
1771 return fold_convert_const_int_from_fixed (type, arg1);
1773 else if (TREE_CODE (type) == REAL_TYPE)
1775 if (TREE_CODE (arg1) == INTEGER_CST)
1776 return build_real_from_int_cst (type, arg1);
1777 else if (TREE_CODE (arg1) == REAL_CST)
1778 return fold_convert_const_real_from_real (type, arg1);
1779 else if (TREE_CODE (arg1) == FIXED_CST)
1780 return fold_convert_const_real_from_fixed (type, arg1);
1782 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1784 if (TREE_CODE (arg1) == FIXED_CST)
1785 return fold_convert_const_fixed_from_fixed (type, arg1);
1786 else if (TREE_CODE (arg1) == INTEGER_CST)
1787 return fold_convert_const_fixed_from_int (type, arg1);
1788 else if (TREE_CODE (arg1) == REAL_CST)
1789 return fold_convert_const_fixed_from_real (type, arg1);
1791 return NULL_TREE;
1794 /* Construct a vector of zero elements of vector type TYPE. */
1796 static tree
1797 build_zero_vector (tree type)
1799 tree elem, list;
1800 int i, units;
1802 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1803 units = TYPE_VECTOR_SUBPARTS (type);
1805 list = NULL_TREE;
1806 for (i = 0; i < units; i++)
1807 list = tree_cons (NULL_TREE, elem, list);
1808 return build_vector (type, list);
1811 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1813 bool
1814 fold_convertible_p (const_tree type, const_tree arg)
1816 tree orig = TREE_TYPE (arg);
1818 if (type == orig)
1819 return true;
1821 if (TREE_CODE (arg) == ERROR_MARK
1822 || TREE_CODE (type) == ERROR_MARK
1823 || TREE_CODE (orig) == ERROR_MARK)
1824 return false;
1826 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1827 return true;
1829 switch (TREE_CODE (type))
1831 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1832 case POINTER_TYPE: case REFERENCE_TYPE:
1833 case OFFSET_TYPE:
1834 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1835 || TREE_CODE (orig) == OFFSET_TYPE)
1836 return true;
1837 return (TREE_CODE (orig) == VECTOR_TYPE
1838 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1840 case REAL_TYPE:
1841 case FIXED_POINT_TYPE:
1842 case COMPLEX_TYPE:
1843 case VECTOR_TYPE:
1844 case VOID_TYPE:
1845 return TREE_CODE (type) == TREE_CODE (orig);
1847 default:
1848 return false;
1852 /* Convert expression ARG to type TYPE. Used by the middle-end for
1853 simple conversions in preference to calling the front-end's convert. */
1855 tree
1856 fold_convert_loc (location_t loc, tree type, tree arg)
1858 tree orig = TREE_TYPE (arg);
1859 tree tem;
1861 if (type == orig)
1862 return arg;
1864 if (TREE_CODE (arg) == ERROR_MARK
1865 || TREE_CODE (type) == ERROR_MARK
1866 || TREE_CODE (orig) == ERROR_MARK)
1867 return error_mark_node;
1869 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1870 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1872 switch (TREE_CODE (type))
1874 case POINTER_TYPE:
1875 case REFERENCE_TYPE:
1876 /* Handle conversions between pointers to different address spaces. */
1877 if (POINTER_TYPE_P (orig)
1878 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1879 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1880 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1881 /* fall through */
1883 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1884 case OFFSET_TYPE:
1885 if (TREE_CODE (arg) == INTEGER_CST)
1887 tem = fold_convert_const (NOP_EXPR, type, arg);
1888 if (tem != NULL_TREE)
1889 return tem;
1891 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1892 || TREE_CODE (orig) == OFFSET_TYPE)
1893 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1894 if (TREE_CODE (orig) == COMPLEX_TYPE)
1895 return fold_convert_loc (loc, type,
1896 fold_build1_loc (loc, REALPART_EXPR,
1897 TREE_TYPE (orig), arg));
1898 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1899 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1900 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1902 case REAL_TYPE:
1903 if (TREE_CODE (arg) == INTEGER_CST)
1905 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 return tem;
1909 else if (TREE_CODE (arg) == REAL_CST)
1911 tem = fold_convert_const (NOP_EXPR, type, arg);
1912 if (tem != NULL_TREE)
1913 return tem;
1915 else if (TREE_CODE (arg) == FIXED_CST)
1917 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1918 if (tem != NULL_TREE)
1919 return tem;
1922 switch (TREE_CODE (orig))
1924 case INTEGER_TYPE:
1925 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1926 case POINTER_TYPE: case REFERENCE_TYPE:
1927 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1929 case REAL_TYPE:
1930 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1932 case FIXED_POINT_TYPE:
1933 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1935 case COMPLEX_TYPE:
1936 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1937 return fold_convert_loc (loc, type, tem);
1939 default:
1940 gcc_unreachable ();
1943 case FIXED_POINT_TYPE:
1944 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1945 || TREE_CODE (arg) == REAL_CST)
1947 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1949 goto fold_convert_exit;
1952 switch (TREE_CODE (orig))
1954 case FIXED_POINT_TYPE:
1955 case INTEGER_TYPE:
1956 case ENUMERAL_TYPE:
1957 case BOOLEAN_TYPE:
1958 case REAL_TYPE:
1959 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1961 case COMPLEX_TYPE:
1962 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1963 return fold_convert_loc (loc, type, tem);
1965 default:
1966 gcc_unreachable ();
1969 case COMPLEX_TYPE:
1970 switch (TREE_CODE (orig))
1972 case INTEGER_TYPE:
1973 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1974 case POINTER_TYPE: case REFERENCE_TYPE:
1975 case REAL_TYPE:
1976 case FIXED_POINT_TYPE:
1977 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1978 fold_convert_loc (loc, TREE_TYPE (type), arg),
1979 fold_convert_loc (loc, TREE_TYPE (type),
1980 integer_zero_node));
1981 case COMPLEX_TYPE:
1983 tree rpart, ipart;
1985 if (TREE_CODE (arg) == COMPLEX_EXPR)
1987 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1988 TREE_OPERAND (arg, 0));
1989 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1990 TREE_OPERAND (arg, 1));
1991 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1994 arg = save_expr (arg);
1995 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1996 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1997 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1998 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1999 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2002 default:
2003 gcc_unreachable ();
2006 case VECTOR_TYPE:
2007 if (integer_zerop (arg))
2008 return build_zero_vector (type);
2009 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2010 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2011 || TREE_CODE (orig) == VECTOR_TYPE);
2012 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2014 case VOID_TYPE:
2015 tem = fold_ignored_result (arg);
2016 if (TREE_CODE (tem) == MODIFY_EXPR)
2017 goto fold_convert_exit;
2018 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2020 default:
2021 gcc_unreachable ();
2023 fold_convert_exit:
2024 protected_set_expr_location (tem, loc);
2025 return tem;
2028 /* Return false if expr can be assumed not to be an lvalue, true
2029 otherwise. */
2031 static bool
2032 maybe_lvalue_p (const_tree x)
2034 /* We only need to wrap lvalue tree codes. */
2035 switch (TREE_CODE (x))
2037 case VAR_DECL:
2038 case PARM_DECL:
2039 case RESULT_DECL:
2040 case LABEL_DECL:
2041 case FUNCTION_DECL:
2042 case SSA_NAME:
2044 case COMPONENT_REF:
2045 case MEM_REF:
2046 case INDIRECT_REF:
2047 case MISALIGNED_INDIRECT_REF:
2048 case ARRAY_REF:
2049 case ARRAY_RANGE_REF:
2050 case BIT_FIELD_REF:
2051 case OBJ_TYPE_REF:
2053 case REALPART_EXPR:
2054 case IMAGPART_EXPR:
2055 case PREINCREMENT_EXPR:
2056 case PREDECREMENT_EXPR:
2057 case SAVE_EXPR:
2058 case TRY_CATCH_EXPR:
2059 case WITH_CLEANUP_EXPR:
2060 case COMPOUND_EXPR:
2061 case MODIFY_EXPR:
2062 case TARGET_EXPR:
2063 case COND_EXPR:
2064 case BIND_EXPR:
2065 break;
2067 default:
2068 /* Assume the worst for front-end tree codes. */
2069 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2070 break;
2071 return false;
2074 return true;
2077 /* Return an expr equal to X but certainly not valid as an lvalue. */
2079 tree
2080 non_lvalue_loc (location_t loc, tree x)
2082 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2083 us. */
2084 if (in_gimple_form)
2085 return x;
2087 if (! maybe_lvalue_p (x))
2088 return x;
2089 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2090 SET_EXPR_LOCATION (x, loc);
2091 return x;
2094 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2095 Zero means allow extended lvalues. */
2097 int pedantic_lvalues;
2099 /* When pedantic, return an expr equal to X but certainly not valid as a
2100 pedantic lvalue. Otherwise, return X. */
2102 static tree
2103 pedantic_non_lvalue_loc (location_t loc, tree x)
2105 if (pedantic_lvalues)
2106 return non_lvalue_loc (loc, x);
2107 protected_set_expr_location (x, loc);
2108 return x;
2111 /* Given a tree comparison code, return the code that is the logical inverse
2112 of the given code. It is not safe to do this for floating-point
2113 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2114 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2116 enum tree_code
2117 invert_tree_comparison (enum tree_code code, bool honor_nans)
2119 if (honor_nans && flag_trapping_math)
2120 return ERROR_MARK;
2122 switch (code)
2124 case EQ_EXPR:
2125 return NE_EXPR;
2126 case NE_EXPR:
2127 return EQ_EXPR;
2128 case GT_EXPR:
2129 return honor_nans ? UNLE_EXPR : LE_EXPR;
2130 case GE_EXPR:
2131 return honor_nans ? UNLT_EXPR : LT_EXPR;
2132 case LT_EXPR:
2133 return honor_nans ? UNGE_EXPR : GE_EXPR;
2134 case LE_EXPR:
2135 return honor_nans ? UNGT_EXPR : GT_EXPR;
2136 case LTGT_EXPR:
2137 return UNEQ_EXPR;
2138 case UNEQ_EXPR:
2139 return LTGT_EXPR;
2140 case UNGT_EXPR:
2141 return LE_EXPR;
2142 case UNGE_EXPR:
2143 return LT_EXPR;
2144 case UNLT_EXPR:
2145 return GE_EXPR;
2146 case UNLE_EXPR:
2147 return GT_EXPR;
2148 case ORDERED_EXPR:
2149 return UNORDERED_EXPR;
2150 case UNORDERED_EXPR:
2151 return ORDERED_EXPR;
2152 default:
2153 gcc_unreachable ();
2157 /* Similar, but return the comparison that results if the operands are
2158 swapped. This is safe for floating-point. */
2160 enum tree_code
2161 swap_tree_comparison (enum tree_code code)
2163 switch (code)
2165 case EQ_EXPR:
2166 case NE_EXPR:
2167 case ORDERED_EXPR:
2168 case UNORDERED_EXPR:
2169 case LTGT_EXPR:
2170 case UNEQ_EXPR:
2171 return code;
2172 case GT_EXPR:
2173 return LT_EXPR;
2174 case GE_EXPR:
2175 return LE_EXPR;
2176 case LT_EXPR:
2177 return GT_EXPR;
2178 case LE_EXPR:
2179 return GE_EXPR;
2180 case UNGT_EXPR:
2181 return UNLT_EXPR;
2182 case UNGE_EXPR:
2183 return UNLE_EXPR;
2184 case UNLT_EXPR:
2185 return UNGT_EXPR;
2186 case UNLE_EXPR:
2187 return UNGE_EXPR;
2188 default:
2189 gcc_unreachable ();
2194 /* Convert a comparison tree code from an enum tree_code representation
2195 into a compcode bit-based encoding. This function is the inverse of
2196 compcode_to_comparison. */
2198 static enum comparison_code
2199 comparison_to_compcode (enum tree_code code)
2201 switch (code)
2203 case LT_EXPR:
2204 return COMPCODE_LT;
2205 case EQ_EXPR:
2206 return COMPCODE_EQ;
2207 case LE_EXPR:
2208 return COMPCODE_LE;
2209 case GT_EXPR:
2210 return COMPCODE_GT;
2211 case NE_EXPR:
2212 return COMPCODE_NE;
2213 case GE_EXPR:
2214 return COMPCODE_GE;
2215 case ORDERED_EXPR:
2216 return COMPCODE_ORD;
2217 case UNORDERED_EXPR:
2218 return COMPCODE_UNORD;
2219 case UNLT_EXPR:
2220 return COMPCODE_UNLT;
2221 case UNEQ_EXPR:
2222 return COMPCODE_UNEQ;
2223 case UNLE_EXPR:
2224 return COMPCODE_UNLE;
2225 case UNGT_EXPR:
2226 return COMPCODE_UNGT;
2227 case LTGT_EXPR:
2228 return COMPCODE_LTGT;
2229 case UNGE_EXPR:
2230 return COMPCODE_UNGE;
2231 default:
2232 gcc_unreachable ();
2236 /* Convert a compcode bit-based encoding of a comparison operator back
2237 to GCC's enum tree_code representation. This function is the
2238 inverse of comparison_to_compcode. */
2240 static enum tree_code
2241 compcode_to_comparison (enum comparison_code code)
2243 switch (code)
2245 case COMPCODE_LT:
2246 return LT_EXPR;
2247 case COMPCODE_EQ:
2248 return EQ_EXPR;
2249 case COMPCODE_LE:
2250 return LE_EXPR;
2251 case COMPCODE_GT:
2252 return GT_EXPR;
2253 case COMPCODE_NE:
2254 return NE_EXPR;
2255 case COMPCODE_GE:
2256 return GE_EXPR;
2257 case COMPCODE_ORD:
2258 return ORDERED_EXPR;
2259 case COMPCODE_UNORD:
2260 return UNORDERED_EXPR;
2261 case COMPCODE_UNLT:
2262 return UNLT_EXPR;
2263 case COMPCODE_UNEQ:
2264 return UNEQ_EXPR;
2265 case COMPCODE_UNLE:
2266 return UNLE_EXPR;
2267 case COMPCODE_UNGT:
2268 return UNGT_EXPR;
2269 case COMPCODE_LTGT:
2270 return LTGT_EXPR;
2271 case COMPCODE_UNGE:
2272 return UNGE_EXPR;
2273 default:
2274 gcc_unreachable ();
2278 /* Return a tree for the comparison which is the combination of
2279 doing the AND or OR (depending on CODE) of the two operations LCODE
2280 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2281 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2282 if this makes the transformation invalid. */
2284 tree
2285 combine_comparisons (location_t loc,
2286 enum tree_code code, enum tree_code lcode,
2287 enum tree_code rcode, tree truth_type,
2288 tree ll_arg, tree lr_arg)
2290 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2291 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2292 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2293 int compcode;
2295 switch (code)
2297 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2298 compcode = lcompcode & rcompcode;
2299 break;
2301 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2302 compcode = lcompcode | rcompcode;
2303 break;
2305 default:
2306 return NULL_TREE;
2309 if (!honor_nans)
2311 /* Eliminate unordered comparisons, as well as LTGT and ORD
2312 which are not used unless the mode has NaNs. */
2313 compcode &= ~COMPCODE_UNORD;
2314 if (compcode == COMPCODE_LTGT)
2315 compcode = COMPCODE_NE;
2316 else if (compcode == COMPCODE_ORD)
2317 compcode = COMPCODE_TRUE;
2319 else if (flag_trapping_math)
2321 /* Check that the original operation and the optimized ones will trap
2322 under the same condition. */
2323 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2324 && (lcompcode != COMPCODE_EQ)
2325 && (lcompcode != COMPCODE_ORD);
2326 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2327 && (rcompcode != COMPCODE_EQ)
2328 && (rcompcode != COMPCODE_ORD);
2329 bool trap = (compcode & COMPCODE_UNORD) == 0
2330 && (compcode != COMPCODE_EQ)
2331 && (compcode != COMPCODE_ORD);
2333 /* In a short-circuited boolean expression the LHS might be
2334 such that the RHS, if evaluated, will never trap. For
2335 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2336 if neither x nor y is NaN. (This is a mixed blessing: for
2337 example, the expression above will never trap, hence
2338 optimizing it to x < y would be invalid). */
2339 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2340 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2341 rtrap = false;
2343 /* If the comparison was short-circuited, and only the RHS
2344 trapped, we may now generate a spurious trap. */
2345 if (rtrap && !ltrap
2346 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2347 return NULL_TREE;
2349 /* If we changed the conditions that cause a trap, we lose. */
2350 if ((ltrap || rtrap) != trap)
2351 return NULL_TREE;
2354 if (compcode == COMPCODE_TRUE)
2355 return constant_boolean_node (true, truth_type);
2356 else if (compcode == COMPCODE_FALSE)
2357 return constant_boolean_node (false, truth_type);
2358 else
2360 enum tree_code tcode;
2362 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2363 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2367 /* Return nonzero if two operands (typically of the same tree node)
2368 are necessarily equal. If either argument has side-effects this
2369 function returns zero. FLAGS modifies behavior as follows:
2371 If OEP_ONLY_CONST is set, only return nonzero for constants.
2372 This function tests whether the operands are indistinguishable;
2373 it does not test whether they are equal using C's == operation.
2374 The distinction is important for IEEE floating point, because
2375 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2376 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2378 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2379 even though it may hold multiple values during a function.
2380 This is because a GCC tree node guarantees that nothing else is
2381 executed between the evaluation of its "operands" (which may often
2382 be evaluated in arbitrary order). Hence if the operands themselves
2383 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2384 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2385 unset means assuming isochronic (or instantaneous) tree equivalence.
2386 Unless comparing arbitrary expression trees, such as from different
2387 statements, this flag can usually be left unset.
2389 If OEP_PURE_SAME is set, then pure functions with identical arguments
2390 are considered the same. It is used when the caller has other ways
2391 to ensure that global memory is unchanged in between. */
2394 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2396 /* If either is ERROR_MARK, they aren't equal. */
2397 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2398 || TREE_TYPE (arg0) == error_mark_node
2399 || TREE_TYPE (arg1) == error_mark_node)
2400 return 0;
2402 /* Similar, if either does not have a type (like a released SSA name),
2403 they aren't equal. */
2404 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2405 return 0;
2407 /* Check equality of integer constants before bailing out due to
2408 precision differences. */
2409 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2410 return tree_int_cst_equal (arg0, arg1);
2412 /* If both types don't have the same signedness, then we can't consider
2413 them equal. We must check this before the STRIP_NOPS calls
2414 because they may change the signedness of the arguments. As pointers
2415 strictly don't have a signedness, require either two pointers or
2416 two non-pointers as well. */
2417 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2418 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2419 return 0;
2421 /* We cannot consider pointers to different address space equal. */
2422 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2423 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2424 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2425 return 0;
2427 /* If both types don't have the same precision, then it is not safe
2428 to strip NOPs. */
2429 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2430 return 0;
2432 STRIP_NOPS (arg0);
2433 STRIP_NOPS (arg1);
2435 /* In case both args are comparisons but with different comparison
2436 code, try to swap the comparison operands of one arg to produce
2437 a match and compare that variant. */
2438 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2439 && COMPARISON_CLASS_P (arg0)
2440 && COMPARISON_CLASS_P (arg1))
2442 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2444 if (TREE_CODE (arg0) == swap_code)
2445 return operand_equal_p (TREE_OPERAND (arg0, 0),
2446 TREE_OPERAND (arg1, 1), flags)
2447 && operand_equal_p (TREE_OPERAND (arg0, 1),
2448 TREE_OPERAND (arg1, 0), flags);
2451 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2452 /* This is needed for conversions and for COMPONENT_REF.
2453 Might as well play it safe and always test this. */
2454 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2455 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2456 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2457 return 0;
2459 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2460 We don't care about side effects in that case because the SAVE_EXPR
2461 takes care of that for us. In all other cases, two expressions are
2462 equal if they have no side effects. If we have two identical
2463 expressions with side effects that should be treated the same due
2464 to the only side effects being identical SAVE_EXPR's, that will
2465 be detected in the recursive calls below. */
2466 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2467 && (TREE_CODE (arg0) == SAVE_EXPR
2468 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2469 return 1;
2471 /* Next handle constant cases, those for which we can return 1 even
2472 if ONLY_CONST is set. */
2473 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2474 switch (TREE_CODE (arg0))
2476 case INTEGER_CST:
2477 return tree_int_cst_equal (arg0, arg1);
2479 case FIXED_CST:
2480 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2481 TREE_FIXED_CST (arg1));
2483 case REAL_CST:
2484 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2485 TREE_REAL_CST (arg1)))
2486 return 1;
2489 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2491 /* If we do not distinguish between signed and unsigned zero,
2492 consider them equal. */
2493 if (real_zerop (arg0) && real_zerop (arg1))
2494 return 1;
2496 return 0;
2498 case VECTOR_CST:
2500 tree v1, v2;
2502 v1 = TREE_VECTOR_CST_ELTS (arg0);
2503 v2 = TREE_VECTOR_CST_ELTS (arg1);
2504 while (v1 && v2)
2506 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2507 flags))
2508 return 0;
2509 v1 = TREE_CHAIN (v1);
2510 v2 = TREE_CHAIN (v2);
2513 return v1 == v2;
2516 case COMPLEX_CST:
2517 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2518 flags)
2519 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2520 flags));
2522 case STRING_CST:
2523 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2524 && ! memcmp (TREE_STRING_POINTER (arg0),
2525 TREE_STRING_POINTER (arg1),
2526 TREE_STRING_LENGTH (arg0)));
2528 case ADDR_EXPR:
2529 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2531 default:
2532 break;
2535 if (flags & OEP_ONLY_CONST)
2536 return 0;
2538 /* Define macros to test an operand from arg0 and arg1 for equality and a
2539 variant that allows null and views null as being different from any
2540 non-null value. In the latter case, if either is null, the both
2541 must be; otherwise, do the normal comparison. */
2542 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2543 TREE_OPERAND (arg1, N), flags)
2545 #define OP_SAME_WITH_NULL(N) \
2546 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2547 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2549 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2551 case tcc_unary:
2552 /* Two conversions are equal only if signedness and modes match. */
2553 switch (TREE_CODE (arg0))
2555 CASE_CONVERT:
2556 case FIX_TRUNC_EXPR:
2557 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2558 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2559 return 0;
2560 break;
2561 default:
2562 break;
2565 return OP_SAME (0);
2568 case tcc_comparison:
2569 case tcc_binary:
2570 if (OP_SAME (0) && OP_SAME (1))
2571 return 1;
2573 /* For commutative ops, allow the other order. */
2574 return (commutative_tree_code (TREE_CODE (arg0))
2575 && operand_equal_p (TREE_OPERAND (arg0, 0),
2576 TREE_OPERAND (arg1, 1), flags)
2577 && operand_equal_p (TREE_OPERAND (arg0, 1),
2578 TREE_OPERAND (arg1, 0), flags));
2580 case tcc_reference:
2581 /* If either of the pointer (or reference) expressions we are
2582 dereferencing contain a side effect, these cannot be equal. */
2583 if (TREE_SIDE_EFFECTS (arg0)
2584 || TREE_SIDE_EFFECTS (arg1))
2585 return 0;
2587 switch (TREE_CODE (arg0))
2589 case INDIRECT_REF:
2590 case MISALIGNED_INDIRECT_REF:
2591 case REALPART_EXPR:
2592 case IMAGPART_EXPR:
2593 return OP_SAME (0);
2595 case MEM_REF:
2596 /* Require equal access sizes. We can have incomplete types
2597 for array references of variable-sized arrays from the
2598 Fortran frontent though. */
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && OP_SAME (0) && OP_SAME (1));
2606 case ARRAY_REF:
2607 case ARRAY_RANGE_REF:
2608 /* Operands 2 and 3 may be null.
2609 Compare the array index by value if it is constant first as we
2610 may have different types but same value here. */
2611 return (OP_SAME (0)
2612 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2613 TREE_OPERAND (arg1, 1))
2614 || OP_SAME (1))
2615 && OP_SAME_WITH_NULL (2)
2616 && OP_SAME_WITH_NULL (3));
2618 case COMPONENT_REF:
2619 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2620 may be NULL when we're called to compare MEM_EXPRs. */
2621 return OP_SAME_WITH_NULL (0)
2622 && OP_SAME (1)
2623 && OP_SAME_WITH_NULL (2);
2625 case BIT_FIELD_REF:
2626 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2628 default:
2629 return 0;
2632 case tcc_expression:
2633 switch (TREE_CODE (arg0))
2635 case ADDR_EXPR:
2636 case TRUTH_NOT_EXPR:
2637 return OP_SAME (0);
2639 case TRUTH_ANDIF_EXPR:
2640 case TRUTH_ORIF_EXPR:
2641 return OP_SAME (0) && OP_SAME (1);
2643 case TRUTH_AND_EXPR:
2644 case TRUTH_OR_EXPR:
2645 case TRUTH_XOR_EXPR:
2646 if (OP_SAME (0) && OP_SAME (1))
2647 return 1;
2649 /* Otherwise take into account this is a commutative operation. */
2650 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2651 TREE_OPERAND (arg1, 1), flags)
2652 && operand_equal_p (TREE_OPERAND (arg0, 1),
2653 TREE_OPERAND (arg1, 0), flags));
2655 case COND_EXPR:
2656 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2658 default:
2659 return 0;
2662 case tcc_vl_exp:
2663 switch (TREE_CODE (arg0))
2665 case CALL_EXPR:
2666 /* If the CALL_EXPRs call different functions, then they
2667 clearly can not be equal. */
2668 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2669 flags))
2670 return 0;
2673 unsigned int cef = call_expr_flags (arg0);
2674 if (flags & OEP_PURE_SAME)
2675 cef &= ECF_CONST | ECF_PURE;
2676 else
2677 cef &= ECF_CONST;
2678 if (!cef)
2679 return 0;
2682 /* Now see if all the arguments are the same. */
2684 const_call_expr_arg_iterator iter0, iter1;
2685 const_tree a0, a1;
2686 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2687 a1 = first_const_call_expr_arg (arg1, &iter1);
2688 a0 && a1;
2689 a0 = next_const_call_expr_arg (&iter0),
2690 a1 = next_const_call_expr_arg (&iter1))
2691 if (! operand_equal_p (a0, a1, flags))
2692 return 0;
2694 /* If we get here and both argument lists are exhausted
2695 then the CALL_EXPRs are equal. */
2696 return ! (a0 || a1);
2698 default:
2699 return 0;
2702 case tcc_declaration:
2703 /* Consider __builtin_sqrt equal to sqrt. */
2704 return (TREE_CODE (arg0) == FUNCTION_DECL
2705 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2706 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2707 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2709 default:
2710 return 0;
2713 #undef OP_SAME
2714 #undef OP_SAME_WITH_NULL
2717 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2718 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2720 When in doubt, return 0. */
2722 static int
2723 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2725 int unsignedp1, unsignedpo;
2726 tree primarg0, primarg1, primother;
2727 unsigned int correct_width;
2729 if (operand_equal_p (arg0, arg1, 0))
2730 return 1;
2732 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2733 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2734 return 0;
2736 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2737 and see if the inner values are the same. This removes any
2738 signedness comparison, which doesn't matter here. */
2739 primarg0 = arg0, primarg1 = arg1;
2740 STRIP_NOPS (primarg0);
2741 STRIP_NOPS (primarg1);
2742 if (operand_equal_p (primarg0, primarg1, 0))
2743 return 1;
2745 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2746 actual comparison operand, ARG0.
2748 First throw away any conversions to wider types
2749 already present in the operands. */
2751 primarg1 = get_narrower (arg1, &unsignedp1);
2752 primother = get_narrower (other, &unsignedpo);
2754 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2755 if (unsignedp1 == unsignedpo
2756 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2757 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2759 tree type = TREE_TYPE (arg0);
2761 /* Make sure shorter operand is extended the right way
2762 to match the longer operand. */
2763 primarg1 = fold_convert (signed_or_unsigned_type_for
2764 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2766 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2767 return 1;
2770 return 0;
2773 /* See if ARG is an expression that is either a comparison or is performing
2774 arithmetic on comparisons. The comparisons must only be comparing
2775 two different values, which will be stored in *CVAL1 and *CVAL2; if
2776 they are nonzero it means that some operands have already been found.
2777 No variables may be used anywhere else in the expression except in the
2778 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2779 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2781 If this is true, return 1. Otherwise, return zero. */
2783 static int
2784 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2786 enum tree_code code = TREE_CODE (arg);
2787 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2789 /* We can handle some of the tcc_expression cases here. */
2790 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2791 tclass = tcc_unary;
2792 else if (tclass == tcc_expression
2793 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2794 || code == COMPOUND_EXPR))
2795 tclass = tcc_binary;
2797 else if (tclass == tcc_expression && code == SAVE_EXPR
2798 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2800 /* If we've already found a CVAL1 or CVAL2, this expression is
2801 two complex to handle. */
2802 if (*cval1 || *cval2)
2803 return 0;
2805 tclass = tcc_unary;
2806 *save_p = 1;
2809 switch (tclass)
2811 case tcc_unary:
2812 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2814 case tcc_binary:
2815 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2816 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2817 cval1, cval2, save_p));
2819 case tcc_constant:
2820 return 1;
2822 case tcc_expression:
2823 if (code == COND_EXPR)
2824 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2825 cval1, cval2, save_p)
2826 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2827 cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2829 cval1, cval2, save_p));
2830 return 0;
2832 case tcc_comparison:
2833 /* First see if we can handle the first operand, then the second. For
2834 the second operand, we know *CVAL1 can't be zero. It must be that
2835 one side of the comparison is each of the values; test for the
2836 case where this isn't true by failing if the two operands
2837 are the same. */
2839 if (operand_equal_p (TREE_OPERAND (arg, 0),
2840 TREE_OPERAND (arg, 1), 0))
2841 return 0;
2843 if (*cval1 == 0)
2844 *cval1 = TREE_OPERAND (arg, 0);
2845 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2847 else if (*cval2 == 0)
2848 *cval2 = TREE_OPERAND (arg, 0);
2849 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2851 else
2852 return 0;
2854 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2856 else if (*cval2 == 0)
2857 *cval2 = TREE_OPERAND (arg, 1);
2858 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2860 else
2861 return 0;
2863 return 1;
2865 default:
2866 return 0;
2870 /* ARG is a tree that is known to contain just arithmetic operations and
2871 comparisons. Evaluate the operations in the tree substituting NEW0 for
2872 any occurrence of OLD0 as an operand of a comparison and likewise for
2873 NEW1 and OLD1. */
2875 static tree
2876 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2877 tree old1, tree new1)
2879 tree type = TREE_TYPE (arg);
2880 enum tree_code code = TREE_CODE (arg);
2881 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2883 /* We can handle some of the tcc_expression cases here. */
2884 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2885 tclass = tcc_unary;
2886 else if (tclass == tcc_expression
2887 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2888 tclass = tcc_binary;
2890 switch (tclass)
2892 case tcc_unary:
2893 return fold_build1_loc (loc, code, type,
2894 eval_subst (loc, TREE_OPERAND (arg, 0),
2895 old0, new0, old1, new1));
2897 case tcc_binary:
2898 return fold_build2_loc (loc, code, type,
2899 eval_subst (loc, TREE_OPERAND (arg, 0),
2900 old0, new0, old1, new1),
2901 eval_subst (loc, TREE_OPERAND (arg, 1),
2902 old0, new0, old1, new1));
2904 case tcc_expression:
2905 switch (code)
2907 case SAVE_EXPR:
2908 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2909 old1, new1);
2911 case COMPOUND_EXPR:
2912 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2913 old1, new1);
2915 case COND_EXPR:
2916 return fold_build3_loc (loc, code, type,
2917 eval_subst (loc, TREE_OPERAND (arg, 0),
2918 old0, new0, old1, new1),
2919 eval_subst (loc, TREE_OPERAND (arg, 1),
2920 old0, new0, old1, new1),
2921 eval_subst (loc, TREE_OPERAND (arg, 2),
2922 old0, new0, old1, new1));
2923 default:
2924 break;
2926 /* Fall through - ??? */
2928 case tcc_comparison:
2930 tree arg0 = TREE_OPERAND (arg, 0);
2931 tree arg1 = TREE_OPERAND (arg, 1);
2933 /* We need to check both for exact equality and tree equality. The
2934 former will be true if the operand has a side-effect. In that
2935 case, we know the operand occurred exactly once. */
2937 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2938 arg0 = new0;
2939 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2940 arg0 = new1;
2942 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2943 arg1 = new0;
2944 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2945 arg1 = new1;
2947 return fold_build2_loc (loc, code, type, arg0, arg1);
2950 default:
2951 return arg;
2955 /* Return a tree for the case when the result of an expression is RESULT
2956 converted to TYPE and OMITTED was previously an operand of the expression
2957 but is now not needed (e.g., we folded OMITTED * 0).
2959 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2960 the conversion of RESULT to TYPE. */
2962 tree
2963 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2965 tree t = fold_convert_loc (loc, type, result);
2967 /* If the resulting operand is an empty statement, just return the omitted
2968 statement casted to void. */
2969 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2971 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2972 goto omit_one_operand_exit;
2975 if (TREE_SIDE_EFFECTS (omitted))
2977 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2978 goto omit_one_operand_exit;
2981 return non_lvalue_loc (loc, t);
2983 omit_one_operand_exit:
2984 protected_set_expr_location (t, loc);
2985 return t;
2988 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2990 static tree
2991 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2992 tree omitted)
2994 tree t = fold_convert_loc (loc, type, result);
2996 /* If the resulting operand is an empty statement, just return the omitted
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3000 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3001 goto pedantic_omit_one_operand_exit;
3004 if (TREE_SIDE_EFFECTS (omitted))
3006 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3007 goto pedantic_omit_one_operand_exit;
3010 return pedantic_non_lvalue_loc (loc, t);
3012 pedantic_omit_one_operand_exit:
3013 protected_set_expr_location (t, loc);
3014 return t;
3017 /* Return a tree for the case when the result of an expression is RESULT
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3019 of the expression but are now not needed.
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3022 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3023 evaluated before OMITTED2. Otherwise, if neither has side effects,
3024 just do the conversion of RESULT to TYPE. */
3026 tree
3027 omit_two_operands_loc (location_t loc, tree type, tree result,
3028 tree omitted1, tree omitted2)
3030 tree t = fold_convert_loc (loc, type, result);
3032 if (TREE_SIDE_EFFECTS (omitted2))
3034 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3035 SET_EXPR_LOCATION (t, loc);
3037 if (TREE_SIDE_EFFECTS (omitted1))
3039 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3040 SET_EXPR_LOCATION (t, loc);
3043 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3047 /* Return a simplified tree node for the truth-negation of ARG. This
3048 never alters ARG itself. We assume that ARG is an operation that
3049 returns a truth value (0 or 1).
3051 FIXME: one would think we would fold the result, but it causes
3052 problems with the dominator optimizer. */
3054 tree
3055 fold_truth_not_expr (location_t loc, tree arg)
3057 tree t, type = TREE_TYPE (arg);
3058 enum tree_code code = TREE_CODE (arg);
3059 location_t loc1, loc2;
3061 /* If this is a comparison, we can simply invert it, except for
3062 floating-point non-equality comparisons, in which case we just
3063 enclose a TRUTH_NOT_EXPR around what we have. */
3065 if (TREE_CODE_CLASS (code) == tcc_comparison)
3067 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3068 if (FLOAT_TYPE_P (op_type)
3069 && flag_trapping_math
3070 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3071 && code != NE_EXPR && code != EQ_EXPR)
3072 return NULL_TREE;
3074 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3075 if (code == ERROR_MARK)
3076 return NULL_TREE;
3078 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3079 SET_EXPR_LOCATION (t, loc);
3080 return t;
3083 switch (code)
3085 case INTEGER_CST:
3086 return constant_boolean_node (integer_zerop (arg), type);
3088 case TRUTH_AND_EXPR:
3089 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3090 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3091 if (loc1 == UNKNOWN_LOCATION)
3092 loc1 = loc;
3093 if (loc2 == UNKNOWN_LOCATION)
3094 loc2 = loc;
3095 t = build2 (TRUTH_OR_EXPR, type,
3096 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3097 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3098 break;
3100 case TRUTH_OR_EXPR:
3101 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3102 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3103 if (loc1 == UNKNOWN_LOCATION)
3104 loc1 = loc;
3105 if (loc2 == UNKNOWN_LOCATION)
3106 loc2 = loc;
3107 t = build2 (TRUTH_AND_EXPR, type,
3108 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3109 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3110 break;
3112 case TRUTH_XOR_EXPR:
3113 /* Here we can invert either operand. We invert the first operand
3114 unless the second operand is a TRUTH_NOT_EXPR in which case our
3115 result is the XOR of the first operand with the inside of the
3116 negation of the second operand. */
3118 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3119 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3120 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3121 else
3122 t = build2 (TRUTH_XOR_EXPR, type,
3123 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3124 TREE_OPERAND (arg, 1));
3125 break;
3127 case TRUTH_ANDIF_EXPR:
3128 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3129 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3130 if (loc1 == UNKNOWN_LOCATION)
3131 loc1 = loc;
3132 if (loc2 == UNKNOWN_LOCATION)
3133 loc2 = loc;
3134 t = build2 (TRUTH_ORIF_EXPR, type,
3135 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3136 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3137 break;
3139 case TRUTH_ORIF_EXPR:
3140 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3141 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3142 if (loc1 == UNKNOWN_LOCATION)
3143 loc1 = loc;
3144 if (loc2 == UNKNOWN_LOCATION)
3145 loc2 = loc;
3146 t = build2 (TRUTH_ANDIF_EXPR, type,
3147 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3148 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3149 break;
3151 case TRUTH_NOT_EXPR:
3152 return TREE_OPERAND (arg, 0);
3154 case COND_EXPR:
3156 tree arg1 = TREE_OPERAND (arg, 1);
3157 tree arg2 = TREE_OPERAND (arg, 2);
3159 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3160 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3161 if (loc1 == UNKNOWN_LOCATION)
3162 loc1 = loc;
3163 if (loc2 == UNKNOWN_LOCATION)
3164 loc2 = loc;
3166 /* A COND_EXPR may have a throw as one operand, which
3167 then has void type. Just leave void operands
3168 as they are. */
3169 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3170 VOID_TYPE_P (TREE_TYPE (arg1))
3171 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3172 VOID_TYPE_P (TREE_TYPE (arg2))
3173 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3174 break;
3177 case COMPOUND_EXPR:
3178 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3179 if (loc1 == UNKNOWN_LOCATION)
3180 loc1 = loc;
3181 t = build2 (COMPOUND_EXPR, type,
3182 TREE_OPERAND (arg, 0),
3183 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3184 break;
3186 case NON_LVALUE_EXPR:
3187 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3188 if (loc1 == UNKNOWN_LOCATION)
3189 loc1 = loc;
3190 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3192 CASE_CONVERT:
3193 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3195 t = build1 (TRUTH_NOT_EXPR, type, arg);
3196 break;
3199 /* ... fall through ... */
3201 case FLOAT_EXPR:
3202 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3203 if (loc1 == UNKNOWN_LOCATION)
3204 loc1 = loc;
3205 t = build1 (TREE_CODE (arg), type,
3206 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3207 break;
3209 case BIT_AND_EXPR:
3210 if (!integer_onep (TREE_OPERAND (arg, 1)))
3211 return NULL_TREE;
3212 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3213 break;
3215 case SAVE_EXPR:
3216 t = build1 (TRUTH_NOT_EXPR, type, arg);
3217 break;
3219 case CLEANUP_POINT_EXPR:
3220 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3221 if (loc1 == UNKNOWN_LOCATION)
3222 loc1 = loc;
3223 t = build1 (CLEANUP_POINT_EXPR, type,
3224 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3225 break;
3227 default:
3228 t = NULL_TREE;
3229 break;
3232 if (t)
3233 SET_EXPR_LOCATION (t, loc);
3235 return t;
3238 /* Return a simplified tree node for the truth-negation of ARG. This
3239 never alters ARG itself. We assume that ARG is an operation that
3240 returns a truth value (0 or 1).
3242 FIXME: one would think we would fold the result, but it causes
3243 problems with the dominator optimizer. */
3245 tree
3246 invert_truthvalue_loc (location_t loc, tree arg)
3248 tree tem;
3250 if (TREE_CODE (arg) == ERROR_MARK)
3251 return arg;
3253 tem = fold_truth_not_expr (loc, arg);
3254 if (!tem)
3256 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3257 SET_EXPR_LOCATION (tem, loc);
3260 return tem;
3263 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3264 operands are another bit-wise operation with a common input. If so,
3265 distribute the bit operations to save an operation and possibly two if
3266 constants are involved. For example, convert
3267 (A | B) & (A | C) into A | (B & C)
3268 Further simplification will occur if B and C are constants.
3270 If this optimization cannot be done, 0 will be returned. */
3272 static tree
3273 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3274 tree arg0, tree arg1)
3276 tree common;
3277 tree left, right;
3279 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3280 || TREE_CODE (arg0) == code
3281 || (TREE_CODE (arg0) != BIT_AND_EXPR
3282 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3283 return 0;
3285 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3287 common = TREE_OPERAND (arg0, 0);
3288 left = TREE_OPERAND (arg0, 1);
3289 right = TREE_OPERAND (arg1, 1);
3291 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3293 common = TREE_OPERAND (arg0, 0);
3294 left = TREE_OPERAND (arg0, 1);
3295 right = TREE_OPERAND (arg1, 0);
3297 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3299 common = TREE_OPERAND (arg0, 1);
3300 left = TREE_OPERAND (arg0, 0);
3301 right = TREE_OPERAND (arg1, 1);
3303 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3305 common = TREE_OPERAND (arg0, 1);
3306 left = TREE_OPERAND (arg0, 0);
3307 right = TREE_OPERAND (arg1, 0);
3309 else
3310 return 0;
3312 common = fold_convert_loc (loc, type, common);
3313 left = fold_convert_loc (loc, type, left);
3314 right = fold_convert_loc (loc, type, right);
3315 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3316 fold_build2_loc (loc, code, type, left, right));
3319 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3320 with code CODE. This optimization is unsafe. */
3321 static tree
3322 distribute_real_division (location_t loc, enum tree_code code, tree type,
3323 tree arg0, tree arg1)
3325 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3326 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3328 /* (A / C) +- (B / C) -> (A +- B) / C. */
3329 if (mul0 == mul1
3330 && operand_equal_p (TREE_OPERAND (arg0, 1),
3331 TREE_OPERAND (arg1, 1), 0))
3332 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3333 fold_build2_loc (loc, code, type,
3334 TREE_OPERAND (arg0, 0),
3335 TREE_OPERAND (arg1, 0)),
3336 TREE_OPERAND (arg0, 1));
3338 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3339 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3340 TREE_OPERAND (arg1, 0), 0)
3341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3342 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3344 REAL_VALUE_TYPE r0, r1;
3345 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3346 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3347 if (!mul0)
3348 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3349 if (!mul1)
3350 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3351 real_arithmetic (&r0, code, &r0, &r1);
3352 return fold_build2_loc (loc, MULT_EXPR, type,
3353 TREE_OPERAND (arg0, 0),
3354 build_real (type, r0));
3357 return NULL_TREE;
3360 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3361 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3363 static tree
3364 make_bit_field_ref (location_t loc, tree inner, tree type,
3365 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3367 tree result, bftype;
3369 if (bitpos == 0)
3371 tree size = TYPE_SIZE (TREE_TYPE (inner));
3372 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3373 || POINTER_TYPE_P (TREE_TYPE (inner)))
3374 && host_integerp (size, 0)
3375 && tree_low_cst (size, 0) == bitsize)
3376 return fold_convert_loc (loc, type, inner);
3379 bftype = type;
3380 if (TYPE_PRECISION (bftype) != bitsize
3381 || TYPE_UNSIGNED (bftype) == !unsignedp)
3382 bftype = build_nonstandard_integer_type (bitsize, 0);
3384 result = build3 (BIT_FIELD_REF, bftype, inner,
3385 size_int (bitsize), bitsize_int (bitpos));
3386 SET_EXPR_LOCATION (result, loc);
3388 if (bftype != type)
3389 result = fold_convert_loc (loc, type, result);
3391 return result;
3394 /* Optimize a bit-field compare.
3396 There are two cases: First is a compare against a constant and the
3397 second is a comparison of two items where the fields are at the same
3398 bit position relative to the start of a chunk (byte, halfword, word)
3399 large enough to contain it. In these cases we can avoid the shift
3400 implicit in bitfield extractions.
3402 For constants, we emit a compare of the shifted constant with the
3403 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3404 compared. For two fields at the same position, we do the ANDs with the
3405 similar mask and compare the result of the ANDs.
3407 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3408 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3409 are the left and right operands of the comparison, respectively.
3411 If the optimization described above can be done, we return the resulting
3412 tree. Otherwise we return zero. */
3414 static tree
3415 optimize_bit_field_compare (location_t loc, enum tree_code code,
3416 tree compare_type, tree lhs, tree rhs)
3418 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3419 tree type = TREE_TYPE (lhs);
3420 tree signed_type, unsigned_type;
3421 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3422 enum machine_mode lmode, rmode, nmode;
3423 int lunsignedp, runsignedp;
3424 int lvolatilep = 0, rvolatilep = 0;
3425 tree linner, rinner = NULL_TREE;
3426 tree mask;
3427 tree offset;
3429 /* Get all the information about the extractions being done. If the bit size
3430 if the same as the size of the underlying object, we aren't doing an
3431 extraction at all and so can do nothing. We also don't want to
3432 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3433 then will no longer be able to replace it. */
3434 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3435 &lunsignedp, &lvolatilep, false);
3436 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3437 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3438 return 0;
3440 if (!const_p)
3442 /* If this is not a constant, we can only do something if bit positions,
3443 sizes, and signedness are the same. */
3444 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3445 &runsignedp, &rvolatilep, false);
3447 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3448 || lunsignedp != runsignedp || offset != 0
3449 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3450 return 0;
3453 /* See if we can find a mode to refer to this field. We should be able to,
3454 but fail if we can't. */
3455 if (lvolatilep
3456 && GET_MODE_BITSIZE (lmode) > 0
3457 && flag_strict_volatile_bitfields > 0)
3458 nmode = lmode;
3459 else
3460 nmode = get_best_mode (lbitsize, lbitpos,
3461 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3462 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3463 TYPE_ALIGN (TREE_TYPE (rinner))),
3464 word_mode, lvolatilep || rvolatilep);
3465 if (nmode == VOIDmode)
3466 return 0;
3468 /* Set signed and unsigned types of the precision of this mode for the
3469 shifts below. */
3470 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3471 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3473 /* Compute the bit position and size for the new reference and our offset
3474 within it. If the new reference is the same size as the original, we
3475 won't optimize anything, so return zero. */
3476 nbitsize = GET_MODE_BITSIZE (nmode);
3477 nbitpos = lbitpos & ~ (nbitsize - 1);
3478 lbitpos -= nbitpos;
3479 if (nbitsize == lbitsize)
3480 return 0;
3482 if (BYTES_BIG_ENDIAN)
3483 lbitpos = nbitsize - lbitsize - lbitpos;
3485 /* Make the mask to be used against the extracted field. */
3486 mask = build_int_cst_type (unsigned_type, -1);
3487 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3488 mask = const_binop (RSHIFT_EXPR, mask,
3489 size_int (nbitsize - lbitsize - lbitpos));
3491 if (! const_p)
3492 /* If not comparing with constant, just rework the comparison
3493 and return. */
3494 return fold_build2_loc (loc, code, compare_type,
3495 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3496 make_bit_field_ref (loc, linner,
3497 unsigned_type,
3498 nbitsize, nbitpos,
3500 mask),
3501 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3502 make_bit_field_ref (loc, rinner,
3503 unsigned_type,
3504 nbitsize, nbitpos,
3506 mask));
3508 /* Otherwise, we are handling the constant case. See if the constant is too
3509 big for the field. Warn and return a tree of for 0 (false) if so. We do
3510 this not only for its own sake, but to avoid having to test for this
3511 error case below. If we didn't, we might generate wrong code.
3513 For unsigned fields, the constant shifted right by the field length should
3514 be all zero. For signed fields, the high-order bits should agree with
3515 the sign bit. */
3517 if (lunsignedp)
3519 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3520 fold_convert_loc (loc,
3521 unsigned_type, rhs),
3522 size_int (lbitsize))))
3524 warning (0, "comparison is always %d due to width of bit-field",
3525 code == NE_EXPR);
3526 return constant_boolean_node (code == NE_EXPR, compare_type);
3529 else
3531 tree tem = const_binop (RSHIFT_EXPR,
3532 fold_convert_loc (loc, signed_type, rhs),
3533 size_int (lbitsize - 1));
3534 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3536 warning (0, "comparison is always %d due to width of bit-field",
3537 code == NE_EXPR);
3538 return constant_boolean_node (code == NE_EXPR, compare_type);
3542 /* Single-bit compares should always be against zero. */
3543 if (lbitsize == 1 && ! integer_zerop (rhs))
3545 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3546 rhs = build_int_cst (type, 0);
3549 /* Make a new bitfield reference, shift the constant over the
3550 appropriate number of bits and mask it with the computed mask
3551 (in case this was a signed field). If we changed it, make a new one. */
3552 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3553 if (lvolatilep)
3555 TREE_SIDE_EFFECTS (lhs) = 1;
3556 TREE_THIS_VOLATILE (lhs) = 1;
3559 rhs = const_binop (BIT_AND_EXPR,
3560 const_binop (LSHIFT_EXPR,
3561 fold_convert_loc (loc, unsigned_type, rhs),
3562 size_int (lbitpos)),
3563 mask);
3565 lhs = build2 (code, compare_type,
3566 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3567 rhs);
3568 SET_EXPR_LOCATION (lhs, loc);
3569 return lhs;
3572 /* Subroutine for fold_truthop: decode a field reference.
3574 If EXP is a comparison reference, we return the innermost reference.
3576 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3577 set to the starting bit number.
3579 If the innermost field can be completely contained in a mode-sized
3580 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3582 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3583 otherwise it is not changed.
3585 *PUNSIGNEDP is set to the signedness of the field.
3587 *PMASK is set to the mask used. This is either contained in a
3588 BIT_AND_EXPR or derived from the width of the field.
3590 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3592 Return 0 if this is not a component reference or is one that we can't
3593 do anything with. */
3595 static tree
3596 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3597 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3598 int *punsignedp, int *pvolatilep,
3599 tree *pmask, tree *pand_mask)
3601 tree outer_type = 0;
3602 tree and_mask = 0;
3603 tree mask, inner, offset;
3604 tree unsigned_type;
3605 unsigned int precision;
3607 /* All the optimizations using this function assume integer fields.
3608 There are problems with FP fields since the type_for_size call
3609 below can fail for, e.g., XFmode. */
3610 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3611 return 0;
3613 /* We are interested in the bare arrangement of bits, so strip everything
3614 that doesn't affect the machine mode. However, record the type of the
3615 outermost expression if it may matter below. */
3616 if (CONVERT_EXPR_P (exp)
3617 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3618 outer_type = TREE_TYPE (exp);
3619 STRIP_NOPS (exp);
3621 if (TREE_CODE (exp) == BIT_AND_EXPR)
3623 and_mask = TREE_OPERAND (exp, 1);
3624 exp = TREE_OPERAND (exp, 0);
3625 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3626 if (TREE_CODE (and_mask) != INTEGER_CST)
3627 return 0;
3630 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3631 punsignedp, pvolatilep, false);
3632 if ((inner == exp && and_mask == 0)
3633 || *pbitsize < 0 || offset != 0
3634 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3635 return 0;
3637 /* If the number of bits in the reference is the same as the bitsize of
3638 the outer type, then the outer type gives the signedness. Otherwise
3639 (in case of a small bitfield) the signedness is unchanged. */
3640 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3641 *punsignedp = TYPE_UNSIGNED (outer_type);
3643 /* Compute the mask to access the bitfield. */
3644 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3645 precision = TYPE_PRECISION (unsigned_type);
3647 mask = build_int_cst_type (unsigned_type, -1);
3649 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3650 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3652 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3653 if (and_mask != 0)
3654 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3655 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3657 *pmask = mask;
3658 *pand_mask = and_mask;
3659 return inner;
3662 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3663 bit positions. */
3665 static int
3666 all_ones_mask_p (const_tree mask, int size)
3668 tree type = TREE_TYPE (mask);
3669 unsigned int precision = TYPE_PRECISION (type);
3670 tree tmask;
3672 tmask = build_int_cst_type (signed_type_for (type), -1);
3674 return
3675 tree_int_cst_equal (mask,
3676 const_binop (RSHIFT_EXPR,
3677 const_binop (LSHIFT_EXPR, tmask,
3678 size_int (precision - size)),
3679 size_int (precision - size)));
3682 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3683 represents the sign bit of EXP's type. If EXP represents a sign
3684 or zero extension, also test VAL against the unextended type.
3685 The return value is the (sub)expression whose sign bit is VAL,
3686 or NULL_TREE otherwise. */
3688 static tree
3689 sign_bit_p (tree exp, const_tree val)
3691 unsigned HOST_WIDE_INT mask_lo, lo;
3692 HOST_WIDE_INT mask_hi, hi;
3693 int width;
3694 tree t;
3696 /* Tree EXP must have an integral type. */
3697 t = TREE_TYPE (exp);
3698 if (! INTEGRAL_TYPE_P (t))
3699 return NULL_TREE;
3701 /* Tree VAL must be an integer constant. */
3702 if (TREE_CODE (val) != INTEGER_CST
3703 || TREE_OVERFLOW (val))
3704 return NULL_TREE;
3706 width = TYPE_PRECISION (t);
3707 if (width > HOST_BITS_PER_WIDE_INT)
3709 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3710 lo = 0;
3712 mask_hi = ((unsigned HOST_WIDE_INT) -1
3713 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3714 mask_lo = -1;
3716 else
3718 hi = 0;
3719 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3721 mask_hi = 0;
3722 mask_lo = ((unsigned HOST_WIDE_INT) -1
3723 >> (HOST_BITS_PER_WIDE_INT - width));
3726 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3727 treat VAL as if it were unsigned. */
3728 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3729 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3730 return exp;
3732 /* Handle extension from a narrower type. */
3733 if (TREE_CODE (exp) == NOP_EXPR
3734 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3735 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3737 return NULL_TREE;
3740 /* Subroutine for fold_truthop: determine if an operand is simple enough
3741 to be evaluated unconditionally. */
3743 static int
3744 simple_operand_p (const_tree exp)
3746 /* Strip any conversions that don't change the machine mode. */
3747 STRIP_NOPS (exp);
3749 return (CONSTANT_CLASS_P (exp)
3750 || TREE_CODE (exp) == SSA_NAME
3751 || (DECL_P (exp)
3752 && ! TREE_ADDRESSABLE (exp)
3753 && ! TREE_THIS_VOLATILE (exp)
3754 && ! DECL_NONLOCAL (exp)
3755 /* Don't regard global variables as simple. They may be
3756 allocated in ways unknown to the compiler (shared memory,
3757 #pragma weak, etc). */
3758 && ! TREE_PUBLIC (exp)
3759 && ! DECL_EXTERNAL (exp)
3760 /* Loading a static variable is unduly expensive, but global
3761 registers aren't expensive. */
3762 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3765 /* The following functions are subroutines to fold_range_test and allow it to
3766 try to change a logical combination of comparisons into a range test.
3768 For example, both
3769 X == 2 || X == 3 || X == 4 || X == 5
3771 X >= 2 && X <= 5
3772 are converted to
3773 (unsigned) (X - 2) <= 3
3775 We describe each set of comparisons as being either inside or outside
3776 a range, using a variable named like IN_P, and then describe the
3777 range with a lower and upper bound. If one of the bounds is omitted,
3778 it represents either the highest or lowest value of the type.
3780 In the comments below, we represent a range by two numbers in brackets
3781 preceded by a "+" to designate being inside that range, or a "-" to
3782 designate being outside that range, so the condition can be inverted by
3783 flipping the prefix. An omitted bound is represented by a "-". For
3784 example, "- [-, 10]" means being outside the range starting at the lowest
3785 possible value and ending at 10, in other words, being greater than 10.
3786 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3787 always false.
3789 We set up things so that the missing bounds are handled in a consistent
3790 manner so neither a missing bound nor "true" and "false" need to be
3791 handled using a special case. */
3793 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3794 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3795 and UPPER1_P are nonzero if the respective argument is an upper bound
3796 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3797 must be specified for a comparison. ARG1 will be converted to ARG0's
3798 type if both are specified. */
3800 static tree
3801 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3802 tree arg1, int upper1_p)
3804 tree tem;
3805 int result;
3806 int sgn0, sgn1;
3808 /* If neither arg represents infinity, do the normal operation.
3809 Else, if not a comparison, return infinity. Else handle the special
3810 comparison rules. Note that most of the cases below won't occur, but
3811 are handled for consistency. */
3813 if (arg0 != 0 && arg1 != 0)
3815 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3816 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3817 STRIP_NOPS (tem);
3818 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3821 if (TREE_CODE_CLASS (code) != tcc_comparison)
3822 return 0;
3824 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3825 for neither. In real maths, we cannot assume open ended ranges are
3826 the same. But, this is computer arithmetic, where numbers are finite.
3827 We can therefore make the transformation of any unbounded range with
3828 the value Z, Z being greater than any representable number. This permits
3829 us to treat unbounded ranges as equal. */
3830 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3831 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3832 switch (code)
3834 case EQ_EXPR:
3835 result = sgn0 == sgn1;
3836 break;
3837 case NE_EXPR:
3838 result = sgn0 != sgn1;
3839 break;
3840 case LT_EXPR:
3841 result = sgn0 < sgn1;
3842 break;
3843 case LE_EXPR:
3844 result = sgn0 <= sgn1;
3845 break;
3846 case GT_EXPR:
3847 result = sgn0 > sgn1;
3848 break;
3849 case GE_EXPR:
3850 result = sgn0 >= sgn1;
3851 break;
3852 default:
3853 gcc_unreachable ();
3856 return constant_boolean_node (result, type);
3859 /* Given EXP, a logical expression, set the range it is testing into
3860 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3861 actually being tested. *PLOW and *PHIGH will be made of the same
3862 type as the returned expression. If EXP is not a comparison, we
3863 will most likely not be returning a useful value and range. Set
3864 *STRICT_OVERFLOW_P to true if the return value is only valid
3865 because signed overflow is undefined; otherwise, do not change
3866 *STRICT_OVERFLOW_P. */
3868 tree
3869 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3870 bool *strict_overflow_p)
3872 enum tree_code code;
3873 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3874 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3875 int in_p, n_in_p;
3876 tree low, high, n_low, n_high;
3877 location_t loc = EXPR_LOCATION (exp);
3879 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3880 and see if we can refine the range. Some of the cases below may not
3881 happen, but it doesn't seem worth worrying about this. We "continue"
3882 the outer loop when we've changed something; otherwise we "break"
3883 the switch, which will "break" the while. */
3885 in_p = 0;
3886 low = high = build_int_cst (TREE_TYPE (exp), 0);
3888 while (1)
3890 code = TREE_CODE (exp);
3891 exp_type = TREE_TYPE (exp);
3893 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3895 if (TREE_OPERAND_LENGTH (exp) > 0)
3896 arg0 = TREE_OPERAND (exp, 0);
3897 if (TREE_CODE_CLASS (code) == tcc_comparison
3898 || TREE_CODE_CLASS (code) == tcc_unary
3899 || TREE_CODE_CLASS (code) == tcc_binary)
3900 arg0_type = TREE_TYPE (arg0);
3901 if (TREE_CODE_CLASS (code) == tcc_binary
3902 || TREE_CODE_CLASS (code) == tcc_comparison
3903 || (TREE_CODE_CLASS (code) == tcc_expression
3904 && TREE_OPERAND_LENGTH (exp) > 1))
3905 arg1 = TREE_OPERAND (exp, 1);
3908 switch (code)
3910 case TRUTH_NOT_EXPR:
3911 in_p = ! in_p, exp = arg0;
3912 continue;
3914 case EQ_EXPR: case NE_EXPR:
3915 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low == 0 || high == 0
3922 || ! integer_zerop (low) || ! integer_zerop (high)
3923 || TREE_CODE (arg1) != INTEGER_CST)
3924 break;
3926 switch (code)
3928 case NE_EXPR: /* - [c, c] */
3929 low = high = arg1;
3930 break;
3931 case EQ_EXPR: /* + [c, c] */
3932 in_p = ! in_p, low = high = arg1;
3933 break;
3934 case GT_EXPR: /* - [-, c] */
3935 low = 0, high = arg1;
3936 break;
3937 case GE_EXPR: /* + [c, -] */
3938 in_p = ! in_p, low = arg1, high = 0;
3939 break;
3940 case LT_EXPR: /* - [c, -] */
3941 low = arg1, high = 0;
3942 break;
3943 case LE_EXPR: /* + [-, c] */
3944 in_p = ! in_p, low = 0, high = arg1;
3945 break;
3946 default:
3947 gcc_unreachable ();
3950 /* If this is an unsigned comparison, we also know that EXP is
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 in_p, low, high, 1,
3959 build_int_cst (arg0_type, 0),
3960 NULL_TREE))
3961 break;
3963 in_p = n_in_p, low = n_low, high = n_high;
3965 /* If the high bound is missing, but we have a nonzero low
3966 bound, reverse the range so it goes from zero to the low bound
3967 minus 1. */
3968 if (high == 0 && low && ! integer_zerop (low))
3970 in_p = ! in_p;
3971 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3972 integer_one_node, 0);
3973 low = build_int_cst (arg0_type, 0);
3977 exp = arg0;
3978 continue;
3980 case NEGATE_EXPR:
3981 /* (-x) IN [a,b] -> x in [-b, -a] */
3982 n_low = range_binop (MINUS_EXPR, exp_type,
3983 build_int_cst (exp_type, 0),
3984 0, high, 1);
3985 n_high = range_binop (MINUS_EXPR, exp_type,
3986 build_int_cst (exp_type, 0),
3987 0, low, 0);
3988 low = n_low, high = n_high;
3989 exp = arg0;
3990 continue;
3992 case BIT_NOT_EXPR:
3993 /* ~ X -> -X - 1 */
3994 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3995 build_int_cst (exp_type, 1));
3996 SET_EXPR_LOCATION (exp, loc);
3997 continue;
3999 case PLUS_EXPR: case MINUS_EXPR:
4000 if (TREE_CODE (arg1) != INTEGER_CST)
4001 break;
4003 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4004 move a constant to the other side. */
4005 if (!TYPE_UNSIGNED (arg0_type)
4006 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4007 break;
4009 /* If EXP is signed, any overflow in the computation is undefined,
4010 so we don't worry about it so long as our computations on
4011 the bounds don't overflow. For unsigned, overflow is defined
4012 and this is exactly the right thing. */
4013 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4014 arg0_type, low, 0, arg1, 0);
4015 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4016 arg0_type, high, 1, arg1, 0);
4017 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4018 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4019 break;
4021 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4022 *strict_overflow_p = true;
4024 /* Check for an unsigned range which has wrapped around the maximum
4025 value thus making n_high < n_low, and normalize it. */
4026 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4028 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4029 integer_one_node, 0);
4030 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4031 integer_one_node, 0);
4033 /* If the range is of the form +/- [ x+1, x ], we won't
4034 be able to normalize it. But then, it represents the
4035 whole range or the empty set, so make it
4036 +/- [ -, - ]. */
4037 if (tree_int_cst_equal (n_low, low)
4038 && tree_int_cst_equal (n_high, high))
4039 low = high = 0;
4040 else
4041 in_p = ! in_p;
4043 else
4044 low = n_low, high = n_high;
4046 exp = arg0;
4047 continue;
4049 CASE_CONVERT: case NON_LVALUE_EXPR:
4050 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4051 break;
4053 if (! INTEGRAL_TYPE_P (arg0_type)
4054 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4055 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4056 break;
4058 n_low = low, n_high = high;
4060 if (n_low != 0)
4061 n_low = fold_convert_loc (loc, arg0_type, n_low);
4063 if (n_high != 0)
4064 n_high = fold_convert_loc (loc, arg0_type, n_high);
4067 /* If we're converting arg0 from an unsigned type, to exp,
4068 a signed type, we will be doing the comparison as unsigned.
4069 The tests above have already verified that LOW and HIGH
4070 are both positive.
4072 So we have to ensure that we will handle large unsigned
4073 values the same way that the current signed bounds treat
4074 negative values. */
4076 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4078 tree high_positive;
4079 tree equiv_type;
4080 /* For fixed-point modes, we need to pass the saturating flag
4081 as the 2nd parameter. */
4082 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4083 equiv_type = lang_hooks.types.type_for_mode
4084 (TYPE_MODE (arg0_type),
4085 TYPE_SATURATING (arg0_type));
4086 else
4087 equiv_type = lang_hooks.types.type_for_mode
4088 (TYPE_MODE (arg0_type), 1);
4090 /* A range without an upper bound is, naturally, unbounded.
4091 Since convert would have cropped a very large value, use
4092 the max value for the destination type. */
4093 high_positive
4094 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4095 : TYPE_MAX_VALUE (arg0_type);
4097 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4098 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4099 fold_convert_loc (loc, arg0_type,
4100 high_positive),
4101 build_int_cst (arg0_type, 1));
4103 /* If the low bound is specified, "and" the range with the
4104 range for which the original unsigned value will be
4105 positive. */
4106 if (low != 0)
4108 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4109 1, n_low, n_high, 1,
4110 fold_convert_loc (loc, arg0_type,
4111 integer_zero_node),
4112 high_positive))
4113 break;
4115 in_p = (n_in_p == in_p);
4117 else
4119 /* Otherwise, "or" the range with the range of the input
4120 that will be interpreted as negative. */
4121 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4122 0, n_low, n_high, 1,
4123 fold_convert_loc (loc, arg0_type,
4124 integer_zero_node),
4125 high_positive))
4126 break;
4128 in_p = (in_p != n_in_p);
4132 exp = arg0;
4133 low = n_low, high = n_high;
4134 continue;
4136 default:
4137 break;
4140 break;
4143 /* If EXP is a constant, we can evaluate whether this is true or false. */
4144 if (TREE_CODE (exp) == INTEGER_CST)
4146 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4147 exp, 0, low, 0))
4148 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4149 exp, 1, high, 1)));
4150 low = high = 0;
4151 exp = 0;
4154 *pin_p = in_p, *plow = low, *phigh = high;
4155 return exp;
4158 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4159 type, TYPE, return an expression to test if EXP is in (or out of, depending
4160 on IN_P) the range. Return 0 if the test couldn't be created. */
4162 tree
4163 build_range_check (location_t loc, tree type, tree exp, int in_p,
4164 tree low, tree high)
4166 tree etype = TREE_TYPE (exp), value;
4168 #ifdef HAVE_canonicalize_funcptr_for_compare
4169 /* Disable this optimization for function pointer expressions
4170 on targets that require function pointer canonicalization. */
4171 if (HAVE_canonicalize_funcptr_for_compare
4172 && TREE_CODE (etype) == POINTER_TYPE
4173 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4174 return NULL_TREE;
4175 #endif
4177 if (! in_p)
4179 value = build_range_check (loc, type, exp, 1, low, high);
4180 if (value != 0)
4181 return invert_truthvalue_loc (loc, value);
4183 return 0;
4186 if (low == 0 && high == 0)
4187 return build_int_cst (type, 1);
4189 if (low == 0)
4190 return fold_build2_loc (loc, LE_EXPR, type, exp,
4191 fold_convert_loc (loc, etype, high));
4193 if (high == 0)
4194 return fold_build2_loc (loc, GE_EXPR, type, exp,
4195 fold_convert_loc (loc, etype, low));
4197 if (operand_equal_p (low, high, 0))
4198 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4199 fold_convert_loc (loc, etype, low));
4201 if (integer_zerop (low))
4203 if (! TYPE_UNSIGNED (etype))
4205 etype = unsigned_type_for (etype);
4206 high = fold_convert_loc (loc, etype, high);
4207 exp = fold_convert_loc (loc, etype, exp);
4209 return build_range_check (loc, type, exp, 1, 0, high);
4212 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4213 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4215 unsigned HOST_WIDE_INT lo;
4216 HOST_WIDE_INT hi;
4217 int prec;
4219 prec = TYPE_PRECISION (etype);
4220 if (prec <= HOST_BITS_PER_WIDE_INT)
4222 hi = 0;
4223 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4225 else
4227 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4228 lo = (unsigned HOST_WIDE_INT) -1;
4231 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4233 if (TYPE_UNSIGNED (etype))
4235 tree signed_etype = signed_type_for (etype);
4236 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4237 etype
4238 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4239 else
4240 etype = signed_etype;
4241 exp = fold_convert_loc (loc, etype, exp);
4243 return fold_build2_loc (loc, GT_EXPR, type, exp,
4244 build_int_cst (etype, 0));
4248 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4249 This requires wrap-around arithmetics for the type of the expression.
4250 First make sure that arithmetics in this type is valid, then make sure
4251 that it wraps around. */
4252 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4253 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4254 TYPE_UNSIGNED (etype));
4256 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4258 tree utype, minv, maxv;
4260 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4261 for the type in question, as we rely on this here. */
4262 utype = unsigned_type_for (etype);
4263 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4264 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4265 integer_one_node, 1);
4266 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4268 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4269 minv, 1, maxv, 1)))
4270 etype = utype;
4271 else
4272 return 0;
4275 high = fold_convert_loc (loc, etype, high);
4276 low = fold_convert_loc (loc, etype, low);
4277 exp = fold_convert_loc (loc, etype, exp);
4279 value = const_binop (MINUS_EXPR, high, low);
4282 if (POINTER_TYPE_P (etype))
4284 if (value != 0 && !TREE_OVERFLOW (value))
4286 low = fold_convert_loc (loc, sizetype, low);
4287 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4288 return build_range_check (loc, type,
4289 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4290 etype, exp, low),
4291 1, build_int_cst (etype, 0), value);
4293 return 0;
4296 if (value != 0 && !TREE_OVERFLOW (value))
4297 return build_range_check (loc, type,
4298 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4299 1, build_int_cst (etype, 0), value);
4301 return 0;
4304 /* Return the predecessor of VAL in its type, handling the infinite case. */
4306 static tree
4307 range_predecessor (tree val)
4309 tree type = TREE_TYPE (val);
4311 if (INTEGRAL_TYPE_P (type)
4312 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4313 return 0;
4314 else
4315 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4318 /* Return the successor of VAL in its type, handling the infinite case. */
4320 static tree
4321 range_successor (tree val)
4323 tree type = TREE_TYPE (val);
4325 if (INTEGRAL_TYPE_P (type)
4326 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4327 return 0;
4328 else
4329 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4332 /* Given two ranges, see if we can merge them into one. Return 1 if we
4333 can, 0 if we can't. Set the output range into the specified parameters. */
4335 bool
4336 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4337 tree high0, int in1_p, tree low1, tree high1)
4339 int no_overlap;
4340 int subset;
4341 int temp;
4342 tree tem;
4343 int in_p;
4344 tree low, high;
4345 int lowequal = ((low0 == 0 && low1 == 0)
4346 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4347 low0, 0, low1, 0)));
4348 int highequal = ((high0 == 0 && high1 == 0)
4349 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4350 high0, 1, high1, 1)));
4352 /* Make range 0 be the range that starts first, or ends last if they
4353 start at the same value. Swap them if it isn't. */
4354 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4355 low0, 0, low1, 0))
4356 || (lowequal
4357 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4358 high1, 1, high0, 1))))
4360 temp = in0_p, in0_p = in1_p, in1_p = temp;
4361 tem = low0, low0 = low1, low1 = tem;
4362 tem = high0, high0 = high1, high1 = tem;
4365 /* Now flag two cases, whether the ranges are disjoint or whether the
4366 second range is totally subsumed in the first. Note that the tests
4367 below are simplified by the ones above. */
4368 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4369 high0, 1, low1, 0));
4370 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4371 high1, 1, high0, 1));
4373 /* We now have four cases, depending on whether we are including or
4374 excluding the two ranges. */
4375 if (in0_p && in1_p)
4377 /* If they don't overlap, the result is false. If the second range
4378 is a subset it is the result. Otherwise, the range is from the start
4379 of the second to the end of the first. */
4380 if (no_overlap)
4381 in_p = 0, low = high = 0;
4382 else if (subset)
4383 in_p = 1, low = low1, high = high1;
4384 else
4385 in_p = 1, low = low1, high = high0;
4388 else if (in0_p && ! in1_p)
4390 /* If they don't overlap, the result is the first range. If they are
4391 equal, the result is false. If the second range is a subset of the
4392 first, and the ranges begin at the same place, we go from just after
4393 the end of the second range to the end of the first. If the second
4394 range is not a subset of the first, or if it is a subset and both
4395 ranges end at the same place, the range starts at the start of the
4396 first range and ends just before the second range.
4397 Otherwise, we can't describe this as a single range. */
4398 if (no_overlap)
4399 in_p = 1, low = low0, high = high0;
4400 else if (lowequal && highequal)
4401 in_p = 0, low = high = 0;
4402 else if (subset && lowequal)
4404 low = range_successor (high1);
4405 high = high0;
4406 in_p = 1;
4407 if (low == 0)
4409 /* We are in the weird situation where high0 > high1 but
4410 high1 has no successor. Punt. */
4411 return 0;
4414 else if (! subset || highequal)
4416 low = low0;
4417 high = range_predecessor (low1);
4418 in_p = 1;
4419 if (high == 0)
4421 /* low0 < low1 but low1 has no predecessor. Punt. */
4422 return 0;
4425 else
4426 return 0;
4429 else if (! in0_p && in1_p)
4431 /* If they don't overlap, the result is the second range. If the second
4432 is a subset of the first, the result is false. Otherwise,
4433 the range starts just after the first range and ends at the
4434 end of the second. */
4435 if (no_overlap)
4436 in_p = 1, low = low1, high = high1;
4437 else if (subset || highequal)
4438 in_p = 0, low = high = 0;
4439 else
4441 low = range_successor (high0);
4442 high = high1;
4443 in_p = 1;
4444 if (low == 0)
4446 /* high1 > high0 but high0 has no successor. Punt. */
4447 return 0;
4452 else
4454 /* The case where we are excluding both ranges. Here the complex case
4455 is if they don't overlap. In that case, the only time we have a
4456 range is if they are adjacent. If the second is a subset of the
4457 first, the result is the first. Otherwise, the range to exclude
4458 starts at the beginning of the first range and ends at the end of the
4459 second. */
4460 if (no_overlap)
4462 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4463 range_successor (high0),
4464 1, low1, 0)))
4465 in_p = 0, low = low0, high = high1;
4466 else
4468 /* Canonicalize - [min, x] into - [-, x]. */
4469 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4470 switch (TREE_CODE (TREE_TYPE (low0)))
4472 case ENUMERAL_TYPE:
4473 if (TYPE_PRECISION (TREE_TYPE (low0))
4474 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4475 break;
4476 /* FALLTHROUGH */
4477 case INTEGER_TYPE:
4478 if (tree_int_cst_equal (low0,
4479 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4480 low0 = 0;
4481 break;
4482 case POINTER_TYPE:
4483 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4484 && integer_zerop (low0))
4485 low0 = 0;
4486 break;
4487 default:
4488 break;
4491 /* Canonicalize - [x, max] into - [x, -]. */
4492 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4493 switch (TREE_CODE (TREE_TYPE (high1)))
4495 case ENUMERAL_TYPE:
4496 if (TYPE_PRECISION (TREE_TYPE (high1))
4497 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4498 break;
4499 /* FALLTHROUGH */
4500 case INTEGER_TYPE:
4501 if (tree_int_cst_equal (high1,
4502 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4503 high1 = 0;
4504 break;
4505 case POINTER_TYPE:
4506 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4507 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4508 high1, 1,
4509 integer_one_node, 1)))
4510 high1 = 0;
4511 break;
4512 default:
4513 break;
4516 /* The ranges might be also adjacent between the maximum and
4517 minimum values of the given type. For
4518 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4519 return + [x + 1, y - 1]. */
4520 if (low0 == 0 && high1 == 0)
4522 low = range_successor (high0);
4523 high = range_predecessor (low1);
4524 if (low == 0 || high == 0)
4525 return 0;
4527 in_p = 1;
4529 else
4530 return 0;
4533 else if (subset)
4534 in_p = 0, low = low0, high = high0;
4535 else
4536 in_p = 0, low = low0, high = high1;
4539 *pin_p = in_p, *plow = low, *phigh = high;
4540 return 1;
4544 /* Subroutine of fold, looking inside expressions of the form
4545 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4546 of the COND_EXPR. This function is being used also to optimize
4547 A op B ? C : A, by reversing the comparison first.
4549 Return a folded expression whose code is not a COND_EXPR
4550 anymore, or NULL_TREE if no folding opportunity is found. */
4552 static tree
4553 fold_cond_expr_with_comparison (location_t loc, tree type,
4554 tree arg0, tree arg1, tree arg2)
4556 enum tree_code comp_code = TREE_CODE (arg0);
4557 tree arg00 = TREE_OPERAND (arg0, 0);
4558 tree arg01 = TREE_OPERAND (arg0, 1);
4559 tree arg1_type = TREE_TYPE (arg1);
4560 tree tem;
4562 STRIP_NOPS (arg1);
4563 STRIP_NOPS (arg2);
4565 /* If we have A op 0 ? A : -A, consider applying the following
4566 transformations:
4568 A == 0? A : -A same as -A
4569 A != 0? A : -A same as A
4570 A >= 0? A : -A same as abs (A)
4571 A > 0? A : -A same as abs (A)
4572 A <= 0? A : -A same as -abs (A)
4573 A < 0? A : -A same as -abs (A)
4575 None of these transformations work for modes with signed
4576 zeros. If A is +/-0, the first two transformations will
4577 change the sign of the result (from +0 to -0, or vice
4578 versa). The last four will fix the sign of the result,
4579 even though the original expressions could be positive or
4580 negative, depending on the sign of A.
4582 Note that all these transformations are correct if A is
4583 NaN, since the two alternatives (A and -A) are also NaNs. */
4584 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4585 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4586 ? real_zerop (arg01)
4587 : integer_zerop (arg01))
4588 && ((TREE_CODE (arg2) == NEGATE_EXPR
4589 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4590 /* In the case that A is of the form X-Y, '-A' (arg2) may
4591 have already been folded to Y-X, check for that. */
4592 || (TREE_CODE (arg1) == MINUS_EXPR
4593 && TREE_CODE (arg2) == MINUS_EXPR
4594 && operand_equal_p (TREE_OPERAND (arg1, 0),
4595 TREE_OPERAND (arg2, 1), 0)
4596 && operand_equal_p (TREE_OPERAND (arg1, 1),
4597 TREE_OPERAND (arg2, 0), 0))))
4598 switch (comp_code)
4600 case EQ_EXPR:
4601 case UNEQ_EXPR:
4602 tem = fold_convert_loc (loc, arg1_type, arg1);
4603 return pedantic_non_lvalue_loc (loc,
4604 fold_convert_loc (loc, type,
4605 negate_expr (tem)));
4606 case NE_EXPR:
4607 case LTGT_EXPR:
4608 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4609 case UNGE_EXPR:
4610 case UNGT_EXPR:
4611 if (flag_trapping_math)
4612 break;
4613 /* Fall through. */
4614 case GE_EXPR:
4615 case GT_EXPR:
4616 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4617 arg1 = fold_convert_loc (loc, signed_type_for
4618 (TREE_TYPE (arg1)), arg1);
4619 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4621 case UNLE_EXPR:
4622 case UNLT_EXPR:
4623 if (flag_trapping_math)
4624 break;
4625 case LE_EXPR:
4626 case LT_EXPR:
4627 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4628 arg1 = fold_convert_loc (loc, signed_type_for
4629 (TREE_TYPE (arg1)), arg1);
4630 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4631 return negate_expr (fold_convert_loc (loc, type, tem));
4632 default:
4633 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4634 break;
4637 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4638 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4639 both transformations are correct when A is NaN: A != 0
4640 is then true, and A == 0 is false. */
4642 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4643 && integer_zerop (arg01) && integer_zerop (arg2))
4645 if (comp_code == NE_EXPR)
4646 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4647 else if (comp_code == EQ_EXPR)
4648 return build_int_cst (type, 0);
4651 /* Try some transformations of A op B ? A : B.
4653 A == B? A : B same as B
4654 A != B? A : B same as A
4655 A >= B? A : B same as max (A, B)
4656 A > B? A : B same as max (B, A)
4657 A <= B? A : B same as min (A, B)
4658 A < B? A : B same as min (B, A)
4660 As above, these transformations don't work in the presence
4661 of signed zeros. For example, if A and B are zeros of
4662 opposite sign, the first two transformations will change
4663 the sign of the result. In the last four, the original
4664 expressions give different results for (A=+0, B=-0) and
4665 (A=-0, B=+0), but the transformed expressions do not.
4667 The first two transformations are correct if either A or B
4668 is a NaN. In the first transformation, the condition will
4669 be false, and B will indeed be chosen. In the case of the
4670 second transformation, the condition A != B will be true,
4671 and A will be chosen.
4673 The conversions to max() and min() are not correct if B is
4674 a number and A is not. The conditions in the original
4675 expressions will be false, so all four give B. The min()
4676 and max() versions would give a NaN instead. */
4677 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4678 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4679 /* Avoid these transformations if the COND_EXPR may be used
4680 as an lvalue in the C++ front-end. PR c++/19199. */
4681 && (in_gimple_form
4682 || (strcmp (lang_hooks.name, "GNU C++") != 0
4683 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4684 || ! maybe_lvalue_p (arg1)
4685 || ! maybe_lvalue_p (arg2)))
4687 tree comp_op0 = arg00;
4688 tree comp_op1 = arg01;
4689 tree comp_type = TREE_TYPE (comp_op0);
4691 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4692 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4694 comp_type = type;
4695 comp_op0 = arg1;
4696 comp_op1 = arg2;
4699 switch (comp_code)
4701 case EQ_EXPR:
4702 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4703 case NE_EXPR:
4704 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4705 case LE_EXPR:
4706 case LT_EXPR:
4707 case UNLE_EXPR:
4708 case UNLT_EXPR:
4709 /* In C++ a ?: expression can be an lvalue, so put the
4710 operand which will be used if they are equal first
4711 so that we can convert this back to the
4712 corresponding COND_EXPR. */
4713 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4715 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4716 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4717 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4718 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4719 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4720 comp_op1, comp_op0);
4721 return pedantic_non_lvalue_loc (loc,
4722 fold_convert_loc (loc, type, tem));
4724 break;
4725 case GE_EXPR:
4726 case GT_EXPR:
4727 case UNGE_EXPR:
4728 case UNGT_EXPR:
4729 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4731 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4732 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4733 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4734 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4735 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4736 comp_op1, comp_op0);
4737 return pedantic_non_lvalue_loc (loc,
4738 fold_convert_loc (loc, type, tem));
4740 break;
4741 case UNEQ_EXPR:
4742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4743 return pedantic_non_lvalue_loc (loc,
4744 fold_convert_loc (loc, type, arg2));
4745 break;
4746 case LTGT_EXPR:
4747 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4748 return pedantic_non_lvalue_loc (loc,
4749 fold_convert_loc (loc, type, arg1));
4750 break;
4751 default:
4752 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4753 break;
4757 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4758 we might still be able to simplify this. For example,
4759 if C1 is one less or one more than C2, this might have started
4760 out as a MIN or MAX and been transformed by this function.
4761 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4763 if (INTEGRAL_TYPE_P (type)
4764 && TREE_CODE (arg01) == INTEGER_CST
4765 && TREE_CODE (arg2) == INTEGER_CST)
4766 switch (comp_code)
4768 case EQ_EXPR:
4769 if (TREE_CODE (arg1) == INTEGER_CST)
4770 break;
4771 /* We can replace A with C1 in this case. */
4772 arg1 = fold_convert_loc (loc, type, arg01);
4773 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4775 case LT_EXPR:
4776 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4777 MIN_EXPR, to preserve the signedness of the comparison. */
4778 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4779 OEP_ONLY_CONST)
4780 && operand_equal_p (arg01,
4781 const_binop (PLUS_EXPR, arg2,
4782 build_int_cst (type, 1)),
4783 OEP_ONLY_CONST))
4785 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4786 fold_convert_loc (loc, TREE_TYPE (arg00),
4787 arg2));
4788 return pedantic_non_lvalue_loc (loc,
4789 fold_convert_loc (loc, type, tem));
4791 break;
4793 case LE_EXPR:
4794 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4795 as above. */
4796 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4797 OEP_ONLY_CONST)
4798 && operand_equal_p (arg01,
4799 const_binop (MINUS_EXPR, arg2,
4800 build_int_cst (type, 1)),
4801 OEP_ONLY_CONST))
4803 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4804 fold_convert_loc (loc, TREE_TYPE (arg00),
4805 arg2));
4806 return pedantic_non_lvalue_loc (loc,
4807 fold_convert_loc (loc, type, tem));
4809 break;
4811 case GT_EXPR:
4812 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4813 MAX_EXPR, to preserve the signedness of the comparison. */
4814 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4815 OEP_ONLY_CONST)
4816 && operand_equal_p (arg01,
4817 const_binop (MINUS_EXPR, arg2,
4818 build_int_cst (type, 1)),
4819 OEP_ONLY_CONST))
4821 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4822 fold_convert_loc (loc, TREE_TYPE (arg00),
4823 arg2));
4824 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4826 break;
4828 case GE_EXPR:
4829 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4830 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4831 OEP_ONLY_CONST)
4832 && operand_equal_p (arg01,
4833 const_binop (PLUS_EXPR, arg2,
4834 build_int_cst (type, 1)),
4835 OEP_ONLY_CONST))
4837 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4838 fold_convert_loc (loc, TREE_TYPE (arg00),
4839 arg2));
4840 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4842 break;
4843 case NE_EXPR:
4844 break;
4845 default:
4846 gcc_unreachable ();
4849 return NULL_TREE;
4854 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4855 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4856 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4857 false) >= 2)
4858 #endif
4860 /* EXP is some logical combination of boolean tests. See if we can
4861 merge it into some range test. Return the new tree if so. */
4863 static tree
4864 fold_range_test (location_t loc, enum tree_code code, tree type,
4865 tree op0, tree op1)
4867 int or_op = (code == TRUTH_ORIF_EXPR
4868 || code == TRUTH_OR_EXPR);
4869 int in0_p, in1_p, in_p;
4870 tree low0, low1, low, high0, high1, high;
4871 bool strict_overflow_p = false;
4872 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4873 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4874 tree tem;
4875 const char * const warnmsg = G_("assuming signed overflow does not occur "
4876 "when simplifying range test");
4878 /* If this is an OR operation, invert both sides; we will invert
4879 again at the end. */
4880 if (or_op)
4881 in0_p = ! in0_p, in1_p = ! in1_p;
4883 /* If both expressions are the same, if we can merge the ranges, and we
4884 can build the range test, return it or it inverted. If one of the
4885 ranges is always true or always false, consider it to be the same
4886 expression as the other. */
4887 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4888 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4889 in1_p, low1, high1)
4890 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4891 lhs != 0 ? lhs
4892 : rhs != 0 ? rhs : integer_zero_node,
4893 in_p, low, high))))
4895 if (strict_overflow_p)
4896 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4897 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4900 /* On machines where the branch cost is expensive, if this is a
4901 short-circuited branch and the underlying object on both sides
4902 is the same, make a non-short-circuit operation. */
4903 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4904 && lhs != 0 && rhs != 0
4905 && (code == TRUTH_ANDIF_EXPR
4906 || code == TRUTH_ORIF_EXPR)
4907 && operand_equal_p (lhs, rhs, 0))
4909 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4910 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4911 which cases we can't do this. */
4912 if (simple_operand_p (lhs))
4914 tem = build2 (code == TRUTH_ANDIF_EXPR
4915 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4916 type, op0, op1);
4917 SET_EXPR_LOCATION (tem, loc);
4918 return tem;
4921 else if (lang_hooks.decls.global_bindings_p () == 0
4922 && ! CONTAINS_PLACEHOLDER_P (lhs))
4924 tree common = save_expr (lhs);
4926 if (0 != (lhs = build_range_check (loc, type, common,
4927 or_op ? ! in0_p : in0_p,
4928 low0, high0))
4929 && (0 != (rhs = build_range_check (loc, type, common,
4930 or_op ? ! in1_p : in1_p,
4931 low1, high1))))
4933 if (strict_overflow_p)
4934 fold_overflow_warning (warnmsg,
4935 WARN_STRICT_OVERFLOW_COMPARISON);
4936 tem = build2 (code == TRUTH_ANDIF_EXPR
4937 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4938 type, lhs, rhs);
4939 SET_EXPR_LOCATION (tem, loc);
4940 return tem;
4945 return 0;
4948 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4949 bit value. Arrange things so the extra bits will be set to zero if and
4950 only if C is signed-extended to its full width. If MASK is nonzero,
4951 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4953 static tree
4954 unextend (tree c, int p, int unsignedp, tree mask)
4956 tree type = TREE_TYPE (c);
4957 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4958 tree temp;
4960 if (p == modesize || unsignedp)
4961 return c;
4963 /* We work by getting just the sign bit into the low-order bit, then
4964 into the high-order bit, then sign-extend. We then XOR that value
4965 with C. */
4966 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4967 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4969 /* We must use a signed type in order to get an arithmetic right shift.
4970 However, we must also avoid introducing accidental overflows, so that
4971 a subsequent call to integer_zerop will work. Hence we must
4972 do the type conversion here. At this point, the constant is either
4973 zero or one, and the conversion to a signed type can never overflow.
4974 We could get an overflow if this conversion is done anywhere else. */
4975 if (TYPE_UNSIGNED (type))
4976 temp = fold_convert (signed_type_for (type), temp);
4978 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4979 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4980 if (mask != 0)
4981 temp = const_binop (BIT_AND_EXPR, temp,
4982 fold_convert (TREE_TYPE (c), mask));
4983 /* If necessary, convert the type back to match the type of C. */
4984 if (TYPE_UNSIGNED (type))
4985 temp = fold_convert (type, temp);
4987 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4990 /* For an expression that has the form
4991 (A && B) || ~B
4993 (A || B) && ~B,
4994 we can drop one of the inner expressions and simplify to
4995 A || ~B
4997 A && ~B
4998 LOC is the location of the resulting expression. OP is the inner
4999 logical operation; the left-hand side in the examples above, while CMPOP
5000 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5001 removing a condition that guards another, as in
5002 (A != NULL && A->...) || A == NULL
5003 which we must not transform. If RHS_ONLY is true, only eliminate the
5004 right-most operand of the inner logical operation. */
5006 static tree
5007 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5008 bool rhs_only)
5010 tree type = TREE_TYPE (cmpop);
5011 enum tree_code code = TREE_CODE (cmpop);
5012 enum tree_code truthop_code = TREE_CODE (op);
5013 tree lhs = TREE_OPERAND (op, 0);
5014 tree rhs = TREE_OPERAND (op, 1);
5015 tree orig_lhs = lhs, orig_rhs = rhs;
5016 enum tree_code rhs_code = TREE_CODE (rhs);
5017 enum tree_code lhs_code = TREE_CODE (lhs);
5018 enum tree_code inv_code;
5020 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5021 return NULL_TREE;
5023 if (TREE_CODE_CLASS (code) != tcc_comparison)
5024 return NULL_TREE;
5026 if (rhs_code == truthop_code)
5028 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5029 if (newrhs != NULL_TREE)
5031 rhs = newrhs;
5032 rhs_code = TREE_CODE (rhs);
5035 if (lhs_code == truthop_code && !rhs_only)
5037 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5038 if (newlhs != NULL_TREE)
5040 lhs = newlhs;
5041 lhs_code = TREE_CODE (lhs);
5045 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5046 if (inv_code == rhs_code
5047 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5048 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5049 return lhs;
5050 if (!rhs_only && inv_code == lhs_code
5051 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5052 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5053 return rhs;
5054 if (rhs != orig_rhs || lhs != orig_lhs)
5055 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5056 lhs, rhs);
5057 return NULL_TREE;
5060 /* Find ways of folding logical expressions of LHS and RHS:
5061 Try to merge two comparisons to the same innermost item.
5062 Look for range tests like "ch >= '0' && ch <= '9'".
5063 Look for combinations of simple terms on machines with expensive branches
5064 and evaluate the RHS unconditionally.
5066 For example, if we have p->a == 2 && p->b == 4 and we can make an
5067 object large enough to span both A and B, we can do this with a comparison
5068 against the object ANDed with the a mask.
5070 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5071 operations to do this with one comparison.
5073 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5074 function and the one above.
5076 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5077 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5079 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5080 two operands.
5082 We return the simplified tree or 0 if no optimization is possible. */
5084 static tree
5085 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5086 tree lhs, tree rhs)
5088 /* If this is the "or" of two comparisons, we can do something if
5089 the comparisons are NE_EXPR. If this is the "and", we can do something
5090 if the comparisons are EQ_EXPR. I.e.,
5091 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5093 WANTED_CODE is this operation code. For single bit fields, we can
5094 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5095 comparison for one-bit fields. */
5097 enum tree_code wanted_code;
5098 enum tree_code lcode, rcode;
5099 tree ll_arg, lr_arg, rl_arg, rr_arg;
5100 tree ll_inner, lr_inner, rl_inner, rr_inner;
5101 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5102 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5103 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5104 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5105 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5106 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5107 enum machine_mode lnmode, rnmode;
5108 tree ll_mask, lr_mask, rl_mask, rr_mask;
5109 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5110 tree l_const, r_const;
5111 tree lntype, rntype, result;
5112 HOST_WIDE_INT first_bit, end_bit;
5113 int volatilep;
5114 tree orig_lhs = lhs, orig_rhs = rhs;
5115 enum tree_code orig_code = code;
5117 /* Start by getting the comparison codes. Fail if anything is volatile.
5118 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5119 it were surrounded with a NE_EXPR. */
5121 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5122 return 0;
5124 lcode = TREE_CODE (lhs);
5125 rcode = TREE_CODE (rhs);
5127 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5129 lhs = build2 (NE_EXPR, truth_type, lhs,
5130 build_int_cst (TREE_TYPE (lhs), 0));
5131 lcode = NE_EXPR;
5134 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5136 rhs = build2 (NE_EXPR, truth_type, rhs,
5137 build_int_cst (TREE_TYPE (rhs), 0));
5138 rcode = NE_EXPR;
5141 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5142 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5143 return 0;
5145 ll_arg = TREE_OPERAND (lhs, 0);
5146 lr_arg = TREE_OPERAND (lhs, 1);
5147 rl_arg = TREE_OPERAND (rhs, 0);
5148 rr_arg = TREE_OPERAND (rhs, 1);
5150 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5151 if (simple_operand_p (ll_arg)
5152 && simple_operand_p (lr_arg))
5154 tree result;
5155 if (operand_equal_p (ll_arg, rl_arg, 0)
5156 && operand_equal_p (lr_arg, rr_arg, 0))
5158 result = combine_comparisons (loc, code, lcode, rcode,
5159 truth_type, ll_arg, lr_arg);
5160 if (result)
5161 return result;
5163 else if (operand_equal_p (ll_arg, rr_arg, 0)
5164 && operand_equal_p (lr_arg, rl_arg, 0))
5166 result = combine_comparisons (loc, code, lcode,
5167 swap_tree_comparison (rcode),
5168 truth_type, ll_arg, lr_arg);
5169 if (result)
5170 return result;
5174 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5175 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5177 /* If the RHS can be evaluated unconditionally and its operands are
5178 simple, it wins to evaluate the RHS unconditionally on machines
5179 with expensive branches. In this case, this isn't a comparison
5180 that can be merged. Avoid doing this if the RHS is a floating-point
5181 comparison since those can trap. */
5183 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5184 false) >= 2
5185 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5186 && simple_operand_p (rl_arg)
5187 && simple_operand_p (rr_arg))
5189 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5190 if (code == TRUTH_OR_EXPR
5191 && lcode == NE_EXPR && integer_zerop (lr_arg)
5192 && rcode == NE_EXPR && integer_zerop (rr_arg)
5193 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5194 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5196 result = build2 (NE_EXPR, truth_type,
5197 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5198 ll_arg, rl_arg),
5199 build_int_cst (TREE_TYPE (ll_arg), 0));
5200 goto fold_truthop_exit;
5203 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5204 if (code == TRUTH_AND_EXPR
5205 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5206 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5207 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5208 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5210 result = build2 (EQ_EXPR, truth_type,
5211 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5212 ll_arg, rl_arg),
5213 build_int_cst (TREE_TYPE (ll_arg), 0));
5214 goto fold_truthop_exit;
5217 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5219 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5221 result = build2 (code, truth_type, lhs, rhs);
5222 goto fold_truthop_exit;
5224 return NULL_TREE;
5228 /* See if the comparisons can be merged. Then get all the parameters for
5229 each side. */
5231 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5232 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5233 return 0;
5235 volatilep = 0;
5236 ll_inner = decode_field_reference (loc, ll_arg,
5237 &ll_bitsize, &ll_bitpos, &ll_mode,
5238 &ll_unsignedp, &volatilep, &ll_mask,
5239 &ll_and_mask);
5240 lr_inner = decode_field_reference (loc, lr_arg,
5241 &lr_bitsize, &lr_bitpos, &lr_mode,
5242 &lr_unsignedp, &volatilep, &lr_mask,
5243 &lr_and_mask);
5244 rl_inner = decode_field_reference (loc, rl_arg,
5245 &rl_bitsize, &rl_bitpos, &rl_mode,
5246 &rl_unsignedp, &volatilep, &rl_mask,
5247 &rl_and_mask);
5248 rr_inner = decode_field_reference (loc, rr_arg,
5249 &rr_bitsize, &rr_bitpos, &rr_mode,
5250 &rr_unsignedp, &volatilep, &rr_mask,
5251 &rr_and_mask);
5253 /* It must be true that the inner operation on the lhs of each
5254 comparison must be the same if we are to be able to do anything.
5255 Then see if we have constants. If not, the same must be true for
5256 the rhs's. */
5257 if (volatilep || ll_inner == 0 || rl_inner == 0
5258 || ! operand_equal_p (ll_inner, rl_inner, 0))
5259 return 0;
5261 if (TREE_CODE (lr_arg) == INTEGER_CST
5262 && TREE_CODE (rr_arg) == INTEGER_CST)
5263 l_const = lr_arg, r_const = rr_arg;
5264 else if (lr_inner == 0 || rr_inner == 0
5265 || ! operand_equal_p (lr_inner, rr_inner, 0))
5266 return 0;
5267 else
5268 l_const = r_const = 0;
5270 /* If either comparison code is not correct for our logical operation,
5271 fail. However, we can convert a one-bit comparison against zero into
5272 the opposite comparison against that bit being set in the field. */
5274 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5275 if (lcode != wanted_code)
5277 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5279 /* Make the left operand unsigned, since we are only interested
5280 in the value of one bit. Otherwise we are doing the wrong
5281 thing below. */
5282 ll_unsignedp = 1;
5283 l_const = ll_mask;
5285 else
5286 return 0;
5289 /* This is analogous to the code for l_const above. */
5290 if (rcode != wanted_code)
5292 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5294 rl_unsignedp = 1;
5295 r_const = rl_mask;
5297 else
5298 return 0;
5301 /* See if we can find a mode that contains both fields being compared on
5302 the left. If we can't, fail. Otherwise, update all constants and masks
5303 to be relative to a field of that size. */
5304 first_bit = MIN (ll_bitpos, rl_bitpos);
5305 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5306 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5307 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5308 volatilep);
5309 if (lnmode == VOIDmode)
5310 return 0;
5312 lnbitsize = GET_MODE_BITSIZE (lnmode);
5313 lnbitpos = first_bit & ~ (lnbitsize - 1);
5314 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5315 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5317 if (BYTES_BIG_ENDIAN)
5319 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5320 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5323 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5324 size_int (xll_bitpos));
5325 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5326 size_int (xrl_bitpos));
5328 if (l_const)
5330 l_const = fold_convert_loc (loc, lntype, l_const);
5331 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5332 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5333 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5334 fold_build1_loc (loc, BIT_NOT_EXPR,
5335 lntype, ll_mask))))
5337 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5339 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5342 if (r_const)
5344 r_const = fold_convert_loc (loc, lntype, r_const);
5345 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5346 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5347 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5348 fold_build1_loc (loc, BIT_NOT_EXPR,
5349 lntype, rl_mask))))
5351 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5353 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5357 /* If the right sides are not constant, do the same for it. Also,
5358 disallow this optimization if a size or signedness mismatch occurs
5359 between the left and right sides. */
5360 if (l_const == 0)
5362 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5363 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5364 /* Make sure the two fields on the right
5365 correspond to the left without being swapped. */
5366 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5367 return 0;
5369 first_bit = MIN (lr_bitpos, rr_bitpos);
5370 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5371 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5372 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5373 volatilep);
5374 if (rnmode == VOIDmode)
5375 return 0;
5377 rnbitsize = GET_MODE_BITSIZE (rnmode);
5378 rnbitpos = first_bit & ~ (rnbitsize - 1);
5379 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5380 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5382 if (BYTES_BIG_ENDIAN)
5384 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5385 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5388 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5389 rntype, lr_mask),
5390 size_int (xlr_bitpos));
5391 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5392 rntype, rr_mask),
5393 size_int (xrr_bitpos));
5395 /* Make a mask that corresponds to both fields being compared.
5396 Do this for both items being compared. If the operands are the
5397 same size and the bits being compared are in the same position
5398 then we can do this by masking both and comparing the masked
5399 results. */
5400 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5401 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5402 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5404 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5405 ll_unsignedp || rl_unsignedp);
5406 if (! all_ones_mask_p (ll_mask, lnbitsize))
5407 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5409 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5410 lr_unsignedp || rr_unsignedp);
5411 if (! all_ones_mask_p (lr_mask, rnbitsize))
5412 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5414 result = build2 (wanted_code, truth_type, lhs, rhs);
5415 goto fold_truthop_exit;
5418 /* There is still another way we can do something: If both pairs of
5419 fields being compared are adjacent, we may be able to make a wider
5420 field containing them both.
5422 Note that we still must mask the lhs/rhs expressions. Furthermore,
5423 the mask must be shifted to account for the shift done by
5424 make_bit_field_ref. */
5425 if ((ll_bitsize + ll_bitpos == rl_bitpos
5426 && lr_bitsize + lr_bitpos == rr_bitpos)
5427 || (ll_bitpos == rl_bitpos + rl_bitsize
5428 && lr_bitpos == rr_bitpos + rr_bitsize))
5430 tree type;
5432 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5433 ll_bitsize + rl_bitsize,
5434 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5435 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5436 lr_bitsize + rr_bitsize,
5437 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5439 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5440 size_int (MIN (xll_bitpos, xrl_bitpos)));
5441 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5442 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5444 /* Convert to the smaller type before masking out unwanted bits. */
5445 type = lntype;
5446 if (lntype != rntype)
5448 if (lnbitsize > rnbitsize)
5450 lhs = fold_convert_loc (loc, rntype, lhs);
5451 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5452 type = rntype;
5454 else if (lnbitsize < rnbitsize)
5456 rhs = fold_convert_loc (loc, lntype, rhs);
5457 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5458 type = lntype;
5462 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5463 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5465 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5466 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5468 result = build2 (wanted_code, truth_type, lhs, rhs);
5469 goto fold_truthop_exit;
5472 return 0;
5475 /* Handle the case of comparisons with constants. If there is something in
5476 common between the masks, those bits of the constants must be the same.
5477 If not, the condition is always false. Test for this to avoid generating
5478 incorrect code below. */
5479 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5480 if (! integer_zerop (result)
5481 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5482 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5484 if (wanted_code == NE_EXPR)
5486 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5487 return constant_boolean_node (true, truth_type);
5489 else
5491 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5492 return constant_boolean_node (false, truth_type);
5496 /* Construct the expression we will return. First get the component
5497 reference we will make. Unless the mask is all ones the width of
5498 that field, perform the mask operation. Then compare with the
5499 merged constant. */
5500 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5501 ll_unsignedp || rl_unsignedp);
5503 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5504 if (! all_ones_mask_p (ll_mask, lnbitsize))
5506 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5507 SET_EXPR_LOCATION (result, loc);
5510 result = build2 (wanted_code, truth_type, result,
5511 const_binop (BIT_IOR_EXPR, l_const, r_const));
5513 fold_truthop_exit:
5514 SET_EXPR_LOCATION (result, loc);
5515 return result;
5518 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5519 constant. */
5521 static tree
5522 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5523 tree op0, tree op1)
5525 tree arg0 = op0;
5526 enum tree_code op_code;
5527 tree comp_const;
5528 tree minmax_const;
5529 int consts_equal, consts_lt;
5530 tree inner;
5532 STRIP_SIGN_NOPS (arg0);
5534 op_code = TREE_CODE (arg0);
5535 minmax_const = TREE_OPERAND (arg0, 1);
5536 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5537 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5538 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5539 inner = TREE_OPERAND (arg0, 0);
5541 /* If something does not permit us to optimize, return the original tree. */
5542 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5543 || TREE_CODE (comp_const) != INTEGER_CST
5544 || TREE_OVERFLOW (comp_const)
5545 || TREE_CODE (minmax_const) != INTEGER_CST
5546 || TREE_OVERFLOW (minmax_const))
5547 return NULL_TREE;
5549 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5550 and GT_EXPR, doing the rest with recursive calls using logical
5551 simplifications. */
5552 switch (code)
5554 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5556 tree tem
5557 = optimize_minmax_comparison (loc,
5558 invert_tree_comparison (code, false),
5559 type, op0, op1);
5560 if (tem)
5561 return invert_truthvalue_loc (loc, tem);
5562 return NULL_TREE;
5565 case GE_EXPR:
5566 return
5567 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5568 optimize_minmax_comparison
5569 (loc, EQ_EXPR, type, arg0, comp_const),
5570 optimize_minmax_comparison
5571 (loc, GT_EXPR, type, arg0, comp_const));
5573 case EQ_EXPR:
5574 if (op_code == MAX_EXPR && consts_equal)
5575 /* MAX (X, 0) == 0 -> X <= 0 */
5576 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5578 else if (op_code == MAX_EXPR && consts_lt)
5579 /* MAX (X, 0) == 5 -> X == 5 */
5580 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5582 else if (op_code == MAX_EXPR)
5583 /* MAX (X, 0) == -1 -> false */
5584 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5586 else if (consts_equal)
5587 /* MIN (X, 0) == 0 -> X >= 0 */
5588 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5590 else if (consts_lt)
5591 /* MIN (X, 0) == 5 -> false */
5592 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5594 else
5595 /* MIN (X, 0) == -1 -> X == -1 */
5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5598 case GT_EXPR:
5599 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5600 /* MAX (X, 0) > 0 -> X > 0
5601 MAX (X, 0) > 5 -> X > 5 */
5602 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5604 else if (op_code == MAX_EXPR)
5605 /* MAX (X, 0) > -1 -> true */
5606 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5608 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5609 /* MIN (X, 0) > 0 -> false
5610 MIN (X, 0) > 5 -> false */
5611 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5613 else
5614 /* MIN (X, 0) > -1 -> X > -1 */
5615 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5617 default:
5618 return NULL_TREE;
5622 /* T is an integer expression that is being multiplied, divided, or taken a
5623 modulus (CODE says which and what kind of divide or modulus) by a
5624 constant C. See if we can eliminate that operation by folding it with
5625 other operations already in T. WIDE_TYPE, if non-null, is a type that
5626 should be used for the computation if wider than our type.
5628 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5629 (X * 2) + (Y * 4). We must, however, be assured that either the original
5630 expression would not overflow or that overflow is undefined for the type
5631 in the language in question.
5633 If we return a non-null expression, it is an equivalent form of the
5634 original computation, but need not be in the original type.
5636 We set *STRICT_OVERFLOW_P to true if the return values depends on
5637 signed overflow being undefined. Otherwise we do not change
5638 *STRICT_OVERFLOW_P. */
5640 static tree
5641 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5642 bool *strict_overflow_p)
5644 /* To avoid exponential search depth, refuse to allow recursion past
5645 three levels. Beyond that (1) it's highly unlikely that we'll find
5646 something interesting and (2) we've probably processed it before
5647 when we built the inner expression. */
5649 static int depth;
5650 tree ret;
5652 if (depth > 3)
5653 return NULL;
5655 depth++;
5656 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5657 depth--;
5659 return ret;
5662 static tree
5663 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5664 bool *strict_overflow_p)
5666 tree type = TREE_TYPE (t);
5667 enum tree_code tcode = TREE_CODE (t);
5668 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5669 > GET_MODE_SIZE (TYPE_MODE (type)))
5670 ? wide_type : type);
5671 tree t1, t2;
5672 int same_p = tcode == code;
5673 tree op0 = NULL_TREE, op1 = NULL_TREE;
5674 bool sub_strict_overflow_p;
5676 /* Don't deal with constants of zero here; they confuse the code below. */
5677 if (integer_zerop (c))
5678 return NULL_TREE;
5680 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5681 op0 = TREE_OPERAND (t, 0);
5683 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5684 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5686 /* Note that we need not handle conditional operations here since fold
5687 already handles those cases. So just do arithmetic here. */
5688 switch (tcode)
5690 case INTEGER_CST:
5691 /* For a constant, we can always simplify if we are a multiply
5692 or (for divide and modulus) if it is a multiple of our constant. */
5693 if (code == MULT_EXPR
5694 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5695 return const_binop (code, fold_convert (ctype, t),
5696 fold_convert (ctype, c));
5697 break;
5699 CASE_CONVERT: case NON_LVALUE_EXPR:
5700 /* If op0 is an expression ... */
5701 if ((COMPARISON_CLASS_P (op0)
5702 || UNARY_CLASS_P (op0)
5703 || BINARY_CLASS_P (op0)
5704 || VL_EXP_CLASS_P (op0)
5705 || EXPRESSION_CLASS_P (op0))
5706 /* ... and has wrapping overflow, and its type is smaller
5707 than ctype, then we cannot pass through as widening. */
5708 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5709 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5710 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5711 && (TYPE_PRECISION (ctype)
5712 > TYPE_PRECISION (TREE_TYPE (op0))))
5713 /* ... or this is a truncation (t is narrower than op0),
5714 then we cannot pass through this narrowing. */
5715 || (TYPE_PRECISION (type)
5716 < TYPE_PRECISION (TREE_TYPE (op0)))
5717 /* ... or signedness changes for division or modulus,
5718 then we cannot pass through this conversion. */
5719 || (code != MULT_EXPR
5720 && (TYPE_UNSIGNED (ctype)
5721 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5722 /* ... or has undefined overflow while the converted to
5723 type has not, we cannot do the operation in the inner type
5724 as that would introduce undefined overflow. */
5725 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5726 && !TYPE_OVERFLOW_UNDEFINED (type))))
5727 break;
5729 /* Pass the constant down and see if we can make a simplification. If
5730 we can, replace this expression with the inner simplification for
5731 possible later conversion to our or some other type. */
5732 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5733 && TREE_CODE (t2) == INTEGER_CST
5734 && !TREE_OVERFLOW (t2)
5735 && (0 != (t1 = extract_muldiv (op0, t2, code,
5736 code == MULT_EXPR
5737 ? ctype : NULL_TREE,
5738 strict_overflow_p))))
5739 return t1;
5740 break;
5742 case ABS_EXPR:
5743 /* If widening the type changes it from signed to unsigned, then we
5744 must avoid building ABS_EXPR itself as unsigned. */
5745 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5747 tree cstype = (*signed_type_for) (ctype);
5748 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5749 != 0)
5751 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5752 return fold_convert (ctype, t1);
5754 break;
5756 /* If the constant is negative, we cannot simplify this. */
5757 if (tree_int_cst_sgn (c) == -1)
5758 break;
5759 /* FALLTHROUGH */
5760 case NEGATE_EXPR:
5761 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5762 != 0)
5763 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5764 break;
5766 case MIN_EXPR: case MAX_EXPR:
5767 /* If widening the type changes the signedness, then we can't perform
5768 this optimization as that changes the result. */
5769 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5770 break;
5772 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5773 sub_strict_overflow_p = false;
5774 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5775 &sub_strict_overflow_p)) != 0
5776 && (t2 = extract_muldiv (op1, c, code, wide_type,
5777 &sub_strict_overflow_p)) != 0)
5779 if (tree_int_cst_sgn (c) < 0)
5780 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5781 if (sub_strict_overflow_p)
5782 *strict_overflow_p = true;
5783 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5784 fold_convert (ctype, t2));
5786 break;
5788 case LSHIFT_EXPR: case RSHIFT_EXPR:
5789 /* If the second operand is constant, this is a multiplication
5790 or floor division, by a power of two, so we can treat it that
5791 way unless the multiplier or divisor overflows. Signed
5792 left-shift overflow is implementation-defined rather than
5793 undefined in C90, so do not convert signed left shift into
5794 multiplication. */
5795 if (TREE_CODE (op1) == INTEGER_CST
5796 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5797 /* const_binop may not detect overflow correctly,
5798 so check for it explicitly here. */
5799 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5800 && TREE_INT_CST_HIGH (op1) == 0
5801 && 0 != (t1 = fold_convert (ctype,
5802 const_binop (LSHIFT_EXPR,
5803 size_one_node,
5804 op1)))
5805 && !TREE_OVERFLOW (t1))
5806 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5807 ? MULT_EXPR : FLOOR_DIV_EXPR,
5808 ctype,
5809 fold_convert (ctype, op0),
5810 t1),
5811 c, code, wide_type, strict_overflow_p);
5812 break;
5814 case PLUS_EXPR: case MINUS_EXPR:
5815 /* See if we can eliminate the operation on both sides. If we can, we
5816 can return a new PLUS or MINUS. If we can't, the only remaining
5817 cases where we can do anything are if the second operand is a
5818 constant. */
5819 sub_strict_overflow_p = false;
5820 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5821 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5822 if (t1 != 0 && t2 != 0
5823 && (code == MULT_EXPR
5824 /* If not multiplication, we can only do this if both operands
5825 are divisible by c. */
5826 || (multiple_of_p (ctype, op0, c)
5827 && multiple_of_p (ctype, op1, c))))
5829 if (sub_strict_overflow_p)
5830 *strict_overflow_p = true;
5831 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5832 fold_convert (ctype, t2));
5835 /* If this was a subtraction, negate OP1 and set it to be an addition.
5836 This simplifies the logic below. */
5837 if (tcode == MINUS_EXPR)
5839 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5840 /* If OP1 was not easily negatable, the constant may be OP0. */
5841 if (TREE_CODE (op0) == INTEGER_CST)
5843 tree tem = op0;
5844 op0 = op1;
5845 op1 = tem;
5846 tem = t1;
5847 t1 = t2;
5848 t2 = tem;
5852 if (TREE_CODE (op1) != INTEGER_CST)
5853 break;
5855 /* If either OP1 or C are negative, this optimization is not safe for
5856 some of the division and remainder types while for others we need
5857 to change the code. */
5858 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5860 if (code == CEIL_DIV_EXPR)
5861 code = FLOOR_DIV_EXPR;
5862 else if (code == FLOOR_DIV_EXPR)
5863 code = CEIL_DIV_EXPR;
5864 else if (code != MULT_EXPR
5865 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5866 break;
5869 /* If it's a multiply or a division/modulus operation of a multiple
5870 of our constant, do the operation and verify it doesn't overflow. */
5871 if (code == MULT_EXPR
5872 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5874 op1 = const_binop (code, fold_convert (ctype, op1),
5875 fold_convert (ctype, c));
5876 /* We allow the constant to overflow with wrapping semantics. */
5877 if (op1 == 0
5878 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5879 break;
5881 else
5882 break;
5884 /* If we have an unsigned type is not a sizetype, we cannot widen
5885 the operation since it will change the result if the original
5886 computation overflowed. */
5887 if (TYPE_UNSIGNED (ctype)
5888 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5889 && ctype != type)
5890 break;
5892 /* If we were able to eliminate our operation from the first side,
5893 apply our operation to the second side and reform the PLUS. */
5894 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5895 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5897 /* The last case is if we are a multiply. In that case, we can
5898 apply the distributive law to commute the multiply and addition
5899 if the multiplication of the constants doesn't overflow. */
5900 if (code == MULT_EXPR)
5901 return fold_build2 (tcode, ctype,
5902 fold_build2 (code, ctype,
5903 fold_convert (ctype, op0),
5904 fold_convert (ctype, c)),
5905 op1);
5907 break;
5909 case MULT_EXPR:
5910 /* We have a special case here if we are doing something like
5911 (C * 8) % 4 since we know that's zero. */
5912 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5913 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5914 /* If the multiplication can overflow we cannot optimize this.
5915 ??? Until we can properly mark individual operations as
5916 not overflowing we need to treat sizetype special here as
5917 stor-layout relies on this opimization to make
5918 DECL_FIELD_BIT_OFFSET always a constant. */
5919 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5920 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5921 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5922 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5923 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5925 *strict_overflow_p = true;
5926 return omit_one_operand (type, integer_zero_node, op0);
5929 /* ... fall through ... */
5931 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5932 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5933 /* If we can extract our operation from the LHS, do so and return a
5934 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5935 do something only if the second operand is a constant. */
5936 if (same_p
5937 && (t1 = extract_muldiv (op0, c, code, wide_type,
5938 strict_overflow_p)) != 0)
5939 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5940 fold_convert (ctype, op1));
5941 else if (tcode == MULT_EXPR && code == MULT_EXPR
5942 && (t1 = extract_muldiv (op1, c, code, wide_type,
5943 strict_overflow_p)) != 0)
5944 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5945 fold_convert (ctype, t1));
5946 else if (TREE_CODE (op1) != INTEGER_CST)
5947 return 0;
5949 /* If these are the same operation types, we can associate them
5950 assuming no overflow. */
5951 if (tcode == code
5952 && 0 != (t1 = int_const_binop (MULT_EXPR,
5953 fold_convert (ctype, op1),
5954 fold_convert (ctype, c), 1))
5955 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5956 (TYPE_UNSIGNED (ctype)
5957 && tcode != MULT_EXPR) ? -1 : 1,
5958 TREE_OVERFLOW (t1)))
5959 && !TREE_OVERFLOW (t1))
5960 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5962 /* If these operations "cancel" each other, we have the main
5963 optimizations of this pass, which occur when either constant is a
5964 multiple of the other, in which case we replace this with either an
5965 operation or CODE or TCODE.
5967 If we have an unsigned type that is not a sizetype, we cannot do
5968 this since it will change the result if the original computation
5969 overflowed. */
5970 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5971 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5972 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5973 || (tcode == MULT_EXPR
5974 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5975 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5976 && code != MULT_EXPR)))
5978 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5981 *strict_overflow_p = true;
5982 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5983 fold_convert (ctype,
5984 const_binop (TRUNC_DIV_EXPR,
5985 op1, c)));
5987 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5990 *strict_overflow_p = true;
5991 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5992 fold_convert (ctype,
5993 const_binop (TRUNC_DIV_EXPR,
5994 c, op1)));
5997 break;
5999 default:
6000 break;
6003 return 0;
6006 /* Return a node which has the indicated constant VALUE (either 0 or
6007 1), and is of the indicated TYPE. */
6009 tree
6010 constant_boolean_node (int value, tree type)
6012 if (type == integer_type_node)
6013 return value ? integer_one_node : integer_zero_node;
6014 else if (type == boolean_type_node)
6015 return value ? boolean_true_node : boolean_false_node;
6016 else
6017 return build_int_cst (type, value);
6021 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6022 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6023 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6024 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6025 COND is the first argument to CODE; otherwise (as in the example
6026 given here), it is the second argument. TYPE is the type of the
6027 original expression. Return NULL_TREE if no simplification is
6028 possible. */
6030 static tree
6031 fold_binary_op_with_conditional_arg (location_t loc,
6032 enum tree_code code,
6033 tree type, tree op0, tree op1,
6034 tree cond, tree arg, int cond_first_p)
6036 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6037 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6038 tree test, true_value, false_value;
6039 tree lhs = NULL_TREE;
6040 tree rhs = NULL_TREE;
6042 if (TREE_CODE (cond) == COND_EXPR)
6044 test = TREE_OPERAND (cond, 0);
6045 true_value = TREE_OPERAND (cond, 1);
6046 false_value = TREE_OPERAND (cond, 2);
6047 /* If this operand throws an expression, then it does not make
6048 sense to try to perform a logical or arithmetic operation
6049 involving it. */
6050 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6051 lhs = true_value;
6052 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6053 rhs = false_value;
6055 else
6057 tree testtype = TREE_TYPE (cond);
6058 test = cond;
6059 true_value = constant_boolean_node (true, testtype);
6060 false_value = constant_boolean_node (false, testtype);
6063 /* This transformation is only worthwhile if we don't have to wrap ARG
6064 in a SAVE_EXPR and the operation can be simplified on at least one
6065 of the branches once its pushed inside the COND_EXPR. */
6066 if (!TREE_CONSTANT (arg)
6067 && (TREE_SIDE_EFFECTS (arg)
6068 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6069 return NULL_TREE;
6071 arg = fold_convert_loc (loc, arg_type, arg);
6072 if (lhs == 0)
6074 true_value = fold_convert_loc (loc, cond_type, true_value);
6075 if (cond_first_p)
6076 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6077 else
6078 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6080 if (rhs == 0)
6082 false_value = fold_convert_loc (loc, cond_type, false_value);
6083 if (cond_first_p)
6084 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6085 else
6086 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6089 /* Check that we have simplified at least one of the branches. */
6090 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6091 return NULL_TREE;
6093 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6097 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6099 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6100 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6101 ADDEND is the same as X.
6103 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6104 and finite. The problematic cases are when X is zero, and its mode
6105 has signed zeros. In the case of rounding towards -infinity,
6106 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6107 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6109 bool
6110 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6112 if (!real_zerop (addend))
6113 return false;
6115 /* Don't allow the fold with -fsignaling-nans. */
6116 if (HONOR_SNANS (TYPE_MODE (type)))
6117 return false;
6119 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6120 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6121 return true;
6123 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6124 if (TREE_CODE (addend) == REAL_CST
6125 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6126 negate = !negate;
6128 /* The mode has signed zeros, and we have to honor their sign.
6129 In this situation, there is only one case we can return true for.
6130 X - 0 is the same as X unless rounding towards -infinity is
6131 supported. */
6132 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6135 /* Subroutine of fold() that checks comparisons of built-in math
6136 functions against real constants.
6138 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6139 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6140 is the type of the result and ARG0 and ARG1 are the operands of the
6141 comparison. ARG1 must be a TREE_REAL_CST.
6143 The function returns the constant folded tree if a simplification
6144 can be made, and NULL_TREE otherwise. */
6146 static tree
6147 fold_mathfn_compare (location_t loc,
6148 enum built_in_function fcode, enum tree_code code,
6149 tree type, tree arg0, tree arg1)
6151 REAL_VALUE_TYPE c;
6153 if (BUILTIN_SQRT_P (fcode))
6155 tree arg = CALL_EXPR_ARG (arg0, 0);
6156 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6158 c = TREE_REAL_CST (arg1);
6159 if (REAL_VALUE_NEGATIVE (c))
6161 /* sqrt(x) < y is always false, if y is negative. */
6162 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6163 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6165 /* sqrt(x) > y is always true, if y is negative and we
6166 don't care about NaNs, i.e. negative values of x. */
6167 if (code == NE_EXPR || !HONOR_NANS (mode))
6168 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6170 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6171 return fold_build2_loc (loc, GE_EXPR, type, arg,
6172 build_real (TREE_TYPE (arg), dconst0));
6174 else if (code == GT_EXPR || code == GE_EXPR)
6176 REAL_VALUE_TYPE c2;
6178 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6179 real_convert (&c2, mode, &c2);
6181 if (REAL_VALUE_ISINF (c2))
6183 /* sqrt(x) > y is x == +Inf, when y is very large. */
6184 if (HONOR_INFINITIES (mode))
6185 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6186 build_real (TREE_TYPE (arg), c2));
6188 /* sqrt(x) > y is always false, when y is very large
6189 and we don't care about infinities. */
6190 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6193 /* sqrt(x) > c is the same as x > c*c. */
6194 return fold_build2_loc (loc, code, type, arg,
6195 build_real (TREE_TYPE (arg), c2));
6197 else if (code == LT_EXPR || code == LE_EXPR)
6199 REAL_VALUE_TYPE c2;
6201 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6202 real_convert (&c2, mode, &c2);
6204 if (REAL_VALUE_ISINF (c2))
6206 /* sqrt(x) < y is always true, when y is a very large
6207 value and we don't care about NaNs or Infinities. */
6208 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6209 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6211 /* sqrt(x) < y is x != +Inf when y is very large and we
6212 don't care about NaNs. */
6213 if (! HONOR_NANS (mode))
6214 return fold_build2_loc (loc, NE_EXPR, type, arg,
6215 build_real (TREE_TYPE (arg), c2));
6217 /* sqrt(x) < y is x >= 0 when y is very large and we
6218 don't care about Infinities. */
6219 if (! HONOR_INFINITIES (mode))
6220 return fold_build2_loc (loc, GE_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg), dconst0));
6223 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6224 if (lang_hooks.decls.global_bindings_p () != 0
6225 || CONTAINS_PLACEHOLDER_P (arg))
6226 return NULL_TREE;
6228 arg = save_expr (arg);
6229 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6230 fold_build2_loc (loc, GE_EXPR, type, arg,
6231 build_real (TREE_TYPE (arg),
6232 dconst0)),
6233 fold_build2_loc (loc, NE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg),
6235 c2)));
6238 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6239 if (! HONOR_NANS (mode))
6240 return fold_build2_loc (loc, code, type, arg,
6241 build_real (TREE_TYPE (arg), c2));
6243 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6244 if (lang_hooks.decls.global_bindings_p () == 0
6245 && ! CONTAINS_PLACEHOLDER_P (arg))
6247 arg = save_expr (arg);
6248 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6249 fold_build2_loc (loc, GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg),
6251 dconst0)),
6252 fold_build2_loc (loc, code, type, arg,
6253 build_real (TREE_TYPE (arg),
6254 c2)));
6259 return NULL_TREE;
6262 /* Subroutine of fold() that optimizes comparisons against Infinities,
6263 either +Inf or -Inf.
6265 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6266 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6267 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6269 The function returns the constant folded tree if a simplification
6270 can be made, and NULL_TREE otherwise. */
6272 static tree
6273 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6274 tree arg0, tree arg1)
6276 enum machine_mode mode;
6277 REAL_VALUE_TYPE max;
6278 tree temp;
6279 bool neg;
6281 mode = TYPE_MODE (TREE_TYPE (arg0));
6283 /* For negative infinity swap the sense of the comparison. */
6284 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6285 if (neg)
6286 code = swap_tree_comparison (code);
6288 switch (code)
6290 case GT_EXPR:
6291 /* x > +Inf is always false, if with ignore sNANs. */
6292 if (HONOR_SNANS (mode))
6293 return NULL_TREE;
6294 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6296 case LE_EXPR:
6297 /* x <= +Inf is always true, if we don't case about NaNs. */
6298 if (! HONOR_NANS (mode))
6299 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6301 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6302 if (lang_hooks.decls.global_bindings_p () == 0
6303 && ! CONTAINS_PLACEHOLDER_P (arg0))
6305 arg0 = save_expr (arg0);
6306 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6308 break;
6310 case EQ_EXPR:
6311 case GE_EXPR:
6312 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6313 real_maxval (&max, neg, mode);
6314 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6315 arg0, build_real (TREE_TYPE (arg0), max));
6317 case LT_EXPR:
6318 /* x < +Inf is always equal to x <= DBL_MAX. */
6319 real_maxval (&max, neg, mode);
6320 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6321 arg0, build_real (TREE_TYPE (arg0), max));
6323 case NE_EXPR:
6324 /* x != +Inf is always equal to !(x > DBL_MAX). */
6325 real_maxval (&max, neg, mode);
6326 if (! HONOR_NANS (mode))
6327 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6330 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6332 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6334 default:
6335 break;
6338 return NULL_TREE;
6341 /* Subroutine of fold() that optimizes comparisons of a division by
6342 a nonzero integer constant against an integer constant, i.e.
6343 X/C1 op C2.
6345 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6346 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6347 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6349 The function returns the constant folded tree if a simplification
6350 can be made, and NULL_TREE otherwise. */
6352 static tree
6353 fold_div_compare (location_t loc,
6354 enum tree_code code, tree type, tree arg0, tree arg1)
6356 tree prod, tmp, hi, lo;
6357 tree arg00 = TREE_OPERAND (arg0, 0);
6358 tree arg01 = TREE_OPERAND (arg0, 1);
6359 double_int val;
6360 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6361 bool neg_overflow;
6362 int overflow;
6364 /* We have to do this the hard way to detect unsigned overflow.
6365 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6366 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6367 TREE_INT_CST_HIGH (arg01),
6368 TREE_INT_CST_LOW (arg1),
6369 TREE_INT_CST_HIGH (arg1),
6370 &val.low, &val.high, unsigned_p);
6371 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6372 neg_overflow = false;
6374 if (unsigned_p)
6376 tmp = int_const_binop (MINUS_EXPR, arg01,
6377 build_int_cst (TREE_TYPE (arg01), 1), 0);
6378 lo = prod;
6380 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6381 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6382 TREE_INT_CST_HIGH (prod),
6383 TREE_INT_CST_LOW (tmp),
6384 TREE_INT_CST_HIGH (tmp),
6385 &val.low, &val.high, unsigned_p);
6386 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6387 -1, overflow | TREE_OVERFLOW (prod));
6389 else if (tree_int_cst_sgn (arg01) >= 0)
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1), 0);
6393 switch (tree_int_cst_sgn (arg1))
6395 case -1:
6396 neg_overflow = true;
6397 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6398 hi = prod;
6399 break;
6401 case 0:
6402 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6403 hi = tmp;
6404 break;
6406 case 1:
6407 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6408 lo = prod;
6409 break;
6411 default:
6412 gcc_unreachable ();
6415 else
6417 /* A negative divisor reverses the relational operators. */
6418 code = swap_tree_comparison (code);
6420 tmp = int_const_binop (PLUS_EXPR, arg01,
6421 build_int_cst (TREE_TYPE (arg01), 1), 0);
6422 switch (tree_int_cst_sgn (arg1))
6424 case -1:
6425 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6426 lo = prod;
6427 break;
6429 case 0:
6430 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6431 lo = tmp;
6432 break;
6434 case 1:
6435 neg_overflow = true;
6436 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6437 hi = prod;
6438 break;
6440 default:
6441 gcc_unreachable ();
6445 switch (code)
6447 case EQ_EXPR:
6448 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6449 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6450 if (TREE_OVERFLOW (hi))
6451 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6452 if (TREE_OVERFLOW (lo))
6453 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6454 return build_range_check (loc, type, arg00, 1, lo, hi);
6456 case NE_EXPR:
6457 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6458 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6459 if (TREE_OVERFLOW (hi))
6460 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6461 if (TREE_OVERFLOW (lo))
6462 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6463 return build_range_check (loc, type, arg00, 0, lo, hi);
6465 case LT_EXPR:
6466 if (TREE_OVERFLOW (lo))
6468 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6469 return omit_one_operand_loc (loc, type, tmp, arg00);
6471 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6473 case LE_EXPR:
6474 if (TREE_OVERFLOW (hi))
6476 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6477 return omit_one_operand_loc (loc, type, tmp, arg00);
6479 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6481 case GT_EXPR:
6482 if (TREE_OVERFLOW (hi))
6484 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6485 return omit_one_operand_loc (loc, type, tmp, arg00);
6487 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6489 case GE_EXPR:
6490 if (TREE_OVERFLOW (lo))
6492 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6493 return omit_one_operand_loc (loc, type, tmp, arg00);
6495 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6497 default:
6498 break;
6501 return NULL_TREE;
6505 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6506 equality/inequality test, then return a simplified form of the test
6507 using a sign testing. Otherwise return NULL. TYPE is the desired
6508 result type. */
6510 static tree
6511 fold_single_bit_test_into_sign_test (location_t loc,
6512 enum tree_code code, tree arg0, tree arg1,
6513 tree result_type)
6515 /* If this is testing a single bit, we can optimize the test. */
6516 if ((code == NE_EXPR || code == EQ_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6520 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6521 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6522 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6524 if (arg00 != NULL_TREE
6525 /* This is only a win if casting to a signed type is cheap,
6526 i.e. when arg00's type is not a partial mode. */
6527 && TYPE_PRECISION (TREE_TYPE (arg00))
6528 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6530 tree stype = signed_type_for (TREE_TYPE (arg00));
6531 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6532 result_type,
6533 fold_convert_loc (loc, stype, arg00),
6534 build_int_cst (stype, 0));
6538 return NULL_TREE;
6541 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6542 equality/inequality test, then return a simplified form of
6543 the test using shifts and logical operations. Otherwise return
6544 NULL. TYPE is the desired result type. */
6546 tree
6547 fold_single_bit_test (location_t loc, enum tree_code code,
6548 tree arg0, tree arg1, tree result_type)
6550 /* If this is testing a single bit, we can optimize the test. */
6551 if ((code == NE_EXPR || code == EQ_EXPR)
6552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6555 tree inner = TREE_OPERAND (arg0, 0);
6556 tree type = TREE_TYPE (arg0);
6557 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6558 enum machine_mode operand_mode = TYPE_MODE (type);
6559 int ops_unsigned;
6560 tree signed_type, unsigned_type, intermediate_type;
6561 tree tem, one;
6563 /* First, see if we can fold the single bit test into a sign-bit
6564 test. */
6565 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6566 result_type);
6567 if (tem)
6568 return tem;
6570 /* Otherwise we have (A & C) != 0 where C is a single bit,
6571 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6572 Similarly for (A & C) == 0. */
6574 /* If INNER is a right shift of a constant and it plus BITNUM does
6575 not overflow, adjust BITNUM and INNER. */
6576 if (TREE_CODE (inner) == RSHIFT_EXPR
6577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6578 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6579 && bitnum < TYPE_PRECISION (type)
6580 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6581 bitnum - TYPE_PRECISION (type)))
6583 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6584 inner = TREE_OPERAND (inner, 0);
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
6590 #ifdef LOAD_EXTEND_OP
6591 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6592 && !flag_syntax_only) ? 0 : 1;
6593 #else
6594 ops_unsigned = 1;
6595 #endif
6597 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6598 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6599 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6600 inner = fold_convert_loc (loc, intermediate_type, inner);
6602 if (bitnum != 0)
6603 inner = build2 (RSHIFT_EXPR, intermediate_type,
6604 inner, size_int (bitnum));
6606 one = build_int_cst (intermediate_type, 1);
6608 if (code == EQ_EXPR)
6609 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6611 /* Put the AND last so it can combine with more things. */
6612 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6614 /* Make sure to return the proper type. */
6615 inner = fold_convert_loc (loc, result_type, inner);
6617 return inner;
6619 return NULL_TREE;
6622 /* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6625 static bool
6626 reorder_operands_p (const_tree arg0, const_tree arg1)
6628 if (! flag_evaluation_order)
6629 return true;
6630 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6631 return true;
6632 return ! TREE_SIDE_EFFECTS (arg0)
6633 && ! TREE_SIDE_EFFECTS (arg1);
6636 /* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
6641 bool
6642 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6644 STRIP_SIGN_NOPS (arg0);
6645 STRIP_SIGN_NOPS (arg1);
6647 if (TREE_CODE (arg1) == INTEGER_CST)
6648 return 0;
6649 if (TREE_CODE (arg0) == INTEGER_CST)
6650 return 1;
6652 if (TREE_CODE (arg1) == REAL_CST)
6653 return 0;
6654 if (TREE_CODE (arg0) == REAL_CST)
6655 return 1;
6657 if (TREE_CODE (arg1) == FIXED_CST)
6658 return 0;
6659 if (TREE_CODE (arg0) == FIXED_CST)
6660 return 1;
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6665 return 1;
6667 if (TREE_CONSTANT (arg1))
6668 return 0;
6669 if (TREE_CONSTANT (arg0))
6670 return 1;
6672 if (optimize_function_for_size_p (cfun))
6673 return 0;
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6677 return 0;
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6686 return 1;
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6690 return 0;
6691 if (TREE_CODE (arg0) == SSA_NAME)
6692 return 1;
6694 /* Put variables last. */
6695 if (DECL_P (arg1))
6696 return 0;
6697 if (DECL_P (arg0))
6698 return 1;
6700 return 0;
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6706 static tree
6707 fold_widened_comparison (location_t loc, enum tree_code code,
6708 tree type, tree arg0, tree arg1)
6710 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6711 tree arg1_unw;
6712 tree shorter_type, outer_type;
6713 tree min, max;
6714 bool above, below;
6716 if (arg0_unw == arg0)
6717 return NULL_TREE;
6718 shorter_type = TREE_TYPE (arg0_unw);
6720 #ifdef HAVE_canonicalize_funcptr_for_compare
6721 /* Disable this optimization if we're casting a function pointer
6722 type on targets that require function pointer canonicalization. */
6723 if (HAVE_canonicalize_funcptr_for_compare
6724 && TREE_CODE (shorter_type) == POINTER_TYPE
6725 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6726 return NULL_TREE;
6727 #endif
6729 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6730 return NULL_TREE;
6732 arg1_unw = get_unwidened (arg1, NULL_TREE);
6734 /* If possible, express the comparison in the shorter mode. */
6735 if ((code == EQ_EXPR || code == NE_EXPR
6736 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6737 && (TREE_TYPE (arg1_unw) == shorter_type
6738 || ((TYPE_PRECISION (shorter_type)
6739 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6740 && (TYPE_UNSIGNED (shorter_type)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6742 || (TREE_CODE (arg1_unw) == INTEGER_CST
6743 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6745 && int_fits_type_p (arg1_unw, shorter_type))))
6746 return fold_build2_loc (loc, code, type, arg0_unw,
6747 fold_convert_loc (loc, shorter_type, arg1_unw));
6749 if (TREE_CODE (arg1_unw) != INTEGER_CST
6750 || TREE_CODE (shorter_type) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw, shorter_type))
6752 return NULL_TREE;
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type = TREE_TYPE (arg1_unw);
6757 min = lower_bound_in_type (outer_type, shorter_type);
6758 max = upper_bound_in_type (outer_type, shorter_type);
6760 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 max, arg1_unw));
6762 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6763 arg1_unw, min));
6765 switch (code)
6767 case EQ_EXPR:
6768 if (above || below)
6769 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6770 break;
6772 case NE_EXPR:
6773 if (above || below)
6774 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6775 break;
6777 case LT_EXPR:
6778 case LE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6784 case GT_EXPR:
6785 case GE_EXPR:
6786 if (above)
6787 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6788 else if (below)
6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6791 default:
6792 break;
6795 return NULL_TREE;
6798 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6801 static tree
6802 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6803 tree arg0, tree arg1)
6805 tree arg0_inner;
6806 tree inner_type, outer_type;
6808 if (!CONVERT_EXPR_P (arg0))
6809 return NULL_TREE;
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6821 return NULL_TREE;
6822 #endif
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6825 return NULL_TREE;
6827 if (TREE_CODE (arg1) != INTEGER_CST
6828 && !(CONVERT_EXPR_P (arg1)
6829 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6830 return NULL_TREE;
6832 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6833 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6834 && code != NE_EXPR
6835 && code != EQ_EXPR)
6836 return NULL_TREE;
6838 if (TREE_CODE (arg1) == INTEGER_CST)
6839 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6840 0, TREE_OVERFLOW (arg1));
6841 else
6842 arg1 = fold_convert_loc (loc, inner_type, arg1);
6844 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6847 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6848 step of the array. Reconstructs s and delta in the case of s *
6849 delta being an integer constant (and thus already folded). ADDR is
6850 the address. MULT is the multiplicative expression. If the
6851 function succeeds, the new address expression is returned.
6852 Otherwise NULL_TREE is returned. LOC is the location of the
6853 resulting expression. */
6855 static tree
6856 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6858 tree s, delta, step;
6859 tree ref = TREE_OPERAND (addr, 0), pref;
6860 tree ret, pos;
6861 tree itype;
6862 bool mdim = false;
6864 /* Strip the nops that might be added when converting op1 to sizetype. */
6865 STRIP_NOPS (op1);
6867 /* Canonicalize op1 into a possibly non-constant delta
6868 and an INTEGER_CST s. */
6869 if (TREE_CODE (op1) == MULT_EXPR)
6871 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6873 STRIP_NOPS (arg0);
6874 STRIP_NOPS (arg1);
6876 if (TREE_CODE (arg0) == INTEGER_CST)
6878 s = arg0;
6879 delta = arg1;
6881 else if (TREE_CODE (arg1) == INTEGER_CST)
6883 s = arg1;
6884 delta = arg0;
6886 else
6887 return NULL_TREE;
6889 else if (TREE_CODE (op1) == INTEGER_CST)
6891 delta = op1;
6892 s = NULL_TREE;
6894 else
6896 /* Simulate we are delta * 1. */
6897 delta = op1;
6898 s = integer_one_node;
6901 for (;; ref = TREE_OPERAND (ref, 0))
6903 if (TREE_CODE (ref) == ARRAY_REF)
6905 tree domain;
6907 /* Remember if this was a multi-dimensional array. */
6908 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6909 mdim = true;
6911 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6912 if (! domain)
6913 continue;
6914 itype = TREE_TYPE (domain);
6916 step = array_ref_element_size (ref);
6917 if (TREE_CODE (step) != INTEGER_CST)
6918 continue;
6920 if (s)
6922 if (! tree_int_cst_equal (step, s))
6923 continue;
6925 else
6927 /* Try if delta is a multiple of step. */
6928 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6929 if (! tmp)
6930 continue;
6931 delta = tmp;
6934 /* Only fold here if we can verify we do not overflow one
6935 dimension of a multi-dimensional array. */
6936 if (mdim)
6938 tree tmp;
6940 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6941 || !TYPE_MAX_VALUE (domain)
6942 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6943 continue;
6945 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6946 fold_convert_loc (loc, itype,
6947 TREE_OPERAND (ref, 1)),
6948 fold_convert_loc (loc, itype, delta));
6949 if (!tmp
6950 || TREE_CODE (tmp) != INTEGER_CST
6951 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6952 continue;
6955 break;
6957 else
6958 mdim = false;
6960 if (!handled_component_p (ref))
6961 return NULL_TREE;
6964 /* We found the suitable array reference. So copy everything up to it,
6965 and replace the index. */
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
6969 SET_EXPR_LOCATION (ret, loc);
6970 pos = ret;
6972 while (pref != ref)
6974 pref = TREE_OPERAND (pref, 0);
6975 TREE_OPERAND (pos, 0) = copy_node (pref);
6976 pos = TREE_OPERAND (pos, 0);
6979 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6980 fold_convert_loc (loc, itype,
6981 TREE_OPERAND (pos, 1)),
6982 fold_convert_loc (loc, itype, delta));
6984 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6988 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6989 means A >= Y && A != MAX, but in this case we know that
6990 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6992 static tree
6993 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6995 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6997 if (TREE_CODE (bound) == LT_EXPR)
6998 a = TREE_OPERAND (bound, 0);
6999 else if (TREE_CODE (bound) == GT_EXPR)
7000 a = TREE_OPERAND (bound, 1);
7001 else
7002 return NULL_TREE;
7004 typea = TREE_TYPE (a);
7005 if (!INTEGRAL_TYPE_P (typea)
7006 && !POINTER_TYPE_P (typea))
7007 return NULL_TREE;
7009 if (TREE_CODE (ineq) == LT_EXPR)
7011 a1 = TREE_OPERAND (ineq, 1);
7012 y = TREE_OPERAND (ineq, 0);
7014 else if (TREE_CODE (ineq) == GT_EXPR)
7016 a1 = TREE_OPERAND (ineq, 0);
7017 y = TREE_OPERAND (ineq, 1);
7019 else
7020 return NULL_TREE;
7022 if (TREE_TYPE (a1) != typea)
7023 return NULL_TREE;
7025 if (POINTER_TYPE_P (typea))
7027 /* Convert the pointer types into integer before taking the difference. */
7028 tree ta = fold_convert_loc (loc, ssizetype, a);
7029 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7030 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7032 else
7033 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7035 if (!diff || !integer_onep (diff))
7036 return NULL_TREE;
7038 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7041 /* Fold a sum or difference of at least one multiplication.
7042 Returns the folded tree or NULL if no simplification could be made. */
7044 static tree
7045 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7046 tree arg0, tree arg1)
7048 tree arg00, arg01, arg10, arg11;
7049 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7051 /* (A * C) +- (B * C) -> (A+-B) * C.
7052 (A * C) +- A -> A * (C+-1).
7053 We are most concerned about the case where C is a constant,
7054 but other combinations show up during loop reduction. Since
7055 it is not difficult, try all four possibilities. */
7057 if (TREE_CODE (arg0) == MULT_EXPR)
7059 arg00 = TREE_OPERAND (arg0, 0);
7060 arg01 = TREE_OPERAND (arg0, 1);
7062 else if (TREE_CODE (arg0) == INTEGER_CST)
7064 arg00 = build_one_cst (type);
7065 arg01 = arg0;
7067 else
7069 /* We cannot generate constant 1 for fract. */
7070 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7071 return NULL_TREE;
7072 arg00 = arg0;
7073 arg01 = build_one_cst (type);
7075 if (TREE_CODE (arg1) == MULT_EXPR)
7077 arg10 = TREE_OPERAND (arg1, 0);
7078 arg11 = TREE_OPERAND (arg1, 1);
7080 else if (TREE_CODE (arg1) == INTEGER_CST)
7082 arg10 = build_one_cst (type);
7083 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7084 the purpose of this canonicalization. */
7085 if (TREE_INT_CST_HIGH (arg1) == -1
7086 && negate_expr_p (arg1)
7087 && code == PLUS_EXPR)
7089 arg11 = negate_expr (arg1);
7090 code = MINUS_EXPR;
7092 else
7093 arg11 = arg1;
7095 else
7097 /* We cannot generate constant 1 for fract. */
7098 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7099 return NULL_TREE;
7100 arg10 = arg1;
7101 arg11 = build_one_cst (type);
7103 same = NULL_TREE;
7105 if (operand_equal_p (arg01, arg11, 0))
7106 same = arg01, alt0 = arg00, alt1 = arg10;
7107 else if (operand_equal_p (arg00, arg10, 0))
7108 same = arg00, alt0 = arg01, alt1 = arg11;
7109 else if (operand_equal_p (arg00, arg11, 0))
7110 same = arg00, alt0 = arg01, alt1 = arg10;
7111 else if (operand_equal_p (arg01, arg10, 0))
7112 same = arg01, alt0 = arg00, alt1 = arg11;
7114 /* No identical multiplicands; see if we can find a common
7115 power-of-two factor in non-power-of-two multiplies. This
7116 can help in multi-dimensional array access. */
7117 else if (host_integerp (arg01, 0)
7118 && host_integerp (arg11, 0))
7120 HOST_WIDE_INT int01, int11, tmp;
7121 bool swap = false;
7122 tree maybe_same;
7123 int01 = TREE_INT_CST_LOW (arg01);
7124 int11 = TREE_INT_CST_LOW (arg11);
7126 /* Move min of absolute values to int11. */
7127 if ((int01 >= 0 ? int01 : -int01)
7128 < (int11 >= 0 ? int11 : -int11))
7130 tmp = int01, int01 = int11, int11 = tmp;
7131 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7132 maybe_same = arg01;
7133 swap = true;
7135 else
7136 maybe_same = arg11;
7138 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7139 /* The remainder should not be a constant, otherwise we
7140 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7141 increased the number of multiplications necessary. */
7142 && TREE_CODE (arg10) != INTEGER_CST)
7144 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7145 build_int_cst (TREE_TYPE (arg00),
7146 int01 / int11));
7147 alt1 = arg10;
7148 same = maybe_same;
7149 if (swap)
7150 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7154 if (same)
7155 return fold_build2_loc (loc, MULT_EXPR, type,
7156 fold_build2_loc (loc, code, type,
7157 fold_convert_loc (loc, type, alt0),
7158 fold_convert_loc (loc, type, alt1)),
7159 fold_convert_loc (loc, type, same));
7161 return NULL_TREE;
7164 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7165 specified by EXPR into the buffer PTR of length LEN bytes.
7166 Return the number of bytes placed in the buffer, or zero
7167 upon failure. */
7169 static int
7170 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7172 tree type = TREE_TYPE (expr);
7173 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7174 int byte, offset, word, words;
7175 unsigned char value;
7177 if (total_bytes > len)
7178 return 0;
7179 words = total_bytes / UNITS_PER_WORD;
7181 for (byte = 0; byte < total_bytes; byte++)
7183 int bitpos = byte * BITS_PER_UNIT;
7184 if (bitpos < HOST_BITS_PER_WIDE_INT)
7185 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7186 else
7187 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7188 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7190 if (total_bytes > UNITS_PER_WORD)
7192 word = byte / UNITS_PER_WORD;
7193 if (WORDS_BIG_ENDIAN)
7194 word = (words - 1) - word;
7195 offset = word * UNITS_PER_WORD;
7196 if (BYTES_BIG_ENDIAN)
7197 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7198 else
7199 offset += byte % UNITS_PER_WORD;
7201 else
7202 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7203 ptr[offset] = value;
7205 return total_bytes;
7209 /* Subroutine of native_encode_expr. Encode the REAL_CST
7210 specified by EXPR into the buffer PTR of length LEN bytes.
7211 Return the number of bytes placed in the buffer, or zero
7212 upon failure. */
7214 static int
7215 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7217 tree type = TREE_TYPE (expr);
7218 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7219 int byte, offset, word, words, bitpos;
7220 unsigned char value;
7222 /* There are always 32 bits in each long, no matter the size of
7223 the hosts long. We handle floating point representations with
7224 up to 192 bits. */
7225 long tmp[6];
7227 if (total_bytes > len)
7228 return 0;
7229 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7231 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7233 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7234 bitpos += BITS_PER_UNIT)
7236 byte = (bitpos / BITS_PER_UNIT) & 3;
7237 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7239 if (UNITS_PER_WORD < 4)
7241 word = byte / UNITS_PER_WORD;
7242 if (WORDS_BIG_ENDIAN)
7243 word = (words - 1) - word;
7244 offset = word * UNITS_PER_WORD;
7245 if (BYTES_BIG_ENDIAN)
7246 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7247 else
7248 offset += byte % UNITS_PER_WORD;
7250 else
7251 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7252 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7254 return total_bytes;
7257 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7258 specified by EXPR into the buffer PTR of length LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero
7260 upon failure. */
7262 static int
7263 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7265 int rsize, isize;
7266 tree part;
7268 part = TREE_REALPART (expr);
7269 rsize = native_encode_expr (part, ptr, len);
7270 if (rsize == 0)
7271 return 0;
7272 part = TREE_IMAGPART (expr);
7273 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7274 if (isize != rsize)
7275 return 0;
7276 return rsize + isize;
7280 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7281 specified by EXPR into the buffer PTR of length LEN bytes.
7282 Return the number of bytes placed in the buffer, or zero
7283 upon failure. */
7285 static int
7286 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7288 int i, size, offset, count;
7289 tree itype, elem, elements;
7291 offset = 0;
7292 elements = TREE_VECTOR_CST_ELTS (expr);
7293 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7294 itype = TREE_TYPE (TREE_TYPE (expr));
7295 size = GET_MODE_SIZE (TYPE_MODE (itype));
7296 for (i = 0; i < count; i++)
7298 if (elements)
7300 elem = TREE_VALUE (elements);
7301 elements = TREE_CHAIN (elements);
7303 else
7304 elem = NULL_TREE;
7306 if (elem)
7308 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7309 return 0;
7311 else
7313 if (offset + size > len)
7314 return 0;
7315 memset (ptr+offset, 0, size);
7317 offset += size;
7319 return offset;
7323 /* Subroutine of native_encode_expr. Encode the STRING_CST
7324 specified by EXPR into the buffer PTR of length LEN bytes.
7325 Return the number of bytes placed in the buffer, or zero
7326 upon failure. */
7328 static int
7329 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7331 tree type = TREE_TYPE (expr);
7332 HOST_WIDE_INT total_bytes;
7334 if (TREE_CODE (type) != ARRAY_TYPE
7335 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7336 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7337 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7338 return 0;
7339 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7340 if (total_bytes > len)
7341 return 0;
7342 if (TREE_STRING_LENGTH (expr) < total_bytes)
7344 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7345 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7346 total_bytes - TREE_STRING_LENGTH (expr));
7348 else
7349 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7350 return total_bytes;
7354 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7355 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7356 buffer PTR of length LEN bytes. Return the number of bytes
7357 placed in the buffer, or zero upon failure. */
7360 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7362 switch (TREE_CODE (expr))
7364 case INTEGER_CST:
7365 return native_encode_int (expr, ptr, len);
7367 case REAL_CST:
7368 return native_encode_real (expr, ptr, len);
7370 case COMPLEX_CST:
7371 return native_encode_complex (expr, ptr, len);
7373 case VECTOR_CST:
7374 return native_encode_vector (expr, ptr, len);
7376 case STRING_CST:
7377 return native_encode_string (expr, ptr, len);
7379 default:
7380 return 0;
7385 /* Subroutine of native_interpret_expr. Interpret the contents of
7386 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7387 If the buffer cannot be interpreted, return NULL_TREE. */
7389 static tree
7390 native_interpret_int (tree type, const unsigned char *ptr, int len)
7392 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7393 int byte, offset, word, words;
7394 unsigned char value;
7395 double_int result;
7397 if (total_bytes > len)
7398 return NULL_TREE;
7399 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7400 return NULL_TREE;
7402 result = double_int_zero;
7403 words = total_bytes / UNITS_PER_WORD;
7405 for (byte = 0; byte < total_bytes; byte++)
7407 int bitpos = byte * BITS_PER_UNIT;
7408 if (total_bytes > UNITS_PER_WORD)
7410 word = byte / UNITS_PER_WORD;
7411 if (WORDS_BIG_ENDIAN)
7412 word = (words - 1) - word;
7413 offset = word * UNITS_PER_WORD;
7414 if (BYTES_BIG_ENDIAN)
7415 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7416 else
7417 offset += byte % UNITS_PER_WORD;
7419 else
7420 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7421 value = ptr[offset];
7423 if (bitpos < HOST_BITS_PER_WIDE_INT)
7424 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7425 else
7426 result.high |= (unsigned HOST_WIDE_INT) value
7427 << (bitpos - HOST_BITS_PER_WIDE_INT);
7430 return double_int_to_tree (type, result);
7434 /* Subroutine of native_interpret_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7436 If the buffer cannot be interpreted, return NULL_TREE. */
7438 static tree
7439 native_interpret_real (tree type, const unsigned char *ptr, int len)
7441 enum machine_mode mode = TYPE_MODE (type);
7442 int total_bytes = GET_MODE_SIZE (mode);
7443 int byte, offset, word, words, bitpos;
7444 unsigned char value;
7445 /* There are always 32 bits in each long, no matter the size of
7446 the hosts long. We handle floating point representations with
7447 up to 192 bits. */
7448 REAL_VALUE_TYPE r;
7449 long tmp[6];
7451 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7452 if (total_bytes > len || total_bytes > 24)
7453 return NULL_TREE;
7454 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7456 memset (tmp, 0, sizeof (tmp));
7457 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7458 bitpos += BITS_PER_UNIT)
7460 byte = (bitpos / BITS_PER_UNIT) & 3;
7461 if (UNITS_PER_WORD < 4)
7463 word = byte / UNITS_PER_WORD;
7464 if (WORDS_BIG_ENDIAN)
7465 word = (words - 1) - word;
7466 offset = word * UNITS_PER_WORD;
7467 if (BYTES_BIG_ENDIAN)
7468 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7469 else
7470 offset += byte % UNITS_PER_WORD;
7472 else
7473 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7474 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7476 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7479 real_from_target (&r, tmp, mode);
7480 return build_real (type, r);
7484 /* Subroutine of native_interpret_expr. Interpret the contents of
7485 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7486 If the buffer cannot be interpreted, return NULL_TREE. */
7488 static tree
7489 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7491 tree etype, rpart, ipart;
7492 int size;
7494 etype = TREE_TYPE (type);
7495 size = GET_MODE_SIZE (TYPE_MODE (etype));
7496 if (size * 2 > len)
7497 return NULL_TREE;
7498 rpart = native_interpret_expr (etype, ptr, size);
7499 if (!rpart)
7500 return NULL_TREE;
7501 ipart = native_interpret_expr (etype, ptr+size, size);
7502 if (!ipart)
7503 return NULL_TREE;
7504 return build_complex (type, rpart, ipart);
7508 /* Subroutine of native_interpret_expr. Interpret the contents of
7509 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7510 If the buffer cannot be interpreted, return NULL_TREE. */
7512 static tree
7513 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7515 tree etype, elem, elements;
7516 int i, size, count;
7518 etype = TREE_TYPE (type);
7519 size = GET_MODE_SIZE (TYPE_MODE (etype));
7520 count = TYPE_VECTOR_SUBPARTS (type);
7521 if (size * count > len)
7522 return NULL_TREE;
7524 elements = NULL_TREE;
7525 for (i = count - 1; i >= 0; i--)
7527 elem = native_interpret_expr (etype, ptr+(i*size), size);
7528 if (!elem)
7529 return NULL_TREE;
7530 elements = tree_cons (NULL_TREE, elem, elements);
7532 return build_vector (type, elements);
7536 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7537 the buffer PTR of length LEN as a constant of type TYPE. For
7538 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7539 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7540 return NULL_TREE. */
7542 tree
7543 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7545 switch (TREE_CODE (type))
7547 case INTEGER_TYPE:
7548 case ENUMERAL_TYPE:
7549 case BOOLEAN_TYPE:
7550 return native_interpret_int (type, ptr, len);
7552 case REAL_TYPE:
7553 return native_interpret_real (type, ptr, len);
7555 case COMPLEX_TYPE:
7556 return native_interpret_complex (type, ptr, len);
7558 case VECTOR_TYPE:
7559 return native_interpret_vector (type, ptr, len);
7561 default:
7562 return NULL_TREE;
7567 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7568 TYPE at compile-time. If we're unable to perform the conversion
7569 return NULL_TREE. */
7571 static tree
7572 fold_view_convert_expr (tree type, tree expr)
7574 /* We support up to 512-bit values (for V8DFmode). */
7575 unsigned char buffer[64];
7576 int len;
7578 /* Check that the host and target are sane. */
7579 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7580 return NULL_TREE;
7582 len = native_encode_expr (expr, buffer, sizeof (buffer));
7583 if (len == 0)
7584 return NULL_TREE;
7586 return native_interpret_expr (type, buffer, len);
7589 /* Build an expression for the address of T. Folds away INDIRECT_REF
7590 to avoid confusing the gimplify process. */
7592 tree
7593 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7595 /* The size of the object is not relevant when talking about its address. */
7596 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7597 t = TREE_OPERAND (t, 0);
7599 if (TREE_CODE (t) == INDIRECT_REF
7600 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7602 t = TREE_OPERAND (t, 0);
7604 if (TREE_TYPE (t) != ptrtype)
7606 t = build1 (NOP_EXPR, ptrtype, t);
7607 SET_EXPR_LOCATION (t, loc);
7610 else if (TREE_CODE (t) == MEM_REF
7611 && integer_zerop (TREE_OPERAND (t, 1)))
7612 return TREE_OPERAND (t, 0);
7613 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7615 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7617 if (TREE_TYPE (t) != ptrtype)
7618 t = fold_convert_loc (loc, ptrtype, t);
7620 else
7622 t = build1 (ADDR_EXPR, ptrtype, t);
7623 SET_EXPR_LOCATION (t, loc);
7626 return t;
7629 /* Build an expression for the address of T. */
7631 tree
7632 build_fold_addr_expr_loc (location_t loc, tree t)
7634 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7636 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7639 /* Fold a unary expression of code CODE and type TYPE with operand
7640 OP0. Return the folded expression if folding is successful.
7641 Otherwise, return NULL_TREE. */
7643 tree
7644 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7646 tree tem;
7647 tree arg0;
7648 enum tree_code_class kind = TREE_CODE_CLASS (code);
7650 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7651 && TREE_CODE_LENGTH (code) == 1);
7653 arg0 = op0;
7654 if (arg0)
7656 if (CONVERT_EXPR_CODE_P (code)
7657 || code == FLOAT_EXPR || code == ABS_EXPR)
7659 /* Don't use STRIP_NOPS, because signedness of argument type
7660 matters. */
7661 STRIP_SIGN_NOPS (arg0);
7663 else
7665 /* Strip any conversions that don't change the mode. This
7666 is safe for every expression, except for a comparison
7667 expression because its signedness is derived from its
7668 operands.
7670 Note that this is done as an internal manipulation within
7671 the constant folder, in order to find the simplest
7672 representation of the arguments so that their form can be
7673 studied. In any cases, the appropriate type conversions
7674 should be put back in the tree that will get out of the
7675 constant folder. */
7676 STRIP_NOPS (arg0);
7680 if (TREE_CODE_CLASS (code) == tcc_unary)
7682 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7683 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7684 fold_build1_loc (loc, code, type,
7685 fold_convert_loc (loc, TREE_TYPE (op0),
7686 TREE_OPERAND (arg0, 1))));
7687 else if (TREE_CODE (arg0) == COND_EXPR)
7689 tree arg01 = TREE_OPERAND (arg0, 1);
7690 tree arg02 = TREE_OPERAND (arg0, 2);
7691 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7692 arg01 = fold_build1_loc (loc, code, type,
7693 fold_convert_loc (loc,
7694 TREE_TYPE (op0), arg01));
7695 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7696 arg02 = fold_build1_loc (loc, code, type,
7697 fold_convert_loc (loc,
7698 TREE_TYPE (op0), arg02));
7699 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7700 arg01, arg02);
7702 /* If this was a conversion, and all we did was to move into
7703 inside the COND_EXPR, bring it back out. But leave it if
7704 it is a conversion from integer to integer and the
7705 result precision is no wider than a word since such a
7706 conversion is cheap and may be optimized away by combine,
7707 while it couldn't if it were outside the COND_EXPR. Then return
7708 so we don't get into an infinite recursion loop taking the
7709 conversion out and then back in. */
7711 if ((CONVERT_EXPR_CODE_P (code)
7712 || code == NON_LVALUE_EXPR)
7713 && TREE_CODE (tem) == COND_EXPR
7714 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7715 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7716 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7717 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7718 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7719 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7720 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7721 && (INTEGRAL_TYPE_P
7722 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7723 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7724 || flag_syntax_only))
7726 tem = build1 (code, type,
7727 build3 (COND_EXPR,
7728 TREE_TYPE (TREE_OPERAND
7729 (TREE_OPERAND (tem, 1), 0)),
7730 TREE_OPERAND (tem, 0),
7731 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7732 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7733 SET_EXPR_LOCATION (tem, loc);
7735 return tem;
7737 else if (COMPARISON_CLASS_P (arg0))
7739 if (TREE_CODE (type) == BOOLEAN_TYPE)
7741 arg0 = copy_node (arg0);
7742 TREE_TYPE (arg0) = type;
7743 return arg0;
7745 else if (TREE_CODE (type) != INTEGER_TYPE)
7746 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7747 fold_build1_loc (loc, code, type,
7748 integer_one_node),
7749 fold_build1_loc (loc, code, type,
7750 integer_zero_node));
7754 switch (code)
7756 case PAREN_EXPR:
7757 /* Re-association barriers around constants and other re-association
7758 barriers can be removed. */
7759 if (CONSTANT_CLASS_P (op0)
7760 || TREE_CODE (op0) == PAREN_EXPR)
7761 return fold_convert_loc (loc, type, op0);
7762 return NULL_TREE;
7764 CASE_CONVERT:
7765 case FLOAT_EXPR:
7766 case FIX_TRUNC_EXPR:
7767 if (TREE_TYPE (op0) == type)
7768 return op0;
7770 /* If we have (type) (a CMP b) and type is an integral type, return
7771 new expression involving the new type. */
7772 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7773 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7774 TREE_OPERAND (op0, 1));
7776 /* Handle cases of two conversions in a row. */
7777 if (CONVERT_EXPR_P (op0))
7779 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7780 tree inter_type = TREE_TYPE (op0);
7781 int inside_int = INTEGRAL_TYPE_P (inside_type);
7782 int inside_ptr = POINTER_TYPE_P (inside_type);
7783 int inside_float = FLOAT_TYPE_P (inside_type);
7784 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7785 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7786 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7787 int inter_int = INTEGRAL_TYPE_P (inter_type);
7788 int inter_ptr = POINTER_TYPE_P (inter_type);
7789 int inter_float = FLOAT_TYPE_P (inter_type);
7790 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7791 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7792 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7793 int final_int = INTEGRAL_TYPE_P (type);
7794 int final_ptr = POINTER_TYPE_P (type);
7795 int final_float = FLOAT_TYPE_P (type);
7796 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7797 unsigned int final_prec = TYPE_PRECISION (type);
7798 int final_unsignedp = TYPE_UNSIGNED (type);
7800 /* In addition to the cases of two conversions in a row
7801 handled below, if we are converting something to its own
7802 type via an object of identical or wider precision, neither
7803 conversion is needed. */
7804 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7805 && (((inter_int || inter_ptr) && final_int)
7806 || (inter_float && final_float))
7807 && inter_prec >= final_prec)
7808 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7810 /* Likewise, if the intermediate and initial types are either both
7811 float or both integer, we don't need the middle conversion if the
7812 former is wider than the latter and doesn't change the signedness
7813 (for integers). Avoid this if the final type is a pointer since
7814 then we sometimes need the middle conversion. Likewise if the
7815 final type has a precision not equal to the size of its mode. */
7816 if (((inter_int && inside_int)
7817 || (inter_float && inside_float)
7818 || (inter_vec && inside_vec))
7819 && inter_prec >= inside_prec
7820 && (inter_float || inter_vec
7821 || inter_unsignedp == inside_unsignedp)
7822 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7823 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7824 && ! final_ptr
7825 && (! final_vec || inter_prec == inside_prec))
7826 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7828 /* If we have a sign-extension of a zero-extended value, we can
7829 replace that by a single zero-extension. */
7830 if (inside_int && inter_int && final_int
7831 && inside_prec < inter_prec && inter_prec < final_prec
7832 && inside_unsignedp && !inter_unsignedp)
7833 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7835 /* Two conversions in a row are not needed unless:
7836 - some conversion is floating-point (overstrict for now), or
7837 - some conversion is a vector (overstrict for now), or
7838 - the intermediate type is narrower than both initial and
7839 final, or
7840 - the intermediate type and innermost type differ in signedness,
7841 and the outermost type is wider than the intermediate, or
7842 - the initial type is a pointer type and the precisions of the
7843 intermediate and final types differ, or
7844 - the final type is a pointer type and the precisions of the
7845 initial and intermediate types differ. */
7846 if (! inside_float && ! inter_float && ! final_float
7847 && ! inside_vec && ! inter_vec && ! final_vec
7848 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7849 && ! (inside_int && inter_int
7850 && inter_unsignedp != inside_unsignedp
7851 && inter_prec < final_prec)
7852 && ((inter_unsignedp && inter_prec > inside_prec)
7853 == (final_unsignedp && final_prec > inter_prec))
7854 && ! (inside_ptr && inter_prec != final_prec)
7855 && ! (final_ptr && inside_prec != inter_prec)
7856 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7857 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7858 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7861 /* Handle (T *)&A.B.C for A being of type T and B and C
7862 living at offset zero. This occurs frequently in
7863 C++ upcasting and then accessing the base. */
7864 if (TREE_CODE (op0) == ADDR_EXPR
7865 && POINTER_TYPE_P (type)
7866 && handled_component_p (TREE_OPERAND (op0, 0)))
7868 HOST_WIDE_INT bitsize, bitpos;
7869 tree offset;
7870 enum machine_mode mode;
7871 int unsignedp, volatilep;
7872 tree base = TREE_OPERAND (op0, 0);
7873 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7874 &mode, &unsignedp, &volatilep, false);
7875 /* If the reference was to a (constant) zero offset, we can use
7876 the address of the base if it has the same base type
7877 as the result type and the pointer type is unqualified. */
7878 if (! offset && bitpos == 0
7879 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7880 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7881 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7882 return fold_convert_loc (loc, type,
7883 build_fold_addr_expr_loc (loc, base));
7886 if (TREE_CODE (op0) == MODIFY_EXPR
7887 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7888 /* Detect assigning a bitfield. */
7889 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7890 && DECL_BIT_FIELD
7891 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7893 /* Don't leave an assignment inside a conversion
7894 unless assigning a bitfield. */
7895 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7896 /* First do the assignment, then return converted constant. */
7897 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7898 TREE_NO_WARNING (tem) = 1;
7899 TREE_USED (tem) = 1;
7900 SET_EXPR_LOCATION (tem, loc);
7901 return tem;
7904 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7905 constants (if x has signed type, the sign bit cannot be set
7906 in c). This folds extension into the BIT_AND_EXPR.
7907 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7908 very likely don't have maximal range for their precision and this
7909 transformation effectively doesn't preserve non-maximal ranges. */
7910 if (TREE_CODE (type) == INTEGER_TYPE
7911 && TREE_CODE (op0) == BIT_AND_EXPR
7912 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7914 tree and_expr = op0;
7915 tree and0 = TREE_OPERAND (and_expr, 0);
7916 tree and1 = TREE_OPERAND (and_expr, 1);
7917 int change = 0;
7919 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7920 || (TYPE_PRECISION (type)
7921 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7922 change = 1;
7923 else if (TYPE_PRECISION (TREE_TYPE (and1))
7924 <= HOST_BITS_PER_WIDE_INT
7925 && host_integerp (and1, 1))
7927 unsigned HOST_WIDE_INT cst;
7929 cst = tree_low_cst (and1, 1);
7930 cst &= (HOST_WIDE_INT) -1
7931 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7932 change = (cst == 0);
7933 #ifdef LOAD_EXTEND_OP
7934 if (change
7935 && !flag_syntax_only
7936 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7937 == ZERO_EXTEND))
7939 tree uns = unsigned_type_for (TREE_TYPE (and0));
7940 and0 = fold_convert_loc (loc, uns, and0);
7941 and1 = fold_convert_loc (loc, uns, and1);
7943 #endif
7945 if (change)
7947 tem = force_fit_type_double (type, tree_to_double_int (and1),
7948 0, TREE_OVERFLOW (and1));
7949 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7950 fold_convert_loc (loc, type, and0), tem);
7954 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7955 when one of the new casts will fold away. Conservatively we assume
7956 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7957 if (POINTER_TYPE_P (type)
7958 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7959 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7960 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7961 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7963 tree arg00 = TREE_OPERAND (arg0, 0);
7964 tree arg01 = TREE_OPERAND (arg0, 1);
7966 return fold_build2_loc (loc,
7967 TREE_CODE (arg0), type,
7968 fold_convert_loc (loc, type, arg00),
7969 fold_convert_loc (loc, sizetype, arg01));
7972 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7973 of the same precision, and X is an integer type not narrower than
7974 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7975 if (INTEGRAL_TYPE_P (type)
7976 && TREE_CODE (op0) == BIT_NOT_EXPR
7977 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7978 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7979 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7981 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7982 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7983 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7984 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7985 fold_convert_loc (loc, type, tem));
7988 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7989 type of X and Y (integer types only). */
7990 if (INTEGRAL_TYPE_P (type)
7991 && TREE_CODE (op0) == MULT_EXPR
7992 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7993 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7995 /* Be careful not to introduce new overflows. */
7996 tree mult_type;
7997 if (TYPE_OVERFLOW_WRAPS (type))
7998 mult_type = type;
7999 else
8000 mult_type = unsigned_type_for (type);
8002 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8004 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8005 fold_convert_loc (loc, mult_type,
8006 TREE_OPERAND (op0, 0)),
8007 fold_convert_loc (loc, mult_type,
8008 TREE_OPERAND (op0, 1)));
8009 return fold_convert_loc (loc, type, tem);
8013 tem = fold_convert_const (code, type, op0);
8014 return tem ? tem : NULL_TREE;
8016 case ADDR_SPACE_CONVERT_EXPR:
8017 if (integer_zerop (arg0))
8018 return fold_convert_const (code, type, arg0);
8019 return NULL_TREE;
8021 case FIXED_CONVERT_EXPR:
8022 tem = fold_convert_const (code, type, arg0);
8023 return tem ? tem : NULL_TREE;
8025 case VIEW_CONVERT_EXPR:
8026 if (TREE_TYPE (op0) == type)
8027 return op0;
8028 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8029 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8030 type, TREE_OPERAND (op0, 0));
8031 if (TREE_CODE (op0) == MEM_REF)
8032 return fold_build2_loc (loc, MEM_REF, type,
8033 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8035 /* For integral conversions with the same precision or pointer
8036 conversions use a NOP_EXPR instead. */
8037 if ((INTEGRAL_TYPE_P (type)
8038 || POINTER_TYPE_P (type))
8039 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8040 || POINTER_TYPE_P (TREE_TYPE (op0)))
8041 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8042 return fold_convert_loc (loc, type, op0);
8044 /* Strip inner integral conversions that do not change the precision. */
8045 if (CONVERT_EXPR_P (op0)
8046 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8047 || POINTER_TYPE_P (TREE_TYPE (op0)))
8048 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8049 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8050 && (TYPE_PRECISION (TREE_TYPE (op0))
8051 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8052 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8053 type, TREE_OPERAND (op0, 0));
8055 return fold_view_convert_expr (type, op0);
8057 case NEGATE_EXPR:
8058 tem = fold_negate_expr (loc, arg0);
8059 if (tem)
8060 return fold_convert_loc (loc, type, tem);
8061 return NULL_TREE;
8063 case ABS_EXPR:
8064 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8065 return fold_abs_const (arg0, type);
8066 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8067 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8068 /* Convert fabs((double)float) into (double)fabsf(float). */
8069 else if (TREE_CODE (arg0) == NOP_EXPR
8070 && TREE_CODE (type) == REAL_TYPE)
8072 tree targ0 = strip_float_extensions (arg0);
8073 if (targ0 != arg0)
8074 return fold_convert_loc (loc, type,
8075 fold_build1_loc (loc, ABS_EXPR,
8076 TREE_TYPE (targ0),
8077 targ0));
8079 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8080 else if (TREE_CODE (arg0) == ABS_EXPR)
8081 return arg0;
8082 else if (tree_expr_nonnegative_p (arg0))
8083 return arg0;
8085 /* Strip sign ops from argument. */
8086 if (TREE_CODE (type) == REAL_TYPE)
8088 tem = fold_strip_sign_ops (arg0);
8089 if (tem)
8090 return fold_build1_loc (loc, ABS_EXPR, type,
8091 fold_convert_loc (loc, type, tem));
8093 return NULL_TREE;
8095 case CONJ_EXPR:
8096 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8097 return fold_convert_loc (loc, type, arg0);
8098 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8100 tree itype = TREE_TYPE (type);
8101 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8102 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8103 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8104 negate_expr (ipart));
8106 if (TREE_CODE (arg0) == COMPLEX_CST)
8108 tree itype = TREE_TYPE (type);
8109 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8110 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8111 return build_complex (type, rpart, negate_expr (ipart));
8113 if (TREE_CODE (arg0) == CONJ_EXPR)
8114 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8115 return NULL_TREE;
8117 case BIT_NOT_EXPR:
8118 if (TREE_CODE (arg0) == INTEGER_CST)
8119 return fold_not_const (arg0, type);
8120 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8121 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8122 /* Convert ~ (-A) to A - 1. */
8123 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8124 return fold_build2_loc (loc, MINUS_EXPR, type,
8125 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8126 build_int_cst (type, 1));
8127 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8128 else if (INTEGRAL_TYPE_P (type)
8129 && ((TREE_CODE (arg0) == MINUS_EXPR
8130 && integer_onep (TREE_OPERAND (arg0, 1)))
8131 || (TREE_CODE (arg0) == PLUS_EXPR
8132 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8133 return fold_build1_loc (loc, NEGATE_EXPR, type,
8134 fold_convert_loc (loc, type,
8135 TREE_OPERAND (arg0, 0)));
8136 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8137 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8138 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8139 fold_convert_loc (loc, type,
8140 TREE_OPERAND (arg0, 0)))))
8141 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8142 fold_convert_loc (loc, type,
8143 TREE_OPERAND (arg0, 1)));
8144 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8145 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8146 fold_convert_loc (loc, type,
8147 TREE_OPERAND (arg0, 1)))))
8148 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8149 fold_convert_loc (loc, type,
8150 TREE_OPERAND (arg0, 0)), tem);
8151 /* Perform BIT_NOT_EXPR on each element individually. */
8152 else if (TREE_CODE (arg0) == VECTOR_CST)
8154 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8155 int count = TYPE_VECTOR_SUBPARTS (type), i;
8157 for (i = 0; i < count; i++)
8159 if (elements)
8161 elem = TREE_VALUE (elements);
8162 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8163 if (elem == NULL_TREE)
8164 break;
8165 elements = TREE_CHAIN (elements);
8167 else
8168 elem = build_int_cst (TREE_TYPE (type), -1);
8169 list = tree_cons (NULL_TREE, elem, list);
8171 if (i == count)
8172 return build_vector (type, nreverse (list));
8175 return NULL_TREE;
8177 case TRUTH_NOT_EXPR:
8178 /* The argument to invert_truthvalue must have Boolean type. */
8179 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8180 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8182 /* Note that the operand of this must be an int
8183 and its values must be 0 or 1.
8184 ("true" is a fixed value perhaps depending on the language,
8185 but we don't handle values other than 1 correctly yet.) */
8186 tem = fold_truth_not_expr (loc, arg0);
8187 if (!tem)
8188 return NULL_TREE;
8189 return fold_convert_loc (loc, type, tem);
8191 case REALPART_EXPR:
8192 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8193 return fold_convert_loc (loc, type, arg0);
8194 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8195 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8196 TREE_OPERAND (arg0, 1));
8197 if (TREE_CODE (arg0) == COMPLEX_CST)
8198 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8199 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8201 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8202 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8203 fold_build1_loc (loc, REALPART_EXPR, itype,
8204 TREE_OPERAND (arg0, 0)),
8205 fold_build1_loc (loc, REALPART_EXPR, itype,
8206 TREE_OPERAND (arg0, 1)));
8207 return fold_convert_loc (loc, type, tem);
8209 if (TREE_CODE (arg0) == CONJ_EXPR)
8211 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8212 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8213 TREE_OPERAND (arg0, 0));
8214 return fold_convert_loc (loc, type, tem);
8216 if (TREE_CODE (arg0) == CALL_EXPR)
8218 tree fn = get_callee_fndecl (arg0);
8219 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8220 switch (DECL_FUNCTION_CODE (fn))
8222 CASE_FLT_FN (BUILT_IN_CEXPI):
8223 fn = mathfn_built_in (type, BUILT_IN_COS);
8224 if (fn)
8225 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8226 break;
8228 default:
8229 break;
8232 return NULL_TREE;
8234 case IMAGPART_EXPR:
8235 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8236 return fold_convert_loc (loc, type, integer_zero_node);
8237 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8238 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8239 TREE_OPERAND (arg0, 0));
8240 if (TREE_CODE (arg0) == COMPLEX_CST)
8241 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8242 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8244 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8245 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8246 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8247 TREE_OPERAND (arg0, 0)),
8248 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8249 TREE_OPERAND (arg0, 1)));
8250 return fold_convert_loc (loc, type, tem);
8252 if (TREE_CODE (arg0) == CONJ_EXPR)
8254 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8255 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8256 return fold_convert_loc (loc, type, negate_expr (tem));
8258 if (TREE_CODE (arg0) == CALL_EXPR)
8260 tree fn = get_callee_fndecl (arg0);
8261 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8262 switch (DECL_FUNCTION_CODE (fn))
8264 CASE_FLT_FN (BUILT_IN_CEXPI):
8265 fn = mathfn_built_in (type, BUILT_IN_SIN);
8266 if (fn)
8267 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8268 break;
8270 default:
8271 break;
8274 return NULL_TREE;
8276 case INDIRECT_REF:
8277 /* Fold *&X to X if X is an lvalue. */
8278 if (TREE_CODE (op0) == ADDR_EXPR)
8280 tree op00 = TREE_OPERAND (op0, 0);
8281 if ((TREE_CODE (op00) == VAR_DECL
8282 || TREE_CODE (op00) == PARM_DECL
8283 || TREE_CODE (op00) == RESULT_DECL)
8284 && !TREE_READONLY (op00))
8285 return op00;
8287 return NULL_TREE;
8289 default:
8290 return NULL_TREE;
8291 } /* switch (code) */
8295 /* If the operation was a conversion do _not_ mark a resulting constant
8296 with TREE_OVERFLOW if the original constant was not. These conversions
8297 have implementation defined behavior and retaining the TREE_OVERFLOW
8298 flag here would confuse later passes such as VRP. */
8299 tree
8300 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8301 tree type, tree op0)
8303 tree res = fold_unary_loc (loc, code, type, op0);
8304 if (res
8305 && TREE_CODE (res) == INTEGER_CST
8306 && TREE_CODE (op0) == INTEGER_CST
8307 && CONVERT_EXPR_CODE_P (code))
8308 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8310 return res;
8313 /* Fold a binary expression of code CODE and type TYPE with operands
8314 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8315 Return the folded expression if folding is successful. Otherwise,
8316 return NULL_TREE. */
8318 static tree
8319 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8321 enum tree_code compl_code;
8323 if (code == MIN_EXPR)
8324 compl_code = MAX_EXPR;
8325 else if (code == MAX_EXPR)
8326 compl_code = MIN_EXPR;
8327 else
8328 gcc_unreachable ();
8330 /* MIN (MAX (a, b), b) == b. */
8331 if (TREE_CODE (op0) == compl_code
8332 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8333 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8335 /* MIN (MAX (b, a), b) == b. */
8336 if (TREE_CODE (op0) == compl_code
8337 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8338 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8339 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8341 /* MIN (a, MAX (a, b)) == a. */
8342 if (TREE_CODE (op1) == compl_code
8343 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8344 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8345 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8347 /* MIN (a, MAX (b, a)) == a. */
8348 if (TREE_CODE (op1) == compl_code
8349 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8350 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8351 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8353 return NULL_TREE;
8356 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8357 by changing CODE to reduce the magnitude of constants involved in
8358 ARG0 of the comparison.
8359 Returns a canonicalized comparison tree if a simplification was
8360 possible, otherwise returns NULL_TREE.
8361 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8362 valid if signed overflow is undefined. */
8364 static tree
8365 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8366 tree arg0, tree arg1,
8367 bool *strict_overflow_p)
8369 enum tree_code code0 = TREE_CODE (arg0);
8370 tree t, cst0 = NULL_TREE;
8371 int sgn0;
8372 bool swap = false;
8374 /* Match A +- CST code arg1 and CST code arg1. We can change the
8375 first form only if overflow is undefined. */
8376 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8377 /* In principle pointers also have undefined overflow behavior,
8378 but that causes problems elsewhere. */
8379 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8380 && (code0 == MINUS_EXPR
8381 || code0 == PLUS_EXPR)
8382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8383 || code0 == INTEGER_CST))
8384 return NULL_TREE;
8386 /* Identify the constant in arg0 and its sign. */
8387 if (code0 == INTEGER_CST)
8388 cst0 = arg0;
8389 else
8390 cst0 = TREE_OPERAND (arg0, 1);
8391 sgn0 = tree_int_cst_sgn (cst0);
8393 /* Overflowed constants and zero will cause problems. */
8394 if (integer_zerop (cst0)
8395 || TREE_OVERFLOW (cst0))
8396 return NULL_TREE;
8398 /* See if we can reduce the magnitude of the constant in
8399 arg0 by changing the comparison code. */
8400 if (code0 == INTEGER_CST)
8402 /* CST <= arg1 -> CST-1 < arg1. */
8403 if (code == LE_EXPR && sgn0 == 1)
8404 code = LT_EXPR;
8405 /* -CST < arg1 -> -CST-1 <= arg1. */
8406 else if (code == LT_EXPR && sgn0 == -1)
8407 code = LE_EXPR;
8408 /* CST > arg1 -> CST-1 >= arg1. */
8409 else if (code == GT_EXPR && sgn0 == 1)
8410 code = GE_EXPR;
8411 /* -CST >= arg1 -> -CST-1 > arg1. */
8412 else if (code == GE_EXPR && sgn0 == -1)
8413 code = GT_EXPR;
8414 else
8415 return NULL_TREE;
8416 /* arg1 code' CST' might be more canonical. */
8417 swap = true;
8419 else
8421 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8422 if (code == LT_EXPR
8423 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8424 code = LE_EXPR;
8425 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8426 else if (code == GT_EXPR
8427 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8428 code = GE_EXPR;
8429 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8430 else if (code == LE_EXPR
8431 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8432 code = LT_EXPR;
8433 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8434 else if (code == GE_EXPR
8435 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8436 code = GT_EXPR;
8437 else
8438 return NULL_TREE;
8439 *strict_overflow_p = true;
8442 /* Now build the constant reduced in magnitude. But not if that
8443 would produce one outside of its types range. */
8444 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8445 && ((sgn0 == 1
8446 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8447 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8448 || (sgn0 == -1
8449 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8450 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8451 /* We cannot swap the comparison here as that would cause us to
8452 endlessly recurse. */
8453 return NULL_TREE;
8455 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8456 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8457 if (code0 != INTEGER_CST)
8458 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8460 /* If swapping might yield to a more canonical form, do so. */
8461 if (swap)
8462 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8463 else
8464 return fold_build2_loc (loc, code, type, t, arg1);
8467 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8468 overflow further. Try to decrease the magnitude of constants involved
8469 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8470 and put sole constants at the second argument position.
8471 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8473 static tree
8474 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8475 tree arg0, tree arg1)
8477 tree t;
8478 bool strict_overflow_p;
8479 const char * const warnmsg = G_("assuming signed overflow does not occur "
8480 "when reducing constant in comparison");
8482 /* Try canonicalization by simplifying arg0. */
8483 strict_overflow_p = false;
8484 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8485 &strict_overflow_p);
8486 if (t)
8488 if (strict_overflow_p)
8489 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8490 return t;
8493 /* Try canonicalization by simplifying arg1 using the swapped
8494 comparison. */
8495 code = swap_tree_comparison (code);
8496 strict_overflow_p = false;
8497 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8498 &strict_overflow_p);
8499 if (t && strict_overflow_p)
8500 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8501 return t;
8504 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8505 space. This is used to avoid issuing overflow warnings for
8506 expressions like &p->x which can not wrap. */
8508 static bool
8509 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8511 unsigned HOST_WIDE_INT offset_low, total_low;
8512 HOST_WIDE_INT size, offset_high, total_high;
8514 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8515 return true;
8517 if (bitpos < 0)
8518 return true;
8520 if (offset == NULL_TREE)
8522 offset_low = 0;
8523 offset_high = 0;
8525 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8526 return true;
8527 else
8529 offset_low = TREE_INT_CST_LOW (offset);
8530 offset_high = TREE_INT_CST_HIGH (offset);
8533 if (add_double_with_sign (offset_low, offset_high,
8534 bitpos / BITS_PER_UNIT, 0,
8535 &total_low, &total_high,
8536 true))
8537 return true;
8539 if (total_high != 0)
8540 return true;
8542 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8543 if (size <= 0)
8544 return true;
8546 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8547 array. */
8548 if (TREE_CODE (base) == ADDR_EXPR)
8550 HOST_WIDE_INT base_size;
8552 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8553 if (base_size > 0 && size < base_size)
8554 size = base_size;
8557 return total_low > (unsigned HOST_WIDE_INT) size;
8560 /* Subroutine of fold_binary. This routine performs all of the
8561 transformations that are common to the equality/inequality
8562 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8563 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8564 fold_binary should call fold_binary. Fold a comparison with
8565 tree code CODE and type TYPE with operands OP0 and OP1. Return
8566 the folded comparison or NULL_TREE. */
8568 static tree
8569 fold_comparison (location_t loc, enum tree_code code, tree type,
8570 tree op0, tree op1)
8572 tree arg0, arg1, tem;
8574 arg0 = op0;
8575 arg1 = op1;
8577 STRIP_SIGN_NOPS (arg0);
8578 STRIP_SIGN_NOPS (arg1);
8580 tem = fold_relational_const (code, type, arg0, arg1);
8581 if (tem != NULL_TREE)
8582 return tem;
8584 /* If one arg is a real or integer constant, put it last. */
8585 if (tree_swap_operands_p (arg0, arg1, true))
8586 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8588 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8589 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8590 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8591 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8592 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8593 && (TREE_CODE (arg1) == INTEGER_CST
8594 && !TREE_OVERFLOW (arg1)))
8596 tree const1 = TREE_OPERAND (arg0, 1);
8597 tree const2 = arg1;
8598 tree variable = TREE_OPERAND (arg0, 0);
8599 tree lhs;
8600 int lhs_add;
8601 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8603 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8604 TREE_TYPE (arg1), const2, const1);
8606 /* If the constant operation overflowed this can be
8607 simplified as a comparison against INT_MAX/INT_MIN. */
8608 if (TREE_CODE (lhs) == INTEGER_CST
8609 && TREE_OVERFLOW (lhs))
8611 int const1_sgn = tree_int_cst_sgn (const1);
8612 enum tree_code code2 = code;
8614 /* Get the sign of the constant on the lhs if the
8615 operation were VARIABLE + CONST1. */
8616 if (TREE_CODE (arg0) == MINUS_EXPR)
8617 const1_sgn = -const1_sgn;
8619 /* The sign of the constant determines if we overflowed
8620 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8621 Canonicalize to the INT_MIN overflow by swapping the comparison
8622 if necessary. */
8623 if (const1_sgn == -1)
8624 code2 = swap_tree_comparison (code);
8626 /* We now can look at the canonicalized case
8627 VARIABLE + 1 CODE2 INT_MIN
8628 and decide on the result. */
8629 if (code2 == LT_EXPR
8630 || code2 == LE_EXPR
8631 || code2 == EQ_EXPR)
8632 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8633 else if (code2 == NE_EXPR
8634 || code2 == GE_EXPR
8635 || code2 == GT_EXPR)
8636 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8639 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8640 && (TREE_CODE (lhs) != INTEGER_CST
8641 || !TREE_OVERFLOW (lhs)))
8643 fold_overflow_warning ("assuming signed overflow does not occur "
8644 "when changing X +- C1 cmp C2 to "
8645 "X cmp C1 +- C2",
8646 WARN_STRICT_OVERFLOW_COMPARISON);
8647 return fold_build2_loc (loc, code, type, variable, lhs);
8651 /* For comparisons of pointers we can decompose it to a compile time
8652 comparison of the base objects and the offsets into the object.
8653 This requires at least one operand being an ADDR_EXPR or a
8654 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8655 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8656 && (TREE_CODE (arg0) == ADDR_EXPR
8657 || TREE_CODE (arg1) == ADDR_EXPR
8658 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8659 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8661 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8662 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8663 enum machine_mode mode;
8664 int volatilep, unsignedp;
8665 bool indirect_base0 = false, indirect_base1 = false;
8667 /* Get base and offset for the access. Strip ADDR_EXPR for
8668 get_inner_reference, but put it back by stripping INDIRECT_REF
8669 off the base object if possible. indirect_baseN will be true
8670 if baseN is not an address but refers to the object itself. */
8671 base0 = arg0;
8672 if (TREE_CODE (arg0) == ADDR_EXPR)
8674 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8675 &bitsize, &bitpos0, &offset0, &mode,
8676 &unsignedp, &volatilep, false);
8677 if (TREE_CODE (base0) == INDIRECT_REF)
8678 base0 = TREE_OPERAND (base0, 0);
8679 else
8680 indirect_base0 = true;
8682 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8684 base0 = TREE_OPERAND (arg0, 0);
8685 if (TREE_CODE (base0) == ADDR_EXPR)
8687 base0 = TREE_OPERAND (base0, 0);
8688 indirect_base0 = true;
8690 offset0 = TREE_OPERAND (arg0, 1);
8693 base1 = arg1;
8694 if (TREE_CODE (arg1) == ADDR_EXPR)
8696 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8697 &bitsize, &bitpos1, &offset1, &mode,
8698 &unsignedp, &volatilep, false);
8699 if (TREE_CODE (base1) == INDIRECT_REF)
8700 base1 = TREE_OPERAND (base1, 0);
8701 else
8702 indirect_base1 = true;
8704 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8706 base1 = TREE_OPERAND (arg1, 0);
8707 if (TREE_CODE (base1) == ADDR_EXPR)
8709 base1 = TREE_OPERAND (base1, 0);
8710 indirect_base1 = true;
8712 offset1 = TREE_OPERAND (arg1, 1);
8715 /* A local variable can never be pointed to by
8716 the default SSA name of an incoming parameter. */
8717 if ((TREE_CODE (arg0) == ADDR_EXPR
8718 && indirect_base0
8719 && TREE_CODE (base0) == VAR_DECL
8720 && auto_var_in_fn_p (base0, current_function_decl)
8721 && !indirect_base1
8722 && TREE_CODE (base1) == SSA_NAME
8723 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8724 && SSA_NAME_IS_DEFAULT_DEF (base1))
8725 || (TREE_CODE (arg1) == ADDR_EXPR
8726 && indirect_base1
8727 && TREE_CODE (base1) == VAR_DECL
8728 && auto_var_in_fn_p (base1, current_function_decl)
8729 && !indirect_base0
8730 && TREE_CODE (base0) == SSA_NAME
8731 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8732 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8734 if (code == NE_EXPR)
8735 return constant_boolean_node (1, type);
8736 else if (code == EQ_EXPR)
8737 return constant_boolean_node (0, type);
8739 /* If we have equivalent bases we might be able to simplify. */
8740 else if (indirect_base0 == indirect_base1
8741 && operand_equal_p (base0, base1, 0))
8743 /* We can fold this expression to a constant if the non-constant
8744 offset parts are equal. */
8745 if ((offset0 == offset1
8746 || (offset0 && offset1
8747 && operand_equal_p (offset0, offset1, 0)))
8748 && (code == EQ_EXPR
8749 || code == NE_EXPR
8750 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8753 if (code != EQ_EXPR
8754 && code != NE_EXPR
8755 && bitpos0 != bitpos1
8756 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8757 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8758 fold_overflow_warning (("assuming pointer wraparound does not "
8759 "occur when comparing P +- C1 with "
8760 "P +- C2"),
8761 WARN_STRICT_OVERFLOW_CONDITIONAL);
8763 switch (code)
8765 case EQ_EXPR:
8766 return constant_boolean_node (bitpos0 == bitpos1, type);
8767 case NE_EXPR:
8768 return constant_boolean_node (bitpos0 != bitpos1, type);
8769 case LT_EXPR:
8770 return constant_boolean_node (bitpos0 < bitpos1, type);
8771 case LE_EXPR:
8772 return constant_boolean_node (bitpos0 <= bitpos1, type);
8773 case GE_EXPR:
8774 return constant_boolean_node (bitpos0 >= bitpos1, type);
8775 case GT_EXPR:
8776 return constant_boolean_node (bitpos0 > bitpos1, type);
8777 default:;
8780 /* We can simplify the comparison to a comparison of the variable
8781 offset parts if the constant offset parts are equal.
8782 Be careful to use signed size type here because otherwise we
8783 mess with array offsets in the wrong way. This is possible
8784 because pointer arithmetic is restricted to retain within an
8785 object and overflow on pointer differences is undefined as of
8786 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8787 else if (bitpos0 == bitpos1
8788 && ((code == EQ_EXPR || code == NE_EXPR)
8789 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8791 /* By converting to signed size type we cover middle-end pointer
8792 arithmetic which operates on unsigned pointer types of size
8793 type size and ARRAY_REF offsets which are properly sign or
8794 zero extended from their type in case it is narrower than
8795 size type. */
8796 if (offset0 == NULL_TREE)
8797 offset0 = build_int_cst (ssizetype, 0);
8798 else
8799 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8800 if (offset1 == NULL_TREE)
8801 offset1 = build_int_cst (ssizetype, 0);
8802 else
8803 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8805 if (code != EQ_EXPR
8806 && code != NE_EXPR
8807 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8808 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8809 fold_overflow_warning (("assuming pointer wraparound does not "
8810 "occur when comparing P +- C1 with "
8811 "P +- C2"),
8812 WARN_STRICT_OVERFLOW_COMPARISON);
8814 return fold_build2_loc (loc, code, type, offset0, offset1);
8817 /* For non-equal bases we can simplify if they are addresses
8818 of local binding decls or constants. */
8819 else if (indirect_base0 && indirect_base1
8820 /* We know that !operand_equal_p (base0, base1, 0)
8821 because the if condition was false. But make
8822 sure two decls are not the same. */
8823 && base0 != base1
8824 && TREE_CODE (arg0) == ADDR_EXPR
8825 && TREE_CODE (arg1) == ADDR_EXPR
8826 && (((TREE_CODE (base0) == VAR_DECL
8827 || TREE_CODE (base0) == PARM_DECL)
8828 && (targetm.binds_local_p (base0)
8829 || CONSTANT_CLASS_P (base1)))
8830 || CONSTANT_CLASS_P (base0))
8831 && (((TREE_CODE (base1) == VAR_DECL
8832 || TREE_CODE (base1) == PARM_DECL)
8833 && (targetm.binds_local_p (base1)
8834 || CONSTANT_CLASS_P (base0)))
8835 || CONSTANT_CLASS_P (base1)))
8837 if (code == EQ_EXPR)
8838 return omit_two_operands_loc (loc, type, boolean_false_node,
8839 arg0, arg1);
8840 else if (code == NE_EXPR)
8841 return omit_two_operands_loc (loc, type, boolean_true_node,
8842 arg0, arg1);
8844 /* For equal offsets we can simplify to a comparison of the
8845 base addresses. */
8846 else if (bitpos0 == bitpos1
8847 && (indirect_base0
8848 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8849 && (indirect_base1
8850 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8851 && ((offset0 == offset1)
8852 || (offset0 && offset1
8853 && operand_equal_p (offset0, offset1, 0))))
8855 if (indirect_base0)
8856 base0 = build_fold_addr_expr_loc (loc, base0);
8857 if (indirect_base1)
8858 base1 = build_fold_addr_expr_loc (loc, base1);
8859 return fold_build2_loc (loc, code, type, base0, base1);
8863 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8864 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8865 the resulting offset is smaller in absolute value than the
8866 original one. */
8867 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8868 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8869 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8870 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8871 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8872 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8873 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8875 tree const1 = TREE_OPERAND (arg0, 1);
8876 tree const2 = TREE_OPERAND (arg1, 1);
8877 tree variable1 = TREE_OPERAND (arg0, 0);
8878 tree variable2 = TREE_OPERAND (arg1, 0);
8879 tree cst;
8880 const char * const warnmsg = G_("assuming signed overflow does not "
8881 "occur when combining constants around "
8882 "a comparison");
8884 /* Put the constant on the side where it doesn't overflow and is
8885 of lower absolute value than before. */
8886 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8887 ? MINUS_EXPR : PLUS_EXPR,
8888 const2, const1, 0);
8889 if (!TREE_OVERFLOW (cst)
8890 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8892 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8893 return fold_build2_loc (loc, code, type,
8894 variable1,
8895 fold_build2_loc (loc,
8896 TREE_CODE (arg1), TREE_TYPE (arg1),
8897 variable2, cst));
8900 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8901 ? MINUS_EXPR : PLUS_EXPR,
8902 const1, const2, 0);
8903 if (!TREE_OVERFLOW (cst)
8904 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8906 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8907 return fold_build2_loc (loc, code, type,
8908 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8909 variable1, cst),
8910 variable2);
8914 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8915 signed arithmetic case. That form is created by the compiler
8916 often enough for folding it to be of value. One example is in
8917 computing loop trip counts after Operator Strength Reduction. */
8918 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8919 && TREE_CODE (arg0) == MULT_EXPR
8920 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8921 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8922 && integer_zerop (arg1))
8924 tree const1 = TREE_OPERAND (arg0, 1);
8925 tree const2 = arg1; /* zero */
8926 tree variable1 = TREE_OPERAND (arg0, 0);
8927 enum tree_code cmp_code = code;
8929 /* Handle unfolded multiplication by zero. */
8930 if (integer_zerop (const1))
8931 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8933 fold_overflow_warning (("assuming signed overflow does not occur when "
8934 "eliminating multiplication in comparison "
8935 "with zero"),
8936 WARN_STRICT_OVERFLOW_COMPARISON);
8938 /* If const1 is negative we swap the sense of the comparison. */
8939 if (tree_int_cst_sgn (const1) < 0)
8940 cmp_code = swap_tree_comparison (cmp_code);
8942 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8945 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8946 if (tem)
8947 return tem;
8949 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8951 tree targ0 = strip_float_extensions (arg0);
8952 tree targ1 = strip_float_extensions (arg1);
8953 tree newtype = TREE_TYPE (targ0);
8955 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8956 newtype = TREE_TYPE (targ1);
8958 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8959 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8960 return fold_build2_loc (loc, code, type,
8961 fold_convert_loc (loc, newtype, targ0),
8962 fold_convert_loc (loc, newtype, targ1));
8964 /* (-a) CMP (-b) -> b CMP a */
8965 if (TREE_CODE (arg0) == NEGATE_EXPR
8966 && TREE_CODE (arg1) == NEGATE_EXPR)
8967 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8968 TREE_OPERAND (arg0, 0));
8970 if (TREE_CODE (arg1) == REAL_CST)
8972 REAL_VALUE_TYPE cst;
8973 cst = TREE_REAL_CST (arg1);
8975 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8976 if (TREE_CODE (arg0) == NEGATE_EXPR)
8977 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8978 TREE_OPERAND (arg0, 0),
8979 build_real (TREE_TYPE (arg1),
8980 real_value_negate (&cst)));
8982 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8983 /* a CMP (-0) -> a CMP 0 */
8984 if (REAL_VALUE_MINUS_ZERO (cst))
8985 return fold_build2_loc (loc, code, type, arg0,
8986 build_real (TREE_TYPE (arg1), dconst0));
8988 /* x != NaN is always true, other ops are always false. */
8989 if (REAL_VALUE_ISNAN (cst)
8990 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8992 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8993 return omit_one_operand_loc (loc, type, tem, arg0);
8996 /* Fold comparisons against infinity. */
8997 if (REAL_VALUE_ISINF (cst)
8998 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9000 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9001 if (tem != NULL_TREE)
9002 return tem;
9006 /* If this is a comparison of a real constant with a PLUS_EXPR
9007 or a MINUS_EXPR of a real constant, we can convert it into a
9008 comparison with a revised real constant as long as no overflow
9009 occurs when unsafe_math_optimizations are enabled. */
9010 if (flag_unsafe_math_optimizations
9011 && TREE_CODE (arg1) == REAL_CST
9012 && (TREE_CODE (arg0) == PLUS_EXPR
9013 || TREE_CODE (arg0) == MINUS_EXPR)
9014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9015 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9016 ? MINUS_EXPR : PLUS_EXPR,
9017 arg1, TREE_OPERAND (arg0, 1)))
9018 && !TREE_OVERFLOW (tem))
9019 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9021 /* Likewise, we can simplify a comparison of a real constant with
9022 a MINUS_EXPR whose first operand is also a real constant, i.e.
9023 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9024 floating-point types only if -fassociative-math is set. */
9025 if (flag_associative_math
9026 && TREE_CODE (arg1) == REAL_CST
9027 && TREE_CODE (arg0) == MINUS_EXPR
9028 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9029 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9030 arg1))
9031 && !TREE_OVERFLOW (tem))
9032 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9033 TREE_OPERAND (arg0, 1), tem);
9035 /* Fold comparisons against built-in math functions. */
9036 if (TREE_CODE (arg1) == REAL_CST
9037 && flag_unsafe_math_optimizations
9038 && ! flag_errno_math)
9040 enum built_in_function fcode = builtin_mathfn_code (arg0);
9042 if (fcode != END_BUILTINS)
9044 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9045 if (tem != NULL_TREE)
9046 return tem;
9051 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9052 && CONVERT_EXPR_P (arg0))
9054 /* If we are widening one operand of an integer comparison,
9055 see if the other operand is similarly being widened. Perhaps we
9056 can do the comparison in the narrower type. */
9057 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9058 if (tem)
9059 return tem;
9061 /* Or if we are changing signedness. */
9062 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9063 if (tem)
9064 return tem;
9067 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9068 constant, we can simplify it. */
9069 if (TREE_CODE (arg1) == INTEGER_CST
9070 && (TREE_CODE (arg0) == MIN_EXPR
9071 || TREE_CODE (arg0) == MAX_EXPR)
9072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9074 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9075 if (tem)
9076 return tem;
9079 /* Simplify comparison of something with itself. (For IEEE
9080 floating-point, we can only do some of these simplifications.) */
9081 if (operand_equal_p (arg0, arg1, 0))
9083 switch (code)
9085 case EQ_EXPR:
9086 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9087 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9088 return constant_boolean_node (1, type);
9089 break;
9091 case GE_EXPR:
9092 case LE_EXPR:
9093 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9094 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9095 return constant_boolean_node (1, type);
9096 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9098 case NE_EXPR:
9099 /* For NE, we can only do this simplification if integer
9100 or we don't honor IEEE floating point NaNs. */
9101 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9102 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9103 break;
9104 /* ... fall through ... */
9105 case GT_EXPR:
9106 case LT_EXPR:
9107 return constant_boolean_node (0, type);
9108 default:
9109 gcc_unreachable ();
9113 /* If we are comparing an expression that just has comparisons
9114 of two integer values, arithmetic expressions of those comparisons,
9115 and constants, we can simplify it. There are only three cases
9116 to check: the two values can either be equal, the first can be
9117 greater, or the second can be greater. Fold the expression for
9118 those three values. Since each value must be 0 or 1, we have
9119 eight possibilities, each of which corresponds to the constant 0
9120 or 1 or one of the six possible comparisons.
9122 This handles common cases like (a > b) == 0 but also handles
9123 expressions like ((x > y) - (y > x)) > 0, which supposedly
9124 occur in macroized code. */
9126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9128 tree cval1 = 0, cval2 = 0;
9129 int save_p = 0;
9131 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9132 /* Don't handle degenerate cases here; they should already
9133 have been handled anyway. */
9134 && cval1 != 0 && cval2 != 0
9135 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9136 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9137 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9138 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9139 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9140 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9141 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9143 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9144 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9146 /* We can't just pass T to eval_subst in case cval1 or cval2
9147 was the same as ARG1. */
9149 tree high_result
9150 = fold_build2_loc (loc, code, type,
9151 eval_subst (loc, arg0, cval1, maxval,
9152 cval2, minval),
9153 arg1);
9154 tree equal_result
9155 = fold_build2_loc (loc, code, type,
9156 eval_subst (loc, arg0, cval1, maxval,
9157 cval2, maxval),
9158 arg1);
9159 tree low_result
9160 = fold_build2_loc (loc, code, type,
9161 eval_subst (loc, arg0, cval1, minval,
9162 cval2, maxval),
9163 arg1);
9165 /* All three of these results should be 0 or 1. Confirm they are.
9166 Then use those values to select the proper code to use. */
9168 if (TREE_CODE (high_result) == INTEGER_CST
9169 && TREE_CODE (equal_result) == INTEGER_CST
9170 && TREE_CODE (low_result) == INTEGER_CST)
9172 /* Make a 3-bit mask with the high-order bit being the
9173 value for `>', the next for '=', and the low for '<'. */
9174 switch ((integer_onep (high_result) * 4)
9175 + (integer_onep (equal_result) * 2)
9176 + integer_onep (low_result))
9178 case 0:
9179 /* Always false. */
9180 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9181 case 1:
9182 code = LT_EXPR;
9183 break;
9184 case 2:
9185 code = EQ_EXPR;
9186 break;
9187 case 3:
9188 code = LE_EXPR;
9189 break;
9190 case 4:
9191 code = GT_EXPR;
9192 break;
9193 case 5:
9194 code = NE_EXPR;
9195 break;
9196 case 6:
9197 code = GE_EXPR;
9198 break;
9199 case 7:
9200 /* Always true. */
9201 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9204 if (save_p)
9206 tem = save_expr (build2 (code, type, cval1, cval2));
9207 SET_EXPR_LOCATION (tem, loc);
9208 return tem;
9210 return fold_build2_loc (loc, code, type, cval1, cval2);
9215 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9216 into a single range test. */
9217 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9218 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9219 && TREE_CODE (arg1) == INTEGER_CST
9220 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9221 && !integer_zerop (TREE_OPERAND (arg0, 1))
9222 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9223 && !TREE_OVERFLOW (arg1))
9225 tem = fold_div_compare (loc, code, type, arg0, arg1);
9226 if (tem != NULL_TREE)
9227 return tem;
9230 /* Fold ~X op ~Y as Y op X. */
9231 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9232 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9234 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9235 return fold_build2_loc (loc, code, type,
9236 fold_convert_loc (loc, cmp_type,
9237 TREE_OPERAND (arg1, 0)),
9238 TREE_OPERAND (arg0, 0));
9241 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9242 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9243 && TREE_CODE (arg1) == INTEGER_CST)
9245 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9246 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9247 TREE_OPERAND (arg0, 0),
9248 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9249 fold_convert_loc (loc, cmp_type, arg1)));
9252 return NULL_TREE;
9256 /* Subroutine of fold_binary. Optimize complex multiplications of the
9257 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9258 argument EXPR represents the expression "z" of type TYPE. */
9260 static tree
9261 fold_mult_zconjz (location_t loc, tree type, tree expr)
9263 tree itype = TREE_TYPE (type);
9264 tree rpart, ipart, tem;
9266 if (TREE_CODE (expr) == COMPLEX_EXPR)
9268 rpart = TREE_OPERAND (expr, 0);
9269 ipart = TREE_OPERAND (expr, 1);
9271 else if (TREE_CODE (expr) == COMPLEX_CST)
9273 rpart = TREE_REALPART (expr);
9274 ipart = TREE_IMAGPART (expr);
9276 else
9278 expr = save_expr (expr);
9279 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9280 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9283 rpart = save_expr (rpart);
9284 ipart = save_expr (ipart);
9285 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9286 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9287 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9288 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9289 fold_convert_loc (loc, itype, integer_zero_node));
9293 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9294 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9295 guarantees that P and N have the same least significant log2(M) bits.
9296 N is not otherwise constrained. In particular, N is not normalized to
9297 0 <= N < M as is common. In general, the precise value of P is unknown.
9298 M is chosen as large as possible such that constant N can be determined.
9300 Returns M and sets *RESIDUE to N.
9302 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9303 account. This is not always possible due to PR 35705.
9306 static unsigned HOST_WIDE_INT
9307 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9308 bool allow_func_align)
9310 enum tree_code code;
9312 *residue = 0;
9314 code = TREE_CODE (expr);
9315 if (code == ADDR_EXPR)
9317 expr = TREE_OPERAND (expr, 0);
9318 if (handled_component_p (expr))
9320 HOST_WIDE_INT bitsize, bitpos;
9321 tree offset;
9322 enum machine_mode mode;
9323 int unsignedp, volatilep;
9325 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9326 &mode, &unsignedp, &volatilep, false);
9327 *residue = bitpos / BITS_PER_UNIT;
9328 if (offset)
9330 if (TREE_CODE (offset) == INTEGER_CST)
9331 *residue += TREE_INT_CST_LOW (offset);
9332 else
9333 /* We don't handle more complicated offset expressions. */
9334 return 1;
9338 if (DECL_P (expr)
9339 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9340 return DECL_ALIGN_UNIT (expr);
9342 else if (code == POINTER_PLUS_EXPR)
9344 tree op0, op1;
9345 unsigned HOST_WIDE_INT modulus;
9346 enum tree_code inner_code;
9348 op0 = TREE_OPERAND (expr, 0);
9349 STRIP_NOPS (op0);
9350 modulus = get_pointer_modulus_and_residue (op0, residue,
9351 allow_func_align);
9353 op1 = TREE_OPERAND (expr, 1);
9354 STRIP_NOPS (op1);
9355 inner_code = TREE_CODE (op1);
9356 if (inner_code == INTEGER_CST)
9358 *residue += TREE_INT_CST_LOW (op1);
9359 return modulus;
9361 else if (inner_code == MULT_EXPR)
9363 op1 = TREE_OPERAND (op1, 1);
9364 if (TREE_CODE (op1) == INTEGER_CST)
9366 unsigned HOST_WIDE_INT align;
9368 /* Compute the greatest power-of-2 divisor of op1. */
9369 align = TREE_INT_CST_LOW (op1);
9370 align &= -align;
9372 /* If align is non-zero and less than *modulus, replace
9373 *modulus with align., If align is 0, then either op1 is 0
9374 or the greatest power-of-2 divisor of op1 doesn't fit in an
9375 unsigned HOST_WIDE_INT. In either case, no additional
9376 constraint is imposed. */
9377 if (align)
9378 modulus = MIN (modulus, align);
9380 return modulus;
9385 /* If we get here, we were unable to determine anything useful about the
9386 expression. */
9387 return 1;
9391 /* Fold a binary expression of code CODE and type TYPE with operands
9392 OP0 and OP1. LOC is the location of the resulting expression.
9393 Return the folded expression if folding is successful. Otherwise,
9394 return NULL_TREE. */
9396 tree
9397 fold_binary_loc (location_t loc,
9398 enum tree_code code, tree type, tree op0, tree op1)
9400 enum tree_code_class kind = TREE_CODE_CLASS (code);
9401 tree arg0, arg1, tem;
9402 tree t1 = NULL_TREE;
9403 bool strict_overflow_p;
9405 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9406 && TREE_CODE_LENGTH (code) == 2
9407 && op0 != NULL_TREE
9408 && op1 != NULL_TREE);
9410 arg0 = op0;
9411 arg1 = op1;
9413 /* Strip any conversions that don't change the mode. This is
9414 safe for every expression, except for a comparison expression
9415 because its signedness is derived from its operands. So, in
9416 the latter case, only strip conversions that don't change the
9417 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9418 preserved.
9420 Note that this is done as an internal manipulation within the
9421 constant folder, in order to find the simplest representation
9422 of the arguments so that their form can be studied. In any
9423 cases, the appropriate type conversions should be put back in
9424 the tree that will get out of the constant folder. */
9426 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9428 STRIP_SIGN_NOPS (arg0);
9429 STRIP_SIGN_NOPS (arg1);
9431 else
9433 STRIP_NOPS (arg0);
9434 STRIP_NOPS (arg1);
9437 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9438 constant but we can't do arithmetic on them. */
9439 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9440 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9441 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9442 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9443 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9444 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9446 if (kind == tcc_binary)
9448 /* Make sure type and arg0 have the same saturating flag. */
9449 gcc_assert (TYPE_SATURATING (type)
9450 == TYPE_SATURATING (TREE_TYPE (arg0)));
9451 tem = const_binop (code, arg0, arg1);
9453 else if (kind == tcc_comparison)
9454 tem = fold_relational_const (code, type, arg0, arg1);
9455 else
9456 tem = NULL_TREE;
9458 if (tem != NULL_TREE)
9460 if (TREE_TYPE (tem) != type)
9461 tem = fold_convert_loc (loc, type, tem);
9462 return tem;
9466 /* If this is a commutative operation, and ARG0 is a constant, move it
9467 to ARG1 to reduce the number of tests below. */
9468 if (commutative_tree_code (code)
9469 && tree_swap_operands_p (arg0, arg1, true))
9470 return fold_build2_loc (loc, code, type, op1, op0);
9472 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9474 First check for cases where an arithmetic operation is applied to a
9475 compound, conditional, or comparison operation. Push the arithmetic
9476 operation inside the compound or conditional to see if any folding
9477 can then be done. Convert comparison to conditional for this purpose.
9478 The also optimizes non-constant cases that used to be done in
9479 expand_expr.
9481 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9482 one of the operands is a comparison and the other is a comparison, a
9483 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9484 code below would make the expression more complex. Change it to a
9485 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9486 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9488 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9489 || code == EQ_EXPR || code == NE_EXPR)
9490 && ((truth_value_p (TREE_CODE (arg0))
9491 && (truth_value_p (TREE_CODE (arg1))
9492 || (TREE_CODE (arg1) == BIT_AND_EXPR
9493 && integer_onep (TREE_OPERAND (arg1, 1)))))
9494 || (truth_value_p (TREE_CODE (arg1))
9495 && (truth_value_p (TREE_CODE (arg0))
9496 || (TREE_CODE (arg0) == BIT_AND_EXPR
9497 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9499 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9500 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9501 : TRUTH_XOR_EXPR,
9502 boolean_type_node,
9503 fold_convert_loc (loc, boolean_type_node, arg0),
9504 fold_convert_loc (loc, boolean_type_node, arg1));
9506 if (code == EQ_EXPR)
9507 tem = invert_truthvalue_loc (loc, tem);
9509 return fold_convert_loc (loc, type, tem);
9512 if (TREE_CODE_CLASS (code) == tcc_binary
9513 || TREE_CODE_CLASS (code) == tcc_comparison)
9515 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9517 tem = fold_build2_loc (loc, code, type,
9518 fold_convert_loc (loc, TREE_TYPE (op0),
9519 TREE_OPERAND (arg0, 1)), op1);
9520 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9521 goto fold_binary_exit;
9523 if (TREE_CODE (arg1) == COMPOUND_EXPR
9524 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9526 tem = fold_build2_loc (loc, code, type, op0,
9527 fold_convert_loc (loc, TREE_TYPE (op1),
9528 TREE_OPERAND (arg1, 1)));
9529 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9530 goto fold_binary_exit;
9533 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9535 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9536 arg0, arg1,
9537 /*cond_first_p=*/1);
9538 if (tem != NULL_TREE)
9539 return tem;
9542 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9544 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9545 arg1, arg0,
9546 /*cond_first_p=*/0);
9547 if (tem != NULL_TREE)
9548 return tem;
9552 switch (code)
9554 case MEM_REF:
9555 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9556 if (TREE_CODE (arg0) == ADDR_EXPR
9557 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9559 tree iref = TREE_OPERAND (arg0, 0);
9560 return fold_build2 (MEM_REF, type,
9561 TREE_OPERAND (iref, 0),
9562 int_const_binop (PLUS_EXPR, arg1,
9563 TREE_OPERAND (iref, 1), 0));
9566 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9567 if (TREE_CODE (arg0) == ADDR_EXPR
9568 && handled_component_p (TREE_OPERAND (arg0, 0)))
9570 tree base;
9571 HOST_WIDE_INT coffset;
9572 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9573 &coffset);
9574 if (!base)
9575 return NULL_TREE;
9576 return fold_build2 (MEM_REF, type,
9577 build_fold_addr_expr (base),
9578 int_const_binop (PLUS_EXPR, arg1,
9579 size_int (coffset), 0));
9582 return NULL_TREE;
9584 case POINTER_PLUS_EXPR:
9585 /* 0 +p index -> (type)index */
9586 if (integer_zerop (arg0))
9587 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9589 /* PTR +p 0 -> PTR */
9590 if (integer_zerop (arg1))
9591 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9593 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9594 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9595 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9596 return fold_convert_loc (loc, type,
9597 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9598 fold_convert_loc (loc, sizetype,
9599 arg1),
9600 fold_convert_loc (loc, sizetype,
9601 arg0)));
9603 /* index +p PTR -> PTR +p index */
9604 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9605 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9606 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9607 fold_convert_loc (loc, type, arg1),
9608 fold_convert_loc (loc, sizetype, arg0));
9610 /* (PTR +p B) +p A -> PTR +p (B + A) */
9611 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9613 tree inner;
9614 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9615 tree arg00 = TREE_OPERAND (arg0, 0);
9616 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9617 arg01, fold_convert_loc (loc, sizetype, arg1));
9618 return fold_convert_loc (loc, type,
9619 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9620 TREE_TYPE (arg00),
9621 arg00, inner));
9624 /* PTR_CST +p CST -> CST1 */
9625 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9626 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9627 fold_convert_loc (loc, type, arg1));
9629 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9630 of the array. Loop optimizer sometimes produce this type of
9631 expressions. */
9632 if (TREE_CODE (arg0) == ADDR_EXPR)
9634 tem = try_move_mult_to_index (loc, arg0,
9635 fold_convert_loc (loc, sizetype, arg1));
9636 if (tem)
9637 return fold_convert_loc (loc, type, tem);
9640 return NULL_TREE;
9642 case PLUS_EXPR:
9643 /* A + (-B) -> A - B */
9644 if (TREE_CODE (arg1) == NEGATE_EXPR)
9645 return fold_build2_loc (loc, MINUS_EXPR, type,
9646 fold_convert_loc (loc, type, arg0),
9647 fold_convert_loc (loc, type,
9648 TREE_OPERAND (arg1, 0)));
9649 /* (-A) + B -> B - A */
9650 if (TREE_CODE (arg0) == NEGATE_EXPR
9651 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9652 return fold_build2_loc (loc, MINUS_EXPR, type,
9653 fold_convert_loc (loc, type, arg1),
9654 fold_convert_loc (loc, type,
9655 TREE_OPERAND (arg0, 0)));
9657 if (INTEGRAL_TYPE_P (type))
9659 /* Convert ~A + 1 to -A. */
9660 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9661 && integer_onep (arg1))
9662 return fold_build1_loc (loc, NEGATE_EXPR, type,
9663 fold_convert_loc (loc, type,
9664 TREE_OPERAND (arg0, 0)));
9666 /* ~X + X is -1. */
9667 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9668 && !TYPE_OVERFLOW_TRAPS (type))
9670 tree tem = TREE_OPERAND (arg0, 0);
9672 STRIP_NOPS (tem);
9673 if (operand_equal_p (tem, arg1, 0))
9675 t1 = build_int_cst_type (type, -1);
9676 return omit_one_operand_loc (loc, type, t1, arg1);
9680 /* X + ~X is -1. */
9681 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9682 && !TYPE_OVERFLOW_TRAPS (type))
9684 tree tem = TREE_OPERAND (arg1, 0);
9686 STRIP_NOPS (tem);
9687 if (operand_equal_p (arg0, tem, 0))
9689 t1 = build_int_cst_type (type, -1);
9690 return omit_one_operand_loc (loc, type, t1, arg0);
9694 /* X + (X / CST) * -CST is X % CST. */
9695 if (TREE_CODE (arg1) == MULT_EXPR
9696 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9697 && operand_equal_p (arg0,
9698 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9700 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9701 tree cst1 = TREE_OPERAND (arg1, 1);
9702 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9703 cst1, cst0);
9704 if (sum && integer_zerop (sum))
9705 return fold_convert_loc (loc, type,
9706 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9707 TREE_TYPE (arg0), arg0,
9708 cst0));
9712 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9713 same or one. Make sure type is not saturating.
9714 fold_plusminus_mult_expr will re-associate. */
9715 if ((TREE_CODE (arg0) == MULT_EXPR
9716 || TREE_CODE (arg1) == MULT_EXPR)
9717 && !TYPE_SATURATING (type)
9718 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9720 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9721 if (tem)
9722 return tem;
9725 if (! FLOAT_TYPE_P (type))
9727 if (integer_zerop (arg1))
9728 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9730 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9731 with a constant, and the two constants have no bits in common,
9732 we should treat this as a BIT_IOR_EXPR since this may produce more
9733 simplifications. */
9734 if (TREE_CODE (arg0) == BIT_AND_EXPR
9735 && TREE_CODE (arg1) == BIT_AND_EXPR
9736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9737 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9738 && integer_zerop (const_binop (BIT_AND_EXPR,
9739 TREE_OPERAND (arg0, 1),
9740 TREE_OPERAND (arg1, 1))))
9742 code = BIT_IOR_EXPR;
9743 goto bit_ior;
9746 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9747 (plus (plus (mult) (mult)) (foo)) so that we can
9748 take advantage of the factoring cases below. */
9749 if (((TREE_CODE (arg0) == PLUS_EXPR
9750 || TREE_CODE (arg0) == MINUS_EXPR)
9751 && TREE_CODE (arg1) == MULT_EXPR)
9752 || ((TREE_CODE (arg1) == PLUS_EXPR
9753 || TREE_CODE (arg1) == MINUS_EXPR)
9754 && TREE_CODE (arg0) == MULT_EXPR))
9756 tree parg0, parg1, parg, marg;
9757 enum tree_code pcode;
9759 if (TREE_CODE (arg1) == MULT_EXPR)
9760 parg = arg0, marg = arg1;
9761 else
9762 parg = arg1, marg = arg0;
9763 pcode = TREE_CODE (parg);
9764 parg0 = TREE_OPERAND (parg, 0);
9765 parg1 = TREE_OPERAND (parg, 1);
9766 STRIP_NOPS (parg0);
9767 STRIP_NOPS (parg1);
9769 if (TREE_CODE (parg0) == MULT_EXPR
9770 && TREE_CODE (parg1) != MULT_EXPR)
9771 return fold_build2_loc (loc, pcode, type,
9772 fold_build2_loc (loc, PLUS_EXPR, type,
9773 fold_convert_loc (loc, type,
9774 parg0),
9775 fold_convert_loc (loc, type,
9776 marg)),
9777 fold_convert_loc (loc, type, parg1));
9778 if (TREE_CODE (parg0) != MULT_EXPR
9779 && TREE_CODE (parg1) == MULT_EXPR)
9780 return
9781 fold_build2_loc (loc, PLUS_EXPR, type,
9782 fold_convert_loc (loc, type, parg0),
9783 fold_build2_loc (loc, pcode, type,
9784 fold_convert_loc (loc, type, marg),
9785 fold_convert_loc (loc, type,
9786 parg1)));
9789 else
9791 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9792 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9793 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9795 /* Likewise if the operands are reversed. */
9796 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9797 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9799 /* Convert X + -C into X - C. */
9800 if (TREE_CODE (arg1) == REAL_CST
9801 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9803 tem = fold_negate_const (arg1, type);
9804 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9805 return fold_build2_loc (loc, MINUS_EXPR, type,
9806 fold_convert_loc (loc, type, arg0),
9807 fold_convert_loc (loc, type, tem));
9810 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9811 to __complex__ ( x, y ). This is not the same for SNaNs or
9812 if signed zeros are involved. */
9813 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9814 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9815 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9817 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9818 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9819 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9820 bool arg0rz = false, arg0iz = false;
9821 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9822 || (arg0i && (arg0iz = real_zerop (arg0i))))
9824 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9825 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9826 if (arg0rz && arg1i && real_zerop (arg1i))
9828 tree rp = arg1r ? arg1r
9829 : build1 (REALPART_EXPR, rtype, arg1);
9830 tree ip = arg0i ? arg0i
9831 : build1 (IMAGPART_EXPR, rtype, arg0);
9832 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9834 else if (arg0iz && arg1r && real_zerop (arg1r))
9836 tree rp = arg0r ? arg0r
9837 : build1 (REALPART_EXPR, rtype, arg0);
9838 tree ip = arg1i ? arg1i
9839 : build1 (IMAGPART_EXPR, rtype, arg1);
9840 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9845 if (flag_unsafe_math_optimizations
9846 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9847 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9848 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9849 return tem;
9851 /* Convert x+x into x*2.0. */
9852 if (operand_equal_p (arg0, arg1, 0)
9853 && SCALAR_FLOAT_TYPE_P (type))
9854 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9855 build_real (type, dconst2));
9857 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9858 We associate floats only if the user has specified
9859 -fassociative-math. */
9860 if (flag_associative_math
9861 && TREE_CODE (arg1) == PLUS_EXPR
9862 && TREE_CODE (arg0) != MULT_EXPR)
9864 tree tree10 = TREE_OPERAND (arg1, 0);
9865 tree tree11 = TREE_OPERAND (arg1, 1);
9866 if (TREE_CODE (tree11) == MULT_EXPR
9867 && TREE_CODE (tree10) == MULT_EXPR)
9869 tree tree0;
9870 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9871 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9874 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9875 We associate floats only if the user has specified
9876 -fassociative-math. */
9877 if (flag_associative_math
9878 && TREE_CODE (arg0) == PLUS_EXPR
9879 && TREE_CODE (arg1) != MULT_EXPR)
9881 tree tree00 = TREE_OPERAND (arg0, 0);
9882 tree tree01 = TREE_OPERAND (arg0, 1);
9883 if (TREE_CODE (tree01) == MULT_EXPR
9884 && TREE_CODE (tree00) == MULT_EXPR)
9886 tree tree0;
9887 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9888 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9893 bit_rotate:
9894 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9895 is a rotate of A by C1 bits. */
9896 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9897 is a rotate of A by B bits. */
9899 enum tree_code code0, code1;
9900 tree rtype;
9901 code0 = TREE_CODE (arg0);
9902 code1 = TREE_CODE (arg1);
9903 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9904 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9905 && operand_equal_p (TREE_OPERAND (arg0, 0),
9906 TREE_OPERAND (arg1, 0), 0)
9907 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9908 TYPE_UNSIGNED (rtype))
9909 /* Only create rotates in complete modes. Other cases are not
9910 expanded properly. */
9911 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9913 tree tree01, tree11;
9914 enum tree_code code01, code11;
9916 tree01 = TREE_OPERAND (arg0, 1);
9917 tree11 = TREE_OPERAND (arg1, 1);
9918 STRIP_NOPS (tree01);
9919 STRIP_NOPS (tree11);
9920 code01 = TREE_CODE (tree01);
9921 code11 = TREE_CODE (tree11);
9922 if (code01 == INTEGER_CST
9923 && code11 == INTEGER_CST
9924 && TREE_INT_CST_HIGH (tree01) == 0
9925 && TREE_INT_CST_HIGH (tree11) == 0
9926 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9927 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9929 tem = build2 (LROTATE_EXPR,
9930 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9931 TREE_OPERAND (arg0, 0),
9932 code0 == LSHIFT_EXPR
9933 ? tree01 : tree11);
9934 SET_EXPR_LOCATION (tem, loc);
9935 return fold_convert_loc (loc, type, tem);
9937 else if (code11 == MINUS_EXPR)
9939 tree tree110, tree111;
9940 tree110 = TREE_OPERAND (tree11, 0);
9941 tree111 = TREE_OPERAND (tree11, 1);
9942 STRIP_NOPS (tree110);
9943 STRIP_NOPS (tree111);
9944 if (TREE_CODE (tree110) == INTEGER_CST
9945 && 0 == compare_tree_int (tree110,
9946 TYPE_PRECISION
9947 (TREE_TYPE (TREE_OPERAND
9948 (arg0, 0))))
9949 && operand_equal_p (tree01, tree111, 0))
9950 return
9951 fold_convert_loc (loc, type,
9952 build2 ((code0 == LSHIFT_EXPR
9953 ? LROTATE_EXPR
9954 : RROTATE_EXPR),
9955 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9956 TREE_OPERAND (arg0, 0), tree01));
9958 else if (code01 == MINUS_EXPR)
9960 tree tree010, tree011;
9961 tree010 = TREE_OPERAND (tree01, 0);
9962 tree011 = TREE_OPERAND (tree01, 1);
9963 STRIP_NOPS (tree010);
9964 STRIP_NOPS (tree011);
9965 if (TREE_CODE (tree010) == INTEGER_CST
9966 && 0 == compare_tree_int (tree010,
9967 TYPE_PRECISION
9968 (TREE_TYPE (TREE_OPERAND
9969 (arg0, 0))))
9970 && operand_equal_p (tree11, tree011, 0))
9971 return fold_convert_loc
9972 (loc, type,
9973 build2 ((code0 != LSHIFT_EXPR
9974 ? LROTATE_EXPR
9975 : RROTATE_EXPR),
9976 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9977 TREE_OPERAND (arg0, 0), tree11));
9982 associate:
9983 /* In most languages, can't associate operations on floats through
9984 parentheses. Rather than remember where the parentheses were, we
9985 don't associate floats at all, unless the user has specified
9986 -fassociative-math.
9987 And, we need to make sure type is not saturating. */
9989 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9990 && !TYPE_SATURATING (type))
9992 tree var0, con0, lit0, minus_lit0;
9993 tree var1, con1, lit1, minus_lit1;
9994 bool ok = true;
9996 /* Split both trees into variables, constants, and literals. Then
9997 associate each group together, the constants with literals,
9998 then the result with variables. This increases the chances of
9999 literals being recombined later and of generating relocatable
10000 expressions for the sum of a constant and literal. */
10001 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10002 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10003 code == MINUS_EXPR);
10005 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10006 if (code == MINUS_EXPR)
10007 code = PLUS_EXPR;
10009 /* With undefined overflow we can only associate constants with one
10010 variable, and constants whose association doesn't overflow. */
10011 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10012 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10014 if (var0 && var1)
10016 tree tmp0 = var0;
10017 tree tmp1 = var1;
10019 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10020 tmp0 = TREE_OPERAND (tmp0, 0);
10021 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10022 tmp1 = TREE_OPERAND (tmp1, 0);
10023 /* The only case we can still associate with two variables
10024 is if they are the same, modulo negation. */
10025 if (!operand_equal_p (tmp0, tmp1, 0))
10026 ok = false;
10029 if (ok && lit0 && lit1)
10031 tree tmp0 = fold_convert (type, lit0);
10032 tree tmp1 = fold_convert (type, lit1);
10034 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10035 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10036 ok = false;
10040 /* Only do something if we found more than two objects. Otherwise,
10041 nothing has changed and we risk infinite recursion. */
10042 if (ok
10043 && (2 < ((var0 != 0) + (var1 != 0)
10044 + (con0 != 0) + (con1 != 0)
10045 + (lit0 != 0) + (lit1 != 0)
10046 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10048 var0 = associate_trees (loc, var0, var1, code, type);
10049 con0 = associate_trees (loc, con0, con1, code, type);
10050 lit0 = associate_trees (loc, lit0, lit1, code, type);
10051 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10053 /* Preserve the MINUS_EXPR if the negative part of the literal is
10054 greater than the positive part. Otherwise, the multiplicative
10055 folding code (i.e extract_muldiv) may be fooled in case
10056 unsigned constants are subtracted, like in the following
10057 example: ((X*2 + 4) - 8U)/2. */
10058 if (minus_lit0 && lit0)
10060 if (TREE_CODE (lit0) == INTEGER_CST
10061 && TREE_CODE (minus_lit0) == INTEGER_CST
10062 && tree_int_cst_lt (lit0, minus_lit0))
10064 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10065 MINUS_EXPR, type);
10066 lit0 = 0;
10068 else
10070 lit0 = associate_trees (loc, lit0, minus_lit0,
10071 MINUS_EXPR, type);
10072 minus_lit0 = 0;
10075 if (minus_lit0)
10077 if (con0 == 0)
10078 return
10079 fold_convert_loc (loc, type,
10080 associate_trees (loc, var0, minus_lit0,
10081 MINUS_EXPR, type));
10082 else
10084 con0 = associate_trees (loc, con0, minus_lit0,
10085 MINUS_EXPR, type);
10086 return
10087 fold_convert_loc (loc, type,
10088 associate_trees (loc, var0, con0,
10089 PLUS_EXPR, type));
10093 con0 = associate_trees (loc, con0, lit0, code, type);
10094 return
10095 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10096 code, type));
10100 return NULL_TREE;
10102 case MINUS_EXPR:
10103 /* Pointer simplifications for subtraction, simple reassociations. */
10104 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10106 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10107 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10108 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10110 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10111 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10112 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10113 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10114 return fold_build2_loc (loc, PLUS_EXPR, type,
10115 fold_build2_loc (loc, MINUS_EXPR, type,
10116 arg00, arg10),
10117 fold_build2_loc (loc, MINUS_EXPR, type,
10118 arg01, arg11));
10120 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10121 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10123 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10124 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10125 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10126 fold_convert_loc (loc, type, arg1));
10127 if (tmp)
10128 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10131 /* A - (-B) -> A + B */
10132 if (TREE_CODE (arg1) == NEGATE_EXPR)
10133 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10134 fold_convert_loc (loc, type,
10135 TREE_OPERAND (arg1, 0)));
10136 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10137 if (TREE_CODE (arg0) == NEGATE_EXPR
10138 && (FLOAT_TYPE_P (type)
10139 || INTEGRAL_TYPE_P (type))
10140 && negate_expr_p (arg1)
10141 && reorder_operands_p (arg0, arg1))
10142 return fold_build2_loc (loc, MINUS_EXPR, type,
10143 fold_convert_loc (loc, type,
10144 negate_expr (arg1)),
10145 fold_convert_loc (loc, type,
10146 TREE_OPERAND (arg0, 0)));
10147 /* Convert -A - 1 to ~A. */
10148 if (INTEGRAL_TYPE_P (type)
10149 && TREE_CODE (arg0) == NEGATE_EXPR
10150 && integer_onep (arg1)
10151 && !TYPE_OVERFLOW_TRAPS (type))
10152 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10153 fold_convert_loc (loc, type,
10154 TREE_OPERAND (arg0, 0)));
10156 /* Convert -1 - A to ~A. */
10157 if (INTEGRAL_TYPE_P (type)
10158 && integer_all_onesp (arg0))
10159 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10162 /* X - (X / CST) * CST is X % CST. */
10163 if (INTEGRAL_TYPE_P (type)
10164 && TREE_CODE (arg1) == MULT_EXPR
10165 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10166 && operand_equal_p (arg0,
10167 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10168 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10169 TREE_OPERAND (arg1, 1), 0))
10170 return
10171 fold_convert_loc (loc, type,
10172 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10173 arg0, TREE_OPERAND (arg1, 1)));
10175 if (! FLOAT_TYPE_P (type))
10177 if (integer_zerop (arg0))
10178 return negate_expr (fold_convert_loc (loc, type, arg1));
10179 if (integer_zerop (arg1))
10180 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10182 /* Fold A - (A & B) into ~B & A. */
10183 if (!TREE_SIDE_EFFECTS (arg0)
10184 && TREE_CODE (arg1) == BIT_AND_EXPR)
10186 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10188 tree arg10 = fold_convert_loc (loc, type,
10189 TREE_OPERAND (arg1, 0));
10190 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10191 fold_build1_loc (loc, BIT_NOT_EXPR,
10192 type, arg10),
10193 fold_convert_loc (loc, type, arg0));
10195 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10197 tree arg11 = fold_convert_loc (loc,
10198 type, TREE_OPERAND (arg1, 1));
10199 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10200 fold_build1_loc (loc, BIT_NOT_EXPR,
10201 type, arg11),
10202 fold_convert_loc (loc, type, arg0));
10206 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10207 any power of 2 minus 1. */
10208 if (TREE_CODE (arg0) == BIT_AND_EXPR
10209 && TREE_CODE (arg1) == BIT_AND_EXPR
10210 && operand_equal_p (TREE_OPERAND (arg0, 0),
10211 TREE_OPERAND (arg1, 0), 0))
10213 tree mask0 = TREE_OPERAND (arg0, 1);
10214 tree mask1 = TREE_OPERAND (arg1, 1);
10215 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10217 if (operand_equal_p (tem, mask1, 0))
10219 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10220 TREE_OPERAND (arg0, 0), mask1);
10221 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10226 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10227 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10228 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10230 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10231 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10232 (-ARG1 + ARG0) reduces to -ARG1. */
10233 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10234 return negate_expr (fold_convert_loc (loc, type, arg1));
10236 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10237 __complex__ ( x, -y ). This is not the same for SNaNs or if
10238 signed zeros are involved. */
10239 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10240 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10241 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10243 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10244 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10245 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10246 bool arg0rz = false, arg0iz = false;
10247 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10248 || (arg0i && (arg0iz = real_zerop (arg0i))))
10250 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10251 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10252 if (arg0rz && arg1i && real_zerop (arg1i))
10254 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10255 arg1r ? arg1r
10256 : build1 (REALPART_EXPR, rtype, arg1));
10257 tree ip = arg0i ? arg0i
10258 : build1 (IMAGPART_EXPR, rtype, arg0);
10259 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10261 else if (arg0iz && arg1r && real_zerop (arg1r))
10263 tree rp = arg0r ? arg0r
10264 : build1 (REALPART_EXPR, rtype, arg0);
10265 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10266 arg1i ? arg1i
10267 : build1 (IMAGPART_EXPR, rtype, arg1));
10268 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10273 /* Fold &x - &x. This can happen from &x.foo - &x.
10274 This is unsafe for certain floats even in non-IEEE formats.
10275 In IEEE, it is unsafe because it does wrong for NaNs.
10276 Also note that operand_equal_p is always false if an operand
10277 is volatile. */
10279 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10280 && operand_equal_p (arg0, arg1, 0))
10281 return fold_convert_loc (loc, type, integer_zero_node);
10283 /* A - B -> A + (-B) if B is easily negatable. */
10284 if (negate_expr_p (arg1)
10285 && ((FLOAT_TYPE_P (type)
10286 /* Avoid this transformation if B is a positive REAL_CST. */
10287 && (TREE_CODE (arg1) != REAL_CST
10288 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10289 || INTEGRAL_TYPE_P (type)))
10290 return fold_build2_loc (loc, PLUS_EXPR, type,
10291 fold_convert_loc (loc, type, arg0),
10292 fold_convert_loc (loc, type,
10293 negate_expr (arg1)));
10295 /* Try folding difference of addresses. */
10297 HOST_WIDE_INT diff;
10299 if ((TREE_CODE (arg0) == ADDR_EXPR
10300 || TREE_CODE (arg1) == ADDR_EXPR)
10301 && ptr_difference_const (arg0, arg1, &diff))
10302 return build_int_cst_type (type, diff);
10305 /* Fold &a[i] - &a[j] to i-j. */
10306 if (TREE_CODE (arg0) == ADDR_EXPR
10307 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10308 && TREE_CODE (arg1) == ADDR_EXPR
10309 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10311 tree aref0 = TREE_OPERAND (arg0, 0);
10312 tree aref1 = TREE_OPERAND (arg1, 0);
10313 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10314 TREE_OPERAND (aref1, 0), 0))
10316 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10317 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10318 tree esz = array_ref_element_size (aref0);
10319 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10320 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10321 fold_convert_loc (loc, type, esz));
10326 if (FLOAT_TYPE_P (type)
10327 && flag_unsafe_math_optimizations
10328 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10329 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10330 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10331 return tem;
10333 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10334 same or one. Make sure type is not saturating.
10335 fold_plusminus_mult_expr will re-associate. */
10336 if ((TREE_CODE (arg0) == MULT_EXPR
10337 || TREE_CODE (arg1) == MULT_EXPR)
10338 && !TYPE_SATURATING (type)
10339 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10341 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10342 if (tem)
10343 return tem;
10346 goto associate;
10348 case MULT_EXPR:
10349 /* (-A) * (-B) -> A * B */
10350 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10351 return fold_build2_loc (loc, MULT_EXPR, type,
10352 fold_convert_loc (loc, type,
10353 TREE_OPERAND (arg0, 0)),
10354 fold_convert_loc (loc, type,
10355 negate_expr (arg1)));
10356 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10357 return fold_build2_loc (loc, MULT_EXPR, type,
10358 fold_convert_loc (loc, type,
10359 negate_expr (arg0)),
10360 fold_convert_loc (loc, type,
10361 TREE_OPERAND (arg1, 0)));
10363 if (! FLOAT_TYPE_P (type))
10365 if (integer_zerop (arg1))
10366 return omit_one_operand_loc (loc, type, arg1, arg0);
10367 if (integer_onep (arg1))
10368 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10369 /* Transform x * -1 into -x. Make sure to do the negation
10370 on the original operand with conversions not stripped
10371 because we can only strip non-sign-changing conversions. */
10372 if (integer_all_onesp (arg1))
10373 return fold_convert_loc (loc, type, negate_expr (op0));
10374 /* Transform x * -C into -x * C if x is easily negatable. */
10375 if (TREE_CODE (arg1) == INTEGER_CST
10376 && tree_int_cst_sgn (arg1) == -1
10377 && negate_expr_p (arg0)
10378 && (tem = negate_expr (arg1)) != arg1
10379 && !TREE_OVERFLOW (tem))
10380 return fold_build2_loc (loc, MULT_EXPR, type,
10381 fold_convert_loc (loc, type,
10382 negate_expr (arg0)),
10383 tem);
10385 /* (a * (1 << b)) is (a << b) */
10386 if (TREE_CODE (arg1) == LSHIFT_EXPR
10387 && integer_onep (TREE_OPERAND (arg1, 0)))
10388 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10389 TREE_OPERAND (arg1, 1));
10390 if (TREE_CODE (arg0) == LSHIFT_EXPR
10391 && integer_onep (TREE_OPERAND (arg0, 0)))
10392 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10393 TREE_OPERAND (arg0, 1));
10395 /* (A + A) * C -> A * 2 * C */
10396 if (TREE_CODE (arg0) == PLUS_EXPR
10397 && TREE_CODE (arg1) == INTEGER_CST
10398 && operand_equal_p (TREE_OPERAND (arg0, 0),
10399 TREE_OPERAND (arg0, 1), 0))
10400 return fold_build2_loc (loc, MULT_EXPR, type,
10401 omit_one_operand_loc (loc, type,
10402 TREE_OPERAND (arg0, 0),
10403 TREE_OPERAND (arg0, 1)),
10404 fold_build2_loc (loc, MULT_EXPR, type,
10405 build_int_cst (type, 2) , arg1));
10407 strict_overflow_p = false;
10408 if (TREE_CODE (arg1) == INTEGER_CST
10409 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10410 &strict_overflow_p)))
10412 if (strict_overflow_p)
10413 fold_overflow_warning (("assuming signed overflow does not "
10414 "occur when simplifying "
10415 "multiplication"),
10416 WARN_STRICT_OVERFLOW_MISC);
10417 return fold_convert_loc (loc, type, tem);
10420 /* Optimize z * conj(z) for integer complex numbers. */
10421 if (TREE_CODE (arg0) == CONJ_EXPR
10422 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10423 return fold_mult_zconjz (loc, type, arg1);
10424 if (TREE_CODE (arg1) == CONJ_EXPR
10425 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10426 return fold_mult_zconjz (loc, type, arg0);
10428 else
10430 /* Maybe fold x * 0 to 0. The expressions aren't the same
10431 when x is NaN, since x * 0 is also NaN. Nor are they the
10432 same in modes with signed zeros, since multiplying a
10433 negative value by 0 gives -0, not +0. */
10434 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10435 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10436 && real_zerop (arg1))
10437 return omit_one_operand_loc (loc, type, arg1, arg0);
10438 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10439 Likewise for complex arithmetic with signed zeros. */
10440 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10441 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10442 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10443 && real_onep (arg1))
10444 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10446 /* Transform x * -1.0 into -x. */
10447 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10448 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10449 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10450 && real_minus_onep (arg1))
10451 return fold_convert_loc (loc, type, negate_expr (arg0));
10453 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10454 the result for floating point types due to rounding so it is applied
10455 only if -fassociative-math was specify. */
10456 if (flag_associative_math
10457 && TREE_CODE (arg0) == RDIV_EXPR
10458 && TREE_CODE (arg1) == REAL_CST
10459 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10461 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10462 arg1);
10463 if (tem)
10464 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10465 TREE_OPERAND (arg0, 1));
10468 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10469 if (operand_equal_p (arg0, arg1, 0))
10471 tree tem = fold_strip_sign_ops (arg0);
10472 if (tem != NULL_TREE)
10474 tem = fold_convert_loc (loc, type, tem);
10475 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10479 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10480 This is not the same for NaNs or if signed zeros are
10481 involved. */
10482 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10483 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10484 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10485 && TREE_CODE (arg1) == COMPLEX_CST
10486 && real_zerop (TREE_REALPART (arg1)))
10488 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10489 if (real_onep (TREE_IMAGPART (arg1)))
10490 return
10491 fold_build2_loc (loc, COMPLEX_EXPR, type,
10492 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10493 rtype, arg0)),
10494 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10495 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10496 return
10497 fold_build2_loc (loc, COMPLEX_EXPR, type,
10498 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10499 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10500 rtype, arg0)));
10503 /* Optimize z * conj(z) for floating point complex numbers.
10504 Guarded by flag_unsafe_math_optimizations as non-finite
10505 imaginary components don't produce scalar results. */
10506 if (flag_unsafe_math_optimizations
10507 && TREE_CODE (arg0) == CONJ_EXPR
10508 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10509 return fold_mult_zconjz (loc, type, arg1);
10510 if (flag_unsafe_math_optimizations
10511 && TREE_CODE (arg1) == CONJ_EXPR
10512 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10513 return fold_mult_zconjz (loc, type, arg0);
10515 if (flag_unsafe_math_optimizations)
10517 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10518 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10520 /* Optimizations of root(...)*root(...). */
10521 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10523 tree rootfn, arg;
10524 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10525 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10527 /* Optimize sqrt(x)*sqrt(x) as x. */
10528 if (BUILTIN_SQRT_P (fcode0)
10529 && operand_equal_p (arg00, arg10, 0)
10530 && ! HONOR_SNANS (TYPE_MODE (type)))
10531 return arg00;
10533 /* Optimize root(x)*root(y) as root(x*y). */
10534 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10535 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10536 return build_call_expr_loc (loc, rootfn, 1, arg);
10539 /* Optimize expN(x)*expN(y) as expN(x+y). */
10540 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10542 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10543 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10544 CALL_EXPR_ARG (arg0, 0),
10545 CALL_EXPR_ARG (arg1, 0));
10546 return build_call_expr_loc (loc, expfn, 1, arg);
10549 /* Optimizations of pow(...)*pow(...). */
10550 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10551 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10552 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10554 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10555 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10556 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10557 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10559 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10560 if (operand_equal_p (arg01, arg11, 0))
10562 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10563 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10564 arg00, arg10);
10565 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10568 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10569 if (operand_equal_p (arg00, arg10, 0))
10571 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10572 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10573 arg01, arg11);
10574 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10578 /* Optimize tan(x)*cos(x) as sin(x). */
10579 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10580 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10581 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10582 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10583 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10584 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10585 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10586 CALL_EXPR_ARG (arg1, 0), 0))
10588 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10590 if (sinfn != NULL_TREE)
10591 return build_call_expr_loc (loc, sinfn, 1,
10592 CALL_EXPR_ARG (arg0, 0));
10595 /* Optimize x*pow(x,c) as pow(x,c+1). */
10596 if (fcode1 == BUILT_IN_POW
10597 || fcode1 == BUILT_IN_POWF
10598 || fcode1 == BUILT_IN_POWL)
10600 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10601 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10602 if (TREE_CODE (arg11) == REAL_CST
10603 && !TREE_OVERFLOW (arg11)
10604 && operand_equal_p (arg0, arg10, 0))
10606 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10607 REAL_VALUE_TYPE c;
10608 tree arg;
10610 c = TREE_REAL_CST (arg11);
10611 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10612 arg = build_real (type, c);
10613 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10617 /* Optimize pow(x,c)*x as pow(x,c+1). */
10618 if (fcode0 == BUILT_IN_POW
10619 || fcode0 == BUILT_IN_POWF
10620 || fcode0 == BUILT_IN_POWL)
10622 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10623 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10624 if (TREE_CODE (arg01) == REAL_CST
10625 && !TREE_OVERFLOW (arg01)
10626 && operand_equal_p (arg1, arg00, 0))
10628 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10629 REAL_VALUE_TYPE c;
10630 tree arg;
10632 c = TREE_REAL_CST (arg01);
10633 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10634 arg = build_real (type, c);
10635 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10639 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10640 if (optimize_function_for_speed_p (cfun)
10641 && operand_equal_p (arg0, arg1, 0))
10643 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10645 if (powfn)
10647 tree arg = build_real (type, dconst2);
10648 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10653 goto associate;
10655 case BIT_IOR_EXPR:
10656 bit_ior:
10657 if (integer_all_onesp (arg1))
10658 return omit_one_operand_loc (loc, type, arg1, arg0);
10659 if (integer_zerop (arg1))
10660 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10661 if (operand_equal_p (arg0, arg1, 0))
10662 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10664 /* ~X | X is -1. */
10665 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10666 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10668 t1 = fold_convert_loc (loc, type, integer_zero_node);
10669 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10670 return omit_one_operand_loc (loc, type, t1, arg1);
10673 /* X | ~X is -1. */
10674 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10675 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10677 t1 = fold_convert_loc (loc, type, integer_zero_node);
10678 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10679 return omit_one_operand_loc (loc, type, t1, arg0);
10682 /* Canonicalize (X & C1) | C2. */
10683 if (TREE_CODE (arg0) == BIT_AND_EXPR
10684 && TREE_CODE (arg1) == INTEGER_CST
10685 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10687 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10688 int width = TYPE_PRECISION (type), w;
10689 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10690 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10691 hi2 = TREE_INT_CST_HIGH (arg1);
10692 lo2 = TREE_INT_CST_LOW (arg1);
10694 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10695 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10696 return omit_one_operand_loc (loc, type, arg1,
10697 TREE_OPERAND (arg0, 0));
10699 if (width > HOST_BITS_PER_WIDE_INT)
10701 mhi = (unsigned HOST_WIDE_INT) -1
10702 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10703 mlo = -1;
10705 else
10707 mhi = 0;
10708 mlo = (unsigned HOST_WIDE_INT) -1
10709 >> (HOST_BITS_PER_WIDE_INT - width);
10712 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10713 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10714 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10715 TREE_OPERAND (arg0, 0), arg1);
10717 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10718 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10719 mode which allows further optimizations. */
10720 hi1 &= mhi;
10721 lo1 &= mlo;
10722 hi2 &= mhi;
10723 lo2 &= mlo;
10724 hi3 = hi1 & ~hi2;
10725 lo3 = lo1 & ~lo2;
10726 for (w = BITS_PER_UNIT;
10727 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10728 w <<= 1)
10730 unsigned HOST_WIDE_INT mask
10731 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10732 if (((lo1 | lo2) & mask) == mask
10733 && (lo1 & ~mask) == 0 && hi1 == 0)
10735 hi3 = 0;
10736 lo3 = mask;
10737 break;
10740 if (hi3 != hi1 || lo3 != lo1)
10741 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10742 fold_build2_loc (loc, BIT_AND_EXPR, type,
10743 TREE_OPERAND (arg0, 0),
10744 build_int_cst_wide (type,
10745 lo3, hi3)),
10746 arg1);
10749 /* (X & Y) | Y is (X, Y). */
10750 if (TREE_CODE (arg0) == BIT_AND_EXPR
10751 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10752 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10753 /* (X & Y) | X is (Y, X). */
10754 if (TREE_CODE (arg0) == BIT_AND_EXPR
10755 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10756 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10757 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10758 /* X | (X & Y) is (Y, X). */
10759 if (TREE_CODE (arg1) == BIT_AND_EXPR
10760 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10761 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10762 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10763 /* X | (Y & X) is (Y, X). */
10764 if (TREE_CODE (arg1) == BIT_AND_EXPR
10765 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10766 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10767 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10769 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10770 if (t1 != NULL_TREE)
10771 return t1;
10773 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10775 This results in more efficient code for machines without a NAND
10776 instruction. Combine will canonicalize to the first form
10777 which will allow use of NAND instructions provided by the
10778 backend if they exist. */
10779 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10780 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10782 return
10783 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10784 build2 (BIT_AND_EXPR, type,
10785 fold_convert_loc (loc, type,
10786 TREE_OPERAND (arg0, 0)),
10787 fold_convert_loc (loc, type,
10788 TREE_OPERAND (arg1, 0))));
10791 /* See if this can be simplified into a rotate first. If that
10792 is unsuccessful continue in the association code. */
10793 goto bit_rotate;
10795 case BIT_XOR_EXPR:
10796 if (integer_zerop (arg1))
10797 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10798 if (integer_all_onesp (arg1))
10799 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10800 if (operand_equal_p (arg0, arg1, 0))
10801 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10803 /* ~X ^ X is -1. */
10804 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10805 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10807 t1 = fold_convert_loc (loc, type, integer_zero_node);
10808 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10809 return omit_one_operand_loc (loc, type, t1, arg1);
10812 /* X ^ ~X is -1. */
10813 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10814 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10816 t1 = fold_convert_loc (loc, type, integer_zero_node);
10817 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10818 return omit_one_operand_loc (loc, type, t1, arg0);
10821 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10822 with a constant, and the two constants have no bits in common,
10823 we should treat this as a BIT_IOR_EXPR since this may produce more
10824 simplifications. */
10825 if (TREE_CODE (arg0) == BIT_AND_EXPR
10826 && TREE_CODE (arg1) == BIT_AND_EXPR
10827 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10828 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10829 && integer_zerop (const_binop (BIT_AND_EXPR,
10830 TREE_OPERAND (arg0, 1),
10831 TREE_OPERAND (arg1, 1))))
10833 code = BIT_IOR_EXPR;
10834 goto bit_ior;
10837 /* (X | Y) ^ X -> Y & ~ X*/
10838 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10839 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10841 tree t2 = TREE_OPERAND (arg0, 1);
10842 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10843 arg1);
10844 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10845 fold_convert_loc (loc, type, t2),
10846 fold_convert_loc (loc, type, t1));
10847 return t1;
10850 /* (Y | X) ^ X -> Y & ~ X*/
10851 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10852 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10854 tree t2 = TREE_OPERAND (arg0, 0);
10855 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10856 arg1);
10857 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10858 fold_convert_loc (loc, type, t2),
10859 fold_convert_loc (loc, type, t1));
10860 return t1;
10863 /* X ^ (X | Y) -> Y & ~ X*/
10864 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10865 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10867 tree t2 = TREE_OPERAND (arg1, 1);
10868 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10869 arg0);
10870 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10871 fold_convert_loc (loc, type, t2),
10872 fold_convert_loc (loc, type, t1));
10873 return t1;
10876 /* X ^ (Y | X) -> Y & ~ X*/
10877 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10878 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10880 tree t2 = TREE_OPERAND (arg1, 0);
10881 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10882 arg0);
10883 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10884 fold_convert_loc (loc, type, t2),
10885 fold_convert_loc (loc, type, t1));
10886 return t1;
10889 /* Convert ~X ^ ~Y to X ^ Y. */
10890 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10891 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10892 return fold_build2_loc (loc, code, type,
10893 fold_convert_loc (loc, type,
10894 TREE_OPERAND (arg0, 0)),
10895 fold_convert_loc (loc, type,
10896 TREE_OPERAND (arg1, 0)));
10898 /* Convert ~X ^ C to X ^ ~C. */
10899 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10900 && TREE_CODE (arg1) == INTEGER_CST)
10901 return fold_build2_loc (loc, code, type,
10902 fold_convert_loc (loc, type,
10903 TREE_OPERAND (arg0, 0)),
10904 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10906 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10907 if (TREE_CODE (arg0) == BIT_AND_EXPR
10908 && integer_onep (TREE_OPERAND (arg0, 1))
10909 && integer_onep (arg1))
10910 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10911 build_int_cst (TREE_TYPE (arg0), 0));
10913 /* Fold (X & Y) ^ Y as ~X & Y. */
10914 if (TREE_CODE (arg0) == BIT_AND_EXPR
10915 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10917 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10918 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10919 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10920 fold_convert_loc (loc, type, arg1));
10922 /* Fold (X & Y) ^ X as ~Y & X. */
10923 if (TREE_CODE (arg0) == BIT_AND_EXPR
10924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10925 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10927 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10928 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10929 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10930 fold_convert_loc (loc, type, arg1));
10932 /* Fold X ^ (X & Y) as X & ~Y. */
10933 if (TREE_CODE (arg1) == BIT_AND_EXPR
10934 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10936 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10937 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10938 fold_convert_loc (loc, type, arg0),
10939 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10941 /* Fold X ^ (Y & X) as ~Y & X. */
10942 if (TREE_CODE (arg1) == BIT_AND_EXPR
10943 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10944 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10946 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10947 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10948 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10949 fold_convert_loc (loc, type, arg0));
10952 /* See if this can be simplified into a rotate first. If that
10953 is unsuccessful continue in the association code. */
10954 goto bit_rotate;
10956 case BIT_AND_EXPR:
10957 if (integer_all_onesp (arg1))
10958 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10959 if (integer_zerop (arg1))
10960 return omit_one_operand_loc (loc, type, arg1, arg0);
10961 if (operand_equal_p (arg0, arg1, 0))
10962 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10964 /* ~X & X is always zero. */
10965 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10966 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10967 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10969 /* X & ~X is always zero. */
10970 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10972 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10974 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10975 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10976 && TREE_CODE (arg1) == INTEGER_CST
10977 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10979 tree tmp1 = fold_convert_loc (loc, type, arg1);
10980 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10981 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10982 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10983 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10984 return
10985 fold_convert_loc (loc, type,
10986 fold_build2_loc (loc, BIT_IOR_EXPR,
10987 type, tmp2, tmp3));
10990 /* (X | Y) & Y is (X, Y). */
10991 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10992 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10993 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10994 /* (X | Y) & X is (Y, X). */
10995 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10997 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10998 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10999 /* X & (X | Y) is (Y, X). */
11000 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11001 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11002 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11003 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11004 /* X & (Y | X) is (Y, X). */
11005 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11006 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11007 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11008 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11010 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11011 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11012 && integer_onep (TREE_OPERAND (arg0, 1))
11013 && integer_onep (arg1))
11015 tem = TREE_OPERAND (arg0, 0);
11016 return fold_build2_loc (loc, EQ_EXPR, type,
11017 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11018 build_int_cst (TREE_TYPE (tem), 1)),
11019 build_int_cst (TREE_TYPE (tem), 0));
11021 /* Fold ~X & 1 as (X & 1) == 0. */
11022 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11023 && integer_onep (arg1))
11025 tem = TREE_OPERAND (arg0, 0);
11026 return fold_build2_loc (loc, EQ_EXPR, type,
11027 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11028 build_int_cst (TREE_TYPE (tem), 1)),
11029 build_int_cst (TREE_TYPE (tem), 0));
11032 /* Fold (X ^ Y) & Y as ~X & Y. */
11033 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11034 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11036 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11037 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11038 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11039 fold_convert_loc (loc, type, arg1));
11041 /* Fold (X ^ Y) & X as ~Y & X. */
11042 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11044 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11046 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11047 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11048 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11049 fold_convert_loc (loc, type, arg1));
11051 /* Fold X & (X ^ Y) as X & ~Y. */
11052 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11053 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11055 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11056 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11057 fold_convert_loc (loc, type, arg0),
11058 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11060 /* Fold X & (Y ^ X) as ~Y & X. */
11061 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11062 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11063 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11065 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11066 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11067 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11068 fold_convert_loc (loc, type, arg0));
11071 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11072 if (t1 != NULL_TREE)
11073 return t1;
11074 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11075 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11076 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11078 unsigned int prec
11079 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11081 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11082 && (~TREE_INT_CST_LOW (arg1)
11083 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11084 return
11085 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11088 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11090 This results in more efficient code for machines without a NOR
11091 instruction. Combine will canonicalize to the first form
11092 which will allow use of NOR instructions provided by the
11093 backend if they exist. */
11094 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11095 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11097 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11098 build2 (BIT_IOR_EXPR, type,
11099 fold_convert_loc (loc, type,
11100 TREE_OPERAND (arg0, 0)),
11101 fold_convert_loc (loc, type,
11102 TREE_OPERAND (arg1, 0))));
11105 /* If arg0 is derived from the address of an object or function, we may
11106 be able to fold this expression using the object or function's
11107 alignment. */
11108 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11110 unsigned HOST_WIDE_INT modulus, residue;
11111 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11113 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11114 integer_onep (arg1));
11116 /* This works because modulus is a power of 2. If this weren't the
11117 case, we'd have to replace it by its greatest power-of-2
11118 divisor: modulus & -modulus. */
11119 if (low < modulus)
11120 return build_int_cst (type, residue & low);
11123 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11124 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11125 if the new mask might be further optimized. */
11126 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11127 || TREE_CODE (arg0) == RSHIFT_EXPR)
11128 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11129 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11130 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11131 < TYPE_PRECISION (TREE_TYPE (arg0))
11132 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11133 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11135 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11136 unsigned HOST_WIDE_INT mask
11137 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11138 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11139 tree shift_type = TREE_TYPE (arg0);
11141 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11142 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11143 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11144 && TYPE_PRECISION (TREE_TYPE (arg0))
11145 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11147 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11148 tree arg00 = TREE_OPERAND (arg0, 0);
11149 /* See if more bits can be proven as zero because of
11150 zero extension. */
11151 if (TREE_CODE (arg00) == NOP_EXPR
11152 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11154 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11155 if (TYPE_PRECISION (inner_type)
11156 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11157 && TYPE_PRECISION (inner_type) < prec)
11159 prec = TYPE_PRECISION (inner_type);
11160 /* See if we can shorten the right shift. */
11161 if (shiftc < prec)
11162 shift_type = inner_type;
11165 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11166 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11167 zerobits <<= prec - shiftc;
11168 /* For arithmetic shift if sign bit could be set, zerobits
11169 can contain actually sign bits, so no transformation is
11170 possible, unless MASK masks them all away. In that
11171 case the shift needs to be converted into logical shift. */
11172 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11173 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11175 if ((mask & zerobits) == 0)
11176 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11177 else
11178 zerobits = 0;
11182 /* ((X << 16) & 0xff00) is (X, 0). */
11183 if ((mask & zerobits) == mask)
11184 return omit_one_operand_loc (loc, type,
11185 build_int_cst (type, 0), arg0);
11187 newmask = mask | zerobits;
11188 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11190 unsigned int prec;
11192 /* Only do the transformation if NEWMASK is some integer
11193 mode's mask. */
11194 for (prec = BITS_PER_UNIT;
11195 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11196 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11197 break;
11198 if (prec < HOST_BITS_PER_WIDE_INT
11199 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11201 tree newmaskt;
11203 if (shift_type != TREE_TYPE (arg0))
11205 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11206 fold_convert_loc (loc, shift_type,
11207 TREE_OPERAND (arg0, 0)),
11208 TREE_OPERAND (arg0, 1));
11209 tem = fold_convert_loc (loc, type, tem);
11211 else
11212 tem = op0;
11213 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11214 if (!tree_int_cst_equal (newmaskt, arg1))
11215 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11220 goto associate;
11222 case RDIV_EXPR:
11223 /* Don't touch a floating-point divide by zero unless the mode
11224 of the constant can represent infinity. */
11225 if (TREE_CODE (arg1) == REAL_CST
11226 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11227 && real_zerop (arg1))
11228 return NULL_TREE;
11230 /* Optimize A / A to 1.0 if we don't care about
11231 NaNs or Infinities. Skip the transformation
11232 for non-real operands. */
11233 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11234 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11235 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11236 && operand_equal_p (arg0, arg1, 0))
11238 tree r = build_real (TREE_TYPE (arg0), dconst1);
11240 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11243 /* The complex version of the above A / A optimization. */
11244 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11245 && operand_equal_p (arg0, arg1, 0))
11247 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11248 if (! HONOR_NANS (TYPE_MODE (elem_type))
11249 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11251 tree r = build_real (elem_type, dconst1);
11252 /* omit_two_operands will call fold_convert for us. */
11253 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11257 /* (-A) / (-B) -> A / B */
11258 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11259 return fold_build2_loc (loc, RDIV_EXPR, type,
11260 TREE_OPERAND (arg0, 0),
11261 negate_expr (arg1));
11262 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11263 return fold_build2_loc (loc, RDIV_EXPR, type,
11264 negate_expr (arg0),
11265 TREE_OPERAND (arg1, 0));
11267 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11268 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11269 && real_onep (arg1))
11270 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11272 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11273 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11274 && real_minus_onep (arg1))
11275 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11276 negate_expr (arg0)));
11278 /* If ARG1 is a constant, we can convert this to a multiply by the
11279 reciprocal. This does not have the same rounding properties,
11280 so only do this if -freciprocal-math. We can actually
11281 always safely do it if ARG1 is a power of two, but it's hard to
11282 tell if it is or not in a portable manner. */
11283 if (TREE_CODE (arg1) == REAL_CST)
11285 if (flag_reciprocal_math
11286 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11287 arg1)))
11288 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11289 /* Find the reciprocal if optimizing and the result is exact. */
11290 if (optimize)
11292 REAL_VALUE_TYPE r;
11293 r = TREE_REAL_CST (arg1);
11294 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11296 tem = build_real (type, r);
11297 return fold_build2_loc (loc, MULT_EXPR, type,
11298 fold_convert_loc (loc, type, arg0), tem);
11302 /* Convert A/B/C to A/(B*C). */
11303 if (flag_reciprocal_math
11304 && TREE_CODE (arg0) == RDIV_EXPR)
11305 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11306 fold_build2_loc (loc, MULT_EXPR, type,
11307 TREE_OPERAND (arg0, 1), arg1));
11309 /* Convert A/(B/C) to (A/B)*C. */
11310 if (flag_reciprocal_math
11311 && TREE_CODE (arg1) == RDIV_EXPR)
11312 return fold_build2_loc (loc, MULT_EXPR, type,
11313 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11314 TREE_OPERAND (arg1, 0)),
11315 TREE_OPERAND (arg1, 1));
11317 /* Convert C1/(X*C2) into (C1/C2)/X. */
11318 if (flag_reciprocal_math
11319 && TREE_CODE (arg1) == MULT_EXPR
11320 && TREE_CODE (arg0) == REAL_CST
11321 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11323 tree tem = const_binop (RDIV_EXPR, arg0,
11324 TREE_OPERAND (arg1, 1));
11325 if (tem)
11326 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11327 TREE_OPERAND (arg1, 0));
11330 if (flag_unsafe_math_optimizations)
11332 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11333 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11335 /* Optimize sin(x)/cos(x) as tan(x). */
11336 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11337 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11338 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11339 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11340 CALL_EXPR_ARG (arg1, 0), 0))
11342 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11344 if (tanfn != NULL_TREE)
11345 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11348 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11349 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11350 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11351 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11352 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11353 CALL_EXPR_ARG (arg1, 0), 0))
11355 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11357 if (tanfn != NULL_TREE)
11359 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11360 CALL_EXPR_ARG (arg0, 0));
11361 return fold_build2_loc (loc, RDIV_EXPR, type,
11362 build_real (type, dconst1), tmp);
11366 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11367 NaNs or Infinities. */
11368 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11369 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11370 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11372 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11373 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11375 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11376 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11377 && operand_equal_p (arg00, arg01, 0))
11379 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11381 if (cosfn != NULL_TREE)
11382 return build_call_expr_loc (loc, cosfn, 1, arg00);
11386 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11387 NaNs or Infinities. */
11388 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11389 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11390 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11392 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11393 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11395 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11396 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11397 && operand_equal_p (arg00, arg01, 0))
11399 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11401 if (cosfn != NULL_TREE)
11403 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11404 return fold_build2_loc (loc, RDIV_EXPR, type,
11405 build_real (type, dconst1),
11406 tmp);
11411 /* Optimize pow(x,c)/x as pow(x,c-1). */
11412 if (fcode0 == BUILT_IN_POW
11413 || fcode0 == BUILT_IN_POWF
11414 || fcode0 == BUILT_IN_POWL)
11416 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11417 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11418 if (TREE_CODE (arg01) == REAL_CST
11419 && !TREE_OVERFLOW (arg01)
11420 && operand_equal_p (arg1, arg00, 0))
11422 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11423 REAL_VALUE_TYPE c;
11424 tree arg;
11426 c = TREE_REAL_CST (arg01);
11427 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11428 arg = build_real (type, c);
11429 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11433 /* Optimize a/root(b/c) into a*root(c/b). */
11434 if (BUILTIN_ROOT_P (fcode1))
11436 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11438 if (TREE_CODE (rootarg) == RDIV_EXPR)
11440 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11441 tree b = TREE_OPERAND (rootarg, 0);
11442 tree c = TREE_OPERAND (rootarg, 1);
11444 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11446 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11447 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11451 /* Optimize x/expN(y) into x*expN(-y). */
11452 if (BUILTIN_EXPONENT_P (fcode1))
11454 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11455 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11456 arg1 = build_call_expr_loc (loc,
11457 expfn, 1,
11458 fold_convert_loc (loc, type, arg));
11459 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11462 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11463 if (fcode1 == BUILT_IN_POW
11464 || fcode1 == BUILT_IN_POWF
11465 || fcode1 == BUILT_IN_POWL)
11467 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11468 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11469 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11470 tree neg11 = fold_convert_loc (loc, type,
11471 negate_expr (arg11));
11472 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11473 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11476 return NULL_TREE;
11478 case TRUNC_DIV_EXPR:
11479 case FLOOR_DIV_EXPR:
11480 /* Simplify A / (B << N) where A and B are positive and B is
11481 a power of 2, to A >> (N + log2(B)). */
11482 strict_overflow_p = false;
11483 if (TREE_CODE (arg1) == LSHIFT_EXPR
11484 && (TYPE_UNSIGNED (type)
11485 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11487 tree sval = TREE_OPERAND (arg1, 0);
11488 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11490 tree sh_cnt = TREE_OPERAND (arg1, 1);
11491 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11493 if (strict_overflow_p)
11494 fold_overflow_warning (("assuming signed overflow does not "
11495 "occur when simplifying A / (B << N)"),
11496 WARN_STRICT_OVERFLOW_MISC);
11498 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11499 sh_cnt, build_int_cst (NULL_TREE, pow2));
11500 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11501 fold_convert_loc (loc, type, arg0), sh_cnt);
11505 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11506 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11507 if (INTEGRAL_TYPE_P (type)
11508 && TYPE_UNSIGNED (type)
11509 && code == FLOOR_DIV_EXPR)
11510 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11512 /* Fall thru */
11514 case ROUND_DIV_EXPR:
11515 case CEIL_DIV_EXPR:
11516 case EXACT_DIV_EXPR:
11517 if (integer_onep (arg1))
11518 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11519 if (integer_zerop (arg1))
11520 return NULL_TREE;
11521 /* X / -1 is -X. */
11522 if (!TYPE_UNSIGNED (type)
11523 && TREE_CODE (arg1) == INTEGER_CST
11524 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11525 && TREE_INT_CST_HIGH (arg1) == -1)
11526 return fold_convert_loc (loc, type, negate_expr (arg0));
11528 /* Convert -A / -B to A / B when the type is signed and overflow is
11529 undefined. */
11530 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11531 && TREE_CODE (arg0) == NEGATE_EXPR
11532 && negate_expr_p (arg1))
11534 if (INTEGRAL_TYPE_P (type))
11535 fold_overflow_warning (("assuming signed overflow does not occur "
11536 "when distributing negation across "
11537 "division"),
11538 WARN_STRICT_OVERFLOW_MISC);
11539 return fold_build2_loc (loc, code, type,
11540 fold_convert_loc (loc, type,
11541 TREE_OPERAND (arg0, 0)),
11542 fold_convert_loc (loc, type,
11543 negate_expr (arg1)));
11545 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11546 && TREE_CODE (arg1) == NEGATE_EXPR
11547 && negate_expr_p (arg0))
11549 if (INTEGRAL_TYPE_P (type))
11550 fold_overflow_warning (("assuming signed overflow does not occur "
11551 "when distributing negation across "
11552 "division"),
11553 WARN_STRICT_OVERFLOW_MISC);
11554 return fold_build2_loc (loc, code, type,
11555 fold_convert_loc (loc, type,
11556 negate_expr (arg0)),
11557 fold_convert_loc (loc, type,
11558 TREE_OPERAND (arg1, 0)));
11561 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11562 operation, EXACT_DIV_EXPR.
11564 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11565 At one time others generated faster code, it's not clear if they do
11566 after the last round to changes to the DIV code in expmed.c. */
11567 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11568 && multiple_of_p (type, arg0, arg1))
11569 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11571 strict_overflow_p = false;
11572 if (TREE_CODE (arg1) == INTEGER_CST
11573 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11574 &strict_overflow_p)))
11576 if (strict_overflow_p)
11577 fold_overflow_warning (("assuming signed overflow does not occur "
11578 "when simplifying division"),
11579 WARN_STRICT_OVERFLOW_MISC);
11580 return fold_convert_loc (loc, type, tem);
11583 return NULL_TREE;
11585 case CEIL_MOD_EXPR:
11586 case FLOOR_MOD_EXPR:
11587 case ROUND_MOD_EXPR:
11588 case TRUNC_MOD_EXPR:
11589 /* X % 1 is always zero, but be sure to preserve any side
11590 effects in X. */
11591 if (integer_onep (arg1))
11592 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11594 /* X % 0, return X % 0 unchanged so that we can get the
11595 proper warnings and errors. */
11596 if (integer_zerop (arg1))
11597 return NULL_TREE;
11599 /* 0 % X is always zero, but be sure to preserve any side
11600 effects in X. Place this after checking for X == 0. */
11601 if (integer_zerop (arg0))
11602 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11604 /* X % -1 is zero. */
11605 if (!TYPE_UNSIGNED (type)
11606 && TREE_CODE (arg1) == INTEGER_CST
11607 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11608 && TREE_INT_CST_HIGH (arg1) == -1)
11609 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11611 /* X % -C is the same as X % C. */
11612 if (code == TRUNC_MOD_EXPR
11613 && !TYPE_UNSIGNED (type)
11614 && TREE_CODE (arg1) == INTEGER_CST
11615 && !TREE_OVERFLOW (arg1)
11616 && TREE_INT_CST_HIGH (arg1) < 0
11617 && !TYPE_OVERFLOW_TRAPS (type)
11618 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11619 && !sign_bit_p (arg1, arg1))
11620 return fold_build2_loc (loc, code, type,
11621 fold_convert_loc (loc, type, arg0),
11622 fold_convert_loc (loc, type,
11623 negate_expr (arg1)));
11625 /* X % -Y is the same as X % Y. */
11626 if (code == TRUNC_MOD_EXPR
11627 && !TYPE_UNSIGNED (type)
11628 && TREE_CODE (arg1) == NEGATE_EXPR
11629 && !TYPE_OVERFLOW_TRAPS (type))
11630 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11631 fold_convert_loc (loc, type,
11632 TREE_OPERAND (arg1, 0)));
11634 strict_overflow_p = false;
11635 if (TREE_CODE (arg1) == INTEGER_CST
11636 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11637 &strict_overflow_p)))
11639 if (strict_overflow_p)
11640 fold_overflow_warning (("assuming signed overflow does not occur "
11641 "when simplifying modulus"),
11642 WARN_STRICT_OVERFLOW_MISC);
11643 return fold_convert_loc (loc, type, tem);
11646 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11647 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11648 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11649 && (TYPE_UNSIGNED (type)
11650 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11652 tree c = arg1;
11653 /* Also optimize A % (C << N) where C is a power of 2,
11654 to A & ((C << N) - 1). */
11655 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11656 c = TREE_OPERAND (arg1, 0);
11658 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11660 tree mask
11661 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11662 build_int_cst (TREE_TYPE (arg1), 1));
11663 if (strict_overflow_p)
11664 fold_overflow_warning (("assuming signed overflow does not "
11665 "occur when simplifying "
11666 "X % (power of two)"),
11667 WARN_STRICT_OVERFLOW_MISC);
11668 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11669 fold_convert_loc (loc, type, arg0),
11670 fold_convert_loc (loc, type, mask));
11674 return NULL_TREE;
11676 case LROTATE_EXPR:
11677 case RROTATE_EXPR:
11678 if (integer_all_onesp (arg0))
11679 return omit_one_operand_loc (loc, type, arg0, arg1);
11680 goto shift;
11682 case RSHIFT_EXPR:
11683 /* Optimize -1 >> x for arithmetic right shifts. */
11684 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11685 && tree_expr_nonnegative_p (arg1))
11686 return omit_one_operand_loc (loc, type, arg0, arg1);
11687 /* ... fall through ... */
11689 case LSHIFT_EXPR:
11690 shift:
11691 if (integer_zerop (arg1))
11692 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11693 if (integer_zerop (arg0))
11694 return omit_one_operand_loc (loc, type, arg0, arg1);
11696 /* Since negative shift count is not well-defined,
11697 don't try to compute it in the compiler. */
11698 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11699 return NULL_TREE;
11701 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11702 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11703 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11704 && host_integerp (TREE_OPERAND (arg0, 1), false)
11705 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11707 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11708 + TREE_INT_CST_LOW (arg1));
11710 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11711 being well defined. */
11712 if (low >= TYPE_PRECISION (type))
11714 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11715 low = low % TYPE_PRECISION (type);
11716 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11717 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11718 TREE_OPERAND (arg0, 0));
11719 else
11720 low = TYPE_PRECISION (type) - 1;
11723 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11724 build_int_cst (type, low));
11727 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11728 into x & ((unsigned)-1 >> c) for unsigned types. */
11729 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11730 || (TYPE_UNSIGNED (type)
11731 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11732 && host_integerp (arg1, false)
11733 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11734 && host_integerp (TREE_OPERAND (arg0, 1), false)
11735 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11737 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11738 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11739 tree lshift;
11740 tree arg00;
11742 if (low0 == low1)
11744 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11746 lshift = build_int_cst (type, -1);
11747 lshift = int_const_binop (code, lshift, arg1, 0);
11749 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11753 /* Rewrite an LROTATE_EXPR by a constant into an
11754 RROTATE_EXPR by a new constant. */
11755 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11757 tree tem = build_int_cst (TREE_TYPE (arg1),
11758 TYPE_PRECISION (type));
11759 tem = const_binop (MINUS_EXPR, tem, arg1);
11760 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11763 /* If we have a rotate of a bit operation with the rotate count and
11764 the second operand of the bit operation both constant,
11765 permute the two operations. */
11766 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11767 && (TREE_CODE (arg0) == BIT_AND_EXPR
11768 || TREE_CODE (arg0) == BIT_IOR_EXPR
11769 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11770 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11771 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11772 fold_build2_loc (loc, code, type,
11773 TREE_OPERAND (arg0, 0), arg1),
11774 fold_build2_loc (loc, code, type,
11775 TREE_OPERAND (arg0, 1), arg1));
11777 /* Two consecutive rotates adding up to the precision of the
11778 type can be ignored. */
11779 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11780 && TREE_CODE (arg0) == RROTATE_EXPR
11781 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11782 && TREE_INT_CST_HIGH (arg1) == 0
11783 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11784 && ((TREE_INT_CST_LOW (arg1)
11785 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11786 == (unsigned int) TYPE_PRECISION (type)))
11787 return TREE_OPERAND (arg0, 0);
11789 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11790 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11791 if the latter can be further optimized. */
11792 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11793 && TREE_CODE (arg0) == BIT_AND_EXPR
11794 && TREE_CODE (arg1) == INTEGER_CST
11795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11797 tree mask = fold_build2_loc (loc, code, type,
11798 fold_convert_loc (loc, type,
11799 TREE_OPERAND (arg0, 1)),
11800 arg1);
11801 tree shift = fold_build2_loc (loc, code, type,
11802 fold_convert_loc (loc, type,
11803 TREE_OPERAND (arg0, 0)),
11804 arg1);
11805 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11806 if (tem)
11807 return tem;
11810 return NULL_TREE;
11812 case MIN_EXPR:
11813 if (operand_equal_p (arg0, arg1, 0))
11814 return omit_one_operand_loc (loc, type, arg0, arg1);
11815 if (INTEGRAL_TYPE_P (type)
11816 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11817 return omit_one_operand_loc (loc, type, arg1, arg0);
11818 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11819 if (tem)
11820 return tem;
11821 goto associate;
11823 case MAX_EXPR:
11824 if (operand_equal_p (arg0, arg1, 0))
11825 return omit_one_operand_loc (loc, type, arg0, arg1);
11826 if (INTEGRAL_TYPE_P (type)
11827 && TYPE_MAX_VALUE (type)
11828 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11829 return omit_one_operand_loc (loc, type, arg1, arg0);
11830 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11831 if (tem)
11832 return tem;
11833 goto associate;
11835 case TRUTH_ANDIF_EXPR:
11836 /* Note that the operands of this must be ints
11837 and their values must be 0 or 1.
11838 ("true" is a fixed value perhaps depending on the language.) */
11839 /* If first arg is constant zero, return it. */
11840 if (integer_zerop (arg0))
11841 return fold_convert_loc (loc, type, arg0);
11842 case TRUTH_AND_EXPR:
11843 /* If either arg is constant true, drop it. */
11844 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11845 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11846 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11847 /* Preserve sequence points. */
11848 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11849 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11850 /* If second arg is constant zero, result is zero, but first arg
11851 must be evaluated. */
11852 if (integer_zerop (arg1))
11853 return omit_one_operand_loc (loc, type, arg1, arg0);
11854 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11855 case will be handled here. */
11856 if (integer_zerop (arg0))
11857 return omit_one_operand_loc (loc, type, arg0, arg1);
11859 /* !X && X is always false. */
11860 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11862 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11863 /* X && !X is always false. */
11864 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11865 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11866 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11868 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11869 means A >= Y && A != MAX, but in this case we know that
11870 A < X <= MAX. */
11872 if (!TREE_SIDE_EFFECTS (arg0)
11873 && !TREE_SIDE_EFFECTS (arg1))
11875 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11876 if (tem && !operand_equal_p (tem, arg0, 0))
11877 return fold_build2_loc (loc, code, type, tem, arg1);
11879 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11880 if (tem && !operand_equal_p (tem, arg1, 0))
11881 return fold_build2_loc (loc, code, type, arg0, tem);
11884 truth_andor:
11885 /* We only do these simplifications if we are optimizing. */
11886 if (!optimize)
11887 return NULL_TREE;
11889 /* Check for things like (A || B) && (A || C). We can convert this
11890 to A || (B && C). Note that either operator can be any of the four
11891 truth and/or operations and the transformation will still be
11892 valid. Also note that we only care about order for the
11893 ANDIF and ORIF operators. If B contains side effects, this
11894 might change the truth-value of A. */
11895 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11896 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11897 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11898 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11899 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11900 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11902 tree a00 = TREE_OPERAND (arg0, 0);
11903 tree a01 = TREE_OPERAND (arg0, 1);
11904 tree a10 = TREE_OPERAND (arg1, 0);
11905 tree a11 = TREE_OPERAND (arg1, 1);
11906 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11907 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11908 && (code == TRUTH_AND_EXPR
11909 || code == TRUTH_OR_EXPR));
11911 if (operand_equal_p (a00, a10, 0))
11912 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11913 fold_build2_loc (loc, code, type, a01, a11));
11914 else if (commutative && operand_equal_p (a00, a11, 0))
11915 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11916 fold_build2_loc (loc, code, type, a01, a10));
11917 else if (commutative && operand_equal_p (a01, a10, 0))
11918 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11919 fold_build2_loc (loc, code, type, a00, a11));
11921 /* This case if tricky because we must either have commutative
11922 operators or else A10 must not have side-effects. */
11924 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11925 && operand_equal_p (a01, a11, 0))
11926 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11927 fold_build2_loc (loc, code, type, a00, a10),
11928 a01);
11931 /* See if we can build a range comparison. */
11932 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11933 return tem;
11935 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11936 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11938 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11939 if (tem)
11940 return fold_build2_loc (loc, code, type, tem, arg1);
11943 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11944 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11946 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11947 if (tem)
11948 return fold_build2_loc (loc, code, type, arg0, tem);
11951 /* Check for the possibility of merging component references. If our
11952 lhs is another similar operation, try to merge its rhs with our
11953 rhs. Then try to merge our lhs and rhs. */
11954 if (TREE_CODE (arg0) == code
11955 && 0 != (tem = fold_truthop (loc, code, type,
11956 TREE_OPERAND (arg0, 1), arg1)))
11957 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11959 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11960 return tem;
11962 return NULL_TREE;
11964 case TRUTH_ORIF_EXPR:
11965 /* Note that the operands of this must be ints
11966 and their values must be 0 or true.
11967 ("true" is a fixed value perhaps depending on the language.) */
11968 /* If first arg is constant true, return it. */
11969 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11970 return fold_convert_loc (loc, type, arg0);
11971 case TRUTH_OR_EXPR:
11972 /* If either arg is constant zero, drop it. */
11973 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11974 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11975 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11976 /* Preserve sequence points. */
11977 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11978 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11979 /* If second arg is constant true, result is true, but we must
11980 evaluate first arg. */
11981 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11982 return omit_one_operand_loc (loc, type, arg1, arg0);
11983 /* Likewise for first arg, but note this only occurs here for
11984 TRUTH_OR_EXPR. */
11985 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11986 return omit_one_operand_loc (loc, type, arg0, arg1);
11988 /* !X || X is always true. */
11989 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11990 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11991 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11992 /* X || !X is always true. */
11993 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11994 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11995 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11997 goto truth_andor;
11999 case TRUTH_XOR_EXPR:
12000 /* If the second arg is constant zero, drop it. */
12001 if (integer_zerop (arg1))
12002 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12003 /* If the second arg is constant true, this is a logical inversion. */
12004 if (integer_onep (arg1))
12006 /* Only call invert_truthvalue if operand is a truth value. */
12007 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12008 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12009 else
12010 tem = invert_truthvalue_loc (loc, arg0);
12011 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12013 /* Identical arguments cancel to zero. */
12014 if (operand_equal_p (arg0, arg1, 0))
12015 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12017 /* !X ^ X is always true. */
12018 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12020 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12022 /* X ^ !X is always true. */
12023 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12024 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12025 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12027 return NULL_TREE;
12029 case EQ_EXPR:
12030 case NE_EXPR:
12031 tem = fold_comparison (loc, code, type, op0, op1);
12032 if (tem != NULL_TREE)
12033 return tem;
12035 /* bool_var != 0 becomes bool_var. */
12036 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12037 && code == NE_EXPR)
12038 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12040 /* bool_var == 1 becomes bool_var. */
12041 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12042 && code == EQ_EXPR)
12043 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12045 /* bool_var != 1 becomes !bool_var. */
12046 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12047 && code == NE_EXPR)
12048 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12049 fold_convert_loc (loc, type, arg0));
12051 /* bool_var == 0 becomes !bool_var. */
12052 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12053 && code == EQ_EXPR)
12054 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12055 fold_convert_loc (loc, type, arg0));
12057 /* !exp != 0 becomes !exp */
12058 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12059 && code == NE_EXPR)
12060 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12062 /* If this is an equality comparison of the address of two non-weak,
12063 unaliased symbols neither of which are extern (since we do not
12064 have access to attributes for externs), then we know the result. */
12065 if (TREE_CODE (arg0) == ADDR_EXPR
12066 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12067 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12068 && ! lookup_attribute ("alias",
12069 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12070 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12071 && TREE_CODE (arg1) == ADDR_EXPR
12072 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12073 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12074 && ! lookup_attribute ("alias",
12075 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12076 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12078 /* We know that we're looking at the address of two
12079 non-weak, unaliased, static _DECL nodes.
12081 It is both wasteful and incorrect to call operand_equal_p
12082 to compare the two ADDR_EXPR nodes. It is wasteful in that
12083 all we need to do is test pointer equality for the arguments
12084 to the two ADDR_EXPR nodes. It is incorrect to use
12085 operand_equal_p as that function is NOT equivalent to a
12086 C equality test. It can in fact return false for two
12087 objects which would test as equal using the C equality
12088 operator. */
12089 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12090 return constant_boolean_node (equal
12091 ? code == EQ_EXPR : code != EQ_EXPR,
12092 type);
12095 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12096 a MINUS_EXPR of a constant, we can convert it into a comparison with
12097 a revised constant as long as no overflow occurs. */
12098 if (TREE_CODE (arg1) == INTEGER_CST
12099 && (TREE_CODE (arg0) == PLUS_EXPR
12100 || TREE_CODE (arg0) == MINUS_EXPR)
12101 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12102 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12103 ? MINUS_EXPR : PLUS_EXPR,
12104 fold_convert_loc (loc, TREE_TYPE (arg0),
12105 arg1),
12106 TREE_OPERAND (arg0, 1)))
12107 && !TREE_OVERFLOW (tem))
12108 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12110 /* Similarly for a NEGATE_EXPR. */
12111 if (TREE_CODE (arg0) == NEGATE_EXPR
12112 && TREE_CODE (arg1) == INTEGER_CST
12113 && 0 != (tem = negate_expr (arg1))
12114 && TREE_CODE (tem) == INTEGER_CST
12115 && !TREE_OVERFLOW (tem))
12116 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12118 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12119 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12120 && TREE_CODE (arg1) == INTEGER_CST
12121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12122 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12123 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12124 fold_convert_loc (loc,
12125 TREE_TYPE (arg0),
12126 arg1),
12127 TREE_OPERAND (arg0, 1)));
12129 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12130 if ((TREE_CODE (arg0) == PLUS_EXPR
12131 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12132 || TREE_CODE (arg0) == MINUS_EXPR)
12133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12134 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12135 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12137 tree val = TREE_OPERAND (arg0, 1);
12138 return omit_two_operands_loc (loc, type,
12139 fold_build2_loc (loc, code, type,
12140 val,
12141 build_int_cst (TREE_TYPE (val),
12142 0)),
12143 TREE_OPERAND (arg0, 0), arg1);
12146 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12147 if (TREE_CODE (arg0) == MINUS_EXPR
12148 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12149 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12150 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12152 return omit_two_operands_loc (loc, type,
12153 code == NE_EXPR
12154 ? boolean_true_node : boolean_false_node,
12155 TREE_OPERAND (arg0, 1), arg1);
12158 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12159 for !=. Don't do this for ordered comparisons due to overflow. */
12160 if (TREE_CODE (arg0) == MINUS_EXPR
12161 && integer_zerop (arg1))
12162 return fold_build2_loc (loc, code, type,
12163 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12165 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12166 if (TREE_CODE (arg0) == ABS_EXPR
12167 && (integer_zerop (arg1) || real_zerop (arg1)))
12168 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12170 /* If this is an EQ or NE comparison with zero and ARG0 is
12171 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12172 two operations, but the latter can be done in one less insn
12173 on machines that have only two-operand insns or on which a
12174 constant cannot be the first operand. */
12175 if (TREE_CODE (arg0) == BIT_AND_EXPR
12176 && integer_zerop (arg1))
12178 tree arg00 = TREE_OPERAND (arg0, 0);
12179 tree arg01 = TREE_OPERAND (arg0, 1);
12180 if (TREE_CODE (arg00) == LSHIFT_EXPR
12181 && integer_onep (TREE_OPERAND (arg00, 0)))
12183 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12184 arg01, TREE_OPERAND (arg00, 1));
12185 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12186 build_int_cst (TREE_TYPE (arg0), 1));
12187 return fold_build2_loc (loc, code, type,
12188 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12189 arg1);
12191 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12192 && integer_onep (TREE_OPERAND (arg01, 0)))
12194 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12195 arg00, TREE_OPERAND (arg01, 1));
12196 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12197 build_int_cst (TREE_TYPE (arg0), 1));
12198 return fold_build2_loc (loc, code, type,
12199 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12200 arg1);
12204 /* If this is an NE or EQ comparison of zero against the result of a
12205 signed MOD operation whose second operand is a power of 2, make
12206 the MOD operation unsigned since it is simpler and equivalent. */
12207 if (integer_zerop (arg1)
12208 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12209 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12210 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12211 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12212 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12213 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12215 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12216 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12217 fold_convert_loc (loc, newtype,
12218 TREE_OPERAND (arg0, 0)),
12219 fold_convert_loc (loc, newtype,
12220 TREE_OPERAND (arg0, 1)));
12222 return fold_build2_loc (loc, code, type, newmod,
12223 fold_convert_loc (loc, newtype, arg1));
12226 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12227 C1 is a valid shift constant, and C2 is a power of two, i.e.
12228 a single bit. */
12229 if (TREE_CODE (arg0) == BIT_AND_EXPR
12230 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12231 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12232 == INTEGER_CST
12233 && integer_pow2p (TREE_OPERAND (arg0, 1))
12234 && integer_zerop (arg1))
12236 tree itype = TREE_TYPE (arg0);
12237 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12238 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12240 /* Check for a valid shift count. */
12241 if (TREE_INT_CST_HIGH (arg001) == 0
12242 && TREE_INT_CST_LOW (arg001) < prec)
12244 tree arg01 = TREE_OPERAND (arg0, 1);
12245 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12246 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12247 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12248 can be rewritten as (X & (C2 << C1)) != 0. */
12249 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12251 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12252 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12253 return fold_build2_loc (loc, code, type, tem, arg1);
12255 /* Otherwise, for signed (arithmetic) shifts,
12256 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12257 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12258 else if (!TYPE_UNSIGNED (itype))
12259 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12260 arg000, build_int_cst (itype, 0));
12261 /* Otherwise, of unsigned (logical) shifts,
12262 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12263 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12264 else
12265 return omit_one_operand_loc (loc, type,
12266 code == EQ_EXPR ? integer_one_node
12267 : integer_zero_node,
12268 arg000);
12272 /* If this is an NE comparison of zero with an AND of one, remove the
12273 comparison since the AND will give the correct value. */
12274 if (code == NE_EXPR
12275 && integer_zerop (arg1)
12276 && TREE_CODE (arg0) == BIT_AND_EXPR
12277 && integer_onep (TREE_OPERAND (arg0, 1)))
12278 return fold_convert_loc (loc, type, arg0);
12280 /* If we have (A & C) == C where C is a power of 2, convert this into
12281 (A & C) != 0. Similarly for NE_EXPR. */
12282 if (TREE_CODE (arg0) == BIT_AND_EXPR
12283 && integer_pow2p (TREE_OPERAND (arg0, 1))
12284 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12285 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12286 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12287 integer_zero_node));
12289 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12290 bit, then fold the expression into A < 0 or A >= 0. */
12291 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12292 if (tem)
12293 return tem;
12295 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12296 Similarly for NE_EXPR. */
12297 if (TREE_CODE (arg0) == BIT_AND_EXPR
12298 && TREE_CODE (arg1) == INTEGER_CST
12299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12301 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12302 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12303 TREE_OPERAND (arg0, 1));
12304 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12305 arg1, notc);
12306 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12307 if (integer_nonzerop (dandnotc))
12308 return omit_one_operand_loc (loc, type, rslt, arg0);
12311 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12312 Similarly for NE_EXPR. */
12313 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12314 && TREE_CODE (arg1) == INTEGER_CST
12315 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12317 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12318 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12319 TREE_OPERAND (arg0, 1), notd);
12320 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12321 if (integer_nonzerop (candnotd))
12322 return omit_one_operand_loc (loc, type, rslt, arg0);
12325 /* If this is a comparison of a field, we may be able to simplify it. */
12326 if ((TREE_CODE (arg0) == COMPONENT_REF
12327 || TREE_CODE (arg0) == BIT_FIELD_REF)
12328 /* Handle the constant case even without -O
12329 to make sure the warnings are given. */
12330 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12332 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12333 if (t1)
12334 return t1;
12337 /* Optimize comparisons of strlen vs zero to a compare of the
12338 first character of the string vs zero. To wit,
12339 strlen(ptr) == 0 => *ptr == 0
12340 strlen(ptr) != 0 => *ptr != 0
12341 Other cases should reduce to one of these two (or a constant)
12342 due to the return value of strlen being unsigned. */
12343 if (TREE_CODE (arg0) == CALL_EXPR
12344 && integer_zerop (arg1))
12346 tree fndecl = get_callee_fndecl (arg0);
12348 if (fndecl
12349 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12350 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12351 && call_expr_nargs (arg0) == 1
12352 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12354 tree iref = build_fold_indirect_ref_loc (loc,
12355 CALL_EXPR_ARG (arg0, 0));
12356 return fold_build2_loc (loc, code, type, iref,
12357 build_int_cst (TREE_TYPE (iref), 0));
12361 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12362 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12363 if (TREE_CODE (arg0) == RSHIFT_EXPR
12364 && integer_zerop (arg1)
12365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12367 tree arg00 = TREE_OPERAND (arg0, 0);
12368 tree arg01 = TREE_OPERAND (arg0, 1);
12369 tree itype = TREE_TYPE (arg00);
12370 if (TREE_INT_CST_HIGH (arg01) == 0
12371 && TREE_INT_CST_LOW (arg01)
12372 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12374 if (TYPE_UNSIGNED (itype))
12376 itype = signed_type_for (itype);
12377 arg00 = fold_convert_loc (loc, itype, arg00);
12379 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12380 type, arg00, build_int_cst (itype, 0));
12384 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12385 if (integer_zerop (arg1)
12386 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12387 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12388 TREE_OPERAND (arg0, 1));
12390 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12391 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12392 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12393 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12394 build_int_cst (TREE_TYPE (arg1), 0));
12395 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12396 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12398 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12399 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12400 build_int_cst (TREE_TYPE (arg1), 0));
12402 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12403 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12404 && TREE_CODE (arg1) == INTEGER_CST
12405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12406 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12407 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12408 TREE_OPERAND (arg0, 1), arg1));
12410 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12411 (X & C) == 0 when C is a single bit. */
12412 if (TREE_CODE (arg0) == BIT_AND_EXPR
12413 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12414 && integer_zerop (arg1)
12415 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12417 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12418 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12419 TREE_OPERAND (arg0, 1));
12420 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12421 type, tem, arg1);
12424 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12425 constant C is a power of two, i.e. a single bit. */
12426 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12427 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12428 && integer_zerop (arg1)
12429 && integer_pow2p (TREE_OPERAND (arg0, 1))
12430 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12431 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12433 tree arg00 = TREE_OPERAND (arg0, 0);
12434 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12435 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12438 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12439 when is C is a power of two, i.e. a single bit. */
12440 if (TREE_CODE (arg0) == BIT_AND_EXPR
12441 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12442 && integer_zerop (arg1)
12443 && integer_pow2p (TREE_OPERAND (arg0, 1))
12444 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12445 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12447 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12448 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12449 arg000, TREE_OPERAND (arg0, 1));
12450 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12451 tem, build_int_cst (TREE_TYPE (tem), 0));
12454 if (integer_zerop (arg1)
12455 && tree_expr_nonzero_p (arg0))
12457 tree res = constant_boolean_node (code==NE_EXPR, type);
12458 return omit_one_operand_loc (loc, type, res, arg0);
12461 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12462 if (TREE_CODE (arg0) == NEGATE_EXPR
12463 && TREE_CODE (arg1) == NEGATE_EXPR)
12464 return fold_build2_loc (loc, code, type,
12465 TREE_OPERAND (arg0, 0),
12466 TREE_OPERAND (arg1, 0));
12468 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12469 if (TREE_CODE (arg0) == BIT_AND_EXPR
12470 && TREE_CODE (arg1) == BIT_AND_EXPR)
12472 tree arg00 = TREE_OPERAND (arg0, 0);
12473 tree arg01 = TREE_OPERAND (arg0, 1);
12474 tree arg10 = TREE_OPERAND (arg1, 0);
12475 tree arg11 = TREE_OPERAND (arg1, 1);
12476 tree itype = TREE_TYPE (arg0);
12478 if (operand_equal_p (arg01, arg11, 0))
12479 return fold_build2_loc (loc, code, type,
12480 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12481 fold_build2_loc (loc,
12482 BIT_XOR_EXPR, itype,
12483 arg00, arg10),
12484 arg01),
12485 build_int_cst (itype, 0));
12487 if (operand_equal_p (arg01, arg10, 0))
12488 return fold_build2_loc (loc, code, type,
12489 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12490 fold_build2_loc (loc,
12491 BIT_XOR_EXPR, itype,
12492 arg00, arg11),
12493 arg01),
12494 build_int_cst (itype, 0));
12496 if (operand_equal_p (arg00, arg11, 0))
12497 return fold_build2_loc (loc, code, type,
12498 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12499 fold_build2_loc (loc,
12500 BIT_XOR_EXPR, itype,
12501 arg01, arg10),
12502 arg00),
12503 build_int_cst (itype, 0));
12505 if (operand_equal_p (arg00, arg10, 0))
12506 return fold_build2_loc (loc, code, type,
12507 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12508 fold_build2_loc (loc,
12509 BIT_XOR_EXPR, itype,
12510 arg01, arg11),
12511 arg00),
12512 build_int_cst (itype, 0));
12515 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12516 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12518 tree arg00 = TREE_OPERAND (arg0, 0);
12519 tree arg01 = TREE_OPERAND (arg0, 1);
12520 tree arg10 = TREE_OPERAND (arg1, 0);
12521 tree arg11 = TREE_OPERAND (arg1, 1);
12522 tree itype = TREE_TYPE (arg0);
12524 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12525 operand_equal_p guarantees no side-effects so we don't need
12526 to use omit_one_operand on Z. */
12527 if (operand_equal_p (arg01, arg11, 0))
12528 return fold_build2_loc (loc, code, type, arg00, arg10);
12529 if (operand_equal_p (arg01, arg10, 0))
12530 return fold_build2_loc (loc, code, type, arg00, arg11);
12531 if (operand_equal_p (arg00, arg11, 0))
12532 return fold_build2_loc (loc, code, type, arg01, arg10);
12533 if (operand_equal_p (arg00, arg10, 0))
12534 return fold_build2_loc (loc, code, type, arg01, arg11);
12536 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12537 if (TREE_CODE (arg01) == INTEGER_CST
12538 && TREE_CODE (arg11) == INTEGER_CST)
12539 return fold_build2_loc (loc, code, type,
12540 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12541 fold_build2_loc (loc,
12542 BIT_XOR_EXPR, itype,
12543 arg01, arg11)),
12544 arg10);
12547 /* Attempt to simplify equality/inequality comparisons of complex
12548 values. Only lower the comparison if the result is known or
12549 can be simplified to a single scalar comparison. */
12550 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12551 || TREE_CODE (arg0) == COMPLEX_CST)
12552 && (TREE_CODE (arg1) == COMPLEX_EXPR
12553 || TREE_CODE (arg1) == COMPLEX_CST))
12555 tree real0, imag0, real1, imag1;
12556 tree rcond, icond;
12558 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12560 real0 = TREE_OPERAND (arg0, 0);
12561 imag0 = TREE_OPERAND (arg0, 1);
12563 else
12565 real0 = TREE_REALPART (arg0);
12566 imag0 = TREE_IMAGPART (arg0);
12569 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12571 real1 = TREE_OPERAND (arg1, 0);
12572 imag1 = TREE_OPERAND (arg1, 1);
12574 else
12576 real1 = TREE_REALPART (arg1);
12577 imag1 = TREE_IMAGPART (arg1);
12580 rcond = fold_binary_loc (loc, code, type, real0, real1);
12581 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12583 if (integer_zerop (rcond))
12585 if (code == EQ_EXPR)
12586 return omit_two_operands_loc (loc, type, boolean_false_node,
12587 imag0, imag1);
12588 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12590 else
12592 if (code == NE_EXPR)
12593 return omit_two_operands_loc (loc, type, boolean_true_node,
12594 imag0, imag1);
12595 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12599 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12600 if (icond && TREE_CODE (icond) == INTEGER_CST)
12602 if (integer_zerop (icond))
12604 if (code == EQ_EXPR)
12605 return omit_two_operands_loc (loc, type, boolean_false_node,
12606 real0, real1);
12607 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12609 else
12611 if (code == NE_EXPR)
12612 return omit_two_operands_loc (loc, type, boolean_true_node,
12613 real0, real1);
12614 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12619 return NULL_TREE;
12621 case LT_EXPR:
12622 case GT_EXPR:
12623 case LE_EXPR:
12624 case GE_EXPR:
12625 tem = fold_comparison (loc, code, type, op0, op1);
12626 if (tem != NULL_TREE)
12627 return tem;
12629 /* Transform comparisons of the form X +- C CMP X. */
12630 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12631 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12632 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12633 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12634 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12635 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12637 tree arg01 = TREE_OPERAND (arg0, 1);
12638 enum tree_code code0 = TREE_CODE (arg0);
12639 int is_positive;
12641 if (TREE_CODE (arg01) == REAL_CST)
12642 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12643 else
12644 is_positive = tree_int_cst_sgn (arg01);
12646 /* (X - c) > X becomes false. */
12647 if (code == GT_EXPR
12648 && ((code0 == MINUS_EXPR && is_positive >= 0)
12649 || (code0 == PLUS_EXPR && is_positive <= 0)))
12651 if (TREE_CODE (arg01) == INTEGER_CST
12652 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12653 fold_overflow_warning (("assuming signed overflow does not "
12654 "occur when assuming that (X - c) > X "
12655 "is always false"),
12656 WARN_STRICT_OVERFLOW_ALL);
12657 return constant_boolean_node (0, type);
12660 /* Likewise (X + c) < X becomes false. */
12661 if (code == LT_EXPR
12662 && ((code0 == PLUS_EXPR && is_positive >= 0)
12663 || (code0 == MINUS_EXPR && is_positive <= 0)))
12665 if (TREE_CODE (arg01) == INTEGER_CST
12666 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12667 fold_overflow_warning (("assuming signed overflow does not "
12668 "occur when assuming that "
12669 "(X + c) < X is always false"),
12670 WARN_STRICT_OVERFLOW_ALL);
12671 return constant_boolean_node (0, type);
12674 /* Convert (X - c) <= X to true. */
12675 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12676 && code == LE_EXPR
12677 && ((code0 == MINUS_EXPR && is_positive >= 0)
12678 || (code0 == PLUS_EXPR && is_positive <= 0)))
12680 if (TREE_CODE (arg01) == INTEGER_CST
12681 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12682 fold_overflow_warning (("assuming signed overflow does not "
12683 "occur when assuming that "
12684 "(X - c) <= X is always true"),
12685 WARN_STRICT_OVERFLOW_ALL);
12686 return constant_boolean_node (1, type);
12689 /* Convert (X + c) >= X to true. */
12690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12691 && code == GE_EXPR
12692 && ((code0 == PLUS_EXPR && is_positive >= 0)
12693 || (code0 == MINUS_EXPR && is_positive <= 0)))
12695 if (TREE_CODE (arg01) == INTEGER_CST
12696 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12697 fold_overflow_warning (("assuming signed overflow does not "
12698 "occur when assuming that "
12699 "(X + c) >= X is always true"),
12700 WARN_STRICT_OVERFLOW_ALL);
12701 return constant_boolean_node (1, type);
12704 if (TREE_CODE (arg01) == INTEGER_CST)
12706 /* Convert X + c > X and X - c < X to true for integers. */
12707 if (code == GT_EXPR
12708 && ((code0 == PLUS_EXPR && is_positive > 0)
12709 || (code0 == MINUS_EXPR && is_positive < 0)))
12711 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12712 fold_overflow_warning (("assuming signed overflow does "
12713 "not occur when assuming that "
12714 "(X + c) > X is always true"),
12715 WARN_STRICT_OVERFLOW_ALL);
12716 return constant_boolean_node (1, type);
12719 if (code == LT_EXPR
12720 && ((code0 == MINUS_EXPR && is_positive > 0)
12721 || (code0 == PLUS_EXPR && is_positive < 0)))
12723 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12724 fold_overflow_warning (("assuming signed overflow does "
12725 "not occur when assuming that "
12726 "(X - c) < X is always true"),
12727 WARN_STRICT_OVERFLOW_ALL);
12728 return constant_boolean_node (1, type);
12731 /* Convert X + c <= X and X - c >= X to false for integers. */
12732 if (code == LE_EXPR
12733 && ((code0 == PLUS_EXPR && is_positive > 0)
12734 || (code0 == MINUS_EXPR && is_positive < 0)))
12736 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12737 fold_overflow_warning (("assuming signed overflow does "
12738 "not occur when assuming that "
12739 "(X + c) <= X is always false"),
12740 WARN_STRICT_OVERFLOW_ALL);
12741 return constant_boolean_node (0, type);
12744 if (code == GE_EXPR
12745 && ((code0 == MINUS_EXPR && is_positive > 0)
12746 || (code0 == PLUS_EXPR && is_positive < 0)))
12748 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12749 fold_overflow_warning (("assuming signed overflow does "
12750 "not occur when assuming that "
12751 "(X - c) >= X is always false"),
12752 WARN_STRICT_OVERFLOW_ALL);
12753 return constant_boolean_node (0, type);
12758 /* Comparisons with the highest or lowest possible integer of
12759 the specified precision will have known values. */
12761 tree arg1_type = TREE_TYPE (arg1);
12762 unsigned int width = TYPE_PRECISION (arg1_type);
12764 if (TREE_CODE (arg1) == INTEGER_CST
12765 && width <= 2 * HOST_BITS_PER_WIDE_INT
12766 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12768 HOST_WIDE_INT signed_max_hi;
12769 unsigned HOST_WIDE_INT signed_max_lo;
12770 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12772 if (width <= HOST_BITS_PER_WIDE_INT)
12774 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12775 - 1;
12776 signed_max_hi = 0;
12777 max_hi = 0;
12779 if (TYPE_UNSIGNED (arg1_type))
12781 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12782 min_lo = 0;
12783 min_hi = 0;
12785 else
12787 max_lo = signed_max_lo;
12788 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12789 min_hi = -1;
12792 else
12794 width -= HOST_BITS_PER_WIDE_INT;
12795 signed_max_lo = -1;
12796 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12797 - 1;
12798 max_lo = -1;
12799 min_lo = 0;
12801 if (TYPE_UNSIGNED (arg1_type))
12803 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12804 min_hi = 0;
12806 else
12808 max_hi = signed_max_hi;
12809 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12813 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12814 && TREE_INT_CST_LOW (arg1) == max_lo)
12815 switch (code)
12817 case GT_EXPR:
12818 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12820 case GE_EXPR:
12821 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12823 case LE_EXPR:
12824 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12826 case LT_EXPR:
12827 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12829 /* The GE_EXPR and LT_EXPR cases above are not normally
12830 reached because of previous transformations. */
12832 default:
12833 break;
12835 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12836 == max_hi
12837 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12838 switch (code)
12840 case GT_EXPR:
12841 arg1 = const_binop (PLUS_EXPR, arg1,
12842 build_int_cst (TREE_TYPE (arg1), 1));
12843 return fold_build2_loc (loc, EQ_EXPR, type,
12844 fold_convert_loc (loc,
12845 TREE_TYPE (arg1), arg0),
12846 arg1);
12847 case LE_EXPR:
12848 arg1 = const_binop (PLUS_EXPR, arg1,
12849 build_int_cst (TREE_TYPE (arg1), 1));
12850 return fold_build2_loc (loc, NE_EXPR, type,
12851 fold_convert_loc (loc, TREE_TYPE (arg1),
12852 arg0),
12853 arg1);
12854 default:
12855 break;
12857 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12858 == min_hi
12859 && TREE_INT_CST_LOW (arg1) == min_lo)
12860 switch (code)
12862 case LT_EXPR:
12863 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12865 case LE_EXPR:
12866 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12868 case GE_EXPR:
12869 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12871 case GT_EXPR:
12872 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12874 default:
12875 break;
12877 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12878 == min_hi
12879 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12880 switch (code)
12882 case GE_EXPR:
12883 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12884 return fold_build2_loc (loc, NE_EXPR, type,
12885 fold_convert_loc (loc,
12886 TREE_TYPE (arg1), arg0),
12887 arg1);
12888 case LT_EXPR:
12889 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12890 return fold_build2_loc (loc, EQ_EXPR, type,
12891 fold_convert_loc (loc, TREE_TYPE (arg1),
12892 arg0),
12893 arg1);
12894 default:
12895 break;
12898 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12899 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12900 && TYPE_UNSIGNED (arg1_type)
12901 /* We will flip the signedness of the comparison operator
12902 associated with the mode of arg1, so the sign bit is
12903 specified by this mode. Check that arg1 is the signed
12904 max associated with this sign bit. */
12905 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12906 /* signed_type does not work on pointer types. */
12907 && INTEGRAL_TYPE_P (arg1_type))
12909 /* The following case also applies to X < signed_max+1
12910 and X >= signed_max+1 because previous transformations. */
12911 if (code == LE_EXPR || code == GT_EXPR)
12913 tree st;
12914 st = signed_type_for (TREE_TYPE (arg1));
12915 return fold_build2_loc (loc,
12916 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12917 type, fold_convert_loc (loc, st, arg0),
12918 build_int_cst (st, 0));
12924 /* If we are comparing an ABS_EXPR with a constant, we can
12925 convert all the cases into explicit comparisons, but they may
12926 well not be faster than doing the ABS and one comparison.
12927 But ABS (X) <= C is a range comparison, which becomes a subtraction
12928 and a comparison, and is probably faster. */
12929 if (code == LE_EXPR
12930 && TREE_CODE (arg1) == INTEGER_CST
12931 && TREE_CODE (arg0) == ABS_EXPR
12932 && ! TREE_SIDE_EFFECTS (arg0)
12933 && (0 != (tem = negate_expr (arg1)))
12934 && TREE_CODE (tem) == INTEGER_CST
12935 && !TREE_OVERFLOW (tem))
12936 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12937 build2 (GE_EXPR, type,
12938 TREE_OPERAND (arg0, 0), tem),
12939 build2 (LE_EXPR, type,
12940 TREE_OPERAND (arg0, 0), arg1));
12942 /* Convert ABS_EXPR<x> >= 0 to true. */
12943 strict_overflow_p = false;
12944 if (code == GE_EXPR
12945 && (integer_zerop (arg1)
12946 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12947 && real_zerop (arg1)))
12948 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12950 if (strict_overflow_p)
12951 fold_overflow_warning (("assuming signed overflow does not occur "
12952 "when simplifying comparison of "
12953 "absolute value and zero"),
12954 WARN_STRICT_OVERFLOW_CONDITIONAL);
12955 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12958 /* Convert ABS_EXPR<x> < 0 to false. */
12959 strict_overflow_p = false;
12960 if (code == LT_EXPR
12961 && (integer_zerop (arg1) || real_zerop (arg1))
12962 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12964 if (strict_overflow_p)
12965 fold_overflow_warning (("assuming signed overflow does not occur "
12966 "when simplifying comparison of "
12967 "absolute value and zero"),
12968 WARN_STRICT_OVERFLOW_CONDITIONAL);
12969 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12972 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12973 and similarly for >= into !=. */
12974 if ((code == LT_EXPR || code == GE_EXPR)
12975 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12976 && TREE_CODE (arg1) == LSHIFT_EXPR
12977 && integer_onep (TREE_OPERAND (arg1, 0)))
12979 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12980 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12981 TREE_OPERAND (arg1, 1)),
12982 build_int_cst (TREE_TYPE (arg0), 0));
12983 goto fold_binary_exit;
12986 if ((code == LT_EXPR || code == GE_EXPR)
12987 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12988 && CONVERT_EXPR_P (arg1)
12989 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12990 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12992 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12993 fold_convert_loc (loc, TREE_TYPE (arg0),
12994 build2 (RSHIFT_EXPR,
12995 TREE_TYPE (arg0), arg0,
12996 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12997 1))),
12998 build_int_cst (TREE_TYPE (arg0), 0));
12999 goto fold_binary_exit;
13002 return NULL_TREE;
13004 case UNORDERED_EXPR:
13005 case ORDERED_EXPR:
13006 case UNLT_EXPR:
13007 case UNLE_EXPR:
13008 case UNGT_EXPR:
13009 case UNGE_EXPR:
13010 case UNEQ_EXPR:
13011 case LTGT_EXPR:
13012 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13014 t1 = fold_relational_const (code, type, arg0, arg1);
13015 if (t1 != NULL_TREE)
13016 return t1;
13019 /* If the first operand is NaN, the result is constant. */
13020 if (TREE_CODE (arg0) == REAL_CST
13021 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13022 && (code != LTGT_EXPR || ! flag_trapping_math))
13024 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13025 ? integer_zero_node
13026 : integer_one_node;
13027 return omit_one_operand_loc (loc, type, t1, arg1);
13030 /* If the second operand is NaN, the result is constant. */
13031 if (TREE_CODE (arg1) == REAL_CST
13032 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13033 && (code != LTGT_EXPR || ! flag_trapping_math))
13035 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13036 ? integer_zero_node
13037 : integer_one_node;
13038 return omit_one_operand_loc (loc, type, t1, arg0);
13041 /* Simplify unordered comparison of something with itself. */
13042 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13043 && operand_equal_p (arg0, arg1, 0))
13044 return constant_boolean_node (1, type);
13046 if (code == LTGT_EXPR
13047 && !flag_trapping_math
13048 && operand_equal_p (arg0, arg1, 0))
13049 return constant_boolean_node (0, type);
13051 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13053 tree targ0 = strip_float_extensions (arg0);
13054 tree targ1 = strip_float_extensions (arg1);
13055 tree newtype = TREE_TYPE (targ0);
13057 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13058 newtype = TREE_TYPE (targ1);
13060 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13061 return fold_build2_loc (loc, code, type,
13062 fold_convert_loc (loc, newtype, targ0),
13063 fold_convert_loc (loc, newtype, targ1));
13066 return NULL_TREE;
13068 case COMPOUND_EXPR:
13069 /* When pedantic, a compound expression can be neither an lvalue
13070 nor an integer constant expression. */
13071 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13072 return NULL_TREE;
13073 /* Don't let (0, 0) be null pointer constant. */
13074 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13075 : fold_convert_loc (loc, type, arg1);
13076 return pedantic_non_lvalue_loc (loc, tem);
13078 case COMPLEX_EXPR:
13079 if ((TREE_CODE (arg0) == REAL_CST
13080 && TREE_CODE (arg1) == REAL_CST)
13081 || (TREE_CODE (arg0) == INTEGER_CST
13082 && TREE_CODE (arg1) == INTEGER_CST))
13083 return build_complex (type, arg0, arg1);
13084 return NULL_TREE;
13086 case ASSERT_EXPR:
13087 /* An ASSERT_EXPR should never be passed to fold_binary. */
13088 gcc_unreachable ();
13090 default:
13091 return NULL_TREE;
13092 } /* switch (code) */
13093 fold_binary_exit:
13094 protected_set_expr_location (tem, loc);
13095 return tem;
13098 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13099 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13100 of GOTO_EXPR. */
13102 static tree
13103 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13105 switch (TREE_CODE (*tp))
13107 case LABEL_EXPR:
13108 return *tp;
13110 case GOTO_EXPR:
13111 *walk_subtrees = 0;
13113 /* ... fall through ... */
13115 default:
13116 return NULL_TREE;
13120 /* Return whether the sub-tree ST contains a label which is accessible from
13121 outside the sub-tree. */
13123 static bool
13124 contains_label_p (tree st)
13126 return
13127 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13130 /* Fold a ternary expression of code CODE and type TYPE with operands
13131 OP0, OP1, and OP2. Return the folded expression if folding is
13132 successful. Otherwise, return NULL_TREE. */
13134 tree
13135 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13136 tree op0, tree op1, tree op2)
13138 tree tem;
13139 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13140 enum tree_code_class kind = TREE_CODE_CLASS (code);
13142 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13143 && TREE_CODE_LENGTH (code) == 3);
13145 /* Strip any conversions that don't change the mode. This is safe
13146 for every expression, except for a comparison expression because
13147 its signedness is derived from its operands. So, in the latter
13148 case, only strip conversions that don't change the signedness.
13150 Note that this is done as an internal manipulation within the
13151 constant folder, in order to find the simplest representation of
13152 the arguments so that their form can be studied. In any cases,
13153 the appropriate type conversions should be put back in the tree
13154 that will get out of the constant folder. */
13155 if (op0)
13157 arg0 = op0;
13158 STRIP_NOPS (arg0);
13161 if (op1)
13163 arg1 = op1;
13164 STRIP_NOPS (arg1);
13167 switch (code)
13169 case COMPONENT_REF:
13170 if (TREE_CODE (arg0) == CONSTRUCTOR
13171 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13173 unsigned HOST_WIDE_INT idx;
13174 tree field, value;
13175 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13176 if (field == arg1)
13177 return value;
13179 return NULL_TREE;
13181 case COND_EXPR:
13182 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13183 so all simple results must be passed through pedantic_non_lvalue. */
13184 if (TREE_CODE (arg0) == INTEGER_CST)
13186 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13187 tem = integer_zerop (arg0) ? op2 : op1;
13188 /* Only optimize constant conditions when the selected branch
13189 has the same type as the COND_EXPR. This avoids optimizing
13190 away "c ? x : throw", where the throw has a void type.
13191 Avoid throwing away that operand which contains label. */
13192 if ((!TREE_SIDE_EFFECTS (unused_op)
13193 || !contains_label_p (unused_op))
13194 && (! VOID_TYPE_P (TREE_TYPE (tem))
13195 || VOID_TYPE_P (type)))
13196 return pedantic_non_lvalue_loc (loc, tem);
13197 return NULL_TREE;
13199 if (operand_equal_p (arg1, op2, 0))
13200 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13202 /* If we have A op B ? A : C, we may be able to convert this to a
13203 simpler expression, depending on the operation and the values
13204 of B and C. Signed zeros prevent all of these transformations,
13205 for reasons given above each one.
13207 Also try swapping the arguments and inverting the conditional. */
13208 if (COMPARISON_CLASS_P (arg0)
13209 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13210 arg1, TREE_OPERAND (arg0, 1))
13211 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13213 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13214 if (tem)
13215 return tem;
13218 if (COMPARISON_CLASS_P (arg0)
13219 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13220 op2,
13221 TREE_OPERAND (arg0, 1))
13222 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13224 tem = fold_truth_not_expr (loc, arg0);
13225 if (tem && COMPARISON_CLASS_P (tem))
13227 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13228 if (tem)
13229 return tem;
13233 /* If the second operand is simpler than the third, swap them
13234 since that produces better jump optimization results. */
13235 if (truth_value_p (TREE_CODE (arg0))
13236 && tree_swap_operands_p (op1, op2, false))
13238 /* See if this can be inverted. If it can't, possibly because
13239 it was a floating-point inequality comparison, don't do
13240 anything. */
13241 tem = fold_truth_not_expr (loc, arg0);
13242 if (tem)
13243 return fold_build3_loc (loc, code, type, tem, op2, op1);
13246 /* Convert A ? 1 : 0 to simply A. */
13247 if (integer_onep (op1)
13248 && integer_zerop (op2)
13249 /* If we try to convert OP0 to our type, the
13250 call to fold will try to move the conversion inside
13251 a COND, which will recurse. In that case, the COND_EXPR
13252 is probably the best choice, so leave it alone. */
13253 && type == TREE_TYPE (arg0))
13254 return pedantic_non_lvalue_loc (loc, arg0);
13256 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13257 over COND_EXPR in cases such as floating point comparisons. */
13258 if (integer_zerop (op1)
13259 && integer_onep (op2)
13260 && truth_value_p (TREE_CODE (arg0)))
13261 return pedantic_non_lvalue_loc (loc,
13262 fold_convert_loc (loc, type,
13263 invert_truthvalue_loc (loc,
13264 arg0)));
13266 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13267 if (TREE_CODE (arg0) == LT_EXPR
13268 && integer_zerop (TREE_OPERAND (arg0, 1))
13269 && integer_zerop (op2)
13270 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13272 /* sign_bit_p only checks ARG1 bits within A's precision.
13273 If <sign bit of A> has wider type than A, bits outside
13274 of A's precision in <sign bit of A> need to be checked.
13275 If they are all 0, this optimization needs to be done
13276 in unsigned A's type, if they are all 1 in signed A's type,
13277 otherwise this can't be done. */
13278 if (TYPE_PRECISION (TREE_TYPE (tem))
13279 < TYPE_PRECISION (TREE_TYPE (arg1))
13280 && TYPE_PRECISION (TREE_TYPE (tem))
13281 < TYPE_PRECISION (type))
13283 unsigned HOST_WIDE_INT mask_lo;
13284 HOST_WIDE_INT mask_hi;
13285 int inner_width, outer_width;
13286 tree tem_type;
13288 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13289 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13290 if (outer_width > TYPE_PRECISION (type))
13291 outer_width = TYPE_PRECISION (type);
13293 if (outer_width > HOST_BITS_PER_WIDE_INT)
13295 mask_hi = ((unsigned HOST_WIDE_INT) -1
13296 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13297 mask_lo = -1;
13299 else
13301 mask_hi = 0;
13302 mask_lo = ((unsigned HOST_WIDE_INT) -1
13303 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13305 if (inner_width > HOST_BITS_PER_WIDE_INT)
13307 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13308 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13309 mask_lo = 0;
13311 else
13312 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13313 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13315 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13316 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13318 tem_type = signed_type_for (TREE_TYPE (tem));
13319 tem = fold_convert_loc (loc, tem_type, tem);
13321 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13322 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13324 tem_type = unsigned_type_for (TREE_TYPE (tem));
13325 tem = fold_convert_loc (loc, tem_type, tem);
13327 else
13328 tem = NULL;
13331 if (tem)
13332 return
13333 fold_convert_loc (loc, type,
13334 fold_build2_loc (loc, BIT_AND_EXPR,
13335 TREE_TYPE (tem), tem,
13336 fold_convert_loc (loc,
13337 TREE_TYPE (tem),
13338 arg1)));
13341 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13342 already handled above. */
13343 if (TREE_CODE (arg0) == BIT_AND_EXPR
13344 && integer_onep (TREE_OPERAND (arg0, 1))
13345 && integer_zerop (op2)
13346 && integer_pow2p (arg1))
13348 tree tem = TREE_OPERAND (arg0, 0);
13349 STRIP_NOPS (tem);
13350 if (TREE_CODE (tem) == RSHIFT_EXPR
13351 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13352 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13353 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13354 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13355 TREE_OPERAND (tem, 0), arg1);
13358 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13359 is probably obsolete because the first operand should be a
13360 truth value (that's why we have the two cases above), but let's
13361 leave it in until we can confirm this for all front-ends. */
13362 if (integer_zerop (op2)
13363 && TREE_CODE (arg0) == NE_EXPR
13364 && integer_zerop (TREE_OPERAND (arg0, 1))
13365 && integer_pow2p (arg1)
13366 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13367 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13368 arg1, OEP_ONLY_CONST))
13369 return pedantic_non_lvalue_loc (loc,
13370 fold_convert_loc (loc, type,
13371 TREE_OPERAND (arg0, 0)));
13373 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13374 if (integer_zerop (op2)
13375 && truth_value_p (TREE_CODE (arg0))
13376 && truth_value_p (TREE_CODE (arg1)))
13377 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13378 fold_convert_loc (loc, type, arg0),
13379 arg1);
13381 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13382 if (integer_onep (op2)
13383 && truth_value_p (TREE_CODE (arg0))
13384 && truth_value_p (TREE_CODE (arg1)))
13386 /* Only perform transformation if ARG0 is easily inverted. */
13387 tem = fold_truth_not_expr (loc, arg0);
13388 if (tem)
13389 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13390 fold_convert_loc (loc, type, tem),
13391 arg1);
13394 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13395 if (integer_zerop (arg1)
13396 && truth_value_p (TREE_CODE (arg0))
13397 && truth_value_p (TREE_CODE (op2)))
13399 /* Only perform transformation if ARG0 is easily inverted. */
13400 tem = fold_truth_not_expr (loc, arg0);
13401 if (tem)
13402 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13403 fold_convert_loc (loc, type, tem),
13404 op2);
13407 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13408 if (integer_onep (arg1)
13409 && truth_value_p (TREE_CODE (arg0))
13410 && truth_value_p (TREE_CODE (op2)))
13411 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13412 fold_convert_loc (loc, type, arg0),
13413 op2);
13415 return NULL_TREE;
13417 case CALL_EXPR:
13418 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13419 of fold_ternary on them. */
13420 gcc_unreachable ();
13422 case BIT_FIELD_REF:
13423 if ((TREE_CODE (arg0) == VECTOR_CST
13424 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13425 && type == TREE_TYPE (TREE_TYPE (arg0)))
13427 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13428 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13430 if (width != 0
13431 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13432 && (idx % width) == 0
13433 && (idx = idx / width)
13434 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13436 tree elements = NULL_TREE;
13438 if (TREE_CODE (arg0) == VECTOR_CST)
13439 elements = TREE_VECTOR_CST_ELTS (arg0);
13440 else
13442 unsigned HOST_WIDE_INT idx;
13443 tree value;
13445 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13446 elements = tree_cons (NULL_TREE, value, elements);
13448 while (idx-- > 0 && elements)
13449 elements = TREE_CHAIN (elements);
13450 if (elements)
13451 return TREE_VALUE (elements);
13452 else
13453 return fold_convert_loc (loc, type, integer_zero_node);
13457 /* A bit-field-ref that referenced the full argument can be stripped. */
13458 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13459 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13460 && integer_zerop (op2))
13461 return fold_convert_loc (loc, type, arg0);
13463 return NULL_TREE;
13465 default:
13466 return NULL_TREE;
13467 } /* switch (code) */
13470 /* Perform constant folding and related simplification of EXPR.
13471 The related simplifications include x*1 => x, x*0 => 0, etc.,
13472 and application of the associative law.
13473 NOP_EXPR conversions may be removed freely (as long as we
13474 are careful not to change the type of the overall expression).
13475 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13476 but we can constant-fold them if they have constant operands. */
13478 #ifdef ENABLE_FOLD_CHECKING
13479 # define fold(x) fold_1 (x)
13480 static tree fold_1 (tree);
13481 static
13482 #endif
13483 tree
13484 fold (tree expr)
13486 const tree t = expr;
13487 enum tree_code code = TREE_CODE (t);
13488 enum tree_code_class kind = TREE_CODE_CLASS (code);
13489 tree tem;
13490 location_t loc = EXPR_LOCATION (expr);
13492 /* Return right away if a constant. */
13493 if (kind == tcc_constant)
13494 return t;
13496 /* CALL_EXPR-like objects with variable numbers of operands are
13497 treated specially. */
13498 if (kind == tcc_vl_exp)
13500 if (code == CALL_EXPR)
13502 tem = fold_call_expr (loc, expr, false);
13503 return tem ? tem : expr;
13505 return expr;
13508 if (IS_EXPR_CODE_CLASS (kind))
13510 tree type = TREE_TYPE (t);
13511 tree op0, op1, op2;
13513 switch (TREE_CODE_LENGTH (code))
13515 case 1:
13516 op0 = TREE_OPERAND (t, 0);
13517 tem = fold_unary_loc (loc, code, type, op0);
13518 return tem ? tem : expr;
13519 case 2:
13520 op0 = TREE_OPERAND (t, 0);
13521 op1 = TREE_OPERAND (t, 1);
13522 tem = fold_binary_loc (loc, code, type, op0, op1);
13523 return tem ? tem : expr;
13524 case 3:
13525 op0 = TREE_OPERAND (t, 0);
13526 op1 = TREE_OPERAND (t, 1);
13527 op2 = TREE_OPERAND (t, 2);
13528 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13529 return tem ? tem : expr;
13530 default:
13531 break;
13535 switch (code)
13537 case ARRAY_REF:
13539 tree op0 = TREE_OPERAND (t, 0);
13540 tree op1 = TREE_OPERAND (t, 1);
13542 if (TREE_CODE (op1) == INTEGER_CST
13543 && TREE_CODE (op0) == CONSTRUCTOR
13544 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13546 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13547 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13548 unsigned HOST_WIDE_INT begin = 0;
13550 /* Find a matching index by means of a binary search. */
13551 while (begin != end)
13553 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13554 tree index = VEC_index (constructor_elt, elts, middle)->index;
13556 if (TREE_CODE (index) == INTEGER_CST
13557 && tree_int_cst_lt (index, op1))
13558 begin = middle + 1;
13559 else if (TREE_CODE (index) == INTEGER_CST
13560 && tree_int_cst_lt (op1, index))
13561 end = middle;
13562 else if (TREE_CODE (index) == RANGE_EXPR
13563 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13564 begin = middle + 1;
13565 else if (TREE_CODE (index) == RANGE_EXPR
13566 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13567 end = middle;
13568 else
13569 return VEC_index (constructor_elt, elts, middle)->value;
13573 return t;
13576 case CONST_DECL:
13577 return fold (DECL_INITIAL (t));
13579 default:
13580 return t;
13581 } /* switch (code) */
13584 #ifdef ENABLE_FOLD_CHECKING
13585 #undef fold
13587 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13588 static void fold_check_failed (const_tree, const_tree);
13589 void print_fold_checksum (const_tree);
13591 /* When --enable-checking=fold, compute a digest of expr before
13592 and after actual fold call to see if fold did not accidentally
13593 change original expr. */
13595 tree
13596 fold (tree expr)
13598 tree ret;
13599 struct md5_ctx ctx;
13600 unsigned char checksum_before[16], checksum_after[16];
13601 htab_t ht;
13603 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13604 md5_init_ctx (&ctx);
13605 fold_checksum_tree (expr, &ctx, ht);
13606 md5_finish_ctx (&ctx, checksum_before);
13607 htab_empty (ht);
13609 ret = fold_1 (expr);
13611 md5_init_ctx (&ctx);
13612 fold_checksum_tree (expr, &ctx, ht);
13613 md5_finish_ctx (&ctx, checksum_after);
13614 htab_delete (ht);
13616 if (memcmp (checksum_before, checksum_after, 16))
13617 fold_check_failed (expr, ret);
13619 return ret;
13622 void
13623 print_fold_checksum (const_tree expr)
13625 struct md5_ctx ctx;
13626 unsigned char checksum[16], cnt;
13627 htab_t ht;
13629 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13630 md5_init_ctx (&ctx);
13631 fold_checksum_tree (expr, &ctx, ht);
13632 md5_finish_ctx (&ctx, checksum);
13633 htab_delete (ht);
13634 for (cnt = 0; cnt < 16; ++cnt)
13635 fprintf (stderr, "%02x", checksum[cnt]);
13636 putc ('\n', stderr);
13639 static void
13640 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13642 internal_error ("fold check: original tree changed by fold");
13645 static void
13646 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13648 const void **slot;
13649 enum tree_code code;
13650 union tree_node buf;
13651 int i, len;
13653 recursive_label:
13655 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13656 <= sizeof (struct tree_function_decl))
13657 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13658 if (expr == NULL)
13659 return;
13660 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13661 if (*slot != NULL)
13662 return;
13663 *slot = expr;
13664 code = TREE_CODE (expr);
13665 if (TREE_CODE_CLASS (code) == tcc_declaration
13666 && DECL_ASSEMBLER_NAME_SET_P (expr))
13668 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13669 memcpy ((char *) &buf, expr, tree_size (expr));
13670 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13671 expr = (tree) &buf;
13673 else if (TREE_CODE_CLASS (code) == tcc_type
13674 && (TYPE_POINTER_TO (expr)
13675 || TYPE_REFERENCE_TO (expr)
13676 || TYPE_CACHED_VALUES_P (expr)
13677 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13678 || TYPE_NEXT_VARIANT (expr)))
13680 /* Allow these fields to be modified. */
13681 tree tmp;
13682 memcpy ((char *) &buf, expr, tree_size (expr));
13683 expr = tmp = (tree) &buf;
13684 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13685 TYPE_POINTER_TO (tmp) = NULL;
13686 TYPE_REFERENCE_TO (tmp) = NULL;
13687 TYPE_NEXT_VARIANT (tmp) = NULL;
13688 if (TYPE_CACHED_VALUES_P (tmp))
13690 TYPE_CACHED_VALUES_P (tmp) = 0;
13691 TYPE_CACHED_VALUES (tmp) = NULL;
13694 md5_process_bytes (expr, tree_size (expr), ctx);
13695 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13696 if (TREE_CODE_CLASS (code) != tcc_type
13697 && TREE_CODE_CLASS (code) != tcc_declaration
13698 && code != TREE_LIST
13699 && code != SSA_NAME)
13700 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13701 switch (TREE_CODE_CLASS (code))
13703 case tcc_constant:
13704 switch (code)
13706 case STRING_CST:
13707 md5_process_bytes (TREE_STRING_POINTER (expr),
13708 TREE_STRING_LENGTH (expr), ctx);
13709 break;
13710 case COMPLEX_CST:
13711 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13712 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13713 break;
13714 case VECTOR_CST:
13715 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13716 break;
13717 default:
13718 break;
13720 break;
13721 case tcc_exceptional:
13722 switch (code)
13724 case TREE_LIST:
13725 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13726 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13727 expr = TREE_CHAIN (expr);
13728 goto recursive_label;
13729 break;
13730 case TREE_VEC:
13731 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13732 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13733 break;
13734 default:
13735 break;
13737 break;
13738 case tcc_expression:
13739 case tcc_reference:
13740 case tcc_comparison:
13741 case tcc_unary:
13742 case tcc_binary:
13743 case tcc_statement:
13744 case tcc_vl_exp:
13745 len = TREE_OPERAND_LENGTH (expr);
13746 for (i = 0; i < len; ++i)
13747 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13748 break;
13749 case tcc_declaration:
13750 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13751 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13752 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13754 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13755 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13756 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13757 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13758 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13760 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13761 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13763 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13765 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13766 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13767 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13769 break;
13770 case tcc_type:
13771 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13772 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13773 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13774 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13775 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13776 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13777 if (INTEGRAL_TYPE_P (expr)
13778 || SCALAR_FLOAT_TYPE_P (expr))
13780 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13781 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13783 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13784 if (TREE_CODE (expr) == RECORD_TYPE
13785 || TREE_CODE (expr) == UNION_TYPE
13786 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13787 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13788 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13789 break;
13790 default:
13791 break;
13795 /* Helper function for outputting the checksum of a tree T. When
13796 debugging with gdb, you can "define mynext" to be "next" followed
13797 by "call debug_fold_checksum (op0)", then just trace down till the
13798 outputs differ. */
13800 DEBUG_FUNCTION void
13801 debug_fold_checksum (const_tree t)
13803 int i;
13804 unsigned char checksum[16];
13805 struct md5_ctx ctx;
13806 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13808 md5_init_ctx (&ctx);
13809 fold_checksum_tree (t, &ctx, ht);
13810 md5_finish_ctx (&ctx, checksum);
13811 htab_empty (ht);
13813 for (i = 0; i < 16; i++)
13814 fprintf (stderr, "%d ", checksum[i]);
13816 fprintf (stderr, "\n");
13819 #endif
13821 /* Fold a unary tree expression with code CODE of type TYPE with an
13822 operand OP0. LOC is the location of the resulting expression.
13823 Return a folded expression if successful. Otherwise, return a tree
13824 expression with code CODE of type TYPE with an operand OP0. */
13826 tree
13827 fold_build1_stat_loc (location_t loc,
13828 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13830 tree tem;
13831 #ifdef ENABLE_FOLD_CHECKING
13832 unsigned char checksum_before[16], checksum_after[16];
13833 struct md5_ctx ctx;
13834 htab_t ht;
13836 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13837 md5_init_ctx (&ctx);
13838 fold_checksum_tree (op0, &ctx, ht);
13839 md5_finish_ctx (&ctx, checksum_before);
13840 htab_empty (ht);
13841 #endif
13843 tem = fold_unary_loc (loc, code, type, op0);
13844 if (!tem)
13846 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13847 SET_EXPR_LOCATION (tem, loc);
13850 #ifdef ENABLE_FOLD_CHECKING
13851 md5_init_ctx (&ctx);
13852 fold_checksum_tree (op0, &ctx, ht);
13853 md5_finish_ctx (&ctx, checksum_after);
13854 htab_delete (ht);
13856 if (memcmp (checksum_before, checksum_after, 16))
13857 fold_check_failed (op0, tem);
13858 #endif
13859 return tem;
13862 /* Fold a binary tree expression with code CODE of type TYPE with
13863 operands OP0 and OP1. LOC is the location of the resulting
13864 expression. Return a folded expression if successful. Otherwise,
13865 return a tree expression with code CODE of type TYPE with operands
13866 OP0 and OP1. */
13868 tree
13869 fold_build2_stat_loc (location_t loc,
13870 enum tree_code code, tree type, tree op0, tree op1
13871 MEM_STAT_DECL)
13873 tree tem;
13874 #ifdef ENABLE_FOLD_CHECKING
13875 unsigned char checksum_before_op0[16],
13876 checksum_before_op1[16],
13877 checksum_after_op0[16],
13878 checksum_after_op1[16];
13879 struct md5_ctx ctx;
13880 htab_t ht;
13882 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13883 md5_init_ctx (&ctx);
13884 fold_checksum_tree (op0, &ctx, ht);
13885 md5_finish_ctx (&ctx, checksum_before_op0);
13886 htab_empty (ht);
13888 md5_init_ctx (&ctx);
13889 fold_checksum_tree (op1, &ctx, ht);
13890 md5_finish_ctx (&ctx, checksum_before_op1);
13891 htab_empty (ht);
13892 #endif
13894 tem = fold_binary_loc (loc, code, type, op0, op1);
13895 if (!tem)
13897 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13898 SET_EXPR_LOCATION (tem, loc);
13901 #ifdef ENABLE_FOLD_CHECKING
13902 md5_init_ctx (&ctx);
13903 fold_checksum_tree (op0, &ctx, ht);
13904 md5_finish_ctx (&ctx, checksum_after_op0);
13905 htab_empty (ht);
13907 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13908 fold_check_failed (op0, tem);
13910 md5_init_ctx (&ctx);
13911 fold_checksum_tree (op1, &ctx, ht);
13912 md5_finish_ctx (&ctx, checksum_after_op1);
13913 htab_delete (ht);
13915 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13916 fold_check_failed (op1, tem);
13917 #endif
13918 return tem;
13921 /* Fold a ternary tree expression with code CODE of type TYPE with
13922 operands OP0, OP1, and OP2. Return a folded expression if
13923 successful. Otherwise, return a tree expression with code CODE of
13924 type TYPE with operands OP0, OP1, and OP2. */
13926 tree
13927 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13928 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13930 tree tem;
13931 #ifdef ENABLE_FOLD_CHECKING
13932 unsigned char checksum_before_op0[16],
13933 checksum_before_op1[16],
13934 checksum_before_op2[16],
13935 checksum_after_op0[16],
13936 checksum_after_op1[16],
13937 checksum_after_op2[16];
13938 struct md5_ctx ctx;
13939 htab_t ht;
13941 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13942 md5_init_ctx (&ctx);
13943 fold_checksum_tree (op0, &ctx, ht);
13944 md5_finish_ctx (&ctx, checksum_before_op0);
13945 htab_empty (ht);
13947 md5_init_ctx (&ctx);
13948 fold_checksum_tree (op1, &ctx, ht);
13949 md5_finish_ctx (&ctx, checksum_before_op1);
13950 htab_empty (ht);
13952 md5_init_ctx (&ctx);
13953 fold_checksum_tree (op2, &ctx, ht);
13954 md5_finish_ctx (&ctx, checksum_before_op2);
13955 htab_empty (ht);
13956 #endif
13958 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13959 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13960 if (!tem)
13962 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13963 SET_EXPR_LOCATION (tem, loc);
13966 #ifdef ENABLE_FOLD_CHECKING
13967 md5_init_ctx (&ctx);
13968 fold_checksum_tree (op0, &ctx, ht);
13969 md5_finish_ctx (&ctx, checksum_after_op0);
13970 htab_empty (ht);
13972 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13973 fold_check_failed (op0, tem);
13975 md5_init_ctx (&ctx);
13976 fold_checksum_tree (op1, &ctx, ht);
13977 md5_finish_ctx (&ctx, checksum_after_op1);
13978 htab_empty (ht);
13980 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13981 fold_check_failed (op1, tem);
13983 md5_init_ctx (&ctx);
13984 fold_checksum_tree (op2, &ctx, ht);
13985 md5_finish_ctx (&ctx, checksum_after_op2);
13986 htab_delete (ht);
13988 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13989 fold_check_failed (op2, tem);
13990 #endif
13991 return tem;
13994 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13995 arguments in ARGARRAY, and a null static chain.
13996 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13997 of type TYPE from the given operands as constructed by build_call_array. */
13999 tree
14000 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14001 int nargs, tree *argarray)
14003 tree tem;
14004 #ifdef ENABLE_FOLD_CHECKING
14005 unsigned char checksum_before_fn[16],
14006 checksum_before_arglist[16],
14007 checksum_after_fn[16],
14008 checksum_after_arglist[16];
14009 struct md5_ctx ctx;
14010 htab_t ht;
14011 int i;
14013 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14014 md5_init_ctx (&ctx);
14015 fold_checksum_tree (fn, &ctx, ht);
14016 md5_finish_ctx (&ctx, checksum_before_fn);
14017 htab_empty (ht);
14019 md5_init_ctx (&ctx);
14020 for (i = 0; i < nargs; i++)
14021 fold_checksum_tree (argarray[i], &ctx, ht);
14022 md5_finish_ctx (&ctx, checksum_before_arglist);
14023 htab_empty (ht);
14024 #endif
14026 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14028 #ifdef ENABLE_FOLD_CHECKING
14029 md5_init_ctx (&ctx);
14030 fold_checksum_tree (fn, &ctx, ht);
14031 md5_finish_ctx (&ctx, checksum_after_fn);
14032 htab_empty (ht);
14034 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14035 fold_check_failed (fn, tem);
14037 md5_init_ctx (&ctx);
14038 for (i = 0; i < nargs; i++)
14039 fold_checksum_tree (argarray[i], &ctx, ht);
14040 md5_finish_ctx (&ctx, checksum_after_arglist);
14041 htab_delete (ht);
14043 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14044 fold_check_failed (NULL_TREE, tem);
14045 #endif
14046 return tem;
14049 /* Perform constant folding and related simplification of initializer
14050 expression EXPR. These behave identically to "fold_buildN" but ignore
14051 potential run-time traps and exceptions that fold must preserve. */
14053 #define START_FOLD_INIT \
14054 int saved_signaling_nans = flag_signaling_nans;\
14055 int saved_trapping_math = flag_trapping_math;\
14056 int saved_rounding_math = flag_rounding_math;\
14057 int saved_trapv = flag_trapv;\
14058 int saved_folding_initializer = folding_initializer;\
14059 flag_signaling_nans = 0;\
14060 flag_trapping_math = 0;\
14061 flag_rounding_math = 0;\
14062 flag_trapv = 0;\
14063 folding_initializer = 1;
14065 #define END_FOLD_INIT \
14066 flag_signaling_nans = saved_signaling_nans;\
14067 flag_trapping_math = saved_trapping_math;\
14068 flag_rounding_math = saved_rounding_math;\
14069 flag_trapv = saved_trapv;\
14070 folding_initializer = saved_folding_initializer;
14072 tree
14073 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14074 tree type, tree op)
14076 tree result;
14077 START_FOLD_INIT;
14079 result = fold_build1_loc (loc, code, type, op);
14081 END_FOLD_INIT;
14082 return result;
14085 tree
14086 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14087 tree type, tree op0, tree op1)
14089 tree result;
14090 START_FOLD_INIT;
14092 result = fold_build2_loc (loc, code, type, op0, op1);
14094 END_FOLD_INIT;
14095 return result;
14098 tree
14099 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14100 tree type, tree op0, tree op1, tree op2)
14102 tree result;
14103 START_FOLD_INIT;
14105 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14107 END_FOLD_INIT;
14108 return result;
14111 tree
14112 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14113 int nargs, tree *argarray)
14115 tree result;
14116 START_FOLD_INIT;
14118 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14120 END_FOLD_INIT;
14121 return result;
14124 #undef START_FOLD_INIT
14125 #undef END_FOLD_INIT
14127 /* Determine if first argument is a multiple of second argument. Return 0 if
14128 it is not, or we cannot easily determined it to be.
14130 An example of the sort of thing we care about (at this point; this routine
14131 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14132 fold cases do now) is discovering that
14134 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14136 is a multiple of
14138 SAVE_EXPR (J * 8)
14140 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14142 This code also handles discovering that
14144 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14146 is a multiple of 8 so we don't have to worry about dealing with a
14147 possible remainder.
14149 Note that we *look* inside a SAVE_EXPR only to determine how it was
14150 calculated; it is not safe for fold to do much of anything else with the
14151 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14152 at run time. For example, the latter example above *cannot* be implemented
14153 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14154 evaluation time of the original SAVE_EXPR is not necessarily the same at
14155 the time the new expression is evaluated. The only optimization of this
14156 sort that would be valid is changing
14158 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14160 divided by 8 to
14162 SAVE_EXPR (I) * SAVE_EXPR (J)
14164 (where the same SAVE_EXPR (J) is used in the original and the
14165 transformed version). */
14168 multiple_of_p (tree type, const_tree top, const_tree bottom)
14170 if (operand_equal_p (top, bottom, 0))
14171 return 1;
14173 if (TREE_CODE (type) != INTEGER_TYPE)
14174 return 0;
14176 switch (TREE_CODE (top))
14178 case BIT_AND_EXPR:
14179 /* Bitwise and provides a power of two multiple. If the mask is
14180 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14181 if (!integer_pow2p (bottom))
14182 return 0;
14183 /* FALLTHRU */
14185 case MULT_EXPR:
14186 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14187 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14189 case PLUS_EXPR:
14190 case MINUS_EXPR:
14191 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14192 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14194 case LSHIFT_EXPR:
14195 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14197 tree op1, t1;
14199 op1 = TREE_OPERAND (top, 1);
14200 /* const_binop may not detect overflow correctly,
14201 so check for it explicitly here. */
14202 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14203 > TREE_INT_CST_LOW (op1)
14204 && TREE_INT_CST_HIGH (op1) == 0
14205 && 0 != (t1 = fold_convert (type,
14206 const_binop (LSHIFT_EXPR,
14207 size_one_node,
14208 op1)))
14209 && !TREE_OVERFLOW (t1))
14210 return multiple_of_p (type, t1, bottom);
14212 return 0;
14214 case NOP_EXPR:
14215 /* Can't handle conversions from non-integral or wider integral type. */
14216 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14217 || (TYPE_PRECISION (type)
14218 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14219 return 0;
14221 /* .. fall through ... */
14223 case SAVE_EXPR:
14224 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14226 case COND_EXPR:
14227 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14228 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14230 case INTEGER_CST:
14231 if (TREE_CODE (bottom) != INTEGER_CST
14232 || integer_zerop (bottom)
14233 || (TYPE_UNSIGNED (type)
14234 && (tree_int_cst_sgn (top) < 0
14235 || tree_int_cst_sgn (bottom) < 0)))
14236 return 0;
14237 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14238 top, bottom, 0));
14240 default:
14241 return 0;
14245 /* Return true if CODE or TYPE is known to be non-negative. */
14247 static bool
14248 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14250 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14251 && truth_value_p (code))
14252 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14253 have a signed:1 type (where the value is -1 and 0). */
14254 return true;
14255 return false;
14258 /* Return true if (CODE OP0) is known to be non-negative. If the return
14259 value is based on the assumption that signed overflow is undefined,
14260 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14261 *STRICT_OVERFLOW_P. */
14263 bool
14264 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14265 bool *strict_overflow_p)
14267 if (TYPE_UNSIGNED (type))
14268 return true;
14270 switch (code)
14272 case ABS_EXPR:
14273 /* We can't return 1 if flag_wrapv is set because
14274 ABS_EXPR<INT_MIN> = INT_MIN. */
14275 if (!INTEGRAL_TYPE_P (type))
14276 return true;
14277 if (TYPE_OVERFLOW_UNDEFINED (type))
14279 *strict_overflow_p = true;
14280 return true;
14282 break;
14284 case NON_LVALUE_EXPR:
14285 case FLOAT_EXPR:
14286 case FIX_TRUNC_EXPR:
14287 return tree_expr_nonnegative_warnv_p (op0,
14288 strict_overflow_p);
14290 case NOP_EXPR:
14292 tree inner_type = TREE_TYPE (op0);
14293 tree outer_type = type;
14295 if (TREE_CODE (outer_type) == REAL_TYPE)
14297 if (TREE_CODE (inner_type) == REAL_TYPE)
14298 return tree_expr_nonnegative_warnv_p (op0,
14299 strict_overflow_p);
14300 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14302 if (TYPE_UNSIGNED (inner_type))
14303 return true;
14304 return tree_expr_nonnegative_warnv_p (op0,
14305 strict_overflow_p);
14308 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14310 if (TREE_CODE (inner_type) == REAL_TYPE)
14311 return tree_expr_nonnegative_warnv_p (op0,
14312 strict_overflow_p);
14313 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14314 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14315 && TYPE_UNSIGNED (inner_type);
14318 break;
14320 default:
14321 return tree_simple_nonnegative_warnv_p (code, type);
14324 /* We don't know sign of `t', so be conservative and return false. */
14325 return false;
14328 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14329 value is based on the assumption that signed overflow is undefined,
14330 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14331 *STRICT_OVERFLOW_P. */
14333 bool
14334 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14335 tree op1, bool *strict_overflow_p)
14337 if (TYPE_UNSIGNED (type))
14338 return true;
14340 switch (code)
14342 case POINTER_PLUS_EXPR:
14343 case PLUS_EXPR:
14344 if (FLOAT_TYPE_P (type))
14345 return (tree_expr_nonnegative_warnv_p (op0,
14346 strict_overflow_p)
14347 && tree_expr_nonnegative_warnv_p (op1,
14348 strict_overflow_p));
14350 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14351 both unsigned and at least 2 bits shorter than the result. */
14352 if (TREE_CODE (type) == INTEGER_TYPE
14353 && TREE_CODE (op0) == NOP_EXPR
14354 && TREE_CODE (op1) == NOP_EXPR)
14356 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14357 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14358 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14359 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14361 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14362 TYPE_PRECISION (inner2)) + 1;
14363 return prec < TYPE_PRECISION (type);
14366 break;
14368 case MULT_EXPR:
14369 if (FLOAT_TYPE_P (type))
14371 /* x * x for floating point x is always non-negative. */
14372 if (operand_equal_p (op0, op1, 0))
14373 return true;
14374 return (tree_expr_nonnegative_warnv_p (op0,
14375 strict_overflow_p)
14376 && tree_expr_nonnegative_warnv_p (op1,
14377 strict_overflow_p));
14380 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14381 both unsigned and their total bits is shorter than the result. */
14382 if (TREE_CODE (type) == INTEGER_TYPE
14383 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14384 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14386 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14387 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14388 : TREE_TYPE (op0);
14389 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14390 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14391 : TREE_TYPE (op1);
14393 bool unsigned0 = TYPE_UNSIGNED (inner0);
14394 bool unsigned1 = TYPE_UNSIGNED (inner1);
14396 if (TREE_CODE (op0) == INTEGER_CST)
14397 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14399 if (TREE_CODE (op1) == INTEGER_CST)
14400 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14402 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14403 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14405 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14406 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14407 : TYPE_PRECISION (inner0);
14409 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14410 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14411 : TYPE_PRECISION (inner1);
14413 return precision0 + precision1 < TYPE_PRECISION (type);
14416 return false;
14418 case BIT_AND_EXPR:
14419 case MAX_EXPR:
14420 return (tree_expr_nonnegative_warnv_p (op0,
14421 strict_overflow_p)
14422 || tree_expr_nonnegative_warnv_p (op1,
14423 strict_overflow_p));
14425 case BIT_IOR_EXPR:
14426 case BIT_XOR_EXPR:
14427 case MIN_EXPR:
14428 case RDIV_EXPR:
14429 case TRUNC_DIV_EXPR:
14430 case CEIL_DIV_EXPR:
14431 case FLOOR_DIV_EXPR:
14432 case ROUND_DIV_EXPR:
14433 return (tree_expr_nonnegative_warnv_p (op0,
14434 strict_overflow_p)
14435 && tree_expr_nonnegative_warnv_p (op1,
14436 strict_overflow_p));
14438 case TRUNC_MOD_EXPR:
14439 case CEIL_MOD_EXPR:
14440 case FLOOR_MOD_EXPR:
14441 case ROUND_MOD_EXPR:
14442 return tree_expr_nonnegative_warnv_p (op0,
14443 strict_overflow_p);
14444 default:
14445 return tree_simple_nonnegative_warnv_p (code, type);
14448 /* We don't know sign of `t', so be conservative and return false. */
14449 return false;
14452 /* Return true if T is known to be non-negative. If the return
14453 value is based on the assumption that signed overflow is undefined,
14454 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14455 *STRICT_OVERFLOW_P. */
14457 bool
14458 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14460 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14461 return true;
14463 switch (TREE_CODE (t))
14465 case INTEGER_CST:
14466 return tree_int_cst_sgn (t) >= 0;
14468 case REAL_CST:
14469 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14471 case FIXED_CST:
14472 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14474 case COND_EXPR:
14475 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14476 strict_overflow_p)
14477 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14478 strict_overflow_p));
14479 default:
14480 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14481 TREE_TYPE (t));
14483 /* We don't know sign of `t', so be conservative and return false. */
14484 return false;
14487 /* Return true if T is known to be non-negative. If the return
14488 value is based on the assumption that signed overflow is undefined,
14489 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14490 *STRICT_OVERFLOW_P. */
14492 bool
14493 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14494 tree arg0, tree arg1, bool *strict_overflow_p)
14496 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14497 switch (DECL_FUNCTION_CODE (fndecl))
14499 CASE_FLT_FN (BUILT_IN_ACOS):
14500 CASE_FLT_FN (BUILT_IN_ACOSH):
14501 CASE_FLT_FN (BUILT_IN_CABS):
14502 CASE_FLT_FN (BUILT_IN_COSH):
14503 CASE_FLT_FN (BUILT_IN_ERFC):
14504 CASE_FLT_FN (BUILT_IN_EXP):
14505 CASE_FLT_FN (BUILT_IN_EXP10):
14506 CASE_FLT_FN (BUILT_IN_EXP2):
14507 CASE_FLT_FN (BUILT_IN_FABS):
14508 CASE_FLT_FN (BUILT_IN_FDIM):
14509 CASE_FLT_FN (BUILT_IN_HYPOT):
14510 CASE_FLT_FN (BUILT_IN_POW10):
14511 CASE_INT_FN (BUILT_IN_FFS):
14512 CASE_INT_FN (BUILT_IN_PARITY):
14513 CASE_INT_FN (BUILT_IN_POPCOUNT):
14514 case BUILT_IN_BSWAP32:
14515 case BUILT_IN_BSWAP64:
14516 /* Always true. */
14517 return true;
14519 CASE_FLT_FN (BUILT_IN_SQRT):
14520 /* sqrt(-0.0) is -0.0. */
14521 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14522 return true;
14523 return tree_expr_nonnegative_warnv_p (arg0,
14524 strict_overflow_p);
14526 CASE_FLT_FN (BUILT_IN_ASINH):
14527 CASE_FLT_FN (BUILT_IN_ATAN):
14528 CASE_FLT_FN (BUILT_IN_ATANH):
14529 CASE_FLT_FN (BUILT_IN_CBRT):
14530 CASE_FLT_FN (BUILT_IN_CEIL):
14531 CASE_FLT_FN (BUILT_IN_ERF):
14532 CASE_FLT_FN (BUILT_IN_EXPM1):
14533 CASE_FLT_FN (BUILT_IN_FLOOR):
14534 CASE_FLT_FN (BUILT_IN_FMOD):
14535 CASE_FLT_FN (BUILT_IN_FREXP):
14536 CASE_FLT_FN (BUILT_IN_LCEIL):
14537 CASE_FLT_FN (BUILT_IN_LDEXP):
14538 CASE_FLT_FN (BUILT_IN_LFLOOR):
14539 CASE_FLT_FN (BUILT_IN_LLCEIL):
14540 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14541 CASE_FLT_FN (BUILT_IN_LLRINT):
14542 CASE_FLT_FN (BUILT_IN_LLROUND):
14543 CASE_FLT_FN (BUILT_IN_LRINT):
14544 CASE_FLT_FN (BUILT_IN_LROUND):
14545 CASE_FLT_FN (BUILT_IN_MODF):
14546 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14547 CASE_FLT_FN (BUILT_IN_RINT):
14548 CASE_FLT_FN (BUILT_IN_ROUND):
14549 CASE_FLT_FN (BUILT_IN_SCALB):
14550 CASE_FLT_FN (BUILT_IN_SCALBLN):
14551 CASE_FLT_FN (BUILT_IN_SCALBN):
14552 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14553 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14554 CASE_FLT_FN (BUILT_IN_SINH):
14555 CASE_FLT_FN (BUILT_IN_TANH):
14556 CASE_FLT_FN (BUILT_IN_TRUNC):
14557 /* True if the 1st argument is nonnegative. */
14558 return tree_expr_nonnegative_warnv_p (arg0,
14559 strict_overflow_p);
14561 CASE_FLT_FN (BUILT_IN_FMAX):
14562 /* True if the 1st OR 2nd arguments are nonnegative. */
14563 return (tree_expr_nonnegative_warnv_p (arg0,
14564 strict_overflow_p)
14565 || (tree_expr_nonnegative_warnv_p (arg1,
14566 strict_overflow_p)));
14568 CASE_FLT_FN (BUILT_IN_FMIN):
14569 /* True if the 1st AND 2nd arguments are nonnegative. */
14570 return (tree_expr_nonnegative_warnv_p (arg0,
14571 strict_overflow_p)
14572 && (tree_expr_nonnegative_warnv_p (arg1,
14573 strict_overflow_p)));
14575 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14576 /* True if the 2nd argument is nonnegative. */
14577 return tree_expr_nonnegative_warnv_p (arg1,
14578 strict_overflow_p);
14580 CASE_FLT_FN (BUILT_IN_POWI):
14581 /* True if the 1st argument is nonnegative or the second
14582 argument is an even integer. */
14583 if (TREE_CODE (arg1) == INTEGER_CST
14584 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14585 return true;
14586 return tree_expr_nonnegative_warnv_p (arg0,
14587 strict_overflow_p);
14589 CASE_FLT_FN (BUILT_IN_POW):
14590 /* True if the 1st argument is nonnegative or the second
14591 argument is an even integer valued real. */
14592 if (TREE_CODE (arg1) == REAL_CST)
14594 REAL_VALUE_TYPE c;
14595 HOST_WIDE_INT n;
14597 c = TREE_REAL_CST (arg1);
14598 n = real_to_integer (&c);
14599 if ((n & 1) == 0)
14601 REAL_VALUE_TYPE cint;
14602 real_from_integer (&cint, VOIDmode, n,
14603 n < 0 ? -1 : 0, 0);
14604 if (real_identical (&c, &cint))
14605 return true;
14608 return tree_expr_nonnegative_warnv_p (arg0,
14609 strict_overflow_p);
14611 default:
14612 break;
14614 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14615 type);
14618 /* Return true if T is known to be non-negative. If the return
14619 value is based on the assumption that signed overflow is undefined,
14620 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14621 *STRICT_OVERFLOW_P. */
14623 bool
14624 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14626 enum tree_code code = TREE_CODE (t);
14627 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14628 return true;
14630 switch (code)
14632 case TARGET_EXPR:
14634 tree temp = TARGET_EXPR_SLOT (t);
14635 t = TARGET_EXPR_INITIAL (t);
14637 /* If the initializer is non-void, then it's a normal expression
14638 that will be assigned to the slot. */
14639 if (!VOID_TYPE_P (t))
14640 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14642 /* Otherwise, the initializer sets the slot in some way. One common
14643 way is an assignment statement at the end of the initializer. */
14644 while (1)
14646 if (TREE_CODE (t) == BIND_EXPR)
14647 t = expr_last (BIND_EXPR_BODY (t));
14648 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14649 || TREE_CODE (t) == TRY_CATCH_EXPR)
14650 t = expr_last (TREE_OPERAND (t, 0));
14651 else if (TREE_CODE (t) == STATEMENT_LIST)
14652 t = expr_last (t);
14653 else
14654 break;
14656 if (TREE_CODE (t) == MODIFY_EXPR
14657 && TREE_OPERAND (t, 0) == temp)
14658 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14659 strict_overflow_p);
14661 return false;
14664 case CALL_EXPR:
14666 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14667 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14669 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14670 get_callee_fndecl (t),
14671 arg0,
14672 arg1,
14673 strict_overflow_p);
14675 case COMPOUND_EXPR:
14676 case MODIFY_EXPR:
14677 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14678 strict_overflow_p);
14679 case BIND_EXPR:
14680 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14681 strict_overflow_p);
14682 case SAVE_EXPR:
14683 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14684 strict_overflow_p);
14686 default:
14687 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14688 TREE_TYPE (t));
14691 /* We don't know sign of `t', so be conservative and return false. */
14692 return false;
14695 /* Return true if T is known to be non-negative. If the return
14696 value is based on the assumption that signed overflow is undefined,
14697 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14698 *STRICT_OVERFLOW_P. */
14700 bool
14701 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14703 enum tree_code code;
14704 if (t == error_mark_node)
14705 return false;
14707 code = TREE_CODE (t);
14708 switch (TREE_CODE_CLASS (code))
14710 case tcc_binary:
14711 case tcc_comparison:
14712 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14713 TREE_TYPE (t),
14714 TREE_OPERAND (t, 0),
14715 TREE_OPERAND (t, 1),
14716 strict_overflow_p);
14718 case tcc_unary:
14719 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14720 TREE_TYPE (t),
14721 TREE_OPERAND (t, 0),
14722 strict_overflow_p);
14724 case tcc_constant:
14725 case tcc_declaration:
14726 case tcc_reference:
14727 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14729 default:
14730 break;
14733 switch (code)
14735 case TRUTH_AND_EXPR:
14736 case TRUTH_OR_EXPR:
14737 case TRUTH_XOR_EXPR:
14738 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14739 TREE_TYPE (t),
14740 TREE_OPERAND (t, 0),
14741 TREE_OPERAND (t, 1),
14742 strict_overflow_p);
14743 case TRUTH_NOT_EXPR:
14744 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14745 TREE_TYPE (t),
14746 TREE_OPERAND (t, 0),
14747 strict_overflow_p);
14749 case COND_EXPR:
14750 case CONSTRUCTOR:
14751 case OBJ_TYPE_REF:
14752 case ASSERT_EXPR:
14753 case ADDR_EXPR:
14754 case WITH_SIZE_EXPR:
14755 case SSA_NAME:
14756 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14758 default:
14759 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14763 /* Return true if `t' is known to be non-negative. Handle warnings
14764 about undefined signed overflow. */
14766 bool
14767 tree_expr_nonnegative_p (tree t)
14769 bool ret, strict_overflow_p;
14771 strict_overflow_p = false;
14772 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14773 if (strict_overflow_p)
14774 fold_overflow_warning (("assuming signed overflow does not occur when "
14775 "determining that expression is always "
14776 "non-negative"),
14777 WARN_STRICT_OVERFLOW_MISC);
14778 return ret;
14782 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14783 For floating point we further ensure that T is not denormal.
14784 Similar logic is present in nonzero_address in rtlanal.h.
14786 If the return value is based on the assumption that signed overflow
14787 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14788 change *STRICT_OVERFLOW_P. */
14790 bool
14791 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14792 bool *strict_overflow_p)
14794 switch (code)
14796 case ABS_EXPR:
14797 return tree_expr_nonzero_warnv_p (op0,
14798 strict_overflow_p);
14800 case NOP_EXPR:
14802 tree inner_type = TREE_TYPE (op0);
14803 tree outer_type = type;
14805 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14806 && tree_expr_nonzero_warnv_p (op0,
14807 strict_overflow_p));
14809 break;
14811 case NON_LVALUE_EXPR:
14812 return tree_expr_nonzero_warnv_p (op0,
14813 strict_overflow_p);
14815 default:
14816 break;
14819 return false;
14822 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14823 For floating point we further ensure that T is not denormal.
14824 Similar logic is present in nonzero_address in rtlanal.h.
14826 If the return value is based on the assumption that signed overflow
14827 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14828 change *STRICT_OVERFLOW_P. */
14830 bool
14831 tree_binary_nonzero_warnv_p (enum tree_code code,
14832 tree type,
14833 tree op0,
14834 tree op1, bool *strict_overflow_p)
14836 bool sub_strict_overflow_p;
14837 switch (code)
14839 case POINTER_PLUS_EXPR:
14840 case PLUS_EXPR:
14841 if (TYPE_OVERFLOW_UNDEFINED (type))
14843 /* With the presence of negative values it is hard
14844 to say something. */
14845 sub_strict_overflow_p = false;
14846 if (!tree_expr_nonnegative_warnv_p (op0,
14847 &sub_strict_overflow_p)
14848 || !tree_expr_nonnegative_warnv_p (op1,
14849 &sub_strict_overflow_p))
14850 return false;
14851 /* One of operands must be positive and the other non-negative. */
14852 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14853 overflows, on a twos-complement machine the sum of two
14854 nonnegative numbers can never be zero. */
14855 return (tree_expr_nonzero_warnv_p (op0,
14856 strict_overflow_p)
14857 || tree_expr_nonzero_warnv_p (op1,
14858 strict_overflow_p));
14860 break;
14862 case MULT_EXPR:
14863 if (TYPE_OVERFLOW_UNDEFINED (type))
14865 if (tree_expr_nonzero_warnv_p (op0,
14866 strict_overflow_p)
14867 && tree_expr_nonzero_warnv_p (op1,
14868 strict_overflow_p))
14870 *strict_overflow_p = true;
14871 return true;
14874 break;
14876 case MIN_EXPR:
14877 sub_strict_overflow_p = false;
14878 if (tree_expr_nonzero_warnv_p (op0,
14879 &sub_strict_overflow_p)
14880 && tree_expr_nonzero_warnv_p (op1,
14881 &sub_strict_overflow_p))
14883 if (sub_strict_overflow_p)
14884 *strict_overflow_p = true;
14886 break;
14888 case MAX_EXPR:
14889 sub_strict_overflow_p = false;
14890 if (tree_expr_nonzero_warnv_p (op0,
14891 &sub_strict_overflow_p))
14893 if (sub_strict_overflow_p)
14894 *strict_overflow_p = true;
14896 /* When both operands are nonzero, then MAX must be too. */
14897 if (tree_expr_nonzero_warnv_p (op1,
14898 strict_overflow_p))
14899 return true;
14901 /* MAX where operand 0 is positive is positive. */
14902 return tree_expr_nonnegative_warnv_p (op0,
14903 strict_overflow_p);
14905 /* MAX where operand 1 is positive is positive. */
14906 else if (tree_expr_nonzero_warnv_p (op1,
14907 &sub_strict_overflow_p)
14908 && tree_expr_nonnegative_warnv_p (op1,
14909 &sub_strict_overflow_p))
14911 if (sub_strict_overflow_p)
14912 *strict_overflow_p = true;
14913 return true;
14915 break;
14917 case BIT_IOR_EXPR:
14918 return (tree_expr_nonzero_warnv_p (op1,
14919 strict_overflow_p)
14920 || tree_expr_nonzero_warnv_p (op0,
14921 strict_overflow_p));
14923 default:
14924 break;
14927 return false;
14930 /* Return true when T is an address and is known to be nonzero.
14931 For floating point we further ensure that T is not denormal.
14932 Similar logic is present in nonzero_address in rtlanal.h.
14934 If the return value is based on the assumption that signed overflow
14935 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14936 change *STRICT_OVERFLOW_P. */
14938 bool
14939 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14941 bool sub_strict_overflow_p;
14942 switch (TREE_CODE (t))
14944 case INTEGER_CST:
14945 return !integer_zerop (t);
14947 case ADDR_EXPR:
14949 tree base = TREE_OPERAND (t, 0);
14950 if (!DECL_P (base))
14951 base = get_base_address (base);
14953 if (!base)
14954 return false;
14956 /* Weak declarations may link to NULL. Other things may also be NULL
14957 so protect with -fdelete-null-pointer-checks; but not variables
14958 allocated on the stack. */
14959 if (DECL_P (base)
14960 && (flag_delete_null_pointer_checks
14961 || (DECL_CONTEXT (base)
14962 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14963 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
14964 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14966 /* Constants are never weak. */
14967 if (CONSTANT_CLASS_P (base))
14968 return true;
14970 return false;
14973 case COND_EXPR:
14974 sub_strict_overflow_p = false;
14975 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14976 &sub_strict_overflow_p)
14977 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14978 &sub_strict_overflow_p))
14980 if (sub_strict_overflow_p)
14981 *strict_overflow_p = true;
14982 return true;
14984 break;
14986 default:
14987 break;
14989 return false;
14992 /* Return true when T is an address and is known to be nonzero.
14993 For floating point we further ensure that T is not denormal.
14994 Similar logic is present in nonzero_address in rtlanal.h.
14996 If the return value is based on the assumption that signed overflow
14997 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14998 change *STRICT_OVERFLOW_P. */
15000 bool
15001 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15003 tree type = TREE_TYPE (t);
15004 enum tree_code code;
15006 /* Doing something useful for floating point would need more work. */
15007 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15008 return false;
15010 code = TREE_CODE (t);
15011 switch (TREE_CODE_CLASS (code))
15013 case tcc_unary:
15014 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15015 strict_overflow_p);
15016 case tcc_binary:
15017 case tcc_comparison:
15018 return tree_binary_nonzero_warnv_p (code, type,
15019 TREE_OPERAND (t, 0),
15020 TREE_OPERAND (t, 1),
15021 strict_overflow_p);
15022 case tcc_constant:
15023 case tcc_declaration:
15024 case tcc_reference:
15025 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15027 default:
15028 break;
15031 switch (code)
15033 case TRUTH_NOT_EXPR:
15034 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15035 strict_overflow_p);
15037 case TRUTH_AND_EXPR:
15038 case TRUTH_OR_EXPR:
15039 case TRUTH_XOR_EXPR:
15040 return tree_binary_nonzero_warnv_p (code, type,
15041 TREE_OPERAND (t, 0),
15042 TREE_OPERAND (t, 1),
15043 strict_overflow_p);
15045 case COND_EXPR:
15046 case CONSTRUCTOR:
15047 case OBJ_TYPE_REF:
15048 case ASSERT_EXPR:
15049 case ADDR_EXPR:
15050 case WITH_SIZE_EXPR:
15051 case SSA_NAME:
15052 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15054 case COMPOUND_EXPR:
15055 case MODIFY_EXPR:
15056 case BIND_EXPR:
15057 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15058 strict_overflow_p);
15060 case SAVE_EXPR:
15061 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15062 strict_overflow_p);
15064 case CALL_EXPR:
15065 return alloca_call_p (t);
15067 default:
15068 break;
15070 return false;
15073 /* Return true when T is an address and is known to be nonzero.
15074 Handle warnings about undefined signed overflow. */
15076 bool
15077 tree_expr_nonzero_p (tree t)
15079 bool ret, strict_overflow_p;
15081 strict_overflow_p = false;
15082 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15083 if (strict_overflow_p)
15084 fold_overflow_warning (("assuming signed overflow does not occur when "
15085 "determining that expression is always "
15086 "non-zero"),
15087 WARN_STRICT_OVERFLOW_MISC);
15088 return ret;
15091 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15092 attempt to fold the expression to a constant without modifying TYPE,
15093 OP0 or OP1.
15095 If the expression could be simplified to a constant, then return
15096 the constant. If the expression would not be simplified to a
15097 constant, then return NULL_TREE. */
15099 tree
15100 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15102 tree tem = fold_binary (code, type, op0, op1);
15103 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15106 /* Given the components of a unary expression CODE, TYPE and OP0,
15107 attempt to fold the expression to a constant without modifying
15108 TYPE or OP0.
15110 If the expression could be simplified to a constant, then return
15111 the constant. If the expression would not be simplified to a
15112 constant, then return NULL_TREE. */
15114 tree
15115 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15117 tree tem = fold_unary (code, type, op0);
15118 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15121 /* If EXP represents referencing an element in a constant string
15122 (either via pointer arithmetic or array indexing), return the
15123 tree representing the value accessed, otherwise return NULL. */
15125 tree
15126 fold_read_from_constant_string (tree exp)
15128 if ((TREE_CODE (exp) == INDIRECT_REF
15129 || TREE_CODE (exp) == ARRAY_REF)
15130 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15132 tree exp1 = TREE_OPERAND (exp, 0);
15133 tree index;
15134 tree string;
15135 location_t loc = EXPR_LOCATION (exp);
15137 if (TREE_CODE (exp) == INDIRECT_REF)
15138 string = string_constant (exp1, &index);
15139 else
15141 tree low_bound = array_ref_low_bound (exp);
15142 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15144 /* Optimize the special-case of a zero lower bound.
15146 We convert the low_bound to sizetype to avoid some problems
15147 with constant folding. (E.g. suppose the lower bound is 1,
15148 and its mode is QI. Without the conversion,l (ARRAY
15149 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15150 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15151 if (! integer_zerop (low_bound))
15152 index = size_diffop_loc (loc, index,
15153 fold_convert_loc (loc, sizetype, low_bound));
15155 string = exp1;
15158 if (string
15159 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15160 && TREE_CODE (string) == STRING_CST
15161 && TREE_CODE (index) == INTEGER_CST
15162 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15163 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15164 == MODE_INT)
15165 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15166 return build_int_cst_type (TREE_TYPE (exp),
15167 (TREE_STRING_POINTER (string)
15168 [TREE_INT_CST_LOW (index)]));
15170 return NULL;
15173 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15174 an integer constant, real, or fixed-point constant.
15176 TYPE is the type of the result. */
15178 static tree
15179 fold_negate_const (tree arg0, tree type)
15181 tree t = NULL_TREE;
15183 switch (TREE_CODE (arg0))
15185 case INTEGER_CST:
15187 double_int val = tree_to_double_int (arg0);
15188 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15190 t = force_fit_type_double (type, val, 1,
15191 (overflow | TREE_OVERFLOW (arg0))
15192 && !TYPE_UNSIGNED (type));
15193 break;
15196 case REAL_CST:
15197 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15198 break;
15200 case FIXED_CST:
15202 FIXED_VALUE_TYPE f;
15203 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15204 &(TREE_FIXED_CST (arg0)), NULL,
15205 TYPE_SATURATING (type));
15206 t = build_fixed (type, f);
15207 /* Propagate overflow flags. */
15208 if (overflow_p | TREE_OVERFLOW (arg0))
15209 TREE_OVERFLOW (t) = 1;
15210 break;
15213 default:
15214 gcc_unreachable ();
15217 return t;
15220 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15221 an integer constant or real constant.
15223 TYPE is the type of the result. */
15225 tree
15226 fold_abs_const (tree arg0, tree type)
15228 tree t = NULL_TREE;
15230 switch (TREE_CODE (arg0))
15232 case INTEGER_CST:
15234 double_int val = tree_to_double_int (arg0);
15236 /* If the value is unsigned or non-negative, then the absolute value
15237 is the same as the ordinary value. */
15238 if (TYPE_UNSIGNED (type)
15239 || !double_int_negative_p (val))
15240 t = arg0;
15242 /* If the value is negative, then the absolute value is
15243 its negation. */
15244 else
15246 int overflow;
15248 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15249 t = force_fit_type_double (type, val, -1,
15250 overflow | TREE_OVERFLOW (arg0));
15253 break;
15255 case REAL_CST:
15256 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15257 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15258 else
15259 t = arg0;
15260 break;
15262 default:
15263 gcc_unreachable ();
15266 return t;
15269 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15270 constant. TYPE is the type of the result. */
15272 static tree
15273 fold_not_const (const_tree arg0, tree type)
15275 double_int val;
15277 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15279 val = double_int_not (tree_to_double_int (arg0));
15280 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15283 /* Given CODE, a relational operator, the target type, TYPE and two
15284 constant operands OP0 and OP1, return the result of the
15285 relational operation. If the result is not a compile time
15286 constant, then return NULL_TREE. */
15288 static tree
15289 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15291 int result, invert;
15293 /* From here on, the only cases we handle are when the result is
15294 known to be a constant. */
15296 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15298 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15299 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15301 /* Handle the cases where either operand is a NaN. */
15302 if (real_isnan (c0) || real_isnan (c1))
15304 switch (code)
15306 case EQ_EXPR:
15307 case ORDERED_EXPR:
15308 result = 0;
15309 break;
15311 case NE_EXPR:
15312 case UNORDERED_EXPR:
15313 case UNLT_EXPR:
15314 case UNLE_EXPR:
15315 case UNGT_EXPR:
15316 case UNGE_EXPR:
15317 case UNEQ_EXPR:
15318 result = 1;
15319 break;
15321 case LT_EXPR:
15322 case LE_EXPR:
15323 case GT_EXPR:
15324 case GE_EXPR:
15325 case LTGT_EXPR:
15326 if (flag_trapping_math)
15327 return NULL_TREE;
15328 result = 0;
15329 break;
15331 default:
15332 gcc_unreachable ();
15335 return constant_boolean_node (result, type);
15338 return constant_boolean_node (real_compare (code, c0, c1), type);
15341 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15343 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15344 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15345 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15348 /* Handle equality/inequality of complex constants. */
15349 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15351 tree rcond = fold_relational_const (code, type,
15352 TREE_REALPART (op0),
15353 TREE_REALPART (op1));
15354 tree icond = fold_relational_const (code, type,
15355 TREE_IMAGPART (op0),
15356 TREE_IMAGPART (op1));
15357 if (code == EQ_EXPR)
15358 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15359 else if (code == NE_EXPR)
15360 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15361 else
15362 return NULL_TREE;
15365 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15367 To compute GT, swap the arguments and do LT.
15368 To compute GE, do LT and invert the result.
15369 To compute LE, swap the arguments, do LT and invert the result.
15370 To compute NE, do EQ and invert the result.
15372 Therefore, the code below must handle only EQ and LT. */
15374 if (code == LE_EXPR || code == GT_EXPR)
15376 tree tem = op0;
15377 op0 = op1;
15378 op1 = tem;
15379 code = swap_tree_comparison (code);
15382 /* Note that it is safe to invert for real values here because we
15383 have already handled the one case that it matters. */
15385 invert = 0;
15386 if (code == NE_EXPR || code == GE_EXPR)
15388 invert = 1;
15389 code = invert_tree_comparison (code, false);
15392 /* Compute a result for LT or EQ if args permit;
15393 Otherwise return T. */
15394 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15396 if (code == EQ_EXPR)
15397 result = tree_int_cst_equal (op0, op1);
15398 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15399 result = INT_CST_LT_UNSIGNED (op0, op1);
15400 else
15401 result = INT_CST_LT (op0, op1);
15403 else
15404 return NULL_TREE;
15406 if (invert)
15407 result ^= 1;
15408 return constant_boolean_node (result, type);
15411 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15412 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15413 itself. */
15415 tree
15416 fold_build_cleanup_point_expr (tree type, tree expr)
15418 /* If the expression does not have side effects then we don't have to wrap
15419 it with a cleanup point expression. */
15420 if (!TREE_SIDE_EFFECTS (expr))
15421 return expr;
15423 /* If the expression is a return, check to see if the expression inside the
15424 return has no side effects or the right hand side of the modify expression
15425 inside the return. If either don't have side effects set we don't need to
15426 wrap the expression in a cleanup point expression. Note we don't check the
15427 left hand side of the modify because it should always be a return decl. */
15428 if (TREE_CODE (expr) == RETURN_EXPR)
15430 tree op = TREE_OPERAND (expr, 0);
15431 if (!op || !TREE_SIDE_EFFECTS (op))
15432 return expr;
15433 op = TREE_OPERAND (op, 1);
15434 if (!TREE_SIDE_EFFECTS (op))
15435 return expr;
15438 return build1 (CLEANUP_POINT_EXPR, type, expr);
15441 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15442 of an indirection through OP0, or NULL_TREE if no simplification is
15443 possible. */
15445 tree
15446 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15448 tree sub = op0;
15449 tree subtype;
15451 STRIP_NOPS (sub);
15452 subtype = TREE_TYPE (sub);
15453 if (!POINTER_TYPE_P (subtype))
15454 return NULL_TREE;
15456 if (TREE_CODE (sub) == ADDR_EXPR)
15458 tree op = TREE_OPERAND (sub, 0);
15459 tree optype = TREE_TYPE (op);
15460 /* *&CONST_DECL -> to the value of the const decl. */
15461 if (TREE_CODE (op) == CONST_DECL)
15462 return DECL_INITIAL (op);
15463 /* *&p => p; make sure to handle *&"str"[cst] here. */
15464 if (type == optype)
15466 tree fop = fold_read_from_constant_string (op);
15467 if (fop)
15468 return fop;
15469 else
15470 return op;
15472 /* *(foo *)&fooarray => fooarray[0] */
15473 else if (TREE_CODE (optype) == ARRAY_TYPE
15474 && type == TREE_TYPE (optype))
15476 tree type_domain = TYPE_DOMAIN (optype);
15477 tree min_val = size_zero_node;
15478 if (type_domain && TYPE_MIN_VALUE (type_domain))
15479 min_val = TYPE_MIN_VALUE (type_domain);
15480 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15481 SET_EXPR_LOCATION (op0, loc);
15482 return op0;
15484 /* *(foo *)&complexfoo => __real__ complexfoo */
15485 else if (TREE_CODE (optype) == COMPLEX_TYPE
15486 && type == TREE_TYPE (optype))
15487 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15488 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15489 else if (TREE_CODE (optype) == VECTOR_TYPE
15490 && type == TREE_TYPE (optype))
15492 tree part_width = TYPE_SIZE (type);
15493 tree index = bitsize_int (0);
15494 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15498 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15499 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15500 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15502 tree op00 = TREE_OPERAND (sub, 0);
15503 tree op01 = TREE_OPERAND (sub, 1);
15504 tree op00type;
15506 STRIP_NOPS (op00);
15507 op00type = TREE_TYPE (op00);
15508 if (TREE_CODE (op00) == ADDR_EXPR
15509 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15510 && type == TREE_TYPE (TREE_TYPE (op00type)))
15512 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15513 tree part_width = TYPE_SIZE (type);
15514 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15515 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15516 tree index = bitsize_int (indexi);
15518 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15519 return fold_build3_loc (loc,
15520 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15521 part_width, index);
15527 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15528 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15529 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15531 tree op00 = TREE_OPERAND (sub, 0);
15532 tree op01 = TREE_OPERAND (sub, 1);
15533 tree op00type;
15535 STRIP_NOPS (op00);
15536 op00type = TREE_TYPE (op00);
15537 if (TREE_CODE (op00) == ADDR_EXPR
15538 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15539 && type == TREE_TYPE (TREE_TYPE (op00type)))
15541 tree size = TYPE_SIZE_UNIT (type);
15542 if (tree_int_cst_equal (size, op01))
15543 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15544 TREE_OPERAND (op00, 0));
15548 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15549 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15550 && type == TREE_TYPE (TREE_TYPE (subtype)))
15552 tree type_domain;
15553 tree min_val = size_zero_node;
15554 sub = build_fold_indirect_ref_loc (loc, sub);
15555 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15556 if (type_domain && TYPE_MIN_VALUE (type_domain))
15557 min_val = TYPE_MIN_VALUE (type_domain);
15558 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15559 SET_EXPR_LOCATION (op0, loc);
15560 return op0;
15563 return NULL_TREE;
15566 /* Builds an expression for an indirection through T, simplifying some
15567 cases. */
15569 tree
15570 build_fold_indirect_ref_loc (location_t loc, tree t)
15572 tree type = TREE_TYPE (TREE_TYPE (t));
15573 tree sub = fold_indirect_ref_1 (loc, type, t);
15575 if (sub)
15576 return sub;
15578 t = build1 (INDIRECT_REF, type, t);
15579 SET_EXPR_LOCATION (t, loc);
15580 return t;
15583 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15585 tree
15586 fold_indirect_ref_loc (location_t loc, tree t)
15588 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15590 if (sub)
15591 return sub;
15592 else
15593 return t;
15596 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15597 whose result is ignored. The type of the returned tree need not be
15598 the same as the original expression. */
15600 tree
15601 fold_ignored_result (tree t)
15603 if (!TREE_SIDE_EFFECTS (t))
15604 return integer_zero_node;
15606 for (;;)
15607 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15609 case tcc_unary:
15610 t = TREE_OPERAND (t, 0);
15611 break;
15613 case tcc_binary:
15614 case tcc_comparison:
15615 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15616 t = TREE_OPERAND (t, 0);
15617 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15618 t = TREE_OPERAND (t, 1);
15619 else
15620 return t;
15621 break;
15623 case tcc_expression:
15624 switch (TREE_CODE (t))
15626 case COMPOUND_EXPR:
15627 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15628 return t;
15629 t = TREE_OPERAND (t, 0);
15630 break;
15632 case COND_EXPR:
15633 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15634 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15635 return t;
15636 t = TREE_OPERAND (t, 0);
15637 break;
15639 default:
15640 return t;
15642 break;
15644 default:
15645 return t;
15649 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15650 This can only be applied to objects of a sizetype. */
15652 tree
15653 round_up_loc (location_t loc, tree value, int divisor)
15655 tree div = NULL_TREE;
15657 gcc_assert (divisor > 0);
15658 if (divisor == 1)
15659 return value;
15661 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15662 have to do anything. Only do this when we are not given a const,
15663 because in that case, this check is more expensive than just
15664 doing it. */
15665 if (TREE_CODE (value) != INTEGER_CST)
15667 div = build_int_cst (TREE_TYPE (value), divisor);
15669 if (multiple_of_p (TREE_TYPE (value), value, div))
15670 return value;
15673 /* If divisor is a power of two, simplify this to bit manipulation. */
15674 if (divisor == (divisor & -divisor))
15676 if (TREE_CODE (value) == INTEGER_CST)
15678 double_int val = tree_to_double_int (value);
15679 bool overflow_p;
15681 if ((val.low & (divisor - 1)) == 0)
15682 return value;
15684 overflow_p = TREE_OVERFLOW (value);
15685 val.low &= ~(divisor - 1);
15686 val.low += divisor;
15687 if (val.low == 0)
15689 val.high++;
15690 if (val.high == 0)
15691 overflow_p = true;
15694 return force_fit_type_double (TREE_TYPE (value), val,
15695 -1, overflow_p);
15697 else
15699 tree t;
15701 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15702 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15703 t = build_int_cst (TREE_TYPE (value), -divisor);
15704 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15707 else
15709 if (!div)
15710 div = build_int_cst (TREE_TYPE (value), divisor);
15711 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15712 value = size_binop_loc (loc, MULT_EXPR, value, div);
15715 return value;
15718 /* Likewise, but round down. */
15720 tree
15721 round_down_loc (location_t loc, tree value, int divisor)
15723 tree div = NULL_TREE;
15725 gcc_assert (divisor > 0);
15726 if (divisor == 1)
15727 return value;
15729 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15730 have to do anything. Only do this when we are not given a const,
15731 because in that case, this check is more expensive than just
15732 doing it. */
15733 if (TREE_CODE (value) != INTEGER_CST)
15735 div = build_int_cst (TREE_TYPE (value), divisor);
15737 if (multiple_of_p (TREE_TYPE (value), value, div))
15738 return value;
15741 /* If divisor is a power of two, simplify this to bit manipulation. */
15742 if (divisor == (divisor & -divisor))
15744 tree t;
15746 t = build_int_cst (TREE_TYPE (value), -divisor);
15747 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15749 else
15751 if (!div)
15752 div = build_int_cst (TREE_TYPE (value), divisor);
15753 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15754 value = size_binop_loc (loc, MULT_EXPR, value, div);
15757 return value;
15760 /* Returns the pointer to the base of the object addressed by EXP and
15761 extracts the information about the offset of the access, storing it
15762 to PBITPOS and POFFSET. */
15764 static tree
15765 split_address_to_core_and_offset (tree exp,
15766 HOST_WIDE_INT *pbitpos, tree *poffset)
15768 tree core;
15769 enum machine_mode mode;
15770 int unsignedp, volatilep;
15771 HOST_WIDE_INT bitsize;
15772 location_t loc = EXPR_LOCATION (exp);
15774 if (TREE_CODE (exp) == ADDR_EXPR)
15776 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15777 poffset, &mode, &unsignedp, &volatilep,
15778 false);
15779 core = build_fold_addr_expr_loc (loc, core);
15781 else
15783 core = exp;
15784 *pbitpos = 0;
15785 *poffset = NULL_TREE;
15788 return core;
15791 /* Returns true if addresses of E1 and E2 differ by a constant, false
15792 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15794 bool
15795 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15797 tree core1, core2;
15798 HOST_WIDE_INT bitpos1, bitpos2;
15799 tree toffset1, toffset2, tdiff, type;
15801 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15802 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15804 if (bitpos1 % BITS_PER_UNIT != 0
15805 || bitpos2 % BITS_PER_UNIT != 0
15806 || !operand_equal_p (core1, core2, 0))
15807 return false;
15809 if (toffset1 && toffset2)
15811 type = TREE_TYPE (toffset1);
15812 if (type != TREE_TYPE (toffset2))
15813 toffset2 = fold_convert (type, toffset2);
15815 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15816 if (!cst_and_fits_in_hwi (tdiff))
15817 return false;
15819 *diff = int_cst_value (tdiff);
15821 else if (toffset1 || toffset2)
15823 /* If only one of the offsets is non-constant, the difference cannot
15824 be a constant. */
15825 return false;
15827 else
15828 *diff = 0;
15830 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15831 return true;
15834 /* Simplify the floating point expression EXP when the sign of the
15835 result is not significant. Return NULL_TREE if no simplification
15836 is possible. */
15838 tree
15839 fold_strip_sign_ops (tree exp)
15841 tree arg0, arg1;
15842 location_t loc = EXPR_LOCATION (exp);
15844 switch (TREE_CODE (exp))
15846 case ABS_EXPR:
15847 case NEGATE_EXPR:
15848 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15849 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15851 case MULT_EXPR:
15852 case RDIV_EXPR:
15853 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15854 return NULL_TREE;
15855 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15856 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15857 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15858 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15859 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15860 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15861 break;
15863 case COMPOUND_EXPR:
15864 arg0 = TREE_OPERAND (exp, 0);
15865 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15866 if (arg1)
15867 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15868 break;
15870 case COND_EXPR:
15871 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15872 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15873 if (arg0 || arg1)
15874 return fold_build3_loc (loc,
15875 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15876 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15877 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15878 break;
15880 case CALL_EXPR:
15882 const enum built_in_function fcode = builtin_mathfn_code (exp);
15883 switch (fcode)
15885 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15886 /* Strip copysign function call, return the 1st argument. */
15887 arg0 = CALL_EXPR_ARG (exp, 0);
15888 arg1 = CALL_EXPR_ARG (exp, 1);
15889 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15891 default:
15892 /* Strip sign ops from the argument of "odd" math functions. */
15893 if (negate_mathfn_p (fcode))
15895 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15896 if (arg0)
15897 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15899 break;
15902 break;
15904 default:
15905 break;
15907 return NULL_TREE;