Merge from trunk:
[official-gcc.git] / main / gcc / fold-const.c
blob49ab04265a5c95a434f8f08e366b38fb3a2611c1
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71 #include "builtins.h"
72 #include "cgraph.h"
74 /* Nonzero if we are folding constants inside an initializer; zero
75 otherwise. */
76 int folding_initializer = 0;
78 /* The following constants represent a bit based encoding of GCC's
79 comparison operators. This encoding simplifies transformations
80 on relational comparison operators, such as AND and OR. */
81 enum comparison_code {
82 COMPCODE_FALSE = 0,
83 COMPCODE_LT = 1,
84 COMPCODE_EQ = 2,
85 COMPCODE_LE = 3,
86 COMPCODE_GT = 4,
87 COMPCODE_LTGT = 5,
88 COMPCODE_GE = 6,
89 COMPCODE_ORD = 7,
90 COMPCODE_UNORD = 8,
91 COMPCODE_UNLT = 9,
92 COMPCODE_UNEQ = 10,
93 COMPCODE_UNLE = 11,
94 COMPCODE_UNGT = 12,
95 COMPCODE_NE = 13,
96 COMPCODE_UNGE = 14,
97 COMPCODE_TRUE = 15
100 static bool negate_mathfn_p (enum built_in_function);
101 static bool negate_expr_p (tree);
102 static tree negate_expr (tree);
103 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
104 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
105 static tree const_binop (enum tree_code, tree, tree);
106 static enum comparison_code comparison_to_compcode (enum tree_code);
107 static enum tree_code compcode_to_comparison (enum comparison_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
112 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (location_t, tree, tree,
114 HOST_WIDE_INT, HOST_WIDE_INT, int);
115 static tree optimize_bit_field_compare (location_t, enum tree_code,
116 tree, tree, tree);
117 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
118 HOST_WIDE_INT *,
119 enum machine_mode *, int *, int *,
120 tree *, tree *);
121 static tree sign_bit_p (tree, const_tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree optimize_minmax_comparison (location_t, enum tree_code,
131 tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (location_t,
139 enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (const_tree, const_tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (const_tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static tree fold_convert_const (enum tree_code, tree, tree);
149 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
150 Otherwise, return LOC. */
152 static location_t
153 expr_location_or (tree t, location_t loc)
155 location_t tloc = EXPR_LOCATION (t);
156 return tloc == UNKNOWN_LOCATION ? loc : tloc;
159 /* Similar to protected_set_expr_location, but never modify x in place,
160 if location can and needs to be set, unshare it. */
162 static inline tree
163 protected_set_expr_location_unshare (tree x, location_t loc)
165 if (CAN_HAVE_LOCATION_P (x)
166 && EXPR_LOCATION (x) != loc
167 && !(TREE_CODE (x) == SAVE_EXPR
168 || TREE_CODE (x) == TARGET_EXPR
169 || TREE_CODE (x) == BIND_EXPR))
171 x = copy_node (x);
172 SET_EXPR_LOCATION (x, loc);
174 return x;
177 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
178 division and returns the quotient. Otherwise returns
179 NULL_TREE. */
181 tree
182 div_if_zero_remainder (const_tree arg1, const_tree arg2)
184 widest_int quo;
186 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
187 SIGNED, &quo))
188 return wide_int_to_tree (TREE_TYPE (arg1), quo);
190 return NULL_TREE;
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
202 static int fold_deferring_overflow_warnings;
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
209 static const char* fold_deferred_overflow_warning;
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
219 void
220 fold_defer_overflow_warnings (void)
222 ++fold_deferring_overflow_warnings;
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 const char *warnmsg;
238 location_t locus;
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
254 if (!issue || warnmsg == NULL)
255 return;
257 if (gimple_no_warning_p (stmt))
258 return;
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
265 if (!issue_strict_overflow_warning (code))
266 return;
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
281 fold_undefer_overflow_warnings (false, NULL, 0);
284 /* Whether we are deferring overflow warnings. */
286 bool
287 fold_deferring_overflow_warnings_p (void)
289 return fold_deferring_overflow_warnings > 0;
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 if (fold_deferring_overflow_warnings > 0)
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
314 static bool
315 negate_mathfn_p (enum built_in_function code)
317 switch (code)
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
350 default:
351 break;
353 return false;
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
359 bool
360 may_negate_without_overflow_p (const_tree t)
362 tree type;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
370 return !wi::only_sign_bit_p (t);
373 /* Determine whether an expression T can be cheaply negated using
374 the function negate_expr without introducing undefined overflow. */
376 static bool
377 negate_expr_p (tree t)
379 tree type;
381 if (t == 0)
382 return false;
384 type = TREE_TYPE (t);
386 STRIP_SIGN_NOPS (t);
387 switch (TREE_CODE (t))
389 case INTEGER_CST:
390 if (TYPE_OVERFLOW_WRAPS (type))
391 return true;
393 /* Check that -CST will not overflow type. */
394 return may_negate_without_overflow_p (t);
395 case BIT_NOT_EXPR:
396 return (INTEGRAL_TYPE_P (type)
397 && TYPE_OVERFLOW_WRAPS (type));
399 case FIXED_CST:
400 case NEGATE_EXPR:
401 return true;
403 case REAL_CST:
404 /* We want to canonicalize to positive real constants. Pretend
405 that only negative ones can be easily negated. */
406 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
408 case COMPLEX_CST:
409 return negate_expr_p (TREE_REALPART (t))
410 && negate_expr_p (TREE_IMAGPART (t));
412 case VECTOR_CST:
414 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
415 return true;
417 int count = TYPE_VECTOR_SUBPARTS (type), i;
419 for (i = 0; i < count; i++)
420 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
421 return false;
423 return true;
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
456 /* Fall through. */
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
474 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
475 break;
476 /* If overflow is undefined then we have to be careful because
477 we ask whether it's ok to associate the negate with the
478 division which is not ok for example for
479 -((a - b) / c) where (-(a - b)) / c may invoke undefined
480 overflow because of negating INT_MIN. So do not use
481 negate_expr_p here but open-code the two important cases. */
482 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
483 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
485 return true;
487 else if (negate_expr_p (TREE_OPERAND (t, 0)))
488 return true;
489 return negate_expr_p (TREE_OPERAND (t, 1));
491 case NOP_EXPR:
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type) == REAL_TYPE)
495 tree tem = strip_float_extensions (t);
496 if (tem != t)
497 return negate_expr_p (tem);
499 break;
501 case CALL_EXPR:
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t)))
504 return negate_expr_p (CALL_EXPR_ARG (t, 0));
505 break;
507 case RSHIFT_EXPR:
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
509 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 tree op1 = TREE_OPERAND (t, 1);
512 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
513 return true;
515 break;
517 default:
518 break;
520 return false;
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
526 returned. */
528 static tree
529 fold_negate_expr (location_t loc, tree t)
531 tree type = TREE_TYPE (t);
532 tree tem;
534 switch (TREE_CODE (t))
536 /* Convert - (~A) to A + 1. */
537 case BIT_NOT_EXPR:
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_one_cst (type));
541 break;
543 case INTEGER_CST:
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
554 return tem;
555 break;
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
572 break;
574 case VECTOR_CST:
576 int count = TYPE_VECTOR_SUBPARTS (type), i;
577 tree *elts = XALLOCAVEC (tree, count);
579 for (i = 0; i < count; i++)
581 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
582 if (elts[i] == NULL_TREE)
583 return NULL_TREE;
586 return build_vector (type, elts);
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
602 case NEGATE_EXPR:
603 return TREE_OPERAND (t, 0);
605 case PLUS_EXPR:
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
607 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t, 1))
611 && reorder_operands_p (TREE_OPERAND (t, 0),
612 TREE_OPERAND (t, 1)))
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
627 break;
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
633 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
634 return fold_build2_loc (loc, MINUS_EXPR, type,
635 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
636 break;
638 case MULT_EXPR:
639 if (TYPE_UNSIGNED (type))
640 break;
642 /* Fall through. */
644 case RDIV_EXPR:
645 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
647 tem = TREE_OPERAND (t, 1);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 TREE_OPERAND (t, 0), negate_expr (tem));
651 tem = TREE_OPERAND (t, 0);
652 if (negate_expr_p (tem))
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 negate_expr (tem), TREE_OPERAND (t, 1));
656 break;
658 case TRUNC_DIV_EXPR:
659 case ROUND_DIV_EXPR:
660 case EXACT_DIV_EXPR:
661 /* In general we can't negate A / B, because if A is INT_MIN and
662 B is 1, we may turn this into INT_MIN / -1 which is undefined
663 and actually traps on some architectures. But if overflow is
664 undefined, we can negate, because - (INT_MIN / 1) is an
665 overflow. */
666 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
668 const char * const warnmsg = G_("assuming signed overflow does not "
669 "occur when negating a division");
670 tem = TREE_OPERAND (t, 1);
671 if (negate_expr_p (tem))
673 if (INTEGRAL_TYPE_P (type)
674 && (TREE_CODE (tem) != INTEGER_CST
675 || integer_onep (tem)))
676 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0), negate_expr (tem));
680 /* If overflow is undefined then we have to be careful because
681 we ask whether it's ok to associate the negate with the
682 division which is not ok for example for
683 -((a - b) / c) where (-(a - b)) / c may invoke undefined
684 overflow because of negating INT_MIN. So do not use
685 negate_expr_p here but open-code the two important cases. */
686 tem = TREE_OPERAND (t, 0);
687 if ((INTEGRAL_TYPE_P (type)
688 && (TREE_CODE (tem) == NEGATE_EXPR
689 || (TREE_CODE (tem) == INTEGER_CST
690 && may_negate_without_overflow_p (tem))))
691 || !INTEGRAL_TYPE_P (type))
692 return fold_build2_loc (loc, TREE_CODE (t), type,
693 negate_expr (tem), TREE_OPERAND (t, 1));
695 break;
697 case NOP_EXPR:
698 /* Convert -((double)float) into (double)(-float). */
699 if (TREE_CODE (type) == REAL_TYPE)
701 tem = strip_float_extensions (t);
702 if (tem != t && negate_expr_p (tem))
703 return fold_convert_loc (loc, type, negate_expr (tem));
705 break;
707 case CALL_EXPR:
708 /* Negate -f(x) as f(-x). */
709 if (negate_mathfn_p (builtin_mathfn_code (t))
710 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
712 tree fndecl, arg;
714 fndecl = get_callee_fndecl (t);
715 arg = negate_expr (CALL_EXPR_ARG (t, 0));
716 return build_call_expr_loc (loc, fndecl, 1, arg);
718 break;
720 case RSHIFT_EXPR:
721 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
722 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
724 tree op1 = TREE_OPERAND (t, 1);
725 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
727 tree ntype = TYPE_UNSIGNED (type)
728 ? signed_type_for (type)
729 : unsigned_type_for (type);
730 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
731 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
732 return fold_convert_loc (loc, type, temp);
735 break;
737 default:
738 break;
741 return NULL_TREE;
744 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
745 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
746 return NULL_TREE. */
748 static tree
749 negate_expr (tree t)
751 tree type, tem;
752 location_t loc;
754 if (t == NULL_TREE)
755 return NULL_TREE;
757 loc = EXPR_LOCATION (t);
758 type = TREE_TYPE (t);
759 STRIP_SIGN_NOPS (t);
761 tem = fold_negate_expr (loc, t);
762 if (!tem)
763 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
764 return fold_convert_loc (loc, type, tem);
767 /* Split a tree IN into a constant, literal and variable parts that could be
768 combined with CODE to make IN. "constant" means an expression with
769 TREE_CONSTANT but that isn't an actual constant. CODE must be a
770 commutative arithmetic operation. Store the constant part into *CONP,
771 the literal in *LITP and return the variable part. If a part isn't
772 present, set it to null. If the tree does not decompose in this way,
773 return the entire tree as the variable part and the other parts as null.
775 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
776 case, we negate an operand that was subtracted. Except if it is a
777 literal for which we use *MINUS_LITP instead.
779 If NEGATE_P is true, we are negating all of IN, again except a literal
780 for which we use *MINUS_LITP instead.
782 If IN is itself a literal or constant, return it as appropriate.
784 Note that we do not guarantee that any of the three values will be the
785 same type as IN, but they will have the same signedness and mode. */
787 static tree
788 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
789 tree *minus_litp, int negate_p)
791 tree var = 0;
793 *conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
813 tree op0 = TREE_OPERAND (in, 0);
814 tree op1 = TREE_OPERAND (in, 1);
815 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
816 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
818 /* First see if either of the operands is a literal, then a constant. */
819 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
820 || TREE_CODE (op0) == FIXED_CST)
821 *litp = op0, op0 = 0;
822 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
823 || TREE_CODE (op1) == FIXED_CST)
824 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
826 if (op0 != 0 && TREE_CONSTANT (op0))
827 *conp = op0, op0 = 0;
828 else if (op1 != 0 && TREE_CONSTANT (op1))
829 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
831 /* If we haven't dealt with either operand, this is not a case we can
832 decompose. Otherwise, VAR is either of the ones remaining, if any. */
833 if (op0 != 0 && op1 != 0)
834 var = in;
835 else if (op0 != 0)
836 var = op0;
837 else
838 var = op1, neg_var_p = neg1_p;
840 /* Now do any needed negations. */
841 if (neg_litp_p)
842 *minus_litp = *litp, *litp = 0;
843 if (neg_conp_p)
844 *conp = negate_expr (*conp);
845 if (neg_var_p)
846 var = negate_expr (var);
848 else if (TREE_CODE (in) == BIT_NOT_EXPR
849 && code == PLUS_EXPR)
851 /* -X - 1 is folded to ~X, undo that here. */
852 *minus_litp = build_one_cst (TREE_TYPE (in));
853 var = negate_expr (TREE_OPERAND (in, 0));
855 else if (TREE_CONSTANT (in))
856 *conp = in;
857 else
858 var = in;
860 if (negate_p)
862 if (*litp)
863 *minus_litp = *litp, *litp = 0;
864 else if (*minus_litp)
865 *litp = *minus_litp, *minus_litp = 0;
866 *conp = negate_expr (*conp);
867 var = negate_expr (var);
870 return var;
873 /* Re-associate trees split by the above function. T1 and T2 are
874 either expressions to associate or null. Return the new
875 expression, if any. LOC is the location of the new expression. If
876 we build an operation, do it in TYPE and with CODE. */
878 static tree
879 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
881 if (t1 == 0)
882 return t2;
883 else if (t2 == 0)
884 return t1;
886 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
887 try to fold this since we will have infinite recursion. But do
888 deal with any NEGATE_EXPRs. */
889 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
890 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
892 if (code == PLUS_EXPR)
894 if (TREE_CODE (t1) == NEGATE_EXPR)
895 return build2_loc (loc, MINUS_EXPR, type,
896 fold_convert_loc (loc, type, t2),
897 fold_convert_loc (loc, type,
898 TREE_OPERAND (t1, 0)));
899 else if (TREE_CODE (t2) == NEGATE_EXPR)
900 return build2_loc (loc, MINUS_EXPR, type,
901 fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type,
903 TREE_OPERAND (t2, 0)));
904 else if (integer_zerop (t2))
905 return fold_convert_loc (loc, type, t1);
907 else if (code == MINUS_EXPR)
909 if (integer_zerop (t2))
910 return fold_convert_loc (loc, type, t1);
913 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type, t2));
917 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
918 fold_convert_loc (loc, type, t2));
921 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
922 for use in int_const_binop, size_binop and size_diffop. */
924 static bool
925 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
927 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
928 return false;
929 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
930 return false;
932 switch (code)
934 case LSHIFT_EXPR:
935 case RSHIFT_EXPR:
936 case LROTATE_EXPR:
937 case RROTATE_EXPR:
938 return true;
940 default:
941 break;
944 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
945 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
946 && TYPE_MODE (type1) == TYPE_MODE (type2);
950 /* Combine two integer constants ARG1 and ARG2 under operation CODE
951 to produce a new constant. Return NULL_TREE if we don't know how
952 to evaluate CODE at compile-time. */
954 static tree
955 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
956 int overflowable)
958 wide_int res;
959 tree t;
960 tree type = TREE_TYPE (arg1);
961 signop sign = TYPE_SIGN (type);
962 bool overflow = false;
964 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
965 TYPE_SIGN (TREE_TYPE (parg2)));
967 switch (code)
969 case BIT_IOR_EXPR:
970 res = wi::bit_or (arg1, arg2);
971 break;
973 case BIT_XOR_EXPR:
974 res = wi::bit_xor (arg1, arg2);
975 break;
977 case BIT_AND_EXPR:
978 res = wi::bit_and (arg1, arg2);
979 break;
981 case RSHIFT_EXPR:
982 case LSHIFT_EXPR:
983 if (wi::neg_p (arg2))
985 arg2 = -arg2;
986 if (code == RSHIFT_EXPR)
987 code = LSHIFT_EXPR;
988 else
989 code = RSHIFT_EXPR;
992 if (code == RSHIFT_EXPR)
993 /* It's unclear from the C standard whether shifts can overflow.
994 The following code ignores overflow; perhaps a C standard
995 interpretation ruling is needed. */
996 res = wi::rshift (arg1, arg2, sign);
997 else
998 res = wi::lshift (arg1, arg2);
999 break;
1001 case RROTATE_EXPR:
1002 case LROTATE_EXPR:
1003 if (wi::neg_p (arg2))
1005 arg2 = -arg2;
1006 if (code == RROTATE_EXPR)
1007 code = LROTATE_EXPR;
1008 else
1009 code = RROTATE_EXPR;
1012 if (code == RROTATE_EXPR)
1013 res = wi::rrotate (arg1, arg2);
1014 else
1015 res = wi::lrotate (arg1, arg2);
1016 break;
1018 case PLUS_EXPR:
1019 res = wi::add (arg1, arg2, sign, &overflow);
1020 break;
1022 case MINUS_EXPR:
1023 res = wi::sub (arg1, arg2, sign, &overflow);
1024 break;
1026 case MULT_EXPR:
1027 res = wi::mul (arg1, arg2, sign, &overflow);
1028 break;
1030 case MULT_HIGHPART_EXPR:
1031 res = wi::mul_high (arg1, arg2, sign);
1032 break;
1034 case TRUNC_DIV_EXPR:
1035 case EXACT_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1039 break;
1041 case FLOOR_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_floor (arg1, arg2, sign, &overflow);
1045 break;
1047 case CEIL_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1051 break;
1053 case ROUND_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_round (arg1, arg2, sign, &overflow);
1057 break;
1059 case TRUNC_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1063 break;
1065 case FLOOR_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1069 break;
1071 case CEIL_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1075 break;
1077 case ROUND_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_round (arg1, arg2, sign, &overflow);
1081 break;
1083 case MIN_EXPR:
1084 res = wi::min (arg1, arg2, sign);
1085 break;
1087 case MAX_EXPR:
1088 res = wi::max (arg1, arg2, sign);
1089 break;
1091 default:
1092 return NULL_TREE;
1095 t = force_fit_type (type, res, overflowable,
1096 (((sign == SIGNED || overflowable == -1)
1097 && overflow)
1098 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1100 return t;
1103 tree
1104 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1106 return int_const_binop_1 (code, arg1, arg2, 1);
1109 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1110 constant. We assume ARG1 and ARG2 have the same data type, or at least
1111 are the same kind of constant and the same machine mode. Return zero if
1112 combining the constants is not allowed in the current operating mode. */
1114 static tree
1115 const_binop (enum tree_code code, tree arg1, tree arg2)
1117 /* Sanity check for the recursive cases. */
1118 if (!arg1 || !arg2)
1119 return NULL_TREE;
1121 STRIP_NOPS (arg1);
1122 STRIP_NOPS (arg2);
1124 if (TREE_CODE (arg1) == INTEGER_CST)
1125 return int_const_binop (code, arg1, arg2);
1127 if (TREE_CODE (arg1) == REAL_CST)
1129 enum machine_mode mode;
1130 REAL_VALUE_TYPE d1;
1131 REAL_VALUE_TYPE d2;
1132 REAL_VALUE_TYPE value;
1133 REAL_VALUE_TYPE result;
1134 bool inexact;
1135 tree t, type;
1137 /* The following codes are handled by real_arithmetic. */
1138 switch (code)
1140 case PLUS_EXPR:
1141 case MINUS_EXPR:
1142 case MULT_EXPR:
1143 case RDIV_EXPR:
1144 case MIN_EXPR:
1145 case MAX_EXPR:
1146 break;
1148 default:
1149 return NULL_TREE;
1152 d1 = TREE_REAL_CST (arg1);
1153 d2 = TREE_REAL_CST (arg2);
1155 type = TREE_TYPE (arg1);
1156 mode = TYPE_MODE (type);
1158 /* Don't perform operation if we honor signaling NaNs and
1159 either operand is a NaN. */
1160 if (HONOR_SNANS (mode)
1161 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1162 return NULL_TREE;
1164 /* Don't perform operation if it would raise a division
1165 by zero exception. */
1166 if (code == RDIV_EXPR
1167 && REAL_VALUES_EQUAL (d2, dconst0)
1168 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1169 return NULL_TREE;
1171 /* If either operand is a NaN, just return it. Otherwise, set up
1172 for floating-point trap; we return an overflow. */
1173 if (REAL_VALUE_ISNAN (d1))
1174 return arg1;
1175 else if (REAL_VALUE_ISNAN (d2))
1176 return arg2;
1178 inexact = real_arithmetic (&value, code, &d1, &d2);
1179 real_convert (&result, mode, &value);
1181 /* Don't constant fold this floating point operation if
1182 the result has overflowed and flag_trapping_math. */
1183 if (flag_trapping_math
1184 && MODE_HAS_INFINITIES (mode)
1185 && REAL_VALUE_ISINF (result)
1186 && !REAL_VALUE_ISINF (d1)
1187 && !REAL_VALUE_ISINF (d2))
1188 return NULL_TREE;
1190 /* Don't constant fold this floating point operation if the
1191 result may dependent upon the run-time rounding mode and
1192 flag_rounding_math is set, or if GCC's software emulation
1193 is unable to accurately represent the result. */
1194 if ((flag_rounding_math
1195 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1196 && (inexact || !real_identical (&result, &value)))
1197 return NULL_TREE;
1199 t = build_real (type, result);
1201 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1202 return t;
1205 if (TREE_CODE (arg1) == FIXED_CST)
1207 FIXED_VALUE_TYPE f1;
1208 FIXED_VALUE_TYPE f2;
1209 FIXED_VALUE_TYPE result;
1210 tree t, type;
1211 int sat_p;
1212 bool overflow_p;
1214 /* The following codes are handled by fixed_arithmetic. */
1215 switch (code)
1217 case PLUS_EXPR:
1218 case MINUS_EXPR:
1219 case MULT_EXPR:
1220 case TRUNC_DIV_EXPR:
1221 f2 = TREE_FIXED_CST (arg2);
1222 break;
1224 case LSHIFT_EXPR:
1225 case RSHIFT_EXPR:
1227 wide_int w2 = arg2;
1228 f2.data.high = w2.elt (1);
1229 f2.data.low = w2.elt (0);
1230 f2.mode = SImode;
1232 break;
1234 default:
1235 return NULL_TREE;
1238 f1 = TREE_FIXED_CST (arg1);
1239 type = TREE_TYPE (arg1);
1240 sat_p = TYPE_SATURATING (type);
1241 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1242 t = build_fixed (type, result);
1243 /* Propagate overflow flags. */
1244 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1249 if (TREE_CODE (arg1) == COMPLEX_CST)
1251 tree type = TREE_TYPE (arg1);
1252 tree r1 = TREE_REALPART (arg1);
1253 tree i1 = TREE_IMAGPART (arg1);
1254 tree r2 = TREE_REALPART (arg2);
1255 tree i2 = TREE_IMAGPART (arg2);
1256 tree real, imag;
1258 switch (code)
1260 case PLUS_EXPR:
1261 case MINUS_EXPR:
1262 real = const_binop (code, r1, r2);
1263 imag = const_binop (code, i1, i2);
1264 break;
1266 case MULT_EXPR:
1267 if (COMPLEX_FLOAT_TYPE_P (type))
1268 return do_mpc_arg2 (arg1, arg2, type,
1269 /* do_nonfinite= */ folding_initializer,
1270 mpc_mul);
1272 real = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, r1, r2),
1274 const_binop (MULT_EXPR, i1, i2));
1275 imag = const_binop (PLUS_EXPR,
1276 const_binop (MULT_EXPR, r1, i2),
1277 const_binop (MULT_EXPR, i1, r2));
1278 break;
1280 case RDIV_EXPR:
1281 if (COMPLEX_FLOAT_TYPE_P (type))
1282 return do_mpc_arg2 (arg1, arg2, type,
1283 /* do_nonfinite= */ folding_initializer,
1284 mpc_div);
1285 /* Fallthru ... */
1286 case TRUNC_DIV_EXPR:
1287 case CEIL_DIV_EXPR:
1288 case FLOOR_DIV_EXPR:
1289 case ROUND_DIV_EXPR:
1290 if (flag_complex_method == 0)
1292 /* Keep this algorithm in sync with
1293 tree-complex.c:expand_complex_div_straight().
1295 Expand complex division to scalars, straightforward algorithm.
1296 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1297 t = br*br + bi*bi
1299 tree magsquared
1300 = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r2, r2),
1302 const_binop (MULT_EXPR, i2, i2));
1303 tree t1
1304 = const_binop (PLUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2),
1306 const_binop (MULT_EXPR, i1, i2));
1307 tree t2
1308 = const_binop (MINUS_EXPR,
1309 const_binop (MULT_EXPR, i1, r2),
1310 const_binop (MULT_EXPR, r1, i2));
1312 real = const_binop (code, t1, magsquared);
1313 imag = const_binop (code, t2, magsquared);
1315 else
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_wide().
1320 Expand complex division to scalars, modified algorithm to minimize
1321 overflow with wide input ranges. */
1322 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1323 fold_abs_const (r2, TREE_TYPE (type)),
1324 fold_abs_const (i2, TREE_TYPE (type)));
1326 if (integer_nonzerop (compare))
1328 /* In the TRUE branch, we compute
1329 ratio = br/bi;
1330 div = (br * ratio) + bi;
1331 tr = (ar * ratio) + ai;
1332 ti = (ai * ratio) - ar;
1333 tr = tr / div;
1334 ti = ti / div; */
1335 tree ratio = const_binop (code, r2, i2);
1336 tree div = const_binop (PLUS_EXPR, i2,
1337 const_binop (MULT_EXPR, r2, ratio));
1338 real = const_binop (MULT_EXPR, r1, ratio);
1339 real = const_binop (PLUS_EXPR, real, i1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, i1, ratio);
1343 imag = const_binop (MINUS_EXPR, imag, r1);
1344 imag = const_binop (code, imag, div);
1346 else
1348 /* In the FALSE branch, we compute
1349 ratio = d/c;
1350 divisor = (d * ratio) + c;
1351 tr = (b * ratio) + a;
1352 ti = b - (a * ratio);
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, i2, r2);
1356 tree div = const_binop (PLUS_EXPR, r2,
1357 const_binop (MULT_EXPR, i2, ratio));
1359 real = const_binop (MULT_EXPR, i1, ratio);
1360 real = const_binop (PLUS_EXPR, real, r1);
1361 real = const_binop (code, real, div);
1363 imag = const_binop (MULT_EXPR, r1, ratio);
1364 imag = const_binop (MINUS_EXPR, i1, imag);
1365 imag = const_binop (code, imag, div);
1368 break;
1370 default:
1371 return NULL_TREE;
1374 if (real && imag)
1375 return build_complex (type, real, imag);
1378 if (TREE_CODE (arg1) == VECTOR_CST
1379 && TREE_CODE (arg2) == VECTOR_CST)
1381 tree type = TREE_TYPE (arg1);
1382 int count = TYPE_VECTOR_SUBPARTS (type), i;
1383 tree *elts = XALLOCAVEC (tree, count);
1385 for (i = 0; i < count; i++)
1387 tree elem1 = VECTOR_CST_ELT (arg1, i);
1388 tree elem2 = VECTOR_CST_ELT (arg2, i);
1390 elts[i] = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if (elts[i] == NULL_TREE)
1395 return NULL_TREE;
1398 return build_vector (type, elts);
1401 /* Shifts allow a scalar offset for a vector. */
1402 if (TREE_CODE (arg1) == VECTOR_CST
1403 && TREE_CODE (arg2) == INTEGER_CST)
1405 tree type = TREE_TYPE (arg1);
1406 int count = TYPE_VECTOR_SUBPARTS (type), i;
1407 tree *elts = XALLOCAVEC (tree, count);
1409 if (code == VEC_LSHIFT_EXPR
1410 || code == VEC_RSHIFT_EXPR)
1412 if (!tree_fits_uhwi_p (arg2))
1413 return NULL_TREE;
1415 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1416 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1417 unsigned HOST_WIDE_INT innerc
1418 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1419 if (shiftc >= outerc || (shiftc % innerc) != 0)
1420 return NULL_TREE;
1421 int offset = shiftc / innerc;
1422 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1423 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1424 for !BYTES_BIG_ENDIAN picks first vector element, but
1425 for BYTES_BIG_ENDIAN last element from the vector. */
1426 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1427 offset = -offset;
1428 tree zero = build_zero_cst (TREE_TYPE (type));
1429 for (i = 0; i < count; i++)
1431 if (i + offset < 0 || i + offset >= count)
1432 elts[i] = zero;
1433 else
1434 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1437 else
1438 for (i = 0; i < count; i++)
1440 tree elem1 = VECTOR_CST_ELT (arg1, i);
1442 elts[i] = const_binop (code, elem1, arg2);
1444 /* It is possible that const_binop cannot handle the given
1445 code and return NULL_TREE */
1446 if (elts[i] == NULL_TREE)
1447 return NULL_TREE;
1450 return build_vector (type, elts);
1452 return NULL_TREE;
1455 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1456 indicates which particular sizetype to create. */
1458 tree
1459 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1461 return build_int_cst (sizetype_tab[(int) kind], number);
1464 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1465 is a tree code. The type of the result is taken from the operands.
1466 Both must be equivalent integer types, ala int_binop_types_match_p.
1467 If the operands are constant, so is the result. */
1469 tree
1470 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1472 tree type = TREE_TYPE (arg0);
1474 if (arg0 == error_mark_node || arg1 == error_mark_node)
1475 return error_mark_node;
1477 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1478 TREE_TYPE (arg1)));
1480 /* Handle the special case of two integer constants faster. */
1481 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1483 /* And some specific cases even faster than that. */
1484 if (code == PLUS_EXPR)
1486 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1487 return arg1;
1488 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1489 return arg0;
1491 else if (code == MINUS_EXPR)
1493 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1494 return arg0;
1496 else if (code == MULT_EXPR)
1498 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1499 return arg1;
1502 /* Handle general case of two integer constants. For sizetype
1503 constant calculations we always want to know about overflow,
1504 even in the unsigned case. */
1505 return int_const_binop_1 (code, arg0, arg1, -1);
1508 return fold_build2_loc (loc, code, type, arg0, arg1);
1511 /* Given two values, either both of sizetype or both of bitsizetype,
1512 compute the difference between the two values. Return the value
1513 in signed type corresponding to the type of the operands. */
1515 tree
1516 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1518 tree type = TREE_TYPE (arg0);
1519 tree ctype;
1521 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1522 TREE_TYPE (arg1)));
1524 /* If the type is already signed, just do the simple thing. */
1525 if (!TYPE_UNSIGNED (type))
1526 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1528 if (type == sizetype)
1529 ctype = ssizetype;
1530 else if (type == bitsizetype)
1531 ctype = sbitsizetype;
1532 else
1533 ctype = signed_type_for (type);
1535 /* If either operand is not a constant, do the conversions to the signed
1536 type and subtract. The hardware will do the right thing with any
1537 overflow in the subtraction. */
1538 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1539 return size_binop_loc (loc, MINUS_EXPR,
1540 fold_convert_loc (loc, ctype, arg0),
1541 fold_convert_loc (loc, ctype, arg1));
1543 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1544 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1545 overflow) and negate (which can't either). Special-case a result
1546 of zero while we're here. */
1547 if (tree_int_cst_equal (arg0, arg1))
1548 return build_int_cst (ctype, 0);
1549 else if (tree_int_cst_lt (arg1, arg0))
1550 return fold_convert_loc (loc, ctype,
1551 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1552 else
1553 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1554 fold_convert_loc (loc, ctype,
1555 size_binop_loc (loc,
1556 MINUS_EXPR,
1557 arg1, arg0)));
1560 /* A subroutine of fold_convert_const handling conversions of an
1561 INTEGER_CST to another integer type. */
1563 static tree
1564 fold_convert_const_int_from_int (tree type, const_tree arg1)
1566 /* Given an integer constant, make new constant with new type,
1567 appropriately sign-extended or truncated. Use widest_int
1568 so that any extension is done according ARG1's type. */
1569 return force_fit_type (type, wi::to_widest (arg1),
1570 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1571 TREE_OVERFLOW (arg1));
1574 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1575 to an integer type. */
1577 static tree
1578 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1580 bool overflow = false;
1581 tree t;
1583 /* The following code implements the floating point to integer
1584 conversion rules required by the Java Language Specification,
1585 that IEEE NaNs are mapped to zero and values that overflow
1586 the target precision saturate, i.e. values greater than
1587 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1588 are mapped to INT_MIN. These semantics are allowed by the
1589 C and C++ standards that simply state that the behavior of
1590 FP-to-integer conversion is unspecified upon overflow. */
1592 wide_int val;
1593 REAL_VALUE_TYPE r;
1594 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1596 switch (code)
1598 case FIX_TRUNC_EXPR:
1599 real_trunc (&r, VOIDmode, &x);
1600 break;
1602 default:
1603 gcc_unreachable ();
1606 /* If R is NaN, return zero and show we have an overflow. */
1607 if (REAL_VALUE_ISNAN (r))
1609 overflow = true;
1610 val = wi::zero (TYPE_PRECISION (type));
1613 /* See if R is less than the lower bound or greater than the
1614 upper bound. */
1616 if (! overflow)
1618 tree lt = TYPE_MIN_VALUE (type);
1619 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1620 if (REAL_VALUES_LESS (r, l))
1622 overflow = true;
1623 val = lt;
1627 if (! overflow)
1629 tree ut = TYPE_MAX_VALUE (type);
1630 if (ut)
1632 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1633 if (REAL_VALUES_LESS (u, r))
1635 overflow = true;
1636 val = ut;
1641 if (! overflow)
1642 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1644 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1645 return t;
1648 /* A subroutine of fold_convert_const handling conversions of a
1649 FIXED_CST to an integer type. */
1651 static tree
1652 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1654 tree t;
1655 double_int temp, temp_trunc;
1656 unsigned int mode;
1658 /* Right shift FIXED_CST to temp by fbit. */
1659 temp = TREE_FIXED_CST (arg1).data;
1660 mode = TREE_FIXED_CST (arg1).mode;
1661 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1663 temp = temp.rshift (GET_MODE_FBIT (mode),
1664 HOST_BITS_PER_DOUBLE_INT,
1665 SIGNED_FIXED_POINT_MODE_P (mode));
1667 /* Left shift temp to temp_trunc by fbit. */
1668 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1669 HOST_BITS_PER_DOUBLE_INT,
1670 SIGNED_FIXED_POINT_MODE_P (mode));
1672 else
1674 temp = double_int_zero;
1675 temp_trunc = double_int_zero;
1678 /* If FIXED_CST is negative, we need to round the value toward 0.
1679 By checking if the fractional bits are not zero to add 1 to temp. */
1680 if (SIGNED_FIXED_POINT_MODE_P (mode)
1681 && temp_trunc.is_negative ()
1682 && TREE_FIXED_CST (arg1).data != temp_trunc)
1683 temp += double_int_one;
1685 /* Given a fixed-point constant, make new constant with new type,
1686 appropriately sign-extended or truncated. */
1687 t = force_fit_type (type, temp, -1,
1688 (temp.is_negative ()
1689 && (TYPE_UNSIGNED (type)
1690 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1691 | TREE_OVERFLOW (arg1));
1693 return t;
1696 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1697 to another floating point type. */
1699 static tree
1700 fold_convert_const_real_from_real (tree type, const_tree arg1)
1702 REAL_VALUE_TYPE value;
1703 tree t;
1705 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1706 t = build_real (type, value);
1708 /* If converting an infinity or NAN to a representation that doesn't
1709 have one, set the overflow bit so that we can produce some kind of
1710 error message at the appropriate point if necessary. It's not the
1711 most user-friendly message, but it's better than nothing. */
1712 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1713 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1714 TREE_OVERFLOW (t) = 1;
1715 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1716 && !MODE_HAS_NANS (TYPE_MODE (type)))
1717 TREE_OVERFLOW (t) = 1;
1718 /* Regular overflow, conversion produced an infinity in a mode that
1719 can't represent them. */
1720 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1721 && REAL_VALUE_ISINF (value)
1722 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1723 TREE_OVERFLOW (t) = 1;
1724 else
1725 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1726 return t;
1729 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1730 to a floating point type. */
1732 static tree
1733 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1735 REAL_VALUE_TYPE value;
1736 tree t;
1738 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1739 t = build_real (type, value);
1741 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1742 return t;
1745 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1746 to another fixed-point type. */
1748 static tree
1749 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1751 FIXED_VALUE_TYPE value;
1752 tree t;
1753 bool overflow_p;
1755 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1756 TYPE_SATURATING (type));
1757 t = build_fixed (type, value);
1759 /* Propagate overflow flags. */
1760 if (overflow_p | TREE_OVERFLOW (arg1))
1761 TREE_OVERFLOW (t) = 1;
1762 return t;
1765 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1766 to a fixed-point type. */
1768 static tree
1769 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1771 FIXED_VALUE_TYPE value;
1772 tree t;
1773 bool overflow_p;
1774 double_int di;
1776 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1778 di.low = TREE_INT_CST_ELT (arg1, 0);
1779 if (TREE_INT_CST_NUNITS (arg1) == 1)
1780 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1781 else
1782 di.high = TREE_INT_CST_ELT (arg1, 1);
1784 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1785 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1786 TYPE_SATURATING (type));
1787 t = build_fixed (type, value);
1789 /* Propagate overflow flags. */
1790 if (overflow_p | TREE_OVERFLOW (arg1))
1791 TREE_OVERFLOW (t) = 1;
1792 return t;
1795 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1796 to a fixed-point type. */
1798 static tree
1799 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1801 FIXED_VALUE_TYPE value;
1802 tree t;
1803 bool overflow_p;
1805 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1806 &TREE_REAL_CST (arg1),
1807 TYPE_SATURATING (type));
1808 t = build_fixed (type, value);
1810 /* Propagate overflow flags. */
1811 if (overflow_p | TREE_OVERFLOW (arg1))
1812 TREE_OVERFLOW (t) = 1;
1813 return t;
1816 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1817 type TYPE. If no simplification can be done return NULL_TREE. */
1819 static tree
1820 fold_convert_const (enum tree_code code, tree type, tree arg1)
1822 if (TREE_TYPE (arg1) == type)
1823 return arg1;
1825 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1826 || TREE_CODE (type) == OFFSET_TYPE)
1828 if (TREE_CODE (arg1) == INTEGER_CST)
1829 return fold_convert_const_int_from_int (type, arg1);
1830 else if (TREE_CODE (arg1) == REAL_CST)
1831 return fold_convert_const_int_from_real (code, type, arg1);
1832 else if (TREE_CODE (arg1) == FIXED_CST)
1833 return fold_convert_const_int_from_fixed (type, arg1);
1835 else if (TREE_CODE (type) == REAL_TYPE)
1837 if (TREE_CODE (arg1) == INTEGER_CST)
1838 return build_real_from_int_cst (type, arg1);
1839 else if (TREE_CODE (arg1) == REAL_CST)
1840 return fold_convert_const_real_from_real (type, arg1);
1841 else if (TREE_CODE (arg1) == FIXED_CST)
1842 return fold_convert_const_real_from_fixed (type, arg1);
1844 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1846 if (TREE_CODE (arg1) == FIXED_CST)
1847 return fold_convert_const_fixed_from_fixed (type, arg1);
1848 else if (TREE_CODE (arg1) == INTEGER_CST)
1849 return fold_convert_const_fixed_from_int (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_fixed_from_real (type, arg1);
1853 return NULL_TREE;
1856 /* Construct a vector of zero elements of vector type TYPE. */
1858 static tree
1859 build_zero_vector (tree type)
1861 tree t;
1863 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1864 return build_vector_from_val (type, t);
1867 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1869 bool
1870 fold_convertible_p (const_tree type, const_tree arg)
1872 tree orig = TREE_TYPE (arg);
1874 if (type == orig)
1875 return true;
1877 if (TREE_CODE (arg) == ERROR_MARK
1878 || TREE_CODE (type) == ERROR_MARK
1879 || TREE_CODE (orig) == ERROR_MARK)
1880 return false;
1882 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1883 return true;
1885 switch (TREE_CODE (type))
1887 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1888 case POINTER_TYPE: case REFERENCE_TYPE:
1889 case OFFSET_TYPE:
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return true;
1893 return (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1896 case REAL_TYPE:
1897 case FIXED_POINT_TYPE:
1898 case COMPLEX_TYPE:
1899 case VECTOR_TYPE:
1900 case VOID_TYPE:
1901 return TREE_CODE (type) == TREE_CODE (orig);
1903 default:
1904 return false;
1908 /* Convert expression ARG to type TYPE. Used by the middle-end for
1909 simple conversions in preference to calling the front-end's convert. */
1911 tree
1912 fold_convert_loc (location_t loc, tree type, tree arg)
1914 tree orig = TREE_TYPE (arg);
1915 tree tem;
1917 if (type == orig)
1918 return arg;
1920 if (TREE_CODE (arg) == ERROR_MARK
1921 || TREE_CODE (type) == ERROR_MARK
1922 || TREE_CODE (orig) == ERROR_MARK)
1923 return error_mark_node;
1925 switch (TREE_CODE (type))
1927 case POINTER_TYPE:
1928 case REFERENCE_TYPE:
1929 /* Handle conversions between pointers to different address spaces. */
1930 if (POINTER_TYPE_P (orig)
1931 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1932 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1933 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1934 /* fall through */
1936 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1937 case OFFSET_TYPE:
1938 if (TREE_CODE (arg) == INTEGER_CST)
1940 tem = fold_convert_const (NOP_EXPR, type, arg);
1941 if (tem != NULL_TREE)
1942 return tem;
1944 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1945 || TREE_CODE (orig) == OFFSET_TYPE)
1946 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1947 if (TREE_CODE (orig) == COMPLEX_TYPE)
1948 return fold_convert_loc (loc, type,
1949 fold_build1_loc (loc, REALPART_EXPR,
1950 TREE_TYPE (orig), arg));
1951 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1952 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1953 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1955 case REAL_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1958 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1962 else if (TREE_CODE (arg) == REAL_CST)
1964 tem = fold_convert_const (NOP_EXPR, type, arg);
1965 if (tem != NULL_TREE)
1966 return tem;
1968 else if (TREE_CODE (arg) == FIXED_CST)
1970 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1971 if (tem != NULL_TREE)
1972 return tem;
1975 switch (TREE_CODE (orig))
1977 case INTEGER_TYPE:
1978 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1979 case POINTER_TYPE: case REFERENCE_TYPE:
1980 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1982 case REAL_TYPE:
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985 case FIXED_POINT_TYPE:
1986 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1988 case COMPLEX_TYPE:
1989 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1990 return fold_convert_loc (loc, type, tem);
1992 default:
1993 gcc_unreachable ();
1996 case FIXED_POINT_TYPE:
1997 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1998 || TREE_CODE (arg) == REAL_CST)
2000 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2001 if (tem != NULL_TREE)
2002 goto fold_convert_exit;
2005 switch (TREE_CODE (orig))
2007 case FIXED_POINT_TYPE:
2008 case INTEGER_TYPE:
2009 case ENUMERAL_TYPE:
2010 case BOOLEAN_TYPE:
2011 case REAL_TYPE:
2012 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2014 case COMPLEX_TYPE:
2015 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2016 return fold_convert_loc (loc, type, tem);
2018 default:
2019 gcc_unreachable ();
2022 case COMPLEX_TYPE:
2023 switch (TREE_CODE (orig))
2025 case INTEGER_TYPE:
2026 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2027 case POINTER_TYPE: case REFERENCE_TYPE:
2028 case REAL_TYPE:
2029 case FIXED_POINT_TYPE:
2030 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2031 fold_convert_loc (loc, TREE_TYPE (type), arg),
2032 fold_convert_loc (loc, TREE_TYPE (type),
2033 integer_zero_node));
2034 case COMPLEX_TYPE:
2036 tree rpart, ipart;
2038 if (TREE_CODE (arg) == COMPLEX_EXPR)
2040 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2041 TREE_OPERAND (arg, 0));
2042 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2043 TREE_OPERAND (arg, 1));
2044 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2047 arg = save_expr (arg);
2048 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2049 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2051 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2052 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2055 default:
2056 gcc_unreachable ();
2059 case VECTOR_TYPE:
2060 if (integer_zerop (arg))
2061 return build_zero_vector (type);
2062 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2063 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2064 || TREE_CODE (orig) == VECTOR_TYPE);
2065 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2067 case VOID_TYPE:
2068 tem = fold_ignored_result (arg);
2069 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2071 default:
2072 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2073 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2074 gcc_unreachable ();
2076 fold_convert_exit:
2077 protected_set_expr_location_unshare (tem, loc);
2078 return tem;
2081 /* Return false if expr can be assumed not to be an lvalue, true
2082 otherwise. */
2084 static bool
2085 maybe_lvalue_p (const_tree x)
2087 /* We only need to wrap lvalue tree codes. */
2088 switch (TREE_CODE (x))
2090 case VAR_DECL:
2091 case PARM_DECL:
2092 case RESULT_DECL:
2093 case LABEL_DECL:
2094 case FUNCTION_DECL:
2095 case SSA_NAME:
2097 case COMPONENT_REF:
2098 case MEM_REF:
2099 case INDIRECT_REF:
2100 case ARRAY_REF:
2101 case ARRAY_RANGE_REF:
2102 case BIT_FIELD_REF:
2103 case OBJ_TYPE_REF:
2105 case REALPART_EXPR:
2106 case IMAGPART_EXPR:
2107 case PREINCREMENT_EXPR:
2108 case PREDECREMENT_EXPR:
2109 case SAVE_EXPR:
2110 case TRY_CATCH_EXPR:
2111 case WITH_CLEANUP_EXPR:
2112 case COMPOUND_EXPR:
2113 case MODIFY_EXPR:
2114 case TARGET_EXPR:
2115 case COND_EXPR:
2116 case BIND_EXPR:
2117 break;
2119 default:
2120 /* Assume the worst for front-end tree codes. */
2121 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2122 break;
2123 return false;
2126 return true;
2129 /* Return an expr equal to X but certainly not valid as an lvalue. */
2131 tree
2132 non_lvalue_loc (location_t loc, tree x)
2134 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2135 us. */
2136 if (in_gimple_form)
2137 return x;
2139 if (! maybe_lvalue_p (x))
2140 return x;
2141 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2144 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2145 Zero means allow extended lvalues. */
2147 int pedantic_lvalues;
2149 /* When pedantic, return an expr equal to X but certainly not valid as a
2150 pedantic lvalue. Otherwise, return X. */
2152 static tree
2153 pedantic_non_lvalue_loc (location_t loc, tree x)
2155 if (pedantic_lvalues)
2156 return non_lvalue_loc (loc, x);
2158 return protected_set_expr_location_unshare (x, loc);
2161 /* Given a tree comparison code, return the code that is the logical inverse.
2162 It is generally not safe to do this for floating-point comparisons, except
2163 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2164 ERROR_MARK in this case. */
2166 enum tree_code
2167 invert_tree_comparison (enum tree_code code, bool honor_nans)
2169 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2170 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2171 return ERROR_MARK;
2173 switch (code)
2175 case EQ_EXPR:
2176 return NE_EXPR;
2177 case NE_EXPR:
2178 return EQ_EXPR;
2179 case GT_EXPR:
2180 return honor_nans ? UNLE_EXPR : LE_EXPR;
2181 case GE_EXPR:
2182 return honor_nans ? UNLT_EXPR : LT_EXPR;
2183 case LT_EXPR:
2184 return honor_nans ? UNGE_EXPR : GE_EXPR;
2185 case LE_EXPR:
2186 return honor_nans ? UNGT_EXPR : GT_EXPR;
2187 case LTGT_EXPR:
2188 return UNEQ_EXPR;
2189 case UNEQ_EXPR:
2190 return LTGT_EXPR;
2191 case UNGT_EXPR:
2192 return LE_EXPR;
2193 case UNGE_EXPR:
2194 return LT_EXPR;
2195 case UNLT_EXPR:
2196 return GE_EXPR;
2197 case UNLE_EXPR:
2198 return GT_EXPR;
2199 case ORDERED_EXPR:
2200 return UNORDERED_EXPR;
2201 case UNORDERED_EXPR:
2202 return ORDERED_EXPR;
2203 default:
2204 gcc_unreachable ();
2208 /* Similar, but return the comparison that results if the operands are
2209 swapped. This is safe for floating-point. */
2211 enum tree_code
2212 swap_tree_comparison (enum tree_code code)
2214 switch (code)
2216 case EQ_EXPR:
2217 case NE_EXPR:
2218 case ORDERED_EXPR:
2219 case UNORDERED_EXPR:
2220 case LTGT_EXPR:
2221 case UNEQ_EXPR:
2222 return code;
2223 case GT_EXPR:
2224 return LT_EXPR;
2225 case GE_EXPR:
2226 return LE_EXPR;
2227 case LT_EXPR:
2228 return GT_EXPR;
2229 case LE_EXPR:
2230 return GE_EXPR;
2231 case UNGT_EXPR:
2232 return UNLT_EXPR;
2233 case UNGE_EXPR:
2234 return UNLE_EXPR;
2235 case UNLT_EXPR:
2236 return UNGT_EXPR;
2237 case UNLE_EXPR:
2238 return UNGE_EXPR;
2239 default:
2240 gcc_unreachable ();
2245 /* Convert a comparison tree code from an enum tree_code representation
2246 into a compcode bit-based encoding. This function is the inverse of
2247 compcode_to_comparison. */
2249 static enum comparison_code
2250 comparison_to_compcode (enum tree_code code)
2252 switch (code)
2254 case LT_EXPR:
2255 return COMPCODE_LT;
2256 case EQ_EXPR:
2257 return COMPCODE_EQ;
2258 case LE_EXPR:
2259 return COMPCODE_LE;
2260 case GT_EXPR:
2261 return COMPCODE_GT;
2262 case NE_EXPR:
2263 return COMPCODE_NE;
2264 case GE_EXPR:
2265 return COMPCODE_GE;
2266 case ORDERED_EXPR:
2267 return COMPCODE_ORD;
2268 case UNORDERED_EXPR:
2269 return COMPCODE_UNORD;
2270 case UNLT_EXPR:
2271 return COMPCODE_UNLT;
2272 case UNEQ_EXPR:
2273 return COMPCODE_UNEQ;
2274 case UNLE_EXPR:
2275 return COMPCODE_UNLE;
2276 case UNGT_EXPR:
2277 return COMPCODE_UNGT;
2278 case LTGT_EXPR:
2279 return COMPCODE_LTGT;
2280 case UNGE_EXPR:
2281 return COMPCODE_UNGE;
2282 default:
2283 gcc_unreachable ();
2287 /* Convert a compcode bit-based encoding of a comparison operator back
2288 to GCC's enum tree_code representation. This function is the
2289 inverse of comparison_to_compcode. */
2291 static enum tree_code
2292 compcode_to_comparison (enum comparison_code code)
2294 switch (code)
2296 case COMPCODE_LT:
2297 return LT_EXPR;
2298 case COMPCODE_EQ:
2299 return EQ_EXPR;
2300 case COMPCODE_LE:
2301 return LE_EXPR;
2302 case COMPCODE_GT:
2303 return GT_EXPR;
2304 case COMPCODE_NE:
2305 return NE_EXPR;
2306 case COMPCODE_GE:
2307 return GE_EXPR;
2308 case COMPCODE_ORD:
2309 return ORDERED_EXPR;
2310 case COMPCODE_UNORD:
2311 return UNORDERED_EXPR;
2312 case COMPCODE_UNLT:
2313 return UNLT_EXPR;
2314 case COMPCODE_UNEQ:
2315 return UNEQ_EXPR;
2316 case COMPCODE_UNLE:
2317 return UNLE_EXPR;
2318 case COMPCODE_UNGT:
2319 return UNGT_EXPR;
2320 case COMPCODE_LTGT:
2321 return LTGT_EXPR;
2322 case COMPCODE_UNGE:
2323 return UNGE_EXPR;
2324 default:
2325 gcc_unreachable ();
2329 /* Return a tree for the comparison which is the combination of
2330 doing the AND or OR (depending on CODE) of the two operations LCODE
2331 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2332 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2333 if this makes the transformation invalid. */
2335 tree
2336 combine_comparisons (location_t loc,
2337 enum tree_code code, enum tree_code lcode,
2338 enum tree_code rcode, tree truth_type,
2339 tree ll_arg, tree lr_arg)
2341 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2342 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2343 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2344 int compcode;
2346 switch (code)
2348 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2349 compcode = lcompcode & rcompcode;
2350 break;
2352 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2353 compcode = lcompcode | rcompcode;
2354 break;
2356 default:
2357 return NULL_TREE;
2360 if (!honor_nans)
2362 /* Eliminate unordered comparisons, as well as LTGT and ORD
2363 which are not used unless the mode has NaNs. */
2364 compcode &= ~COMPCODE_UNORD;
2365 if (compcode == COMPCODE_LTGT)
2366 compcode = COMPCODE_NE;
2367 else if (compcode == COMPCODE_ORD)
2368 compcode = COMPCODE_TRUE;
2370 else if (flag_trapping_math)
2372 /* Check that the original operation and the optimized ones will trap
2373 under the same condition. */
2374 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2375 && (lcompcode != COMPCODE_EQ)
2376 && (lcompcode != COMPCODE_ORD);
2377 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2378 && (rcompcode != COMPCODE_EQ)
2379 && (rcompcode != COMPCODE_ORD);
2380 bool trap = (compcode & COMPCODE_UNORD) == 0
2381 && (compcode != COMPCODE_EQ)
2382 && (compcode != COMPCODE_ORD);
2384 /* In a short-circuited boolean expression the LHS might be
2385 such that the RHS, if evaluated, will never trap. For
2386 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2387 if neither x nor y is NaN. (This is a mixed blessing: for
2388 example, the expression above will never trap, hence
2389 optimizing it to x < y would be invalid). */
2390 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2391 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2392 rtrap = false;
2394 /* If the comparison was short-circuited, and only the RHS
2395 trapped, we may now generate a spurious trap. */
2396 if (rtrap && !ltrap
2397 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2398 return NULL_TREE;
2400 /* If we changed the conditions that cause a trap, we lose. */
2401 if ((ltrap || rtrap) != trap)
2402 return NULL_TREE;
2405 if (compcode == COMPCODE_TRUE)
2406 return constant_boolean_node (true, truth_type);
2407 else if (compcode == COMPCODE_FALSE)
2408 return constant_boolean_node (false, truth_type);
2409 else
2411 enum tree_code tcode;
2413 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2414 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2418 /* Return nonzero if two operands (typically of the same tree node)
2419 are necessarily equal. If either argument has side-effects this
2420 function returns zero. FLAGS modifies behavior as follows:
2422 If OEP_ONLY_CONST is set, only return nonzero for constants.
2423 This function tests whether the operands are indistinguishable;
2424 it does not test whether they are equal using C's == operation.
2425 The distinction is important for IEEE floating point, because
2426 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2427 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2429 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2430 even though it may hold multiple values during a function.
2431 This is because a GCC tree node guarantees that nothing else is
2432 executed between the evaluation of its "operands" (which may often
2433 be evaluated in arbitrary order). Hence if the operands themselves
2434 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2435 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2436 unset means assuming isochronic (or instantaneous) tree equivalence.
2437 Unless comparing arbitrary expression trees, such as from different
2438 statements, this flag can usually be left unset.
2440 If OEP_PURE_SAME is set, then pure functions with identical arguments
2441 are considered the same. It is used when the caller has other ways
2442 to ensure that global memory is unchanged in between.
2444 If OEP_ALLOW_NULL is set, this routine will not crash on NULL operands,
2445 and two NULL operands are considered equal. This flag is usually set
2446 in the context of frontend when ARG0 and/or ARG1 may be NULL mostly due
2447 to recursion on partially built expressions (e.g. a CAST_EXPR on a NULL
2448 tree.) In this case, we certainly don't want the compiler to crash and
2449 it's OK to consider two NULL operands equal. On the other hand, when
2450 called in the context of code generation and optimization, if NULL
2451 operands are not expected, silently ignoring them could be dangerous
2452 and might cause problems downstream that are hard to find/debug. In that
2453 case, the flag should probably not be set. */
2456 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2458 /* If either is NULL, they must be both NULL to be equal. We only do this
2459 check when OEP_ALLOW_NULL is set. */
2460 if ((flags & OEP_ALLOW_NULL) && (!arg0 || !arg1))
2461 return arg0 == arg1;
2463 /* If either is ERROR_MARK, they aren't equal. */
2464 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2465 || TREE_TYPE (arg0) == error_mark_node
2466 || TREE_TYPE (arg1) == error_mark_node)
2467 return 0;
2469 /* Similar, if either does not have a type (like a released SSA name),
2470 they aren't equal. */
2471 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2473 /* If the caller chooses to allow the comparison of operands without
2474 types, we will continue the comparison only when both of them don't
2475 have a type. */
2476 if (!(flags & OEP_ALLOW_NO_TYPE) || TREE_TYPE (arg0) || TREE_TYPE (arg1))
2477 return 0;
2480 /* Check equality of integer constants before bailing out due to
2481 precision differences. */
2482 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2483 return tree_int_cst_equal (arg0, arg1);
2485 /* If both types don't have the same signedness, then we can't consider
2486 them equal. We must check this before the STRIP_NOPS calls
2487 because they may change the signedness of the arguments. As pointers
2488 strictly don't have a signedness, require either two pointers or
2489 two non-pointers as well. */
2490 if (TREE_TYPE (arg0)
2491 && (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2492 || POINTER_TYPE_P (TREE_TYPE (arg0))
2493 != POINTER_TYPE_P (TREE_TYPE (arg1))))
2494 return 0;
2496 /* We cannot consider pointers to different address space equal. */
2497 if (TREE_TYPE (arg0)
2498 && (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2499 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2500 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))))
2501 return 0;
2503 /* If both types don't have the same precision, then it is not safe
2504 to strip NOPs. */
2505 if (element_precision (TREE_TYPE (arg0))
2506 != element_precision (TREE_TYPE (arg1)))
2507 return 0;
2509 STRIP_NOPS (arg0);
2510 STRIP_NOPS (arg1);
2512 /* In case both args are comparisons but with different comparison
2513 code, try to swap the comparison operands of one arg to produce
2514 a match and compare that variant. */
2515 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2516 && COMPARISON_CLASS_P (arg0)
2517 && COMPARISON_CLASS_P (arg1))
2519 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2521 if (TREE_CODE (arg0) == swap_code)
2522 return operand_equal_p (TREE_OPERAND (arg0, 0),
2523 TREE_OPERAND (arg1, 1), flags)
2524 && operand_equal_p (TREE_OPERAND (arg0, 1),
2525 TREE_OPERAND (arg1, 0), flags);
2528 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2529 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2530 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2531 return 0;
2533 /* This is needed for conversions and for COMPONENT_REF.
2534 Might as well play it safe and always test this. */
2535 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2536 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2537 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2538 return 0;
2540 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2541 We don't care about side effects in that case because the SAVE_EXPR
2542 takes care of that for us. In all other cases, two expressions are
2543 equal if they have no side effects. If we have two identical
2544 expressions with side effects that should be treated the same due
2545 to the only side effects being identical SAVE_EXPR's, that will
2546 be detected in the recursive calls below.
2547 If we are taking an invariant address of two identical objects
2548 they are necessarily equal as well. */
2549 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2550 && (TREE_CODE (arg0) == SAVE_EXPR
2551 || (flags & OEP_CONSTANT_ADDRESS_OF)
2552 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2553 return 1;
2555 /* Next handle constant cases, those for which we can return 1 even
2556 if ONLY_CONST is set. */
2557 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2558 switch (TREE_CODE (arg0))
2560 case INTEGER_CST:
2561 return tree_int_cst_equal (arg0, arg1);
2563 case FIXED_CST:
2564 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2565 TREE_FIXED_CST (arg1));
2567 case REAL_CST:
2568 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2569 TREE_REAL_CST (arg1)))
2570 return 1;
2573 if (TREE_TYPE (arg0)
2574 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2576 /* If we do not distinguish between signed and unsigned zero,
2577 consider them equal. */
2578 if (real_zerop (arg0) && real_zerop (arg1))
2579 return 1;
2581 return 0;
2583 case VECTOR_CST:
2585 unsigned i;
2587 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2588 return 0;
2590 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2592 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2593 VECTOR_CST_ELT (arg1, i), flags))
2594 return 0;
2596 return 1;
2599 case COMPLEX_CST:
2600 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2601 flags)
2602 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2603 flags));
2605 case STRING_CST:
2606 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2607 && ! memcmp (TREE_STRING_POINTER (arg0),
2608 TREE_STRING_POINTER (arg1),
2609 TREE_STRING_LENGTH (arg0)));
2611 case ADDR_EXPR:
2612 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2613 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2614 ? OEP_CONSTANT_ADDRESS_OF : 0);
2615 default:
2616 break;
2619 if (flags & OEP_ONLY_CONST)
2620 return 0;
2622 /* Define macros to test an operand from arg0 and arg1 for equality and a
2623 variant that allows null and views null as being different from any
2624 non-null value. In the latter case, if either is null, the both
2625 must be; otherwise, do the normal comparison. */
2626 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2627 TREE_OPERAND (arg1, N), flags)
2629 #define OP_SAME_WITH_NULL(N) \
2630 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2631 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2633 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2635 case tcc_unary:
2636 /* Two conversions are equal only if signedness and modes match. */
2637 switch (TREE_CODE (arg0))
2639 CASE_CONVERT:
2640 case FIX_TRUNC_EXPR:
2641 if (TREE_TYPE (arg0)
2642 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2643 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2644 return 0;
2645 break;
2646 default:
2647 break;
2650 return OP_SAME (0);
2653 case tcc_comparison:
2654 case tcc_binary:
2655 if (OP_SAME (0) && OP_SAME (1))
2656 return 1;
2658 /* For commutative ops, allow the other order. */
2659 return (commutative_tree_code (TREE_CODE (arg0))
2660 && operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2665 case tcc_reference:
2666 /* If either of the pointer (or reference) expressions we are
2667 dereferencing contain a side effect, these cannot be equal,
2668 but their addresses can be. */
2669 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2670 && (TREE_SIDE_EFFECTS (arg0)
2671 || TREE_SIDE_EFFECTS (arg1)))
2672 return 0;
2674 switch (TREE_CODE (arg0))
2676 case INDIRECT_REF:
2677 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2678 return OP_SAME (0);
2680 case REALPART_EXPR:
2681 case IMAGPART_EXPR:
2682 return OP_SAME (0);
2684 case TARGET_MEM_REF:
2685 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2686 /* Require equal extra operands and then fall through to MEM_REF
2687 handling of the two common operands. */
2688 if (!OP_SAME_WITH_NULL (2)
2689 || !OP_SAME_WITH_NULL (3)
2690 || !OP_SAME_WITH_NULL (4))
2691 return 0;
2692 /* Fallthru. */
2693 case MEM_REF:
2694 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2695 /* Require equal access sizes, and similar pointer types.
2696 We can have incomplete types for array references of
2697 variable-sized arrays from the Fortran frontend
2698 though. Also verify the types are compatible. */
2699 return (TREE_TYPE (arg0)
2700 && (TYPE_SIZE (TREE_TYPE (arg0))
2701 == TYPE_SIZE (TREE_TYPE (arg1))
2702 || (TYPE_SIZE (TREE_TYPE (arg0))
2703 && TYPE_SIZE (TREE_TYPE (arg1))
2704 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2705 TYPE_SIZE (TREE_TYPE (arg1)),
2706 flags)))
2707 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2708 && alias_ptr_types_compatible_p
2709 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2710 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2711 && OP_SAME (0) && OP_SAME (1));
2713 case ARRAY_REF:
2714 case ARRAY_RANGE_REF:
2715 /* Operands 2 and 3 may be null.
2716 Compare the array index by value if it is constant first as we
2717 may have different types but same value here. */
2718 if (!OP_SAME (0))
2719 return 0;
2720 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2721 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2722 TREE_OPERAND (arg1, 1))
2723 || OP_SAME (1))
2724 && OP_SAME_WITH_NULL (2)
2725 && OP_SAME_WITH_NULL (3));
2727 case COMPONENT_REF:
2728 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2729 may be NULL when we're called to compare MEM_EXPRs. */
2730 if (!OP_SAME_WITH_NULL (0)
2731 || !OP_SAME (1))
2732 return 0;
2733 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2734 return OP_SAME_WITH_NULL (2);
2736 case BIT_FIELD_REF:
2737 if (!OP_SAME (0))
2738 return 0;
2739 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2740 return OP_SAME (1) && OP_SAME (2);
2742 default:
2743 return 0;
2746 case tcc_expression:
2747 switch (TREE_CODE (arg0))
2749 case ADDR_EXPR:
2750 case TRUTH_NOT_EXPR:
2751 return OP_SAME (0);
2753 case TRUTH_ANDIF_EXPR:
2754 case TRUTH_ORIF_EXPR:
2755 return OP_SAME (0) && OP_SAME (1);
2757 case FMA_EXPR:
2758 case WIDEN_MULT_PLUS_EXPR:
2759 case WIDEN_MULT_MINUS_EXPR:
2760 if (!OP_SAME (2))
2761 return 0;
2762 /* The multiplcation operands are commutative. */
2763 /* FALLTHRU */
2765 case TRUTH_AND_EXPR:
2766 case TRUTH_OR_EXPR:
2767 case TRUTH_XOR_EXPR:
2768 if (OP_SAME (0) && OP_SAME (1))
2769 return 1;
2771 /* Otherwise take into account this is a commutative operation. */
2772 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2773 TREE_OPERAND (arg1, 1), flags)
2774 && operand_equal_p (TREE_OPERAND (arg0, 1),
2775 TREE_OPERAND (arg1, 0), flags));
2777 case COND_EXPR:
2778 case VEC_COND_EXPR:
2779 case DOT_PROD_EXPR:
2780 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2782 default:
2783 return 0;
2786 case tcc_vl_exp:
2787 switch (TREE_CODE (arg0))
2789 case CALL_EXPR:
2790 /* If the CALL_EXPRs call different functions, then they
2791 clearly can not be equal. */
2792 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2793 flags))
2794 return 0;
2797 unsigned int cef = call_expr_flags (arg0);
2798 if (flags & OEP_PURE_SAME)
2799 cef &= ECF_CONST | ECF_PURE;
2800 else
2801 cef &= ECF_CONST;
2802 if (!cef)
2803 return 0;
2806 /* Now see if all the arguments are the same. */
2808 const_call_expr_arg_iterator iter0, iter1;
2809 const_tree a0, a1;
2810 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2811 a1 = first_const_call_expr_arg (arg1, &iter1);
2812 a0 && a1;
2813 a0 = next_const_call_expr_arg (&iter0),
2814 a1 = next_const_call_expr_arg (&iter1))
2815 if (! operand_equal_p (a0, a1, flags))
2816 return 0;
2818 /* If we get here and both argument lists are exhausted
2819 then the CALL_EXPRs are equal. */
2820 return ! (a0 || a1);
2822 default:
2823 return 0;
2826 case tcc_declaration:
2827 /* Consider __builtin_sqrt equal to sqrt. */
2828 return (TREE_CODE (arg0) == FUNCTION_DECL
2829 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2830 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2831 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2833 default:
2834 return 0;
2837 #undef OP_SAME
2838 #undef OP_SAME_WITH_NULL
2841 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2842 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2844 When in doubt, return 0. */
2846 static int
2847 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2849 int unsignedp1, unsignedpo;
2850 tree primarg0, primarg1, primother;
2851 unsigned int correct_width;
2853 if (operand_equal_p (arg0, arg1, 0))
2854 return 1;
2856 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2857 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2858 return 0;
2860 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2861 and see if the inner values are the same. This removes any
2862 signedness comparison, which doesn't matter here. */
2863 primarg0 = arg0, primarg1 = arg1;
2864 STRIP_NOPS (primarg0);
2865 STRIP_NOPS (primarg1);
2866 if (operand_equal_p (primarg0, primarg1, 0))
2867 return 1;
2869 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2870 actual comparison operand, ARG0.
2872 First throw away any conversions to wider types
2873 already present in the operands. */
2875 primarg1 = get_narrower (arg1, &unsignedp1);
2876 primother = get_narrower (other, &unsignedpo);
2878 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2879 if (unsignedp1 == unsignedpo
2880 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2881 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2883 tree type = TREE_TYPE (arg0);
2885 /* Make sure shorter operand is extended the right way
2886 to match the longer operand. */
2887 primarg1 = fold_convert (signed_or_unsigned_type_for
2888 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2890 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2891 return 1;
2894 return 0;
2897 /* See if ARG is an expression that is either a comparison or is performing
2898 arithmetic on comparisons. The comparisons must only be comparing
2899 two different values, which will be stored in *CVAL1 and *CVAL2; if
2900 they are nonzero it means that some operands have already been found.
2901 No variables may be used anywhere else in the expression except in the
2902 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2903 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2905 If this is true, return 1. Otherwise, return zero. */
2907 static int
2908 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2910 enum tree_code code = TREE_CODE (arg);
2911 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2913 /* We can handle some of the tcc_expression cases here. */
2914 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2915 tclass = tcc_unary;
2916 else if (tclass == tcc_expression
2917 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2918 || code == COMPOUND_EXPR))
2919 tclass = tcc_binary;
2921 else if (tclass == tcc_expression && code == SAVE_EXPR
2922 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2924 /* If we've already found a CVAL1 or CVAL2, this expression is
2925 two complex to handle. */
2926 if (*cval1 || *cval2)
2927 return 0;
2929 tclass = tcc_unary;
2930 *save_p = 1;
2933 switch (tclass)
2935 case tcc_unary:
2936 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2938 case tcc_binary:
2939 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2940 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2941 cval1, cval2, save_p));
2943 case tcc_constant:
2944 return 1;
2946 case tcc_expression:
2947 if (code == COND_EXPR)
2948 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2949 cval1, cval2, save_p)
2950 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2951 cval1, cval2, save_p)
2952 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2953 cval1, cval2, save_p));
2954 return 0;
2956 case tcc_comparison:
2957 /* First see if we can handle the first operand, then the second. For
2958 the second operand, we know *CVAL1 can't be zero. It must be that
2959 one side of the comparison is each of the values; test for the
2960 case where this isn't true by failing if the two operands
2961 are the same. */
2963 if (operand_equal_p (TREE_OPERAND (arg, 0),
2964 TREE_OPERAND (arg, 1), 0))
2965 return 0;
2967 if (*cval1 == 0)
2968 *cval1 = TREE_OPERAND (arg, 0);
2969 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2971 else if (*cval2 == 0)
2972 *cval2 = TREE_OPERAND (arg, 0);
2973 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2975 else
2976 return 0;
2978 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2980 else if (*cval2 == 0)
2981 *cval2 = TREE_OPERAND (arg, 1);
2982 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2984 else
2985 return 0;
2987 return 1;
2989 default:
2990 return 0;
2994 /* ARG is a tree that is known to contain just arithmetic operations and
2995 comparisons. Evaluate the operations in the tree substituting NEW0 for
2996 any occurrence of OLD0 as an operand of a comparison and likewise for
2997 NEW1 and OLD1. */
2999 static tree
3000 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3001 tree old1, tree new1)
3003 tree type = TREE_TYPE (arg);
3004 enum tree_code code = TREE_CODE (arg);
3005 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3007 /* We can handle some of the tcc_expression cases here. */
3008 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3009 tclass = tcc_unary;
3010 else if (tclass == tcc_expression
3011 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3012 tclass = tcc_binary;
3014 switch (tclass)
3016 case tcc_unary:
3017 return fold_build1_loc (loc, code, type,
3018 eval_subst (loc, TREE_OPERAND (arg, 0),
3019 old0, new0, old1, new1));
3021 case tcc_binary:
3022 return fold_build2_loc (loc, code, type,
3023 eval_subst (loc, TREE_OPERAND (arg, 0),
3024 old0, new0, old1, new1),
3025 eval_subst (loc, TREE_OPERAND (arg, 1),
3026 old0, new0, old1, new1));
3028 case tcc_expression:
3029 switch (code)
3031 case SAVE_EXPR:
3032 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3033 old1, new1);
3035 case COMPOUND_EXPR:
3036 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3037 old1, new1);
3039 case COND_EXPR:
3040 return fold_build3_loc (loc, code, type,
3041 eval_subst (loc, TREE_OPERAND (arg, 0),
3042 old0, new0, old1, new1),
3043 eval_subst (loc, TREE_OPERAND (arg, 1),
3044 old0, new0, old1, new1),
3045 eval_subst (loc, TREE_OPERAND (arg, 2),
3046 old0, new0, old1, new1));
3047 default:
3048 break;
3050 /* Fall through - ??? */
3052 case tcc_comparison:
3054 tree arg0 = TREE_OPERAND (arg, 0);
3055 tree arg1 = TREE_OPERAND (arg, 1);
3057 /* We need to check both for exact equality and tree equality. The
3058 former will be true if the operand has a side-effect. In that
3059 case, we know the operand occurred exactly once. */
3061 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3062 arg0 = new0;
3063 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3064 arg0 = new1;
3066 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3067 arg1 = new0;
3068 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3069 arg1 = new1;
3071 return fold_build2_loc (loc, code, type, arg0, arg1);
3074 default:
3075 return arg;
3079 /* Return a tree for the case when the result of an expression is RESULT
3080 converted to TYPE and OMITTED was previously an operand of the expression
3081 but is now not needed (e.g., we folded OMITTED * 0).
3083 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3084 the conversion of RESULT to TYPE. */
3086 tree
3087 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3089 tree t = fold_convert_loc (loc, type, result);
3091 /* If the resulting operand is an empty statement, just return the omitted
3092 statement casted to void. */
3093 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3094 return build1_loc (loc, NOP_EXPR, void_type_node,
3095 fold_ignored_result (omitted));
3097 if (TREE_SIDE_EFFECTS (omitted))
3098 return build2_loc (loc, COMPOUND_EXPR, type,
3099 fold_ignored_result (omitted), t);
3101 return non_lvalue_loc (loc, t);
3104 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3106 static tree
3107 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3108 tree omitted)
3110 tree t = fold_convert_loc (loc, type, result);
3112 /* If the resulting operand is an empty statement, just return the omitted
3113 statement casted to void. */
3114 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3115 return build1_loc (loc, NOP_EXPR, void_type_node,
3116 fold_ignored_result (omitted));
3118 if (TREE_SIDE_EFFECTS (omitted))
3119 return build2_loc (loc, COMPOUND_EXPR, type,
3120 fold_ignored_result (omitted), t);
3122 return pedantic_non_lvalue_loc (loc, t);
3125 /* Return a tree for the case when the result of an expression is RESULT
3126 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3127 of the expression but are now not needed.
3129 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3130 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3131 evaluated before OMITTED2. Otherwise, if neither has side effects,
3132 just do the conversion of RESULT to TYPE. */
3134 tree
3135 omit_two_operands_loc (location_t loc, tree type, tree result,
3136 tree omitted1, tree omitted2)
3138 tree t = fold_convert_loc (loc, type, result);
3140 if (TREE_SIDE_EFFECTS (omitted2))
3141 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3142 if (TREE_SIDE_EFFECTS (omitted1))
3143 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3145 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3149 /* Return a simplified tree node for the truth-negation of ARG. This
3150 never alters ARG itself. We assume that ARG is an operation that
3151 returns a truth value (0 or 1).
3153 FIXME: one would think we would fold the result, but it causes
3154 problems with the dominator optimizer. */
3156 static tree
3157 fold_truth_not_expr (location_t loc, tree arg)
3159 tree type = TREE_TYPE (arg);
3160 enum tree_code code = TREE_CODE (arg);
3161 location_t loc1, loc2;
3163 /* If this is a comparison, we can simply invert it, except for
3164 floating-point non-equality comparisons, in which case we just
3165 enclose a TRUTH_NOT_EXPR around what we have. */
3167 if (TREE_CODE_CLASS (code) == tcc_comparison)
3169 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3170 if (FLOAT_TYPE_P (op_type)
3171 && flag_trapping_math
3172 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3173 && code != NE_EXPR && code != EQ_EXPR)
3174 return NULL_TREE;
3176 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3177 if (code == ERROR_MARK)
3178 return NULL_TREE;
3180 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3181 TREE_OPERAND (arg, 1));
3184 switch (code)
3186 case INTEGER_CST:
3187 return constant_boolean_node (integer_zerop (arg), type);
3189 case TRUTH_AND_EXPR:
3190 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3191 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3192 return build2_loc (loc, TRUTH_OR_EXPR, type,
3193 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3194 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3196 case TRUTH_OR_EXPR:
3197 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3198 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3199 return build2_loc (loc, TRUTH_AND_EXPR, type,
3200 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3201 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3203 case TRUTH_XOR_EXPR:
3204 /* Here we can invert either operand. We invert the first operand
3205 unless the second operand is a TRUTH_NOT_EXPR in which case our
3206 result is the XOR of the first operand with the inside of the
3207 negation of the second operand. */
3209 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3210 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3211 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3212 else
3213 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3214 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3215 TREE_OPERAND (arg, 1));
3217 case TRUTH_ANDIF_EXPR:
3218 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3219 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3220 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3221 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3222 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3224 case TRUTH_ORIF_EXPR:
3225 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3226 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3227 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3228 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3229 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3231 case TRUTH_NOT_EXPR:
3232 return TREE_OPERAND (arg, 0);
3234 case COND_EXPR:
3236 tree arg1 = TREE_OPERAND (arg, 1);
3237 tree arg2 = TREE_OPERAND (arg, 2);
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3240 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3242 /* A COND_EXPR may have a throw as one operand, which
3243 then has void type. Just leave void operands
3244 as they are. */
3245 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3246 VOID_TYPE_P (TREE_TYPE (arg1))
3247 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3248 VOID_TYPE_P (TREE_TYPE (arg2))
3249 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3252 case COMPOUND_EXPR:
3253 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3254 return build2_loc (loc, COMPOUND_EXPR, type,
3255 TREE_OPERAND (arg, 0),
3256 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3258 case NON_LVALUE_EXPR:
3259 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3260 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3262 CASE_CONVERT:
3263 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3264 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3266 /* ... fall through ... */
3268 case FLOAT_EXPR:
3269 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3270 return build1_loc (loc, TREE_CODE (arg), type,
3271 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3273 case BIT_AND_EXPR:
3274 if (!integer_onep (TREE_OPERAND (arg, 1)))
3275 return NULL_TREE;
3276 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3278 case SAVE_EXPR:
3279 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3281 case CLEANUP_POINT_EXPR:
3282 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3283 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3284 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3286 default:
3287 return NULL_TREE;
3291 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3292 assume that ARG is an operation that returns a truth value (0 or 1
3293 for scalars, 0 or -1 for vectors). Return the folded expression if
3294 folding is successful. Otherwise, return NULL_TREE. */
3296 static tree
3297 fold_invert_truthvalue (location_t loc, tree arg)
3299 tree type = TREE_TYPE (arg);
3300 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3301 ? BIT_NOT_EXPR
3302 : TRUTH_NOT_EXPR,
3303 type, arg);
3306 /* Return a simplified tree node for the truth-negation of ARG. This
3307 never alters ARG itself. We assume that ARG is an operation that
3308 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3310 tree
3311 invert_truthvalue_loc (location_t loc, tree arg)
3313 if (TREE_CODE (arg) == ERROR_MARK)
3314 return arg;
3316 tree type = TREE_TYPE (arg);
3317 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3318 ? BIT_NOT_EXPR
3319 : TRUTH_NOT_EXPR,
3320 type, arg);
3323 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3324 operands are another bit-wise operation with a common input. If so,
3325 distribute the bit operations to save an operation and possibly two if
3326 constants are involved. For example, convert
3327 (A | B) & (A | C) into A | (B & C)
3328 Further simplification will occur if B and C are constants.
3330 If this optimization cannot be done, 0 will be returned. */
3332 static tree
3333 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3334 tree arg0, tree arg1)
3336 tree common;
3337 tree left, right;
3339 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3340 || TREE_CODE (arg0) == code
3341 || (TREE_CODE (arg0) != BIT_AND_EXPR
3342 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3343 return 0;
3345 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3347 common = TREE_OPERAND (arg0, 0);
3348 left = TREE_OPERAND (arg0, 1);
3349 right = TREE_OPERAND (arg1, 1);
3351 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3353 common = TREE_OPERAND (arg0, 0);
3354 left = TREE_OPERAND (arg0, 1);
3355 right = TREE_OPERAND (arg1, 0);
3357 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3359 common = TREE_OPERAND (arg0, 1);
3360 left = TREE_OPERAND (arg0, 0);
3361 right = TREE_OPERAND (arg1, 1);
3363 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3365 common = TREE_OPERAND (arg0, 1);
3366 left = TREE_OPERAND (arg0, 0);
3367 right = TREE_OPERAND (arg1, 0);
3369 else
3370 return 0;
3372 common = fold_convert_loc (loc, type, common);
3373 left = fold_convert_loc (loc, type, left);
3374 right = fold_convert_loc (loc, type, right);
3375 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3376 fold_build2_loc (loc, code, type, left, right));
3379 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3380 with code CODE. This optimization is unsafe. */
3381 static tree
3382 distribute_real_division (location_t loc, enum tree_code code, tree type,
3383 tree arg0, tree arg1)
3385 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3386 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3388 /* (A / C) +- (B / C) -> (A +- B) / C. */
3389 if (mul0 == mul1
3390 && operand_equal_p (TREE_OPERAND (arg0, 1),
3391 TREE_OPERAND (arg1, 1), 0))
3392 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3393 fold_build2_loc (loc, code, type,
3394 TREE_OPERAND (arg0, 0),
3395 TREE_OPERAND (arg1, 0)),
3396 TREE_OPERAND (arg0, 1));
3398 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3399 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3400 TREE_OPERAND (arg1, 0), 0)
3401 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3402 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3404 REAL_VALUE_TYPE r0, r1;
3405 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3406 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3407 if (!mul0)
3408 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3409 if (!mul1)
3410 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3411 real_arithmetic (&r0, code, &r0, &r1);
3412 return fold_build2_loc (loc, MULT_EXPR, type,
3413 TREE_OPERAND (arg0, 0),
3414 build_real (type, r0));
3417 return NULL_TREE;
3420 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3421 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3423 static tree
3424 make_bit_field_ref (location_t loc, tree inner, tree type,
3425 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3427 tree result, bftype;
3429 if (bitpos == 0)
3431 tree size = TYPE_SIZE (TREE_TYPE (inner));
3432 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3433 || POINTER_TYPE_P (TREE_TYPE (inner)))
3434 && tree_fits_shwi_p (size)
3435 && tree_to_shwi (size) == bitsize)
3436 return fold_convert_loc (loc, type, inner);
3439 bftype = type;
3440 if (TYPE_PRECISION (bftype) != bitsize
3441 || TYPE_UNSIGNED (bftype) == !unsignedp)
3442 bftype = build_nonstandard_integer_type (bitsize, 0);
3444 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3445 size_int (bitsize), bitsize_int (bitpos));
3447 if (bftype != type)
3448 result = fold_convert_loc (loc, type, result);
3450 return result;
3453 /* Optimize a bit-field compare.
3455 There are two cases: First is a compare against a constant and the
3456 second is a comparison of two items where the fields are at the same
3457 bit position relative to the start of a chunk (byte, halfword, word)
3458 large enough to contain it. In these cases we can avoid the shift
3459 implicit in bitfield extractions.
3461 For constants, we emit a compare of the shifted constant with the
3462 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3463 compared. For two fields at the same position, we do the ANDs with the
3464 similar mask and compare the result of the ANDs.
3466 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3467 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3468 are the left and right operands of the comparison, respectively.
3470 If the optimization described above can be done, we return the resulting
3471 tree. Otherwise we return zero. */
3473 static tree
3474 optimize_bit_field_compare (location_t loc, enum tree_code code,
3475 tree compare_type, tree lhs, tree rhs)
3477 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3478 tree type = TREE_TYPE (lhs);
3479 tree unsigned_type;
3480 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3481 enum machine_mode lmode, rmode, nmode;
3482 int lunsignedp, runsignedp;
3483 int lvolatilep = 0, rvolatilep = 0;
3484 tree linner, rinner = NULL_TREE;
3485 tree mask;
3486 tree offset;
3488 /* Get all the information about the extractions being done. If the bit size
3489 if the same as the size of the underlying object, we aren't doing an
3490 extraction at all and so can do nothing. We also don't want to
3491 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3492 then will no longer be able to replace it. */
3493 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3494 &lunsignedp, &lvolatilep, false);
3495 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3496 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3497 return 0;
3499 if (!const_p)
3501 /* If this is not a constant, we can only do something if bit positions,
3502 sizes, and signedness are the same. */
3503 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3504 &runsignedp, &rvolatilep, false);
3506 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3507 || lunsignedp != runsignedp || offset != 0
3508 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3509 return 0;
3512 /* See if we can find a mode to refer to this field. We should be able to,
3513 but fail if we can't. */
3514 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3515 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3516 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3517 TYPE_ALIGN (TREE_TYPE (rinner))),
3518 word_mode, false);
3519 if (nmode == VOIDmode)
3520 return 0;
3522 /* Set signed and unsigned types of the precision of this mode for the
3523 shifts below. */
3524 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3526 /* Compute the bit position and size for the new reference and our offset
3527 within it. If the new reference is the same size as the original, we
3528 won't optimize anything, so return zero. */
3529 nbitsize = GET_MODE_BITSIZE (nmode);
3530 nbitpos = lbitpos & ~ (nbitsize - 1);
3531 lbitpos -= nbitpos;
3532 if (nbitsize == lbitsize)
3533 return 0;
3535 if (BYTES_BIG_ENDIAN)
3536 lbitpos = nbitsize - lbitsize - lbitpos;
3538 /* Make the mask to be used against the extracted field. */
3539 mask = build_int_cst_type (unsigned_type, -1);
3540 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3541 mask = const_binop (RSHIFT_EXPR, mask,
3542 size_int (nbitsize - lbitsize - lbitpos));
3544 if (! const_p)
3545 /* If not comparing with constant, just rework the comparison
3546 and return. */
3547 return fold_build2_loc (loc, code, compare_type,
3548 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3549 make_bit_field_ref (loc, linner,
3550 unsigned_type,
3551 nbitsize, nbitpos,
3553 mask),
3554 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3555 make_bit_field_ref (loc, rinner,
3556 unsigned_type,
3557 nbitsize, nbitpos,
3559 mask));
3561 /* Otherwise, we are handling the constant case. See if the constant is too
3562 big for the field. Warn and return a tree of for 0 (false) if so. We do
3563 this not only for its own sake, but to avoid having to test for this
3564 error case below. If we didn't, we might generate wrong code.
3566 For unsigned fields, the constant shifted right by the field length should
3567 be all zero. For signed fields, the high-order bits should agree with
3568 the sign bit. */
3570 if (lunsignedp)
3572 if (wi::lrshift (rhs, lbitsize) != 0)
3574 warning (0, "comparison is always %d due to width of bit-field",
3575 code == NE_EXPR);
3576 return constant_boolean_node (code == NE_EXPR, compare_type);
3579 else
3581 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3582 if (tem != 0 && tem != -1)
3584 warning (0, "comparison is always %d due to width of bit-field",
3585 code == NE_EXPR);
3586 return constant_boolean_node (code == NE_EXPR, compare_type);
3590 /* Single-bit compares should always be against zero. */
3591 if (lbitsize == 1 && ! integer_zerop (rhs))
3593 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3594 rhs = build_int_cst (type, 0);
3597 /* Make a new bitfield reference, shift the constant over the
3598 appropriate number of bits and mask it with the computed mask
3599 (in case this was a signed field). If we changed it, make a new one. */
3600 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3602 rhs = const_binop (BIT_AND_EXPR,
3603 const_binop (LSHIFT_EXPR,
3604 fold_convert_loc (loc, unsigned_type, rhs),
3605 size_int (lbitpos)),
3606 mask);
3608 lhs = build2_loc (loc, code, compare_type,
3609 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3610 return lhs;
3613 /* Subroutine for fold_truth_andor_1: decode a field reference.
3615 If EXP is a comparison reference, we return the innermost reference.
3617 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3618 set to the starting bit number.
3620 If the innermost field can be completely contained in a mode-sized
3621 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3623 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3624 otherwise it is not changed.
3626 *PUNSIGNEDP is set to the signedness of the field.
3628 *PMASK is set to the mask used. This is either contained in a
3629 BIT_AND_EXPR or derived from the width of the field.
3631 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3633 Return 0 if this is not a component reference or is one that we can't
3634 do anything with. */
3636 static tree
3637 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3638 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3639 int *punsignedp, int *pvolatilep,
3640 tree *pmask, tree *pand_mask)
3642 tree outer_type = 0;
3643 tree and_mask = 0;
3644 tree mask, inner, offset;
3645 tree unsigned_type;
3646 unsigned int precision;
3648 /* All the optimizations using this function assume integer fields.
3649 There are problems with FP fields since the type_for_size call
3650 below can fail for, e.g., XFmode. */
3651 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3652 return 0;
3654 /* We are interested in the bare arrangement of bits, so strip everything
3655 that doesn't affect the machine mode. However, record the type of the
3656 outermost expression if it may matter below. */
3657 if (CONVERT_EXPR_P (exp)
3658 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3659 outer_type = TREE_TYPE (exp);
3660 STRIP_NOPS (exp);
3662 if (TREE_CODE (exp) == BIT_AND_EXPR)
3664 and_mask = TREE_OPERAND (exp, 1);
3665 exp = TREE_OPERAND (exp, 0);
3666 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3667 if (TREE_CODE (and_mask) != INTEGER_CST)
3668 return 0;
3671 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3672 punsignedp, pvolatilep, false);
3673 if ((inner == exp && and_mask == 0)
3674 || *pbitsize < 0 || offset != 0
3675 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3676 return 0;
3678 /* If the number of bits in the reference is the same as the bitsize of
3679 the outer type, then the outer type gives the signedness. Otherwise
3680 (in case of a small bitfield) the signedness is unchanged. */
3681 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3682 *punsignedp = TYPE_UNSIGNED (outer_type);
3684 /* Compute the mask to access the bitfield. */
3685 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3686 precision = TYPE_PRECISION (unsigned_type);
3688 mask = build_int_cst_type (unsigned_type, -1);
3690 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3691 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3693 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3694 if (and_mask != 0)
3695 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3696 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3698 *pmask = mask;
3699 *pand_mask = and_mask;
3700 return inner;
3703 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3704 bit positions and MASK is SIGNED. */
3706 static int
3707 all_ones_mask_p (const_tree mask, unsigned int size)
3709 tree type = TREE_TYPE (mask);
3710 unsigned int precision = TYPE_PRECISION (type);
3712 /* If this function returns true when the type of the mask is
3713 UNSIGNED, then there will be errors. In particular see
3714 gcc.c-torture/execute/990326-1.c. There does not appear to be
3715 any documentation paper trail as to why this is so. But the pre
3716 wide-int worked with that restriction and it has been preserved
3717 here. */
3718 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3719 return false;
3721 return wi::mask (size, false, precision) == mask;
3724 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3725 represents the sign bit of EXP's type. If EXP represents a sign
3726 or zero extension, also test VAL against the unextended type.
3727 The return value is the (sub)expression whose sign bit is VAL,
3728 or NULL_TREE otherwise. */
3730 static tree
3731 sign_bit_p (tree exp, const_tree val)
3733 int width;
3734 tree t;
3736 /* Tree EXP must have an integral type. */
3737 t = TREE_TYPE (exp);
3738 if (! INTEGRAL_TYPE_P (t))
3739 return NULL_TREE;
3741 /* Tree VAL must be an integer constant. */
3742 if (TREE_CODE (val) != INTEGER_CST
3743 || TREE_OVERFLOW (val))
3744 return NULL_TREE;
3746 width = TYPE_PRECISION (t);
3747 if (wi::only_sign_bit_p (val, width))
3748 return exp;
3750 /* Handle extension from a narrower type. */
3751 if (TREE_CODE (exp) == NOP_EXPR
3752 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3753 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3755 return NULL_TREE;
3758 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3759 to be evaluated unconditionally. */
3761 static int
3762 simple_operand_p (const_tree exp)
3764 /* Strip any conversions that don't change the machine mode. */
3765 STRIP_NOPS (exp);
3767 return (CONSTANT_CLASS_P (exp)
3768 || TREE_CODE (exp) == SSA_NAME
3769 || (DECL_P (exp)
3770 && ! TREE_ADDRESSABLE (exp)
3771 && ! TREE_THIS_VOLATILE (exp)
3772 && ! DECL_NONLOCAL (exp)
3773 /* Don't regard global variables as simple. They may be
3774 allocated in ways unknown to the compiler (shared memory,
3775 #pragma weak, etc). */
3776 && ! TREE_PUBLIC (exp)
3777 && ! DECL_EXTERNAL (exp)
3778 /* Weakrefs are not safe to be read, since they can be NULL.
3779 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3780 have DECL_WEAK flag set. */
3781 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3782 /* Loading a static variable is unduly expensive, but global
3783 registers aren't expensive. */
3784 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3787 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3788 to be evaluated unconditionally.
3789 I addition to simple_operand_p, we assume that comparisons, conversions,
3790 and logic-not operations are simple, if their operands are simple, too. */
3792 static bool
3793 simple_operand_p_2 (tree exp)
3795 enum tree_code code;
3797 if (TREE_SIDE_EFFECTS (exp)
3798 || tree_could_trap_p (exp))
3799 return false;
3801 while (CONVERT_EXPR_P (exp))
3802 exp = TREE_OPERAND (exp, 0);
3804 code = TREE_CODE (exp);
3806 if (TREE_CODE_CLASS (code) == tcc_comparison)
3807 return (simple_operand_p (TREE_OPERAND (exp, 0))
3808 && simple_operand_p (TREE_OPERAND (exp, 1)));
3810 if (code == TRUTH_NOT_EXPR)
3811 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3813 return simple_operand_p (exp);
3817 /* The following functions are subroutines to fold_range_test and allow it to
3818 try to change a logical combination of comparisons into a range test.
3820 For example, both
3821 X == 2 || X == 3 || X == 4 || X == 5
3823 X >= 2 && X <= 5
3824 are converted to
3825 (unsigned) (X - 2) <= 3
3827 We describe each set of comparisons as being either inside or outside
3828 a range, using a variable named like IN_P, and then describe the
3829 range with a lower and upper bound. If one of the bounds is omitted,
3830 it represents either the highest or lowest value of the type.
3832 In the comments below, we represent a range by two numbers in brackets
3833 preceded by a "+" to designate being inside that range, or a "-" to
3834 designate being outside that range, so the condition can be inverted by
3835 flipping the prefix. An omitted bound is represented by a "-". For
3836 example, "- [-, 10]" means being outside the range starting at the lowest
3837 possible value and ending at 10, in other words, being greater than 10.
3838 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3839 always false.
3841 We set up things so that the missing bounds are handled in a consistent
3842 manner so neither a missing bound nor "true" and "false" need to be
3843 handled using a special case. */
3845 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3846 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3847 and UPPER1_P are nonzero if the respective argument is an upper bound
3848 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3849 must be specified for a comparison. ARG1 will be converted to ARG0's
3850 type if both are specified. */
3852 static tree
3853 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3854 tree arg1, int upper1_p)
3856 tree tem;
3857 int result;
3858 int sgn0, sgn1;
3860 /* If neither arg represents infinity, do the normal operation.
3861 Else, if not a comparison, return infinity. Else handle the special
3862 comparison rules. Note that most of the cases below won't occur, but
3863 are handled for consistency. */
3865 if (arg0 != 0 && arg1 != 0)
3867 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3868 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3869 STRIP_NOPS (tem);
3870 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3873 if (TREE_CODE_CLASS (code) != tcc_comparison)
3874 return 0;
3876 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3877 for neither. In real maths, we cannot assume open ended ranges are
3878 the same. But, this is computer arithmetic, where numbers are finite.
3879 We can therefore make the transformation of any unbounded range with
3880 the value Z, Z being greater than any representable number. This permits
3881 us to treat unbounded ranges as equal. */
3882 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3883 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3884 switch (code)
3886 case EQ_EXPR:
3887 result = sgn0 == sgn1;
3888 break;
3889 case NE_EXPR:
3890 result = sgn0 != sgn1;
3891 break;
3892 case LT_EXPR:
3893 result = sgn0 < sgn1;
3894 break;
3895 case LE_EXPR:
3896 result = sgn0 <= sgn1;
3897 break;
3898 case GT_EXPR:
3899 result = sgn0 > sgn1;
3900 break;
3901 case GE_EXPR:
3902 result = sgn0 >= sgn1;
3903 break;
3904 default:
3905 gcc_unreachable ();
3908 return constant_boolean_node (result, type);
3911 /* Helper routine for make_range. Perform one step for it, return
3912 new expression if the loop should continue or NULL_TREE if it should
3913 stop. */
3915 tree
3916 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3917 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3918 bool *strict_overflow_p)
3920 tree arg0_type = TREE_TYPE (arg0);
3921 tree n_low, n_high, low = *p_low, high = *p_high;
3922 int in_p = *p_in_p, n_in_p;
3924 switch (code)
3926 case TRUTH_NOT_EXPR:
3927 /* We can only do something if the range is testing for zero. */
3928 if (low == NULL_TREE || high == NULL_TREE
3929 || ! integer_zerop (low) || ! integer_zerop (high))
3930 return NULL_TREE;
3931 *p_in_p = ! in_p;
3932 return arg0;
3934 case EQ_EXPR: case NE_EXPR:
3935 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3936 /* We can only do something if the range is testing for zero
3937 and if the second operand is an integer constant. Note that
3938 saying something is "in" the range we make is done by
3939 complementing IN_P since it will set in the initial case of
3940 being not equal to zero; "out" is leaving it alone. */
3941 if (low == NULL_TREE || high == NULL_TREE
3942 || ! integer_zerop (low) || ! integer_zerop (high)
3943 || TREE_CODE (arg1) != INTEGER_CST)
3944 return NULL_TREE;
3946 switch (code)
3948 case NE_EXPR: /* - [c, c] */
3949 low = high = arg1;
3950 break;
3951 case EQ_EXPR: /* + [c, c] */
3952 in_p = ! in_p, low = high = arg1;
3953 break;
3954 case GT_EXPR: /* - [-, c] */
3955 low = 0, high = arg1;
3956 break;
3957 case GE_EXPR: /* + [c, -] */
3958 in_p = ! in_p, low = arg1, high = 0;
3959 break;
3960 case LT_EXPR: /* - [c, -] */
3961 low = arg1, high = 0;
3962 break;
3963 case LE_EXPR: /* + [-, c] */
3964 in_p = ! in_p, low = 0, high = arg1;
3965 break;
3966 default:
3967 gcc_unreachable ();
3970 /* If this is an unsigned comparison, we also know that EXP is
3971 greater than or equal to zero. We base the range tests we make
3972 on that fact, so we record it here so we can parse existing
3973 range tests. We test arg0_type since often the return type
3974 of, e.g. EQ_EXPR, is boolean. */
3975 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3977 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3978 in_p, low, high, 1,
3979 build_int_cst (arg0_type, 0),
3980 NULL_TREE))
3981 return NULL_TREE;
3983 in_p = n_in_p, low = n_low, high = n_high;
3985 /* If the high bound is missing, but we have a nonzero low
3986 bound, reverse the range so it goes from zero to the low bound
3987 minus 1. */
3988 if (high == 0 && low && ! integer_zerop (low))
3990 in_p = ! in_p;
3991 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3992 build_int_cst (TREE_TYPE (low), 1), 0);
3993 low = build_int_cst (arg0_type, 0);
3997 *p_low = low;
3998 *p_high = high;
3999 *p_in_p = in_p;
4000 return arg0;
4002 case NEGATE_EXPR:
4003 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4004 low and high are non-NULL, then normalize will DTRT. */
4005 if (!TYPE_UNSIGNED (arg0_type)
4006 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4008 if (low == NULL_TREE)
4009 low = TYPE_MIN_VALUE (arg0_type);
4010 if (high == NULL_TREE)
4011 high = TYPE_MAX_VALUE (arg0_type);
4014 /* (-x) IN [a,b] -> x in [-b, -a] */
4015 n_low = range_binop (MINUS_EXPR, exp_type,
4016 build_int_cst (exp_type, 0),
4017 0, high, 1);
4018 n_high = range_binop (MINUS_EXPR, exp_type,
4019 build_int_cst (exp_type, 0),
4020 0, low, 0);
4021 if (n_high != 0 && TREE_OVERFLOW (n_high))
4022 return NULL_TREE;
4023 goto normalize;
4025 case BIT_NOT_EXPR:
4026 /* ~ X -> -X - 1 */
4027 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4028 build_int_cst (exp_type, 1));
4030 case PLUS_EXPR:
4031 case MINUS_EXPR:
4032 if (TREE_CODE (arg1) != INTEGER_CST)
4033 return NULL_TREE;
4035 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4036 move a constant to the other side. */
4037 if (!TYPE_UNSIGNED (arg0_type)
4038 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4039 return NULL_TREE;
4041 /* If EXP is signed, any overflow in the computation is undefined,
4042 so we don't worry about it so long as our computations on
4043 the bounds don't overflow. For unsigned, overflow is defined
4044 and this is exactly the right thing. */
4045 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4046 arg0_type, low, 0, arg1, 0);
4047 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4048 arg0_type, high, 1, arg1, 0);
4049 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4050 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4051 return NULL_TREE;
4053 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4054 *strict_overflow_p = true;
4056 normalize:
4057 /* Check for an unsigned range which has wrapped around the maximum
4058 value thus making n_high < n_low, and normalize it. */
4059 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4061 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4062 build_int_cst (TREE_TYPE (n_high), 1), 0);
4063 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4064 build_int_cst (TREE_TYPE (n_low), 1), 0);
4066 /* If the range is of the form +/- [ x+1, x ], we won't
4067 be able to normalize it. But then, it represents the
4068 whole range or the empty set, so make it
4069 +/- [ -, - ]. */
4070 if (tree_int_cst_equal (n_low, low)
4071 && tree_int_cst_equal (n_high, high))
4072 low = high = 0;
4073 else
4074 in_p = ! in_p;
4076 else
4077 low = n_low, high = n_high;
4079 *p_low = low;
4080 *p_high = high;
4081 *p_in_p = in_p;
4082 return arg0;
4084 CASE_CONVERT:
4085 case NON_LVALUE_EXPR:
4086 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4087 return NULL_TREE;
4089 if (! INTEGRAL_TYPE_P (arg0_type)
4090 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4091 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4092 return NULL_TREE;
4094 n_low = low, n_high = high;
4096 if (n_low != 0)
4097 n_low = fold_convert_loc (loc, arg0_type, n_low);
4099 if (n_high != 0)
4100 n_high = fold_convert_loc (loc, arg0_type, n_high);
4102 /* If we're converting arg0 from an unsigned type, to exp,
4103 a signed type, we will be doing the comparison as unsigned.
4104 The tests above have already verified that LOW and HIGH
4105 are both positive.
4107 So we have to ensure that we will handle large unsigned
4108 values the same way that the current signed bounds treat
4109 negative values. */
4111 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4113 tree high_positive;
4114 tree equiv_type;
4115 /* For fixed-point modes, we need to pass the saturating flag
4116 as the 2nd parameter. */
4117 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4118 equiv_type
4119 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4120 TYPE_SATURATING (arg0_type));
4121 else
4122 equiv_type
4123 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4125 /* A range without an upper bound is, naturally, unbounded.
4126 Since convert would have cropped a very large value, use
4127 the max value for the destination type. */
4128 high_positive
4129 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4130 : TYPE_MAX_VALUE (arg0_type);
4132 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4133 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4134 fold_convert_loc (loc, arg0_type,
4135 high_positive),
4136 build_int_cst (arg0_type, 1));
4138 /* If the low bound is specified, "and" the range with the
4139 range for which the original unsigned value will be
4140 positive. */
4141 if (low != 0)
4143 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4144 1, fold_convert_loc (loc, arg0_type,
4145 integer_zero_node),
4146 high_positive))
4147 return NULL_TREE;
4149 in_p = (n_in_p == in_p);
4151 else
4153 /* Otherwise, "or" the range with the range of the input
4154 that will be interpreted as negative. */
4155 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4156 1, fold_convert_loc (loc, arg0_type,
4157 integer_zero_node),
4158 high_positive))
4159 return NULL_TREE;
4161 in_p = (in_p != n_in_p);
4165 *p_low = n_low;
4166 *p_high = n_high;
4167 *p_in_p = in_p;
4168 return arg0;
4170 default:
4171 return NULL_TREE;
4175 /* Given EXP, a logical expression, set the range it is testing into
4176 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4177 actually being tested. *PLOW and *PHIGH will be made of the same
4178 type as the returned expression. If EXP is not a comparison, we
4179 will most likely not be returning a useful value and range. Set
4180 *STRICT_OVERFLOW_P to true if the return value is only valid
4181 because signed overflow is undefined; otherwise, do not change
4182 *STRICT_OVERFLOW_P. */
4184 tree
4185 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4186 bool *strict_overflow_p)
4188 enum tree_code code;
4189 tree arg0, arg1 = NULL_TREE;
4190 tree exp_type, nexp;
4191 int in_p;
4192 tree low, high;
4193 location_t loc = EXPR_LOCATION (exp);
4195 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4196 and see if we can refine the range. Some of the cases below may not
4197 happen, but it doesn't seem worth worrying about this. We "continue"
4198 the outer loop when we've changed something; otherwise we "break"
4199 the switch, which will "break" the while. */
4201 in_p = 0;
4202 low = high = build_int_cst (TREE_TYPE (exp), 0);
4204 while (1)
4206 code = TREE_CODE (exp);
4207 exp_type = TREE_TYPE (exp);
4208 arg0 = NULL_TREE;
4210 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4212 if (TREE_OPERAND_LENGTH (exp) > 0)
4213 arg0 = TREE_OPERAND (exp, 0);
4214 if (TREE_CODE_CLASS (code) == tcc_binary
4215 || TREE_CODE_CLASS (code) == tcc_comparison
4216 || (TREE_CODE_CLASS (code) == tcc_expression
4217 && TREE_OPERAND_LENGTH (exp) > 1))
4218 arg1 = TREE_OPERAND (exp, 1);
4220 if (arg0 == NULL_TREE)
4221 break;
4223 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4224 &high, &in_p, strict_overflow_p);
4225 if (nexp == NULL_TREE)
4226 break;
4227 exp = nexp;
4230 /* If EXP is a constant, we can evaluate whether this is true or false. */
4231 if (TREE_CODE (exp) == INTEGER_CST)
4233 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4234 exp, 0, low, 0))
4235 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4236 exp, 1, high, 1)));
4237 low = high = 0;
4238 exp = 0;
4241 *pin_p = in_p, *plow = low, *phigh = high;
4242 return exp;
4245 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4246 type, TYPE, return an expression to test if EXP is in (or out of, depending
4247 on IN_P) the range. Return 0 if the test couldn't be created. */
4249 tree
4250 build_range_check (location_t loc, tree type, tree exp, int in_p,
4251 tree low, tree high)
4253 tree etype = TREE_TYPE (exp), value;
4255 #ifdef HAVE_canonicalize_funcptr_for_compare
4256 /* Disable this optimization for function pointer expressions
4257 on targets that require function pointer canonicalization. */
4258 if (HAVE_canonicalize_funcptr_for_compare
4259 && TREE_CODE (etype) == POINTER_TYPE
4260 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4261 return NULL_TREE;
4262 #endif
4264 if (! in_p)
4266 value = build_range_check (loc, type, exp, 1, low, high);
4267 if (value != 0)
4268 return invert_truthvalue_loc (loc, value);
4270 return 0;
4273 if (low == 0 && high == 0)
4274 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4276 if (low == 0)
4277 return fold_build2_loc (loc, LE_EXPR, type, exp,
4278 fold_convert_loc (loc, etype, high));
4280 if (high == 0)
4281 return fold_build2_loc (loc, GE_EXPR, type, exp,
4282 fold_convert_loc (loc, etype, low));
4284 if (operand_equal_p (low, high, 0))
4285 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4286 fold_convert_loc (loc, etype, low));
4288 if (integer_zerop (low))
4290 if (! TYPE_UNSIGNED (etype))
4292 etype = unsigned_type_for (etype);
4293 high = fold_convert_loc (loc, etype, high);
4294 exp = fold_convert_loc (loc, etype, exp);
4296 return build_range_check (loc, type, exp, 1, 0, high);
4299 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4300 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4302 int prec = TYPE_PRECISION (etype);
4304 if (wi::mask (prec - 1, false, prec) == high)
4306 if (TYPE_UNSIGNED (etype))
4308 tree signed_etype = signed_type_for (etype);
4309 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4310 etype
4311 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4312 else
4313 etype = signed_etype;
4314 exp = fold_convert_loc (loc, etype, exp);
4316 return fold_build2_loc (loc, GT_EXPR, type, exp,
4317 build_int_cst (etype, 0));
4321 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4322 This requires wrap-around arithmetics for the type of the expression.
4323 First make sure that arithmetics in this type is valid, then make sure
4324 that it wraps around. */
4325 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4326 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4327 TYPE_UNSIGNED (etype));
4329 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4331 tree utype, minv, maxv;
4333 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4334 for the type in question, as we rely on this here. */
4335 utype = unsigned_type_for (etype);
4336 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4337 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4338 build_int_cst (TREE_TYPE (maxv), 1), 1);
4339 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4341 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4342 minv, 1, maxv, 1)))
4343 etype = utype;
4344 else
4345 return 0;
4348 high = fold_convert_loc (loc, etype, high);
4349 low = fold_convert_loc (loc, etype, low);
4350 exp = fold_convert_loc (loc, etype, exp);
4352 value = const_binop (MINUS_EXPR, high, low);
4355 if (POINTER_TYPE_P (etype))
4357 if (value != 0 && !TREE_OVERFLOW (value))
4359 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4360 return build_range_check (loc, type,
4361 fold_build_pointer_plus_loc (loc, exp, low),
4362 1, build_int_cst (etype, 0), value);
4364 return 0;
4367 if (value != 0 && !TREE_OVERFLOW (value))
4368 return build_range_check (loc, type,
4369 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4370 1, build_int_cst (etype, 0), value);
4372 return 0;
4375 /* Return the predecessor of VAL in its type, handling the infinite case. */
4377 static tree
4378 range_predecessor (tree val)
4380 tree type = TREE_TYPE (val);
4382 if (INTEGRAL_TYPE_P (type)
4383 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4384 return 0;
4385 else
4386 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4387 build_int_cst (TREE_TYPE (val), 1), 0);
4390 /* Return the successor of VAL in its type, handling the infinite case. */
4392 static tree
4393 range_successor (tree val)
4395 tree type = TREE_TYPE (val);
4397 if (INTEGRAL_TYPE_P (type)
4398 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4399 return 0;
4400 else
4401 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4402 build_int_cst (TREE_TYPE (val), 1), 0);
4405 /* Given two ranges, see if we can merge them into one. Return 1 if we
4406 can, 0 if we can't. Set the output range into the specified parameters. */
4408 bool
4409 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4410 tree high0, int in1_p, tree low1, tree high1)
4412 int no_overlap;
4413 int subset;
4414 int temp;
4415 tree tem;
4416 int in_p;
4417 tree low, high;
4418 int lowequal = ((low0 == 0 && low1 == 0)
4419 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4420 low0, 0, low1, 0)));
4421 int highequal = ((high0 == 0 && high1 == 0)
4422 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4423 high0, 1, high1, 1)));
4425 /* Make range 0 be the range that starts first, or ends last if they
4426 start at the same value. Swap them if it isn't. */
4427 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4428 low0, 0, low1, 0))
4429 || (lowequal
4430 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4431 high1, 1, high0, 1))))
4433 temp = in0_p, in0_p = in1_p, in1_p = temp;
4434 tem = low0, low0 = low1, low1 = tem;
4435 tem = high0, high0 = high1, high1 = tem;
4438 /* Now flag two cases, whether the ranges are disjoint or whether the
4439 second range is totally subsumed in the first. Note that the tests
4440 below are simplified by the ones above. */
4441 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4442 high0, 1, low1, 0));
4443 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4444 high1, 1, high0, 1));
4446 /* We now have four cases, depending on whether we are including or
4447 excluding the two ranges. */
4448 if (in0_p && in1_p)
4450 /* If they don't overlap, the result is false. If the second range
4451 is a subset it is the result. Otherwise, the range is from the start
4452 of the second to the end of the first. */
4453 if (no_overlap)
4454 in_p = 0, low = high = 0;
4455 else if (subset)
4456 in_p = 1, low = low1, high = high1;
4457 else
4458 in_p = 1, low = low1, high = high0;
4461 else if (in0_p && ! in1_p)
4463 /* If they don't overlap, the result is the first range. If they are
4464 equal, the result is false. If the second range is a subset of the
4465 first, and the ranges begin at the same place, we go from just after
4466 the end of the second range to the end of the first. If the second
4467 range is not a subset of the first, or if it is a subset and both
4468 ranges end at the same place, the range starts at the start of the
4469 first range and ends just before the second range.
4470 Otherwise, we can't describe this as a single range. */
4471 if (no_overlap)
4472 in_p = 1, low = low0, high = high0;
4473 else if (lowequal && highequal)
4474 in_p = 0, low = high = 0;
4475 else if (subset && lowequal)
4477 low = range_successor (high1);
4478 high = high0;
4479 in_p = 1;
4480 if (low == 0)
4482 /* We are in the weird situation where high0 > high1 but
4483 high1 has no successor. Punt. */
4484 return 0;
4487 else if (! subset || highequal)
4489 low = low0;
4490 high = range_predecessor (low1);
4491 in_p = 1;
4492 if (high == 0)
4494 /* low0 < low1 but low1 has no predecessor. Punt. */
4495 return 0;
4498 else
4499 return 0;
4502 else if (! in0_p && in1_p)
4504 /* If they don't overlap, the result is the second range. If the second
4505 is a subset of the first, the result is false. Otherwise,
4506 the range starts just after the first range and ends at the
4507 end of the second. */
4508 if (no_overlap)
4509 in_p = 1, low = low1, high = high1;
4510 else if (subset || highequal)
4511 in_p = 0, low = high = 0;
4512 else
4514 low = range_successor (high0);
4515 high = high1;
4516 in_p = 1;
4517 if (low == 0)
4519 /* high1 > high0 but high0 has no successor. Punt. */
4520 return 0;
4525 else
4527 /* The case where we are excluding both ranges. Here the complex case
4528 is if they don't overlap. In that case, the only time we have a
4529 range is if they are adjacent. If the second is a subset of the
4530 first, the result is the first. Otherwise, the range to exclude
4531 starts at the beginning of the first range and ends at the end of the
4532 second. */
4533 if (no_overlap)
4535 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4536 range_successor (high0),
4537 1, low1, 0)))
4538 in_p = 0, low = low0, high = high1;
4539 else
4541 /* Canonicalize - [min, x] into - [-, x]. */
4542 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4543 switch (TREE_CODE (TREE_TYPE (low0)))
4545 case ENUMERAL_TYPE:
4546 if (TYPE_PRECISION (TREE_TYPE (low0))
4547 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4548 break;
4549 /* FALLTHROUGH */
4550 case INTEGER_TYPE:
4551 if (tree_int_cst_equal (low0,
4552 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4553 low0 = 0;
4554 break;
4555 case POINTER_TYPE:
4556 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4557 && integer_zerop (low0))
4558 low0 = 0;
4559 break;
4560 default:
4561 break;
4564 /* Canonicalize - [x, max] into - [x, -]. */
4565 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4566 switch (TREE_CODE (TREE_TYPE (high1)))
4568 case ENUMERAL_TYPE:
4569 if (TYPE_PRECISION (TREE_TYPE (high1))
4570 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4571 break;
4572 /* FALLTHROUGH */
4573 case INTEGER_TYPE:
4574 if (tree_int_cst_equal (high1,
4575 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4576 high1 = 0;
4577 break;
4578 case POINTER_TYPE:
4579 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4580 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4581 high1, 1,
4582 build_int_cst (TREE_TYPE (high1), 1),
4583 1)))
4584 high1 = 0;
4585 break;
4586 default:
4587 break;
4590 /* The ranges might be also adjacent between the maximum and
4591 minimum values of the given type. For
4592 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4593 return + [x + 1, y - 1]. */
4594 if (low0 == 0 && high1 == 0)
4596 low = range_successor (high0);
4597 high = range_predecessor (low1);
4598 if (low == 0 || high == 0)
4599 return 0;
4601 in_p = 1;
4603 else
4604 return 0;
4607 else if (subset)
4608 in_p = 0, low = low0, high = high0;
4609 else
4610 in_p = 0, low = low0, high = high1;
4613 *pin_p = in_p, *plow = low, *phigh = high;
4614 return 1;
4618 /* Subroutine of fold, looking inside expressions of the form
4619 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4620 of the COND_EXPR. This function is being used also to optimize
4621 A op B ? C : A, by reversing the comparison first.
4623 Return a folded expression whose code is not a COND_EXPR
4624 anymore, or NULL_TREE if no folding opportunity is found. */
4626 static tree
4627 fold_cond_expr_with_comparison (location_t loc, tree type,
4628 tree arg0, tree arg1, tree arg2)
4630 enum tree_code comp_code = TREE_CODE (arg0);
4631 tree arg00 = TREE_OPERAND (arg0, 0);
4632 tree arg01 = TREE_OPERAND (arg0, 1);
4633 tree arg1_type = TREE_TYPE (arg1);
4634 tree tem;
4636 STRIP_NOPS (arg1);
4637 STRIP_NOPS (arg2);
4639 /* If we have A op 0 ? A : -A, consider applying the following
4640 transformations:
4642 A == 0? A : -A same as -A
4643 A != 0? A : -A same as A
4644 A >= 0? A : -A same as abs (A)
4645 A > 0? A : -A same as abs (A)
4646 A <= 0? A : -A same as -abs (A)
4647 A < 0? A : -A same as -abs (A)
4649 None of these transformations work for modes with signed
4650 zeros. If A is +/-0, the first two transformations will
4651 change the sign of the result (from +0 to -0, or vice
4652 versa). The last four will fix the sign of the result,
4653 even though the original expressions could be positive or
4654 negative, depending on the sign of A.
4656 Note that all these transformations are correct if A is
4657 NaN, since the two alternatives (A and -A) are also NaNs. */
4658 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4659 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4660 ? real_zerop (arg01)
4661 : integer_zerop (arg01))
4662 && ((TREE_CODE (arg2) == NEGATE_EXPR
4663 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4664 /* In the case that A is of the form X-Y, '-A' (arg2) may
4665 have already been folded to Y-X, check for that. */
4666 || (TREE_CODE (arg1) == MINUS_EXPR
4667 && TREE_CODE (arg2) == MINUS_EXPR
4668 && operand_equal_p (TREE_OPERAND (arg1, 0),
4669 TREE_OPERAND (arg2, 1), 0)
4670 && operand_equal_p (TREE_OPERAND (arg1, 1),
4671 TREE_OPERAND (arg2, 0), 0))))
4672 switch (comp_code)
4674 case EQ_EXPR:
4675 case UNEQ_EXPR:
4676 tem = fold_convert_loc (loc, arg1_type, arg1);
4677 return pedantic_non_lvalue_loc (loc,
4678 fold_convert_loc (loc, type,
4679 negate_expr (tem)));
4680 case NE_EXPR:
4681 case LTGT_EXPR:
4682 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4683 case UNGE_EXPR:
4684 case UNGT_EXPR:
4685 if (flag_trapping_math)
4686 break;
4687 /* Fall through. */
4688 case GE_EXPR:
4689 case GT_EXPR:
4690 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4691 arg1 = fold_convert_loc (loc, signed_type_for
4692 (TREE_TYPE (arg1)), arg1);
4693 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4694 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4695 case UNLE_EXPR:
4696 case UNLT_EXPR:
4697 if (flag_trapping_math)
4698 break;
4699 case LE_EXPR:
4700 case LT_EXPR:
4701 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4702 arg1 = fold_convert_loc (loc, signed_type_for
4703 (TREE_TYPE (arg1)), arg1);
4704 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4705 return negate_expr (fold_convert_loc (loc, type, tem));
4706 default:
4707 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4708 break;
4711 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4712 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4713 both transformations are correct when A is NaN: A != 0
4714 is then true, and A == 0 is false. */
4716 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4717 && integer_zerop (arg01) && integer_zerop (arg2))
4719 if (comp_code == NE_EXPR)
4720 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4721 else if (comp_code == EQ_EXPR)
4722 return build_zero_cst (type);
4725 /* Try some transformations of A op B ? A : B.
4727 A == B? A : B same as B
4728 A != B? A : B same as A
4729 A >= B? A : B same as max (A, B)
4730 A > B? A : B same as max (B, A)
4731 A <= B? A : B same as min (A, B)
4732 A < B? A : B same as min (B, A)
4734 As above, these transformations don't work in the presence
4735 of signed zeros. For example, if A and B are zeros of
4736 opposite sign, the first two transformations will change
4737 the sign of the result. In the last four, the original
4738 expressions give different results for (A=+0, B=-0) and
4739 (A=-0, B=+0), but the transformed expressions do not.
4741 The first two transformations are correct if either A or B
4742 is a NaN. In the first transformation, the condition will
4743 be false, and B will indeed be chosen. In the case of the
4744 second transformation, the condition A != B will be true,
4745 and A will be chosen.
4747 The conversions to max() and min() are not correct if B is
4748 a number and A is not. The conditions in the original
4749 expressions will be false, so all four give B. The min()
4750 and max() versions would give a NaN instead. */
4751 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4752 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4753 /* Avoid these transformations if the COND_EXPR may be used
4754 as an lvalue in the C++ front-end. PR c++/19199. */
4755 && (in_gimple_form
4756 || VECTOR_TYPE_P (type)
4757 || (strcmp (lang_hooks.name, "GNU C++") != 0
4758 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4759 || ! maybe_lvalue_p (arg1)
4760 || ! maybe_lvalue_p (arg2)))
4762 tree comp_op0 = arg00;
4763 tree comp_op1 = arg01;
4764 tree comp_type = TREE_TYPE (comp_op0);
4766 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4767 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4769 comp_type = type;
4770 comp_op0 = arg1;
4771 comp_op1 = arg2;
4774 switch (comp_code)
4776 case EQ_EXPR:
4777 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4778 case NE_EXPR:
4779 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4780 case LE_EXPR:
4781 case LT_EXPR:
4782 case UNLE_EXPR:
4783 case UNLT_EXPR:
4784 /* In C++ a ?: expression can be an lvalue, so put the
4785 operand which will be used if they are equal first
4786 so that we can convert this back to the
4787 corresponding COND_EXPR. */
4788 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4790 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4791 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4792 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4793 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4794 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4795 comp_op1, comp_op0);
4796 return pedantic_non_lvalue_loc (loc,
4797 fold_convert_loc (loc, type, tem));
4799 break;
4800 case GE_EXPR:
4801 case GT_EXPR:
4802 case UNGE_EXPR:
4803 case UNGT_EXPR:
4804 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4806 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4807 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4808 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4809 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4810 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4811 comp_op1, comp_op0);
4812 return pedantic_non_lvalue_loc (loc,
4813 fold_convert_loc (loc, type, tem));
4815 break;
4816 case UNEQ_EXPR:
4817 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4818 return pedantic_non_lvalue_loc (loc,
4819 fold_convert_loc (loc, type, arg2));
4820 break;
4821 case LTGT_EXPR:
4822 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4823 return pedantic_non_lvalue_loc (loc,
4824 fold_convert_loc (loc, type, arg1));
4825 break;
4826 default:
4827 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4828 break;
4832 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4833 we might still be able to simplify this. For example,
4834 if C1 is one less or one more than C2, this might have started
4835 out as a MIN or MAX and been transformed by this function.
4836 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4838 if (INTEGRAL_TYPE_P (type)
4839 && TREE_CODE (arg01) == INTEGER_CST
4840 && TREE_CODE (arg2) == INTEGER_CST)
4841 switch (comp_code)
4843 case EQ_EXPR:
4844 if (TREE_CODE (arg1) == INTEGER_CST)
4845 break;
4846 /* We can replace A with C1 in this case. */
4847 arg1 = fold_convert_loc (loc, type, arg01);
4848 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4850 case LT_EXPR:
4851 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4852 MIN_EXPR, to preserve the signedness of the comparison. */
4853 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4854 OEP_ONLY_CONST)
4855 && operand_equal_p (arg01,
4856 const_binop (PLUS_EXPR, arg2,
4857 build_int_cst (type, 1)),
4858 OEP_ONLY_CONST))
4860 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4861 fold_convert_loc (loc, TREE_TYPE (arg00),
4862 arg2));
4863 return pedantic_non_lvalue_loc (loc,
4864 fold_convert_loc (loc, type, tem));
4866 break;
4868 case LE_EXPR:
4869 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4870 as above. */
4871 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4872 OEP_ONLY_CONST)
4873 && operand_equal_p (arg01,
4874 const_binop (MINUS_EXPR, arg2,
4875 build_int_cst (type, 1)),
4876 OEP_ONLY_CONST))
4878 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4879 fold_convert_loc (loc, TREE_TYPE (arg00),
4880 arg2));
4881 return pedantic_non_lvalue_loc (loc,
4882 fold_convert_loc (loc, type, tem));
4884 break;
4886 case GT_EXPR:
4887 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4888 MAX_EXPR, to preserve the signedness of the comparison. */
4889 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4890 OEP_ONLY_CONST)
4891 && operand_equal_p (arg01,
4892 const_binop (MINUS_EXPR, arg2,
4893 build_int_cst (type, 1)),
4894 OEP_ONLY_CONST))
4896 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4897 fold_convert_loc (loc, TREE_TYPE (arg00),
4898 arg2));
4899 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4901 break;
4903 case GE_EXPR:
4904 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4905 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4906 OEP_ONLY_CONST)
4907 && operand_equal_p (arg01,
4908 const_binop (PLUS_EXPR, arg2,
4909 build_int_cst (type, 1)),
4910 OEP_ONLY_CONST))
4912 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4913 fold_convert_loc (loc, TREE_TYPE (arg00),
4914 arg2));
4915 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4917 break;
4918 case NE_EXPR:
4919 break;
4920 default:
4921 gcc_unreachable ();
4924 return NULL_TREE;
4929 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4930 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4931 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4932 false) >= 2)
4933 #endif
4935 /* EXP is some logical combination of boolean tests. See if we can
4936 merge it into some range test. Return the new tree if so. */
4938 static tree
4939 fold_range_test (location_t loc, enum tree_code code, tree type,
4940 tree op0, tree op1)
4942 int or_op = (code == TRUTH_ORIF_EXPR
4943 || code == TRUTH_OR_EXPR);
4944 int in0_p, in1_p, in_p;
4945 tree low0, low1, low, high0, high1, high;
4946 bool strict_overflow_p = false;
4947 tree tem, lhs, rhs;
4948 const char * const warnmsg = G_("assuming signed overflow does not occur "
4949 "when simplifying range test");
4951 if (!INTEGRAL_TYPE_P (type))
4952 return 0;
4954 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4955 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4957 /* If this is an OR operation, invert both sides; we will invert
4958 again at the end. */
4959 if (or_op)
4960 in0_p = ! in0_p, in1_p = ! in1_p;
4962 /* If both expressions are the same, if we can merge the ranges, and we
4963 can build the range test, return it or it inverted. If one of the
4964 ranges is always true or always false, consider it to be the same
4965 expression as the other. */
4966 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4967 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4968 in1_p, low1, high1)
4969 && 0 != (tem = (build_range_check (loc, type,
4970 lhs != 0 ? lhs
4971 : rhs != 0 ? rhs : integer_zero_node,
4972 in_p, low, high))))
4974 if (strict_overflow_p)
4975 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4976 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4979 /* On machines where the branch cost is expensive, if this is a
4980 short-circuited branch and the underlying object on both sides
4981 is the same, make a non-short-circuit operation. */
4982 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4983 && lhs != 0 && rhs != 0
4984 && (code == TRUTH_ANDIF_EXPR
4985 || code == TRUTH_ORIF_EXPR)
4986 && operand_equal_p (lhs, rhs, 0))
4988 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4989 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4990 which cases we can't do this. */
4991 if (simple_operand_p (lhs))
4992 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4994 type, op0, op1);
4996 else if (!lang_hooks.decls.global_bindings_p ()
4997 && !CONTAINS_PLACEHOLDER_P (lhs))
4999 tree common = save_expr (lhs);
5001 if (0 != (lhs = build_range_check (loc, type, common,
5002 or_op ? ! in0_p : in0_p,
5003 low0, high0))
5004 && (0 != (rhs = build_range_check (loc, type, common,
5005 or_op ? ! in1_p : in1_p,
5006 low1, high1))))
5008 if (strict_overflow_p)
5009 fold_overflow_warning (warnmsg,
5010 WARN_STRICT_OVERFLOW_COMPARISON);
5011 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5012 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5013 type, lhs, rhs);
5018 return 0;
5021 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5022 bit value. Arrange things so the extra bits will be set to zero if and
5023 only if C is signed-extended to its full width. If MASK is nonzero,
5024 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5026 static tree
5027 unextend (tree c, int p, int unsignedp, tree mask)
5029 tree type = TREE_TYPE (c);
5030 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5031 tree temp;
5033 if (p == modesize || unsignedp)
5034 return c;
5036 /* We work by getting just the sign bit into the low-order bit, then
5037 into the high-order bit, then sign-extend. We then XOR that value
5038 with C. */
5039 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5041 /* We must use a signed type in order to get an arithmetic right shift.
5042 However, we must also avoid introducing accidental overflows, so that
5043 a subsequent call to integer_zerop will work. Hence we must
5044 do the type conversion here. At this point, the constant is either
5045 zero or one, and the conversion to a signed type can never overflow.
5046 We could get an overflow if this conversion is done anywhere else. */
5047 if (TYPE_UNSIGNED (type))
5048 temp = fold_convert (signed_type_for (type), temp);
5050 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5051 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5052 if (mask != 0)
5053 temp = const_binop (BIT_AND_EXPR, temp,
5054 fold_convert (TREE_TYPE (c), mask));
5055 /* If necessary, convert the type back to match the type of C. */
5056 if (TYPE_UNSIGNED (type))
5057 temp = fold_convert (type, temp);
5059 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5062 /* For an expression that has the form
5063 (A && B) || ~B
5065 (A || B) && ~B,
5066 we can drop one of the inner expressions and simplify to
5067 A || ~B
5069 A && ~B
5070 LOC is the location of the resulting expression. OP is the inner
5071 logical operation; the left-hand side in the examples above, while CMPOP
5072 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5073 removing a condition that guards another, as in
5074 (A != NULL && A->...) || A == NULL
5075 which we must not transform. If RHS_ONLY is true, only eliminate the
5076 right-most operand of the inner logical operation. */
5078 static tree
5079 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5080 bool rhs_only)
5082 tree type = TREE_TYPE (cmpop);
5083 enum tree_code code = TREE_CODE (cmpop);
5084 enum tree_code truthop_code = TREE_CODE (op);
5085 tree lhs = TREE_OPERAND (op, 0);
5086 tree rhs = TREE_OPERAND (op, 1);
5087 tree orig_lhs = lhs, orig_rhs = rhs;
5088 enum tree_code rhs_code = TREE_CODE (rhs);
5089 enum tree_code lhs_code = TREE_CODE (lhs);
5090 enum tree_code inv_code;
5092 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5093 return NULL_TREE;
5095 if (TREE_CODE_CLASS (code) != tcc_comparison)
5096 return NULL_TREE;
5098 if (rhs_code == truthop_code)
5100 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5101 if (newrhs != NULL_TREE)
5103 rhs = newrhs;
5104 rhs_code = TREE_CODE (rhs);
5107 if (lhs_code == truthop_code && !rhs_only)
5109 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5110 if (newlhs != NULL_TREE)
5112 lhs = newlhs;
5113 lhs_code = TREE_CODE (lhs);
5117 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5118 if (inv_code == rhs_code
5119 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5120 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5121 return lhs;
5122 if (!rhs_only && inv_code == lhs_code
5123 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5124 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5125 return rhs;
5126 if (rhs != orig_rhs || lhs != orig_lhs)
5127 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5128 lhs, rhs);
5129 return NULL_TREE;
5132 /* Find ways of folding logical expressions of LHS and RHS:
5133 Try to merge two comparisons to the same innermost item.
5134 Look for range tests like "ch >= '0' && ch <= '9'".
5135 Look for combinations of simple terms on machines with expensive branches
5136 and evaluate the RHS unconditionally.
5138 For example, if we have p->a == 2 && p->b == 4 and we can make an
5139 object large enough to span both A and B, we can do this with a comparison
5140 against the object ANDed with the a mask.
5142 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5143 operations to do this with one comparison.
5145 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5146 function and the one above.
5148 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5149 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5151 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5152 two operands.
5154 We return the simplified tree or 0 if no optimization is possible. */
5156 static tree
5157 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5158 tree lhs, tree rhs)
5160 /* If this is the "or" of two comparisons, we can do something if
5161 the comparisons are NE_EXPR. If this is the "and", we can do something
5162 if the comparisons are EQ_EXPR. I.e.,
5163 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5165 WANTED_CODE is this operation code. For single bit fields, we can
5166 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5167 comparison for one-bit fields. */
5169 enum tree_code wanted_code;
5170 enum tree_code lcode, rcode;
5171 tree ll_arg, lr_arg, rl_arg, rr_arg;
5172 tree ll_inner, lr_inner, rl_inner, rr_inner;
5173 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5174 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5175 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5176 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5177 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5178 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5179 enum machine_mode lnmode, rnmode;
5180 tree ll_mask, lr_mask, rl_mask, rr_mask;
5181 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5182 tree l_const, r_const;
5183 tree lntype, rntype, result;
5184 HOST_WIDE_INT first_bit, end_bit;
5185 int volatilep;
5187 /* Start by getting the comparison codes. Fail if anything is volatile.
5188 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5189 it were surrounded with a NE_EXPR. */
5191 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5192 return 0;
5194 lcode = TREE_CODE (lhs);
5195 rcode = TREE_CODE (rhs);
5197 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5199 lhs = build2 (NE_EXPR, truth_type, lhs,
5200 build_int_cst (TREE_TYPE (lhs), 0));
5201 lcode = NE_EXPR;
5204 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5206 rhs = build2 (NE_EXPR, truth_type, rhs,
5207 build_int_cst (TREE_TYPE (rhs), 0));
5208 rcode = NE_EXPR;
5211 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5212 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5213 return 0;
5215 ll_arg = TREE_OPERAND (lhs, 0);
5216 lr_arg = TREE_OPERAND (lhs, 1);
5217 rl_arg = TREE_OPERAND (rhs, 0);
5218 rr_arg = TREE_OPERAND (rhs, 1);
5220 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5221 if (simple_operand_p (ll_arg)
5222 && simple_operand_p (lr_arg))
5224 if (operand_equal_p (ll_arg, rl_arg, 0)
5225 && operand_equal_p (lr_arg, rr_arg, 0))
5227 result = combine_comparisons (loc, code, lcode, rcode,
5228 truth_type, ll_arg, lr_arg);
5229 if (result)
5230 return result;
5232 else if (operand_equal_p (ll_arg, rr_arg, 0)
5233 && operand_equal_p (lr_arg, rl_arg, 0))
5235 result = combine_comparisons (loc, code, lcode,
5236 swap_tree_comparison (rcode),
5237 truth_type, ll_arg, lr_arg);
5238 if (result)
5239 return result;
5243 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5244 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5246 /* If the RHS can be evaluated unconditionally and its operands are
5247 simple, it wins to evaluate the RHS unconditionally on machines
5248 with expensive branches. In this case, this isn't a comparison
5249 that can be merged. */
5251 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5252 false) >= 2
5253 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5254 && simple_operand_p (rl_arg)
5255 && simple_operand_p (rr_arg))
5257 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5258 if (code == TRUTH_OR_EXPR
5259 && lcode == NE_EXPR && integer_zerop (lr_arg)
5260 && rcode == NE_EXPR && integer_zerop (rr_arg)
5261 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5262 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5263 return build2_loc (loc, NE_EXPR, truth_type,
5264 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5265 ll_arg, rl_arg),
5266 build_int_cst (TREE_TYPE (ll_arg), 0));
5268 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5269 if (code == TRUTH_AND_EXPR
5270 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5271 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5272 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5273 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5274 return build2_loc (loc, EQ_EXPR, truth_type,
5275 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5276 ll_arg, rl_arg),
5277 build_int_cst (TREE_TYPE (ll_arg), 0));
5280 /* See if the comparisons can be merged. Then get all the parameters for
5281 each side. */
5283 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5284 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5285 return 0;
5287 volatilep = 0;
5288 ll_inner = decode_field_reference (loc, ll_arg,
5289 &ll_bitsize, &ll_bitpos, &ll_mode,
5290 &ll_unsignedp, &volatilep, &ll_mask,
5291 &ll_and_mask);
5292 lr_inner = decode_field_reference (loc, lr_arg,
5293 &lr_bitsize, &lr_bitpos, &lr_mode,
5294 &lr_unsignedp, &volatilep, &lr_mask,
5295 &lr_and_mask);
5296 rl_inner = decode_field_reference (loc, rl_arg,
5297 &rl_bitsize, &rl_bitpos, &rl_mode,
5298 &rl_unsignedp, &volatilep, &rl_mask,
5299 &rl_and_mask);
5300 rr_inner = decode_field_reference (loc, rr_arg,
5301 &rr_bitsize, &rr_bitpos, &rr_mode,
5302 &rr_unsignedp, &volatilep, &rr_mask,
5303 &rr_and_mask);
5305 /* It must be true that the inner operation on the lhs of each
5306 comparison must be the same if we are to be able to do anything.
5307 Then see if we have constants. If not, the same must be true for
5308 the rhs's. */
5309 if (volatilep || ll_inner == 0 || rl_inner == 0
5310 || ! operand_equal_p (ll_inner, rl_inner, 0))
5311 return 0;
5313 if (TREE_CODE (lr_arg) == INTEGER_CST
5314 && TREE_CODE (rr_arg) == INTEGER_CST)
5315 l_const = lr_arg, r_const = rr_arg;
5316 else if (lr_inner == 0 || rr_inner == 0
5317 || ! operand_equal_p (lr_inner, rr_inner, 0))
5318 return 0;
5319 else
5320 l_const = r_const = 0;
5322 /* If either comparison code is not correct for our logical operation,
5323 fail. However, we can convert a one-bit comparison against zero into
5324 the opposite comparison against that bit being set in the field. */
5326 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5327 if (lcode != wanted_code)
5329 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5331 /* Make the left operand unsigned, since we are only interested
5332 in the value of one bit. Otherwise we are doing the wrong
5333 thing below. */
5334 ll_unsignedp = 1;
5335 l_const = ll_mask;
5337 else
5338 return 0;
5341 /* This is analogous to the code for l_const above. */
5342 if (rcode != wanted_code)
5344 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5346 rl_unsignedp = 1;
5347 r_const = rl_mask;
5349 else
5350 return 0;
5353 /* See if we can find a mode that contains both fields being compared on
5354 the left. If we can't, fail. Otherwise, update all constants and masks
5355 to be relative to a field of that size. */
5356 first_bit = MIN (ll_bitpos, rl_bitpos);
5357 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5358 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5359 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5360 volatilep);
5361 if (lnmode == VOIDmode)
5362 return 0;
5364 lnbitsize = GET_MODE_BITSIZE (lnmode);
5365 lnbitpos = first_bit & ~ (lnbitsize - 1);
5366 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5367 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5369 if (BYTES_BIG_ENDIAN)
5371 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5372 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5375 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5376 size_int (xll_bitpos));
5377 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5378 size_int (xrl_bitpos));
5380 if (l_const)
5382 l_const = fold_convert_loc (loc, lntype, l_const);
5383 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5384 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5385 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5386 fold_build1_loc (loc, BIT_NOT_EXPR,
5387 lntype, ll_mask))))
5389 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5391 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5394 if (r_const)
5396 r_const = fold_convert_loc (loc, lntype, r_const);
5397 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5398 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5399 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5400 fold_build1_loc (loc, BIT_NOT_EXPR,
5401 lntype, rl_mask))))
5403 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5405 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5409 /* If the right sides are not constant, do the same for it. Also,
5410 disallow this optimization if a size or signedness mismatch occurs
5411 between the left and right sides. */
5412 if (l_const == 0)
5414 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5415 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5416 /* Make sure the two fields on the right
5417 correspond to the left without being swapped. */
5418 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5419 return 0;
5421 first_bit = MIN (lr_bitpos, rr_bitpos);
5422 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5423 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5424 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5425 volatilep);
5426 if (rnmode == VOIDmode)
5427 return 0;
5429 rnbitsize = GET_MODE_BITSIZE (rnmode);
5430 rnbitpos = first_bit & ~ (rnbitsize - 1);
5431 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5432 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5434 if (BYTES_BIG_ENDIAN)
5436 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5437 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5440 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5441 rntype, lr_mask),
5442 size_int (xlr_bitpos));
5443 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5444 rntype, rr_mask),
5445 size_int (xrr_bitpos));
5447 /* Make a mask that corresponds to both fields being compared.
5448 Do this for both items being compared. If the operands are the
5449 same size and the bits being compared are in the same position
5450 then we can do this by masking both and comparing the masked
5451 results. */
5452 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5453 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5454 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5456 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5457 ll_unsignedp || rl_unsignedp);
5458 if (! all_ones_mask_p (ll_mask, lnbitsize))
5459 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5461 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5462 lr_unsignedp || rr_unsignedp);
5463 if (! all_ones_mask_p (lr_mask, rnbitsize))
5464 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5466 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5469 /* There is still another way we can do something: If both pairs of
5470 fields being compared are adjacent, we may be able to make a wider
5471 field containing them both.
5473 Note that we still must mask the lhs/rhs expressions. Furthermore,
5474 the mask must be shifted to account for the shift done by
5475 make_bit_field_ref. */
5476 if ((ll_bitsize + ll_bitpos == rl_bitpos
5477 && lr_bitsize + lr_bitpos == rr_bitpos)
5478 || (ll_bitpos == rl_bitpos + rl_bitsize
5479 && lr_bitpos == rr_bitpos + rr_bitsize))
5481 tree type;
5483 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5484 ll_bitsize + rl_bitsize,
5485 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5486 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5487 lr_bitsize + rr_bitsize,
5488 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5490 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5491 size_int (MIN (xll_bitpos, xrl_bitpos)));
5492 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5493 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5495 /* Convert to the smaller type before masking out unwanted bits. */
5496 type = lntype;
5497 if (lntype != rntype)
5499 if (lnbitsize > rnbitsize)
5501 lhs = fold_convert_loc (loc, rntype, lhs);
5502 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5503 type = rntype;
5505 else if (lnbitsize < rnbitsize)
5507 rhs = fold_convert_loc (loc, lntype, rhs);
5508 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5509 type = lntype;
5513 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5514 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5516 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5517 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5519 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5522 return 0;
5525 /* Handle the case of comparisons with constants. If there is something in
5526 common between the masks, those bits of the constants must be the same.
5527 If not, the condition is always false. Test for this to avoid generating
5528 incorrect code below. */
5529 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5530 if (! integer_zerop (result)
5531 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5532 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5534 if (wanted_code == NE_EXPR)
5536 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5537 return constant_boolean_node (true, truth_type);
5539 else
5541 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5542 return constant_boolean_node (false, truth_type);
5546 /* Construct the expression we will return. First get the component
5547 reference we will make. Unless the mask is all ones the width of
5548 that field, perform the mask operation. Then compare with the
5549 merged constant. */
5550 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5551 ll_unsignedp || rl_unsignedp);
5553 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5554 if (! all_ones_mask_p (ll_mask, lnbitsize))
5555 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5557 return build2_loc (loc, wanted_code, truth_type, result,
5558 const_binop (BIT_IOR_EXPR, l_const, r_const));
5561 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5562 constant. */
5564 static tree
5565 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5566 tree op0, tree op1)
5568 tree arg0 = op0;
5569 enum tree_code op_code;
5570 tree comp_const;
5571 tree minmax_const;
5572 int consts_equal, consts_lt;
5573 tree inner;
5575 STRIP_SIGN_NOPS (arg0);
5577 op_code = TREE_CODE (arg0);
5578 minmax_const = TREE_OPERAND (arg0, 1);
5579 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5580 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5581 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5582 inner = TREE_OPERAND (arg0, 0);
5584 /* If something does not permit us to optimize, return the original tree. */
5585 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5586 || TREE_CODE (comp_const) != INTEGER_CST
5587 || TREE_OVERFLOW (comp_const)
5588 || TREE_CODE (minmax_const) != INTEGER_CST
5589 || TREE_OVERFLOW (minmax_const))
5590 return NULL_TREE;
5592 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5593 and GT_EXPR, doing the rest with recursive calls using logical
5594 simplifications. */
5595 switch (code)
5597 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5599 tree tem
5600 = optimize_minmax_comparison (loc,
5601 invert_tree_comparison (code, false),
5602 type, op0, op1);
5603 if (tem)
5604 return invert_truthvalue_loc (loc, tem);
5605 return NULL_TREE;
5608 case GE_EXPR:
5609 return
5610 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5611 optimize_minmax_comparison
5612 (loc, EQ_EXPR, type, arg0, comp_const),
5613 optimize_minmax_comparison
5614 (loc, GT_EXPR, type, arg0, comp_const));
5616 case EQ_EXPR:
5617 if (op_code == MAX_EXPR && consts_equal)
5618 /* MAX (X, 0) == 0 -> X <= 0 */
5619 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5621 else if (op_code == MAX_EXPR && consts_lt)
5622 /* MAX (X, 0) == 5 -> X == 5 */
5623 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5625 else if (op_code == MAX_EXPR)
5626 /* MAX (X, 0) == -1 -> false */
5627 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5629 else if (consts_equal)
5630 /* MIN (X, 0) == 0 -> X >= 0 */
5631 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5633 else if (consts_lt)
5634 /* MIN (X, 0) == 5 -> false */
5635 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5637 else
5638 /* MIN (X, 0) == -1 -> X == -1 */
5639 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5641 case GT_EXPR:
5642 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5643 /* MAX (X, 0) > 0 -> X > 0
5644 MAX (X, 0) > 5 -> X > 5 */
5645 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5647 else if (op_code == MAX_EXPR)
5648 /* MAX (X, 0) > -1 -> true */
5649 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5651 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5652 /* MIN (X, 0) > 0 -> false
5653 MIN (X, 0) > 5 -> false */
5654 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5656 else
5657 /* MIN (X, 0) > -1 -> X > -1 */
5658 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5660 default:
5661 return NULL_TREE;
5665 /* T is an integer expression that is being multiplied, divided, or taken a
5666 modulus (CODE says which and what kind of divide or modulus) by a
5667 constant C. See if we can eliminate that operation by folding it with
5668 other operations already in T. WIDE_TYPE, if non-null, is a type that
5669 should be used for the computation if wider than our type.
5671 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5672 (X * 2) + (Y * 4). We must, however, be assured that either the original
5673 expression would not overflow or that overflow is undefined for the type
5674 in the language in question.
5676 If we return a non-null expression, it is an equivalent form of the
5677 original computation, but need not be in the original type.
5679 We set *STRICT_OVERFLOW_P to true if the return values depends on
5680 signed overflow being undefined. Otherwise we do not change
5681 *STRICT_OVERFLOW_P. */
5683 static tree
5684 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5685 bool *strict_overflow_p)
5687 /* To avoid exponential search depth, refuse to allow recursion past
5688 three levels. Beyond that (1) it's highly unlikely that we'll find
5689 something interesting and (2) we've probably processed it before
5690 when we built the inner expression. */
5692 static int depth;
5693 tree ret;
5695 if (depth > 3)
5696 return NULL;
5698 depth++;
5699 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5700 depth--;
5702 return ret;
5705 static tree
5706 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5707 bool *strict_overflow_p)
5709 tree type = TREE_TYPE (t);
5710 enum tree_code tcode = TREE_CODE (t);
5711 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5712 > GET_MODE_SIZE (TYPE_MODE (type)))
5713 ? wide_type : type);
5714 tree t1, t2;
5715 int same_p = tcode == code;
5716 tree op0 = NULL_TREE, op1 = NULL_TREE;
5717 bool sub_strict_overflow_p;
5719 /* Don't deal with constants of zero here; they confuse the code below. */
5720 if (integer_zerop (c))
5721 return NULL_TREE;
5723 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5724 op0 = TREE_OPERAND (t, 0);
5726 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5727 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5729 /* Note that we need not handle conditional operations here since fold
5730 already handles those cases. So just do arithmetic here. */
5731 switch (tcode)
5733 case INTEGER_CST:
5734 /* For a constant, we can always simplify if we are a multiply
5735 or (for divide and modulus) if it is a multiple of our constant. */
5736 if (code == MULT_EXPR
5737 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5738 return const_binop (code, fold_convert (ctype, t),
5739 fold_convert (ctype, c));
5740 break;
5742 CASE_CONVERT: case NON_LVALUE_EXPR:
5743 /* If op0 is an expression ... */
5744 if ((COMPARISON_CLASS_P (op0)
5745 || UNARY_CLASS_P (op0)
5746 || BINARY_CLASS_P (op0)
5747 || VL_EXP_CLASS_P (op0)
5748 || EXPRESSION_CLASS_P (op0))
5749 /* ... and has wrapping overflow, and its type is smaller
5750 than ctype, then we cannot pass through as widening. */
5751 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5752 && (TYPE_PRECISION (ctype)
5753 > TYPE_PRECISION (TREE_TYPE (op0))))
5754 /* ... or this is a truncation (t is narrower than op0),
5755 then we cannot pass through this narrowing. */
5756 || (TYPE_PRECISION (type)
5757 < TYPE_PRECISION (TREE_TYPE (op0)))
5758 /* ... or signedness changes for division or modulus,
5759 then we cannot pass through this conversion. */
5760 || (code != MULT_EXPR
5761 && (TYPE_UNSIGNED (ctype)
5762 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5763 /* ... or has undefined overflow while the converted to
5764 type has not, we cannot do the operation in the inner type
5765 as that would introduce undefined overflow. */
5766 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5767 && !TYPE_OVERFLOW_UNDEFINED (type))))
5768 break;
5770 /* Pass the constant down and see if we can make a simplification. If
5771 we can, replace this expression with the inner simplification for
5772 possible later conversion to our or some other type. */
5773 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5774 && TREE_CODE (t2) == INTEGER_CST
5775 && !TREE_OVERFLOW (t2)
5776 && (0 != (t1 = extract_muldiv (op0, t2, code,
5777 code == MULT_EXPR
5778 ? ctype : NULL_TREE,
5779 strict_overflow_p))))
5780 return t1;
5781 break;
5783 case ABS_EXPR:
5784 /* If widening the type changes it from signed to unsigned, then we
5785 must avoid building ABS_EXPR itself as unsigned. */
5786 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5788 tree cstype = (*signed_type_for) (ctype);
5789 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5790 != 0)
5792 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5793 return fold_convert (ctype, t1);
5795 break;
5797 /* If the constant is negative, we cannot simplify this. */
5798 if (tree_int_cst_sgn (c) == -1)
5799 break;
5800 /* FALLTHROUGH */
5801 case NEGATE_EXPR:
5802 /* For division and modulus, type can't be unsigned, as e.g.
5803 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5804 For signed types, even with wrapping overflow, this is fine. */
5805 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5806 break;
5807 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5808 != 0)
5809 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5810 break;
5812 case MIN_EXPR: case MAX_EXPR:
5813 /* If widening the type changes the signedness, then we can't perform
5814 this optimization as that changes the result. */
5815 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5816 break;
5818 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5819 sub_strict_overflow_p = false;
5820 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5821 &sub_strict_overflow_p)) != 0
5822 && (t2 = extract_muldiv (op1, c, code, wide_type,
5823 &sub_strict_overflow_p)) != 0)
5825 if (tree_int_cst_sgn (c) < 0)
5826 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5827 if (sub_strict_overflow_p)
5828 *strict_overflow_p = true;
5829 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5830 fold_convert (ctype, t2));
5832 break;
5834 case LSHIFT_EXPR: case RSHIFT_EXPR:
5835 /* If the second operand is constant, this is a multiplication
5836 or floor division, by a power of two, so we can treat it that
5837 way unless the multiplier or divisor overflows. Signed
5838 left-shift overflow is implementation-defined rather than
5839 undefined in C90, so do not convert signed left shift into
5840 multiplication. */
5841 if (TREE_CODE (op1) == INTEGER_CST
5842 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5843 /* const_binop may not detect overflow correctly,
5844 so check for it explicitly here. */
5845 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5846 && 0 != (t1 = fold_convert (ctype,
5847 const_binop (LSHIFT_EXPR,
5848 size_one_node,
5849 op1)))
5850 && !TREE_OVERFLOW (t1))
5851 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5852 ? MULT_EXPR : FLOOR_DIV_EXPR,
5853 ctype,
5854 fold_convert (ctype, op0),
5855 t1),
5856 c, code, wide_type, strict_overflow_p);
5857 break;
5859 case PLUS_EXPR: case MINUS_EXPR:
5860 /* See if we can eliminate the operation on both sides. If we can, we
5861 can return a new PLUS or MINUS. If we can't, the only remaining
5862 cases where we can do anything are if the second operand is a
5863 constant. */
5864 sub_strict_overflow_p = false;
5865 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5866 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5867 if (t1 != 0 && t2 != 0
5868 && (code == MULT_EXPR
5869 /* If not multiplication, we can only do this if both operands
5870 are divisible by c. */
5871 || (multiple_of_p (ctype, op0, c)
5872 && multiple_of_p (ctype, op1, c))))
5874 if (sub_strict_overflow_p)
5875 *strict_overflow_p = true;
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5877 fold_convert (ctype, t2));
5880 /* If this was a subtraction, negate OP1 and set it to be an addition.
5881 This simplifies the logic below. */
5882 if (tcode == MINUS_EXPR)
5884 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5885 /* If OP1 was not easily negatable, the constant may be OP0. */
5886 if (TREE_CODE (op0) == INTEGER_CST)
5888 tree tem = op0;
5889 op0 = op1;
5890 op1 = tem;
5891 tem = t1;
5892 t1 = t2;
5893 t2 = tem;
5897 if (TREE_CODE (op1) != INTEGER_CST)
5898 break;
5900 /* If either OP1 or C are negative, this optimization is not safe for
5901 some of the division and remainder types while for others we need
5902 to change the code. */
5903 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5905 if (code == CEIL_DIV_EXPR)
5906 code = FLOOR_DIV_EXPR;
5907 else if (code == FLOOR_DIV_EXPR)
5908 code = CEIL_DIV_EXPR;
5909 else if (code != MULT_EXPR
5910 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5911 break;
5914 /* If it's a multiply or a division/modulus operation of a multiple
5915 of our constant, do the operation and verify it doesn't overflow. */
5916 if (code == MULT_EXPR
5917 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5919 op1 = const_binop (code, fold_convert (ctype, op1),
5920 fold_convert (ctype, c));
5921 /* We allow the constant to overflow with wrapping semantics. */
5922 if (op1 == 0
5923 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5924 break;
5926 else
5927 break;
5929 /* If we have an unsigned type, we cannot widen the operation since it
5930 will change the result if the original computation overflowed. */
5931 if (TYPE_UNSIGNED (ctype) && ctype != type)
5932 break;
5934 /* If we were able to eliminate our operation from the first side,
5935 apply our operation to the second side and reform the PLUS. */
5936 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5937 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5939 /* The last case is if we are a multiply. In that case, we can
5940 apply the distributive law to commute the multiply and addition
5941 if the multiplication of the constants doesn't overflow
5942 and overflow is defined. With undefined overflow
5943 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5944 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5945 return fold_build2 (tcode, ctype,
5946 fold_build2 (code, ctype,
5947 fold_convert (ctype, op0),
5948 fold_convert (ctype, c)),
5949 op1);
5951 break;
5953 case MULT_EXPR:
5954 /* We have a special case here if we are doing something like
5955 (C * 8) % 4 since we know that's zero. */
5956 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5957 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5958 /* If the multiplication can overflow we cannot optimize this. */
5959 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5960 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5961 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5963 *strict_overflow_p = true;
5964 return omit_one_operand (type, integer_zero_node, op0);
5967 /* ... fall through ... */
5969 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5970 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5971 /* If we can extract our operation from the LHS, do so and return a
5972 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5973 do something only if the second operand is a constant. */
5974 if (same_p
5975 && (t1 = extract_muldiv (op0, c, code, wide_type,
5976 strict_overflow_p)) != 0)
5977 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5978 fold_convert (ctype, op1));
5979 else if (tcode == MULT_EXPR && code == MULT_EXPR
5980 && (t1 = extract_muldiv (op1, c, code, wide_type,
5981 strict_overflow_p)) != 0)
5982 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5983 fold_convert (ctype, t1));
5984 else if (TREE_CODE (op1) != INTEGER_CST)
5985 return 0;
5987 /* If these are the same operation types, we can associate them
5988 assuming no overflow. */
5989 if (tcode == code)
5991 bool overflow_p = false;
5992 bool overflow_mul_p;
5993 signop sign = TYPE_SIGN (ctype);
5994 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5995 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5996 if (overflow_mul_p
5997 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5998 overflow_p = true;
5999 if (!overflow_p)
6000 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6001 wide_int_to_tree (ctype, mul));
6004 /* If these operations "cancel" each other, we have the main
6005 optimizations of this pass, which occur when either constant is a
6006 multiple of the other, in which case we replace this with either an
6007 operation or CODE or TCODE.
6009 If we have an unsigned type, we cannot do this since it will change
6010 the result if the original computation overflowed. */
6011 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6012 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6013 || (tcode == MULT_EXPR
6014 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6015 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6016 && code != MULT_EXPR)))
6018 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6020 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6021 *strict_overflow_p = true;
6022 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6023 fold_convert (ctype,
6024 const_binop (TRUNC_DIV_EXPR,
6025 op1, c)));
6027 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6029 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6030 *strict_overflow_p = true;
6031 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6032 fold_convert (ctype,
6033 const_binop (TRUNC_DIV_EXPR,
6034 c, op1)));
6037 break;
6039 default:
6040 break;
6043 return 0;
6046 /* Return a node which has the indicated constant VALUE (either 0 or
6047 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6048 and is of the indicated TYPE. */
6050 tree
6051 constant_boolean_node (bool value, tree type)
6053 if (type == integer_type_node)
6054 return value ? integer_one_node : integer_zero_node;
6055 else if (type == boolean_type_node)
6056 return value ? boolean_true_node : boolean_false_node;
6057 else if (TREE_CODE (type) == VECTOR_TYPE)
6058 return build_vector_from_val (type,
6059 build_int_cst (TREE_TYPE (type),
6060 value ? -1 : 0));
6061 else
6062 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6066 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6067 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6068 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6069 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6070 COND is the first argument to CODE; otherwise (as in the example
6071 given here), it is the second argument. TYPE is the type of the
6072 original expression. Return NULL_TREE if no simplification is
6073 possible. */
6075 static tree
6076 fold_binary_op_with_conditional_arg (location_t loc,
6077 enum tree_code code,
6078 tree type, tree op0, tree op1,
6079 tree cond, tree arg, int cond_first_p)
6081 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6082 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6083 tree test, true_value, false_value;
6084 tree lhs = NULL_TREE;
6085 tree rhs = NULL_TREE;
6086 enum tree_code cond_code = COND_EXPR;
6088 if (TREE_CODE (cond) == COND_EXPR
6089 || TREE_CODE (cond) == VEC_COND_EXPR)
6091 test = TREE_OPERAND (cond, 0);
6092 true_value = TREE_OPERAND (cond, 1);
6093 false_value = TREE_OPERAND (cond, 2);
6094 /* If this operand throws an expression, then it does not make
6095 sense to try to perform a logical or arithmetic operation
6096 involving it. */
6097 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6098 lhs = true_value;
6099 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6100 rhs = false_value;
6102 else
6104 tree testtype = TREE_TYPE (cond);
6105 test = cond;
6106 true_value = constant_boolean_node (true, testtype);
6107 false_value = constant_boolean_node (false, testtype);
6110 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6111 cond_code = VEC_COND_EXPR;
6113 /* This transformation is only worthwhile if we don't have to wrap ARG
6114 in a SAVE_EXPR and the operation can be simplified without recursing
6115 on at least one of the branches once its pushed inside the COND_EXPR. */
6116 if (!TREE_CONSTANT (arg)
6117 && (TREE_SIDE_EFFECTS (arg)
6118 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6119 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6120 return NULL_TREE;
6122 arg = fold_convert_loc (loc, arg_type, arg);
6123 if (lhs == 0)
6125 true_value = fold_convert_loc (loc, cond_type, true_value);
6126 if (cond_first_p)
6127 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6128 else
6129 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6131 if (rhs == 0)
6133 false_value = fold_convert_loc (loc, cond_type, false_value);
6134 if (cond_first_p)
6135 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6136 else
6137 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6140 /* Check that we have simplified at least one of the branches. */
6141 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6142 return NULL_TREE;
6144 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6148 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6150 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6151 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6152 ADDEND is the same as X.
6154 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6155 and finite. The problematic cases are when X is zero, and its mode
6156 has signed zeros. In the case of rounding towards -infinity,
6157 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6158 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6160 bool
6161 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6163 if (!real_zerop (addend))
6164 return false;
6166 /* Don't allow the fold with -fsignaling-nans. */
6167 if (HONOR_SNANS (TYPE_MODE (type)))
6168 return false;
6170 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6171 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6172 return true;
6174 /* In a vector or complex, we would need to check the sign of all zeros. */
6175 if (TREE_CODE (addend) != REAL_CST)
6176 return false;
6178 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6179 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6180 negate = !negate;
6182 /* The mode has signed zeros, and we have to honor their sign.
6183 In this situation, there is only one case we can return true for.
6184 X - 0 is the same as X unless rounding towards -infinity is
6185 supported. */
6186 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6189 /* Subroutine of fold() that checks comparisons of built-in math
6190 functions against real constants.
6192 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6193 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6194 is the type of the result and ARG0 and ARG1 are the operands of the
6195 comparison. ARG1 must be a TREE_REAL_CST.
6197 The function returns the constant folded tree if a simplification
6198 can be made, and NULL_TREE otherwise. */
6200 static tree
6201 fold_mathfn_compare (location_t loc,
6202 enum built_in_function fcode, enum tree_code code,
6203 tree type, tree arg0, tree arg1)
6205 REAL_VALUE_TYPE c;
6207 if (BUILTIN_SQRT_P (fcode))
6209 tree arg = CALL_EXPR_ARG (arg0, 0);
6210 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6212 c = TREE_REAL_CST (arg1);
6213 if (REAL_VALUE_NEGATIVE (c))
6215 /* sqrt(x) < y is always false, if y is negative. */
6216 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6217 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6219 /* sqrt(x) > y is always true, if y is negative and we
6220 don't care about NaNs, i.e. negative values of x. */
6221 if (code == NE_EXPR || !HONOR_NANS (mode))
6222 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6224 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6225 return fold_build2_loc (loc, GE_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), dconst0));
6228 else if (code == GT_EXPR || code == GE_EXPR)
6230 REAL_VALUE_TYPE c2;
6232 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6233 real_convert (&c2, mode, &c2);
6235 if (REAL_VALUE_ISINF (c2))
6237 /* sqrt(x) > y is x == +Inf, when y is very large. */
6238 if (HONOR_INFINITIES (mode))
6239 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6240 build_real (TREE_TYPE (arg), c2));
6242 /* sqrt(x) > y is always false, when y is very large
6243 and we don't care about infinities. */
6244 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6247 /* sqrt(x) > c is the same as x > c*c. */
6248 return fold_build2_loc (loc, code, type, arg,
6249 build_real (TREE_TYPE (arg), c2));
6251 else if (code == LT_EXPR || code == LE_EXPR)
6253 REAL_VALUE_TYPE c2;
6255 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6256 real_convert (&c2, mode, &c2);
6258 if (REAL_VALUE_ISINF (c2))
6260 /* sqrt(x) < y is always true, when y is a very large
6261 value and we don't care about NaNs or Infinities. */
6262 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6263 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6265 /* sqrt(x) < y is x != +Inf when y is very large and we
6266 don't care about NaNs. */
6267 if (! HONOR_NANS (mode))
6268 return fold_build2_loc (loc, NE_EXPR, type, arg,
6269 build_real (TREE_TYPE (arg), c2));
6271 /* sqrt(x) < y is x >= 0 when y is very large and we
6272 don't care about Infinities. */
6273 if (! HONOR_INFINITIES (mode))
6274 return fold_build2_loc (loc, GE_EXPR, type, arg,
6275 build_real (TREE_TYPE (arg), dconst0));
6277 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6278 arg = save_expr (arg);
6279 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6280 fold_build2_loc (loc, GE_EXPR, type, arg,
6281 build_real (TREE_TYPE (arg),
6282 dconst0)),
6283 fold_build2_loc (loc, NE_EXPR, type, arg,
6284 build_real (TREE_TYPE (arg),
6285 c2)));
6288 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6289 if (! HONOR_NANS (mode))
6290 return fold_build2_loc (loc, code, type, arg,
6291 build_real (TREE_TYPE (arg), c2));
6293 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6294 arg = save_expr (arg);
6295 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6296 fold_build2_loc (loc, GE_EXPR, type, arg,
6297 build_real (TREE_TYPE (arg),
6298 dconst0)),
6299 fold_build2_loc (loc, code, type, arg,
6300 build_real (TREE_TYPE (arg),
6301 c2)));
6305 return NULL_TREE;
6308 /* Subroutine of fold() that optimizes comparisons against Infinities,
6309 either +Inf or -Inf.
6311 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6312 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6313 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6315 The function returns the constant folded tree if a simplification
6316 can be made, and NULL_TREE otherwise. */
6318 static tree
6319 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6320 tree arg0, tree arg1)
6322 enum machine_mode mode;
6323 REAL_VALUE_TYPE max;
6324 tree temp;
6325 bool neg;
6327 mode = TYPE_MODE (TREE_TYPE (arg0));
6329 /* For negative infinity swap the sense of the comparison. */
6330 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6331 if (neg)
6332 code = swap_tree_comparison (code);
6334 switch (code)
6336 case GT_EXPR:
6337 /* x > +Inf is always false, if with ignore sNANs. */
6338 if (HONOR_SNANS (mode))
6339 return NULL_TREE;
6340 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6342 case LE_EXPR:
6343 /* x <= +Inf is always true, if we don't case about NaNs. */
6344 if (! HONOR_NANS (mode))
6345 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6347 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6348 arg0 = save_expr (arg0);
6349 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6351 case EQ_EXPR:
6352 case GE_EXPR:
6353 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6354 real_maxval (&max, neg, mode);
6355 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6356 arg0, build_real (TREE_TYPE (arg0), max));
6358 case LT_EXPR:
6359 /* x < +Inf is always equal to x <= DBL_MAX. */
6360 real_maxval (&max, neg, mode);
6361 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6362 arg0, build_real (TREE_TYPE (arg0), max));
6364 case NE_EXPR:
6365 /* x != +Inf is always equal to !(x > DBL_MAX). */
6366 real_maxval (&max, neg, mode);
6367 if (! HONOR_NANS (mode))
6368 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6369 arg0, build_real (TREE_TYPE (arg0), max));
6371 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6372 arg0, build_real (TREE_TYPE (arg0), max));
6373 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6375 default:
6376 break;
6379 return NULL_TREE;
6382 /* Subroutine of fold() that optimizes comparisons of a division by
6383 a nonzero integer constant against an integer constant, i.e.
6384 X/C1 op C2.
6386 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6387 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6388 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6390 The function returns the constant folded tree if a simplification
6391 can be made, and NULL_TREE otherwise. */
6393 static tree
6394 fold_div_compare (location_t loc,
6395 enum tree_code code, tree type, tree arg0, tree arg1)
6397 tree prod, tmp, hi, lo;
6398 tree arg00 = TREE_OPERAND (arg0, 0);
6399 tree arg01 = TREE_OPERAND (arg0, 1);
6400 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6401 bool neg_overflow = false;
6402 bool overflow;
6404 /* We have to do this the hard way to detect unsigned overflow.
6405 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6406 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6407 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6408 neg_overflow = false;
6410 if (sign == UNSIGNED)
6412 tmp = int_const_binop (MINUS_EXPR, arg01,
6413 build_int_cst (TREE_TYPE (arg01), 1));
6414 lo = prod;
6416 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6417 val = wi::add (prod, tmp, sign, &overflow);
6418 hi = force_fit_type (TREE_TYPE (arg00), val,
6419 -1, overflow | TREE_OVERFLOW (prod));
6421 else if (tree_int_cst_sgn (arg01) >= 0)
6423 tmp = int_const_binop (MINUS_EXPR, arg01,
6424 build_int_cst (TREE_TYPE (arg01), 1));
6425 switch (tree_int_cst_sgn (arg1))
6427 case -1:
6428 neg_overflow = true;
6429 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6430 hi = prod;
6431 break;
6433 case 0:
6434 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6435 hi = tmp;
6436 break;
6438 case 1:
6439 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6440 lo = prod;
6441 break;
6443 default:
6444 gcc_unreachable ();
6447 else
6449 /* A negative divisor reverses the relational operators. */
6450 code = swap_tree_comparison (code);
6452 tmp = int_const_binop (PLUS_EXPR, arg01,
6453 build_int_cst (TREE_TYPE (arg01), 1));
6454 switch (tree_int_cst_sgn (arg1))
6456 case -1:
6457 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6458 lo = prod;
6459 break;
6461 case 0:
6462 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6463 lo = tmp;
6464 break;
6466 case 1:
6467 neg_overflow = true;
6468 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6469 hi = prod;
6470 break;
6472 default:
6473 gcc_unreachable ();
6477 switch (code)
6479 case EQ_EXPR:
6480 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6481 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6482 if (TREE_OVERFLOW (hi))
6483 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6484 if (TREE_OVERFLOW (lo))
6485 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6486 return build_range_check (loc, type, arg00, 1, lo, hi);
6488 case NE_EXPR:
6489 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6490 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6491 if (TREE_OVERFLOW (hi))
6492 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6493 if (TREE_OVERFLOW (lo))
6494 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6495 return build_range_check (loc, type, arg00, 0, lo, hi);
6497 case LT_EXPR:
6498 if (TREE_OVERFLOW (lo))
6500 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6501 return omit_one_operand_loc (loc, type, tmp, arg00);
6503 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6505 case LE_EXPR:
6506 if (TREE_OVERFLOW (hi))
6508 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6509 return omit_one_operand_loc (loc, type, tmp, arg00);
6511 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6513 case GT_EXPR:
6514 if (TREE_OVERFLOW (hi))
6516 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6517 return omit_one_operand_loc (loc, type, tmp, arg00);
6519 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6521 case GE_EXPR:
6522 if (TREE_OVERFLOW (lo))
6524 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6525 return omit_one_operand_loc (loc, type, tmp, arg00);
6527 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6529 default:
6530 break;
6533 return NULL_TREE;
6537 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6538 equality/inequality test, then return a simplified form of the test
6539 using a sign testing. Otherwise return NULL. TYPE is the desired
6540 result type. */
6542 static tree
6543 fold_single_bit_test_into_sign_test (location_t loc,
6544 enum tree_code code, tree arg0, tree arg1,
6545 tree result_type)
6547 /* If this is testing a single bit, we can optimize the test. */
6548 if ((code == NE_EXPR || code == EQ_EXPR)
6549 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6550 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6552 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6553 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6554 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6556 if (arg00 != NULL_TREE
6557 /* This is only a win if casting to a signed type is cheap,
6558 i.e. when arg00's type is not a partial mode. */
6559 && TYPE_PRECISION (TREE_TYPE (arg00))
6560 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6562 tree stype = signed_type_for (TREE_TYPE (arg00));
6563 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6564 result_type,
6565 fold_convert_loc (loc, stype, arg00),
6566 build_int_cst (stype, 0));
6570 return NULL_TREE;
6573 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6574 equality/inequality test, then return a simplified form of
6575 the test using shifts and logical operations. Otherwise return
6576 NULL. TYPE is the desired result type. */
6578 tree
6579 fold_single_bit_test (location_t loc, enum tree_code code,
6580 tree arg0, tree arg1, tree result_type)
6582 /* If this is testing a single bit, we can optimize the test. */
6583 if ((code == NE_EXPR || code == EQ_EXPR)
6584 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6585 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6587 tree inner = TREE_OPERAND (arg0, 0);
6588 tree type = TREE_TYPE (arg0);
6589 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6590 enum machine_mode operand_mode = TYPE_MODE (type);
6591 int ops_unsigned;
6592 tree signed_type, unsigned_type, intermediate_type;
6593 tree tem, one;
6595 /* First, see if we can fold the single bit test into a sign-bit
6596 test. */
6597 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6598 result_type);
6599 if (tem)
6600 return tem;
6602 /* Otherwise we have (A & C) != 0 where C is a single bit,
6603 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6604 Similarly for (A & C) == 0. */
6606 /* If INNER is a right shift of a constant and it plus BITNUM does
6607 not overflow, adjust BITNUM and INNER. */
6608 if (TREE_CODE (inner) == RSHIFT_EXPR
6609 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6610 && bitnum < TYPE_PRECISION (type)
6611 && wi::ltu_p (TREE_OPERAND (inner, 1),
6612 TYPE_PRECISION (type) - bitnum))
6614 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6615 inner = TREE_OPERAND (inner, 0);
6618 /* If we are going to be able to omit the AND below, we must do our
6619 operations as unsigned. If we must use the AND, we have a choice.
6620 Normally unsigned is faster, but for some machines signed is. */
6621 #ifdef LOAD_EXTEND_OP
6622 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6623 && !flag_syntax_only) ? 0 : 1;
6624 #else
6625 ops_unsigned = 1;
6626 #endif
6628 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6629 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6630 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6631 inner = fold_convert_loc (loc, intermediate_type, inner);
6633 if (bitnum != 0)
6634 inner = build2 (RSHIFT_EXPR, intermediate_type,
6635 inner, size_int (bitnum));
6637 one = build_int_cst (intermediate_type, 1);
6639 if (code == EQ_EXPR)
6640 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6642 /* Put the AND last so it can combine with more things. */
6643 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6645 /* Make sure to return the proper type. */
6646 inner = fold_convert_loc (loc, result_type, inner);
6648 return inner;
6650 return NULL_TREE;
6653 /* Check whether we are allowed to reorder operands arg0 and arg1,
6654 such that the evaluation of arg1 occurs before arg0. */
6656 static bool
6657 reorder_operands_p (const_tree arg0, const_tree arg1)
6659 if (! flag_evaluation_order)
6660 return true;
6661 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6662 return true;
6663 return ! TREE_SIDE_EFFECTS (arg0)
6664 && ! TREE_SIDE_EFFECTS (arg1);
6667 /* Test whether it is preferable two swap two operands, ARG0 and
6668 ARG1, for example because ARG0 is an integer constant and ARG1
6669 isn't. If REORDER is true, only recommend swapping if we can
6670 evaluate the operands in reverse order. */
6672 bool
6673 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6675 STRIP_SIGN_NOPS (arg0);
6676 STRIP_SIGN_NOPS (arg1);
6678 if (TREE_CODE (arg1) == INTEGER_CST)
6679 return 0;
6680 if (TREE_CODE (arg0) == INTEGER_CST)
6681 return 1;
6683 if (TREE_CODE (arg1) == REAL_CST)
6684 return 0;
6685 if (TREE_CODE (arg0) == REAL_CST)
6686 return 1;
6688 if (TREE_CODE (arg1) == FIXED_CST)
6689 return 0;
6690 if (TREE_CODE (arg0) == FIXED_CST)
6691 return 1;
6693 if (TREE_CODE (arg1) == COMPLEX_CST)
6694 return 0;
6695 if (TREE_CODE (arg0) == COMPLEX_CST)
6696 return 1;
6698 if (TREE_CONSTANT (arg1))
6699 return 0;
6700 if (TREE_CONSTANT (arg0))
6701 return 1;
6703 if (optimize_function_for_size_p (cfun))
6704 return 0;
6706 if (reorder && flag_evaluation_order
6707 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6708 return 0;
6710 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6711 for commutative and comparison operators. Ensuring a canonical
6712 form allows the optimizers to find additional redundancies without
6713 having to explicitly check for both orderings. */
6714 if (TREE_CODE (arg0) == SSA_NAME
6715 && TREE_CODE (arg1) == SSA_NAME
6716 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6717 return 1;
6719 /* Put SSA_NAMEs last. */
6720 if (TREE_CODE (arg1) == SSA_NAME)
6721 return 0;
6722 if (TREE_CODE (arg0) == SSA_NAME)
6723 return 1;
6725 /* Put variables last. */
6726 if (DECL_P (arg1))
6727 return 0;
6728 if (DECL_P (arg0))
6729 return 1;
6731 return 0;
6734 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6735 ARG0 is extended to a wider type. */
6737 static tree
6738 fold_widened_comparison (location_t loc, enum tree_code code,
6739 tree type, tree arg0, tree arg1)
6741 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6742 tree arg1_unw;
6743 tree shorter_type, outer_type;
6744 tree min, max;
6745 bool above, below;
6747 if (arg0_unw == arg0)
6748 return NULL_TREE;
6749 shorter_type = TREE_TYPE (arg0_unw);
6751 #ifdef HAVE_canonicalize_funcptr_for_compare
6752 /* Disable this optimization if we're casting a function pointer
6753 type on targets that require function pointer canonicalization. */
6754 if (HAVE_canonicalize_funcptr_for_compare
6755 && TREE_CODE (shorter_type) == POINTER_TYPE
6756 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6757 return NULL_TREE;
6758 #endif
6760 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6761 return NULL_TREE;
6763 arg1_unw = get_unwidened (arg1, NULL_TREE);
6765 /* If possible, express the comparison in the shorter mode. */
6766 if ((code == EQ_EXPR || code == NE_EXPR
6767 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6768 && (TREE_TYPE (arg1_unw) == shorter_type
6769 || ((TYPE_PRECISION (shorter_type)
6770 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6771 && (TYPE_UNSIGNED (shorter_type)
6772 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6773 || (TREE_CODE (arg1_unw) == INTEGER_CST
6774 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6775 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6776 && int_fits_type_p (arg1_unw, shorter_type))))
6777 return fold_build2_loc (loc, code, type, arg0_unw,
6778 fold_convert_loc (loc, shorter_type, arg1_unw));
6780 if (TREE_CODE (arg1_unw) != INTEGER_CST
6781 || TREE_CODE (shorter_type) != INTEGER_TYPE
6782 || !int_fits_type_p (arg1_unw, shorter_type))
6783 return NULL_TREE;
6785 /* If we are comparing with the integer that does not fit into the range
6786 of the shorter type, the result is known. */
6787 outer_type = TREE_TYPE (arg1_unw);
6788 min = lower_bound_in_type (outer_type, shorter_type);
6789 max = upper_bound_in_type (outer_type, shorter_type);
6791 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6792 max, arg1_unw));
6793 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6794 arg1_unw, min));
6796 switch (code)
6798 case EQ_EXPR:
6799 if (above || below)
6800 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6801 break;
6803 case NE_EXPR:
6804 if (above || below)
6805 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6806 break;
6808 case LT_EXPR:
6809 case LE_EXPR:
6810 if (above)
6811 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6812 else if (below)
6813 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6815 case GT_EXPR:
6816 case GE_EXPR:
6817 if (above)
6818 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6819 else if (below)
6820 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6822 default:
6823 break;
6826 return NULL_TREE;
6829 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6830 ARG0 just the signedness is changed. */
6832 static tree
6833 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6834 tree arg0, tree arg1)
6836 tree arg0_inner;
6837 tree inner_type, outer_type;
6839 if (!CONVERT_EXPR_P (arg0))
6840 return NULL_TREE;
6842 outer_type = TREE_TYPE (arg0);
6843 arg0_inner = TREE_OPERAND (arg0, 0);
6844 inner_type = TREE_TYPE (arg0_inner);
6846 #ifdef HAVE_canonicalize_funcptr_for_compare
6847 /* Disable this optimization if we're casting a function pointer
6848 type on targets that require function pointer canonicalization. */
6849 if (HAVE_canonicalize_funcptr_for_compare
6850 && TREE_CODE (inner_type) == POINTER_TYPE
6851 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6852 return NULL_TREE;
6853 #endif
6855 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6856 return NULL_TREE;
6858 if (TREE_CODE (arg1) != INTEGER_CST
6859 && !(CONVERT_EXPR_P (arg1)
6860 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6861 return NULL_TREE;
6863 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6864 && code != NE_EXPR
6865 && code != EQ_EXPR)
6866 return NULL_TREE;
6868 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6869 return NULL_TREE;
6871 if (TREE_CODE (arg1) == INTEGER_CST)
6872 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6873 TREE_OVERFLOW (arg1));
6874 else
6875 arg1 = fold_convert_loc (loc, inner_type, arg1);
6877 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6880 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6881 step of the array. Reconstructs s and delta in the case of s *
6882 delta being an integer constant (and thus already folded). ADDR is
6883 the address. MULT is the multiplicative expression. If the
6884 function succeeds, the new address expression is returned.
6885 Otherwise NULL_TREE is returned. LOC is the location of the
6886 resulting expression. */
6888 static tree
6889 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6891 tree s, delta, step;
6892 tree ref = TREE_OPERAND (addr, 0), pref;
6893 tree ret, pos;
6894 tree itype;
6895 bool mdim = false;
6897 /* Strip the nops that might be added when converting op1 to sizetype. */
6898 STRIP_NOPS (op1);
6900 /* Canonicalize op1 into a possibly non-constant delta
6901 and an INTEGER_CST s. */
6902 if (TREE_CODE (op1) == MULT_EXPR)
6904 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6906 STRIP_NOPS (arg0);
6907 STRIP_NOPS (arg1);
6909 if (TREE_CODE (arg0) == INTEGER_CST)
6911 s = arg0;
6912 delta = arg1;
6914 else if (TREE_CODE (arg1) == INTEGER_CST)
6916 s = arg1;
6917 delta = arg0;
6919 else
6920 return NULL_TREE;
6922 else if (TREE_CODE (op1) == INTEGER_CST)
6924 delta = op1;
6925 s = NULL_TREE;
6927 else
6929 /* Simulate we are delta * 1. */
6930 delta = op1;
6931 s = integer_one_node;
6934 /* Handle &x.array the same as we would handle &x.array[0]. */
6935 if (TREE_CODE (ref) == COMPONENT_REF
6936 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6938 tree domain;
6940 /* Remember if this was a multi-dimensional array. */
6941 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6942 mdim = true;
6944 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6945 if (! domain)
6946 goto cont;
6947 itype = TREE_TYPE (domain);
6949 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6950 if (TREE_CODE (step) != INTEGER_CST)
6951 goto cont;
6953 if (s)
6955 if (! tree_int_cst_equal (step, s))
6956 goto cont;
6958 else
6960 /* Try if delta is a multiple of step. */
6961 tree tmp = div_if_zero_remainder (op1, step);
6962 if (! tmp)
6963 goto cont;
6964 delta = tmp;
6967 /* Only fold here if we can verify we do not overflow one
6968 dimension of a multi-dimensional array. */
6969 if (mdim)
6971 tree tmp;
6973 if (!TYPE_MIN_VALUE (domain)
6974 || !TYPE_MAX_VALUE (domain)
6975 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6976 goto cont;
6978 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6979 fold_convert_loc (loc, itype,
6980 TYPE_MIN_VALUE (domain)),
6981 fold_convert_loc (loc, itype, delta));
6982 if (TREE_CODE (tmp) != INTEGER_CST
6983 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6984 goto cont;
6987 /* We found a suitable component reference. */
6989 pref = TREE_OPERAND (addr, 0);
6990 ret = copy_node (pref);
6991 SET_EXPR_LOCATION (ret, loc);
6993 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6994 fold_build2_loc
6995 (loc, PLUS_EXPR, itype,
6996 fold_convert_loc (loc, itype,
6997 TYPE_MIN_VALUE
6998 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6999 fold_convert_loc (loc, itype, delta)),
7000 NULL_TREE, NULL_TREE);
7001 return build_fold_addr_expr_loc (loc, ret);
7004 cont:
7006 for (;; ref = TREE_OPERAND (ref, 0))
7008 if (TREE_CODE (ref) == ARRAY_REF)
7010 tree domain;
7012 /* Remember if this was a multi-dimensional array. */
7013 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7014 mdim = true;
7016 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7017 if (! domain)
7018 continue;
7019 itype = TREE_TYPE (domain);
7021 step = array_ref_element_size (ref);
7022 if (TREE_CODE (step) != INTEGER_CST)
7023 continue;
7025 if (s)
7027 if (! tree_int_cst_equal (step, s))
7028 continue;
7030 else
7032 /* Try if delta is a multiple of step. */
7033 tree tmp = div_if_zero_remainder (op1, step);
7034 if (! tmp)
7035 continue;
7036 delta = tmp;
7039 /* Only fold here if we can verify we do not overflow one
7040 dimension of a multi-dimensional array. */
7041 if (mdim)
7043 tree tmp;
7045 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7046 || !TYPE_MAX_VALUE (domain)
7047 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7048 continue;
7050 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7051 fold_convert_loc (loc, itype,
7052 TREE_OPERAND (ref, 1)),
7053 fold_convert_loc (loc, itype, delta));
7054 if (!tmp
7055 || TREE_CODE (tmp) != INTEGER_CST
7056 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7057 continue;
7060 break;
7062 else
7063 mdim = false;
7065 if (!handled_component_p (ref))
7066 return NULL_TREE;
7069 /* We found the suitable array reference. So copy everything up to it,
7070 and replace the index. */
7072 pref = TREE_OPERAND (addr, 0);
7073 ret = copy_node (pref);
7074 SET_EXPR_LOCATION (ret, loc);
7075 pos = ret;
7077 while (pref != ref)
7079 pref = TREE_OPERAND (pref, 0);
7080 TREE_OPERAND (pos, 0) = copy_node (pref);
7081 pos = TREE_OPERAND (pos, 0);
7084 TREE_OPERAND (pos, 1)
7085 = fold_build2_loc (loc, PLUS_EXPR, itype,
7086 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7087 fold_convert_loc (loc, itype, delta));
7088 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7092 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7093 means A >= Y && A != MAX, but in this case we know that
7094 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7096 static tree
7097 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7099 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7101 if (TREE_CODE (bound) == LT_EXPR)
7102 a = TREE_OPERAND (bound, 0);
7103 else if (TREE_CODE (bound) == GT_EXPR)
7104 a = TREE_OPERAND (bound, 1);
7105 else
7106 return NULL_TREE;
7108 typea = TREE_TYPE (a);
7109 if (!INTEGRAL_TYPE_P (typea)
7110 && !POINTER_TYPE_P (typea))
7111 return NULL_TREE;
7113 if (TREE_CODE (ineq) == LT_EXPR)
7115 a1 = TREE_OPERAND (ineq, 1);
7116 y = TREE_OPERAND (ineq, 0);
7118 else if (TREE_CODE (ineq) == GT_EXPR)
7120 a1 = TREE_OPERAND (ineq, 0);
7121 y = TREE_OPERAND (ineq, 1);
7123 else
7124 return NULL_TREE;
7126 if (TREE_TYPE (a1) != typea)
7127 return NULL_TREE;
7129 if (POINTER_TYPE_P (typea))
7131 /* Convert the pointer types into integer before taking the difference. */
7132 tree ta = fold_convert_loc (loc, ssizetype, a);
7133 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7134 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7136 else
7137 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7139 if (!diff || !integer_onep (diff))
7140 return NULL_TREE;
7142 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7145 /* Fold a sum or difference of at least one multiplication.
7146 Returns the folded tree or NULL if no simplification could be made. */
7148 static tree
7149 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7150 tree arg0, tree arg1)
7152 tree arg00, arg01, arg10, arg11;
7153 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7155 /* (A * C) +- (B * C) -> (A+-B) * C.
7156 (A * C) +- A -> A * (C+-1).
7157 We are most concerned about the case where C is a constant,
7158 but other combinations show up during loop reduction. Since
7159 it is not difficult, try all four possibilities. */
7161 if (TREE_CODE (arg0) == MULT_EXPR)
7163 arg00 = TREE_OPERAND (arg0, 0);
7164 arg01 = TREE_OPERAND (arg0, 1);
7166 else if (TREE_CODE (arg0) == INTEGER_CST)
7168 arg00 = build_one_cst (type);
7169 arg01 = arg0;
7171 else
7173 /* We cannot generate constant 1 for fract. */
7174 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7175 return NULL_TREE;
7176 arg00 = arg0;
7177 arg01 = build_one_cst (type);
7179 if (TREE_CODE (arg1) == MULT_EXPR)
7181 arg10 = TREE_OPERAND (arg1, 0);
7182 arg11 = TREE_OPERAND (arg1, 1);
7184 else if (TREE_CODE (arg1) == INTEGER_CST)
7186 arg10 = build_one_cst (type);
7187 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7188 the purpose of this canonicalization. */
7189 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7190 && negate_expr_p (arg1)
7191 && code == PLUS_EXPR)
7193 arg11 = negate_expr (arg1);
7194 code = MINUS_EXPR;
7196 else
7197 arg11 = arg1;
7199 else
7201 /* We cannot generate constant 1 for fract. */
7202 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7203 return NULL_TREE;
7204 arg10 = arg1;
7205 arg11 = build_one_cst (type);
7207 same = NULL_TREE;
7209 if (operand_equal_p (arg01, arg11, 0))
7210 same = arg01, alt0 = arg00, alt1 = arg10;
7211 else if (operand_equal_p (arg00, arg10, 0))
7212 same = arg00, alt0 = arg01, alt1 = arg11;
7213 else if (operand_equal_p (arg00, arg11, 0))
7214 same = arg00, alt0 = arg01, alt1 = arg10;
7215 else if (operand_equal_p (arg01, arg10, 0))
7216 same = arg01, alt0 = arg00, alt1 = arg11;
7218 /* No identical multiplicands; see if we can find a common
7219 power-of-two factor in non-power-of-two multiplies. This
7220 can help in multi-dimensional array access. */
7221 else if (tree_fits_shwi_p (arg01)
7222 && tree_fits_shwi_p (arg11))
7224 HOST_WIDE_INT int01, int11, tmp;
7225 bool swap = false;
7226 tree maybe_same;
7227 int01 = tree_to_shwi (arg01);
7228 int11 = tree_to_shwi (arg11);
7230 /* Move min of absolute values to int11. */
7231 if (absu_hwi (int01) < absu_hwi (int11))
7233 tmp = int01, int01 = int11, int11 = tmp;
7234 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7235 maybe_same = arg01;
7236 swap = true;
7238 else
7239 maybe_same = arg11;
7241 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7242 /* The remainder should not be a constant, otherwise we
7243 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7244 increased the number of multiplications necessary. */
7245 && TREE_CODE (arg10) != INTEGER_CST)
7247 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7248 build_int_cst (TREE_TYPE (arg00),
7249 int01 / int11));
7250 alt1 = arg10;
7251 same = maybe_same;
7252 if (swap)
7253 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7257 if (same)
7258 return fold_build2_loc (loc, MULT_EXPR, type,
7259 fold_build2_loc (loc, code, type,
7260 fold_convert_loc (loc, type, alt0),
7261 fold_convert_loc (loc, type, alt1)),
7262 fold_convert_loc (loc, type, same));
7264 return NULL_TREE;
7267 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7270 upon failure. */
7272 static int
7273 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7275 tree type = TREE_TYPE (expr);
7276 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7277 int byte, offset, word, words;
7278 unsigned char value;
7280 if ((off == -1 && total_bytes > len)
7281 || off >= total_bytes)
7282 return 0;
7283 if (off == -1)
7284 off = 0;
7285 words = total_bytes / UNITS_PER_WORD;
7287 for (byte = 0; byte < total_bytes; byte++)
7289 int bitpos = byte * BITS_PER_UNIT;
7290 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7291 number of bytes. */
7292 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7294 if (total_bytes > UNITS_PER_WORD)
7296 word = byte / UNITS_PER_WORD;
7297 if (WORDS_BIG_ENDIAN)
7298 word = (words - 1) - word;
7299 offset = word * UNITS_PER_WORD;
7300 if (BYTES_BIG_ENDIAN)
7301 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7302 else
7303 offset += byte % UNITS_PER_WORD;
7305 else
7306 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7307 if (offset >= off
7308 && offset - off < len)
7309 ptr[offset - off] = value;
7311 return MIN (len, total_bytes - off);
7315 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7316 specified by EXPR into the buffer PTR of length LEN bytes.
7317 Return the number of bytes placed in the buffer, or zero
7318 upon failure. */
7320 static int
7321 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7323 tree type = TREE_TYPE (expr);
7324 enum machine_mode mode = TYPE_MODE (type);
7325 int total_bytes = GET_MODE_SIZE (mode);
7326 FIXED_VALUE_TYPE value;
7327 tree i_value, i_type;
7329 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7330 return 0;
7332 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7334 if (NULL_TREE == i_type
7335 || TYPE_PRECISION (i_type) != total_bytes)
7336 return 0;
7338 value = TREE_FIXED_CST (expr);
7339 i_value = double_int_to_tree (i_type, value.data);
7341 return native_encode_int (i_value, ptr, len, off);
7345 /* Subroutine of native_encode_expr. Encode the REAL_CST
7346 specified by EXPR into the buffer PTR of length LEN bytes.
7347 Return the number of bytes placed in the buffer, or zero
7348 upon failure. */
7350 static int
7351 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7353 tree type = TREE_TYPE (expr);
7354 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7355 int byte, offset, word, words, bitpos;
7356 unsigned char value;
7358 /* There are always 32 bits in each long, no matter the size of
7359 the hosts long. We handle floating point representations with
7360 up to 192 bits. */
7361 long tmp[6];
7363 if ((off == -1 && total_bytes > len)
7364 || off >= total_bytes)
7365 return 0;
7366 if (off == -1)
7367 off = 0;
7368 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7370 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7372 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7373 bitpos += BITS_PER_UNIT)
7375 byte = (bitpos / BITS_PER_UNIT) & 3;
7376 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7378 if (UNITS_PER_WORD < 4)
7380 word = byte / UNITS_PER_WORD;
7381 if (WORDS_BIG_ENDIAN)
7382 word = (words - 1) - word;
7383 offset = word * UNITS_PER_WORD;
7384 if (BYTES_BIG_ENDIAN)
7385 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7386 else
7387 offset += byte % UNITS_PER_WORD;
7389 else
7390 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7391 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7392 if (offset >= off
7393 && offset - off < len)
7394 ptr[offset - off] = value;
7396 return MIN (len, total_bytes - off);
7399 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7400 specified by EXPR into the buffer PTR of length LEN bytes.
7401 Return the number of bytes placed in the buffer, or zero
7402 upon failure. */
7404 static int
7405 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7407 int rsize, isize;
7408 tree part;
7410 part = TREE_REALPART (expr);
7411 rsize = native_encode_expr (part, ptr, len, off);
7412 if (off == -1
7413 && rsize == 0)
7414 return 0;
7415 part = TREE_IMAGPART (expr);
7416 if (off != -1)
7417 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7418 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7419 if (off == -1
7420 && isize != rsize)
7421 return 0;
7422 return rsize + isize;
7426 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7427 specified by EXPR into the buffer PTR of length LEN bytes.
7428 Return the number of bytes placed in the buffer, or zero
7429 upon failure. */
7431 static int
7432 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7434 unsigned i, count;
7435 int size, offset;
7436 tree itype, elem;
7438 offset = 0;
7439 count = VECTOR_CST_NELTS (expr);
7440 itype = TREE_TYPE (TREE_TYPE (expr));
7441 size = GET_MODE_SIZE (TYPE_MODE (itype));
7442 for (i = 0; i < count; i++)
7444 if (off >= size)
7446 off -= size;
7447 continue;
7449 elem = VECTOR_CST_ELT (expr, i);
7450 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7451 if ((off == -1 && res != size)
7452 || res == 0)
7453 return 0;
7454 offset += res;
7455 if (offset >= len)
7456 return offset;
7457 if (off != -1)
7458 off = 0;
7460 return offset;
7464 /* Subroutine of native_encode_expr. Encode the STRING_CST
7465 specified by EXPR into the buffer PTR of length LEN bytes.
7466 Return the number of bytes placed in the buffer, or zero
7467 upon failure. */
7469 static int
7470 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7472 tree type = TREE_TYPE (expr);
7473 HOST_WIDE_INT total_bytes;
7475 if (TREE_CODE (type) != ARRAY_TYPE
7476 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7477 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7478 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7479 return 0;
7480 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7481 if ((off == -1 && total_bytes > len)
7482 || off >= total_bytes)
7483 return 0;
7484 if (off == -1)
7485 off = 0;
7486 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7488 int written = 0;
7489 if (off < TREE_STRING_LENGTH (expr))
7491 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7492 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7494 memset (ptr + written, 0,
7495 MIN (total_bytes - written, len - written));
7497 else
7498 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7499 return MIN (total_bytes - off, len);
7503 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7504 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7505 buffer PTR of length LEN bytes. If OFF is not -1 then start
7506 the encoding at byte offset OFF and encode at most LEN bytes.
7507 Return the number of bytes placed in the buffer, or zero upon failure. */
7510 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7512 switch (TREE_CODE (expr))
7514 case INTEGER_CST:
7515 return native_encode_int (expr, ptr, len, off);
7517 case REAL_CST:
7518 return native_encode_real (expr, ptr, len, off);
7520 case FIXED_CST:
7521 return native_encode_fixed (expr, ptr, len, off);
7523 case COMPLEX_CST:
7524 return native_encode_complex (expr, ptr, len, off);
7526 case VECTOR_CST:
7527 return native_encode_vector (expr, ptr, len, off);
7529 case STRING_CST:
7530 return native_encode_string (expr, ptr, len, off);
7532 default:
7533 return 0;
7538 /* Subroutine of native_interpret_expr. Interpret the contents of
7539 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7540 If the buffer cannot be interpreted, return NULL_TREE. */
7542 static tree
7543 native_interpret_int (tree type, const unsigned char *ptr, int len)
7545 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7547 if (total_bytes > len
7548 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7549 return NULL_TREE;
7551 wide_int result = wi::from_buffer (ptr, total_bytes);
7553 return wide_int_to_tree (type, result);
7557 /* Subroutine of native_interpret_expr. Interpret the contents of
7558 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7559 If the buffer cannot be interpreted, return NULL_TREE. */
7561 static tree
7562 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7564 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7565 double_int result;
7566 FIXED_VALUE_TYPE fixed_value;
7568 if (total_bytes > len
7569 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7570 return NULL_TREE;
7572 result = double_int::from_buffer (ptr, total_bytes);
7573 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7575 return build_fixed (type, fixed_value);
7579 /* Subroutine of native_interpret_expr. Interpret the contents of
7580 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7581 If the buffer cannot be interpreted, return NULL_TREE. */
7583 static tree
7584 native_interpret_real (tree type, const unsigned char *ptr, int len)
7586 enum machine_mode mode = TYPE_MODE (type);
7587 int total_bytes = GET_MODE_SIZE (mode);
7588 int byte, offset, word, words, bitpos;
7589 unsigned char value;
7590 /* There are always 32 bits in each long, no matter the size of
7591 the hosts long. We handle floating point representations with
7592 up to 192 bits. */
7593 REAL_VALUE_TYPE r;
7594 long tmp[6];
7596 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7597 if (total_bytes > len || total_bytes > 24)
7598 return NULL_TREE;
7599 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7601 memset (tmp, 0, sizeof (tmp));
7602 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7603 bitpos += BITS_PER_UNIT)
7605 byte = (bitpos / BITS_PER_UNIT) & 3;
7606 if (UNITS_PER_WORD < 4)
7608 word = byte / UNITS_PER_WORD;
7609 if (WORDS_BIG_ENDIAN)
7610 word = (words - 1) - word;
7611 offset = word * UNITS_PER_WORD;
7612 if (BYTES_BIG_ENDIAN)
7613 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7614 else
7615 offset += byte % UNITS_PER_WORD;
7617 else
7618 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7619 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7621 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7624 real_from_target (&r, tmp, mode);
7625 return build_real (type, r);
7629 /* Subroutine of native_interpret_expr. Interpret the contents of
7630 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7631 If the buffer cannot be interpreted, return NULL_TREE. */
7633 static tree
7634 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7636 tree etype, rpart, ipart;
7637 int size;
7639 etype = TREE_TYPE (type);
7640 size = GET_MODE_SIZE (TYPE_MODE (etype));
7641 if (size * 2 > len)
7642 return NULL_TREE;
7643 rpart = native_interpret_expr (etype, ptr, size);
7644 if (!rpart)
7645 return NULL_TREE;
7646 ipart = native_interpret_expr (etype, ptr+size, size);
7647 if (!ipart)
7648 return NULL_TREE;
7649 return build_complex (type, rpart, ipart);
7653 /* Subroutine of native_interpret_expr. Interpret the contents of
7654 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7655 If the buffer cannot be interpreted, return NULL_TREE. */
7657 static tree
7658 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7660 tree etype, elem;
7661 int i, size, count;
7662 tree *elements;
7664 etype = TREE_TYPE (type);
7665 size = GET_MODE_SIZE (TYPE_MODE (etype));
7666 count = TYPE_VECTOR_SUBPARTS (type);
7667 if (size * count > len)
7668 return NULL_TREE;
7670 elements = XALLOCAVEC (tree, count);
7671 for (i = count - 1; i >= 0; i--)
7673 elem = native_interpret_expr (etype, ptr+(i*size), size);
7674 if (!elem)
7675 return NULL_TREE;
7676 elements[i] = elem;
7678 return build_vector (type, elements);
7682 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7683 the buffer PTR of length LEN as a constant of type TYPE. For
7684 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7685 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7686 return NULL_TREE. */
7688 tree
7689 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7691 switch (TREE_CODE (type))
7693 case INTEGER_TYPE:
7694 case ENUMERAL_TYPE:
7695 case BOOLEAN_TYPE:
7696 case POINTER_TYPE:
7697 case REFERENCE_TYPE:
7698 return native_interpret_int (type, ptr, len);
7700 case REAL_TYPE:
7701 return native_interpret_real (type, ptr, len);
7703 case FIXED_POINT_TYPE:
7704 return native_interpret_fixed (type, ptr, len);
7706 case COMPLEX_TYPE:
7707 return native_interpret_complex (type, ptr, len);
7709 case VECTOR_TYPE:
7710 return native_interpret_vector (type, ptr, len);
7712 default:
7713 return NULL_TREE;
7717 /* Returns true if we can interpret the contents of a native encoding
7718 as TYPE. */
7720 static bool
7721 can_native_interpret_type_p (tree type)
7723 switch (TREE_CODE (type))
7725 case INTEGER_TYPE:
7726 case ENUMERAL_TYPE:
7727 case BOOLEAN_TYPE:
7728 case POINTER_TYPE:
7729 case REFERENCE_TYPE:
7730 case FIXED_POINT_TYPE:
7731 case REAL_TYPE:
7732 case COMPLEX_TYPE:
7733 case VECTOR_TYPE:
7734 return true;
7735 default:
7736 return false;
7740 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7741 TYPE at compile-time. If we're unable to perform the conversion
7742 return NULL_TREE. */
7744 static tree
7745 fold_view_convert_expr (tree type, tree expr)
7747 /* We support up to 512-bit values (for V8DFmode). */
7748 unsigned char buffer[64];
7749 int len;
7751 /* Check that the host and target are sane. */
7752 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7753 return NULL_TREE;
7755 len = native_encode_expr (expr, buffer, sizeof (buffer));
7756 if (len == 0)
7757 return NULL_TREE;
7759 return native_interpret_expr (type, buffer, len);
7762 /* Build an expression for the address of T. Folds away INDIRECT_REF
7763 to avoid confusing the gimplify process. */
7765 tree
7766 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7768 /* The size of the object is not relevant when talking about its address. */
7769 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7770 t = TREE_OPERAND (t, 0);
7772 if (TREE_CODE (t) == INDIRECT_REF)
7774 t = TREE_OPERAND (t, 0);
7776 if (TREE_TYPE (t) != ptrtype)
7777 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7779 else if (TREE_CODE (t) == MEM_REF
7780 && integer_zerop (TREE_OPERAND (t, 1)))
7781 return TREE_OPERAND (t, 0);
7782 else if (TREE_CODE (t) == MEM_REF
7783 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7784 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7785 TREE_OPERAND (t, 0),
7786 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7787 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7789 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7791 if (TREE_TYPE (t) != ptrtype)
7792 t = fold_convert_loc (loc, ptrtype, t);
7794 else
7795 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7797 return t;
7800 /* Build an expression for the address of T. */
7802 tree
7803 build_fold_addr_expr_loc (location_t loc, tree t)
7805 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7807 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7810 static bool vec_cst_ctor_to_array (tree, tree *);
7812 /* Fold a unary expression of code CODE and type TYPE with operand
7813 OP0. Return the folded expression if folding is successful.
7814 Otherwise, return NULL_TREE. */
7816 static tree
7817 fold_unary_loc_1 (location_t loc, enum tree_code code, tree type, tree op0)
7819 tree tem;
7820 tree arg0;
7821 enum tree_code_class kind = TREE_CODE_CLASS (code);
7823 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7824 && TREE_CODE_LENGTH (code) == 1);
7826 arg0 = op0;
7827 if (arg0)
7829 if (CONVERT_EXPR_CODE_P (code)
7830 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7832 /* Don't use STRIP_NOPS, because signedness of argument type
7833 matters. */
7834 STRIP_SIGN_NOPS (arg0);
7836 else
7838 /* Strip any conversions that don't change the mode. This
7839 is safe for every expression, except for a comparison
7840 expression because its signedness is derived from its
7841 operands.
7843 Note that this is done as an internal manipulation within
7844 the constant folder, in order to find the simplest
7845 representation of the arguments so that their form can be
7846 studied. In any cases, the appropriate type conversions
7847 should be put back in the tree that will get out of the
7848 constant folder. */
7849 STRIP_NOPS (arg0);
7853 if (TREE_CODE_CLASS (code) == tcc_unary)
7855 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7856 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7857 fold_build1_loc (loc, code, type,
7858 fold_convert_loc (loc, TREE_TYPE (op0),
7859 TREE_OPERAND (arg0, 1))));
7860 else if (TREE_CODE (arg0) == COND_EXPR)
7862 tree arg01 = TREE_OPERAND (arg0, 1);
7863 tree arg02 = TREE_OPERAND (arg0, 2);
7864 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7865 arg01 = fold_build1_loc (loc, code, type,
7866 fold_convert_loc (loc,
7867 TREE_TYPE (op0), arg01));
7868 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7869 arg02 = fold_build1_loc (loc, code, type,
7870 fold_convert_loc (loc,
7871 TREE_TYPE (op0), arg02));
7872 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7873 arg01, arg02);
7875 /* If this was a conversion, and all we did was to move into
7876 inside the COND_EXPR, bring it back out. But leave it if
7877 it is a conversion from integer to integer and the
7878 result precision is no wider than a word since such a
7879 conversion is cheap and may be optimized away by combine,
7880 while it couldn't if it were outside the COND_EXPR. Then return
7881 so we don't get into an infinite recursion loop taking the
7882 conversion out and then back in. */
7884 if ((CONVERT_EXPR_CODE_P (code)
7885 || code == NON_LVALUE_EXPR)
7886 && TREE_CODE (tem) == COND_EXPR
7887 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7888 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7889 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7890 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7891 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7892 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7893 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7894 && (INTEGRAL_TYPE_P
7895 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7896 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7897 || flag_syntax_only))
7898 tem = build1_loc (loc, code, type,
7899 build3 (COND_EXPR,
7900 TREE_TYPE (TREE_OPERAND
7901 (TREE_OPERAND (tem, 1), 0)),
7902 TREE_OPERAND (tem, 0),
7903 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7904 TREE_OPERAND (TREE_OPERAND (tem, 2),
7905 0)));
7906 return tem;
7910 switch (code)
7912 case PAREN_EXPR:
7913 /* Re-association barriers around constants and other re-association
7914 barriers can be removed. */
7915 if (CONSTANT_CLASS_P (op0)
7916 || TREE_CODE (op0) == PAREN_EXPR)
7917 return fold_convert_loc (loc, type, op0);
7918 return NULL_TREE;
7920 case NON_LVALUE_EXPR:
7921 if (!maybe_lvalue_p (op0))
7922 return fold_convert_loc (loc, type, op0);
7923 return NULL_TREE;
7925 CASE_CONVERT:
7926 case FLOAT_EXPR:
7927 case FIX_TRUNC_EXPR:
7928 if (TREE_TYPE (op0) == type)
7929 return op0;
7931 if (COMPARISON_CLASS_P (op0))
7933 /* If we have (type) (a CMP b) and type is an integral type, return
7934 new expression involving the new type. Canonicalize
7935 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7936 non-integral type.
7937 Do not fold the result as that would not simplify further, also
7938 folding again results in recursions. */
7939 if (TREE_CODE (type) == BOOLEAN_TYPE)
7940 return build2_loc (loc, TREE_CODE (op0), type,
7941 TREE_OPERAND (op0, 0),
7942 TREE_OPERAND (op0, 1));
7943 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7944 && TREE_CODE (type) != VECTOR_TYPE)
7945 return build3_loc (loc, COND_EXPR, type, op0,
7946 constant_boolean_node (true, type),
7947 constant_boolean_node (false, type));
7950 /* Handle cases of two conversions in a row. */
7951 if (CONVERT_EXPR_P (op0))
7953 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7954 tree inter_type = TREE_TYPE (op0);
7955 int inside_int = INTEGRAL_TYPE_P (inside_type);
7956 int inside_ptr = POINTER_TYPE_P (inside_type);
7957 int inside_float = FLOAT_TYPE_P (inside_type);
7958 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7959 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7960 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7961 int inter_int = INTEGRAL_TYPE_P (inter_type);
7962 int inter_ptr = POINTER_TYPE_P (inter_type);
7963 int inter_float = FLOAT_TYPE_P (inter_type);
7964 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7965 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7966 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7967 int final_int = INTEGRAL_TYPE_P (type);
7968 int final_ptr = POINTER_TYPE_P (type);
7969 int final_float = FLOAT_TYPE_P (type);
7970 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7971 unsigned int final_prec = TYPE_PRECISION (type);
7972 int final_unsignedp = TYPE_UNSIGNED (type);
7974 /* In addition to the cases of two conversions in a row
7975 handled below, if we are converting something to its own
7976 type via an object of identical or wider precision, neither
7977 conversion is needed. */
7978 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7979 && (((inter_int || inter_ptr) && final_int)
7980 || (inter_float && final_float))
7981 && inter_prec >= final_prec)
7982 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7984 /* Likewise, if the intermediate and initial types are either both
7985 float or both integer, we don't need the middle conversion if the
7986 former is wider than the latter and doesn't change the signedness
7987 (for integers). Avoid this if the final type is a pointer since
7988 then we sometimes need the middle conversion. Likewise if the
7989 final type has a precision not equal to the size of its mode. */
7990 if (((inter_int && inside_int)
7991 || (inter_float && inside_float)
7992 || (inter_vec && inside_vec))
7993 && inter_prec >= inside_prec
7994 && (inter_float || inter_vec
7995 || inter_unsignedp == inside_unsignedp)
7996 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7997 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7998 && ! final_ptr
7999 && (! final_vec || inter_prec == inside_prec))
8000 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8002 /* If we have a sign-extension of a zero-extended value, we can
8003 replace that by a single zero-extension. Likewise if the
8004 final conversion does not change precision we can drop the
8005 intermediate conversion. */
8006 if (inside_int && inter_int && final_int
8007 && ((inside_prec < inter_prec && inter_prec < final_prec
8008 && inside_unsignedp && !inter_unsignedp)
8009 || final_prec == inter_prec))
8010 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8012 /* Two conversions in a row are not needed unless:
8013 - some conversion is floating-point (overstrict for now), or
8014 - some conversion is a vector (overstrict for now), or
8015 - the intermediate type is narrower than both initial and
8016 final, or
8017 - the intermediate type and innermost type differ in signedness,
8018 and the outermost type is wider than the intermediate, or
8019 - the initial type is a pointer type and the precisions of the
8020 intermediate and final types differ, or
8021 - the final type is a pointer type and the precisions of the
8022 initial and intermediate types differ. */
8023 if (! inside_float && ! inter_float && ! final_float
8024 && ! inside_vec && ! inter_vec && ! final_vec
8025 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8026 && ! (inside_int && inter_int
8027 && inter_unsignedp != inside_unsignedp
8028 && inter_prec < final_prec)
8029 && ((inter_unsignedp && inter_prec > inside_prec)
8030 == (final_unsignedp && final_prec > inter_prec))
8031 && ! (inside_ptr && inter_prec != final_prec)
8032 && ! (final_ptr && inside_prec != inter_prec)
8033 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8034 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8035 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8038 /* Handle (T *)&A.B.C for A being of type T and B and C
8039 living at offset zero. This occurs frequently in
8040 C++ upcasting and then accessing the base. */
8041 if (TREE_CODE (op0) == ADDR_EXPR
8042 && POINTER_TYPE_P (type)
8043 && handled_component_p (TREE_OPERAND (op0, 0)))
8045 HOST_WIDE_INT bitsize, bitpos;
8046 tree offset;
8047 enum machine_mode mode;
8048 int unsignedp, volatilep;
8049 tree base = TREE_OPERAND (op0, 0);
8050 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8051 &mode, &unsignedp, &volatilep, false);
8052 /* If the reference was to a (constant) zero offset, we can use
8053 the address of the base if it has the same base type
8054 as the result type and the pointer type is unqualified. */
8055 if (! offset && bitpos == 0
8056 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8057 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8058 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8059 return fold_convert_loc (loc, type,
8060 build_fold_addr_expr_loc (loc, base));
8063 if (TREE_CODE (op0) == MODIFY_EXPR
8064 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8065 /* Detect assigning a bitfield. */
8066 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8067 && DECL_BIT_FIELD
8068 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8070 /* Don't leave an assignment inside a conversion
8071 unless assigning a bitfield. */
8072 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8073 /* First do the assignment, then return converted constant. */
8074 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8075 TREE_NO_WARNING (tem) = 1;
8076 TREE_USED (tem) = 1;
8077 return tem;
8080 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8081 constants (if x has signed type, the sign bit cannot be set
8082 in c). This folds extension into the BIT_AND_EXPR.
8083 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8084 very likely don't have maximal range for their precision and this
8085 transformation effectively doesn't preserve non-maximal ranges. */
8086 if (TREE_CODE (type) == INTEGER_TYPE
8087 && TREE_CODE (op0) == BIT_AND_EXPR
8088 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8090 tree and_expr = op0;
8091 tree and0 = TREE_OPERAND (and_expr, 0);
8092 tree and1 = TREE_OPERAND (and_expr, 1);
8093 int change = 0;
8095 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8096 || (TYPE_PRECISION (type)
8097 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8098 change = 1;
8099 else if (TYPE_PRECISION (TREE_TYPE (and1))
8100 <= HOST_BITS_PER_WIDE_INT
8101 && tree_fits_uhwi_p (and1))
8103 unsigned HOST_WIDE_INT cst;
8105 cst = tree_to_uhwi (and1);
8106 cst &= HOST_WIDE_INT_M1U
8107 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8108 change = (cst == 0);
8109 #ifdef LOAD_EXTEND_OP
8110 if (change
8111 && !flag_syntax_only
8112 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8113 == ZERO_EXTEND))
8115 tree uns = unsigned_type_for (TREE_TYPE (and0));
8116 and0 = fold_convert_loc (loc, uns, and0);
8117 and1 = fold_convert_loc (loc, uns, and1);
8119 #endif
8121 if (change)
8123 tem = force_fit_type (type, wi::to_widest (and1), 0,
8124 TREE_OVERFLOW (and1));
8125 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8126 fold_convert_loc (loc, type, and0), tem);
8130 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8131 when one of the new casts will fold away. Conservatively we assume
8132 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8133 if (POINTER_TYPE_P (type)
8134 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8135 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8136 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8137 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8138 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8140 tree arg00 = TREE_OPERAND (arg0, 0);
8141 tree arg01 = TREE_OPERAND (arg0, 1);
8143 return fold_build_pointer_plus_loc
8144 (loc, fold_convert_loc (loc, type, arg00), arg01);
8147 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8148 of the same precision, and X is an integer type not narrower than
8149 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8150 if (INTEGRAL_TYPE_P (type)
8151 && TREE_CODE (op0) == BIT_NOT_EXPR
8152 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8153 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8154 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8156 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8157 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8158 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8159 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8160 fold_convert_loc (loc, type, tem));
8163 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8164 type of X and Y (integer types only). */
8165 if (INTEGRAL_TYPE_P (type)
8166 && TREE_CODE (op0) == MULT_EXPR
8167 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8168 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8170 /* Be careful not to introduce new overflows. */
8171 tree mult_type;
8172 if (TYPE_OVERFLOW_WRAPS (type))
8173 mult_type = type;
8174 else
8175 mult_type = unsigned_type_for (type);
8177 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8179 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8180 fold_convert_loc (loc, mult_type,
8181 TREE_OPERAND (op0, 0)),
8182 fold_convert_loc (loc, mult_type,
8183 TREE_OPERAND (op0, 1)));
8184 return fold_convert_loc (loc, type, tem);
8188 tem = fold_convert_const (code, type, arg0);
8189 return tem ? tem : NULL_TREE;
8191 case ADDR_SPACE_CONVERT_EXPR:
8192 if (integer_zerop (arg0))
8193 return fold_convert_const (code, type, arg0);
8194 return NULL_TREE;
8196 case FIXED_CONVERT_EXPR:
8197 tem = fold_convert_const (code, type, arg0);
8198 return tem ? tem : NULL_TREE;
8200 case VIEW_CONVERT_EXPR:
8201 if (TREE_TYPE (op0) == type)
8202 return op0;
8203 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8204 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8205 type, TREE_OPERAND (op0, 0));
8206 if (TREE_CODE (op0) == MEM_REF)
8207 return fold_build2_loc (loc, MEM_REF, type,
8208 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8210 /* For integral conversions with the same precision or pointer
8211 conversions use a NOP_EXPR instead. */
8212 if ((INTEGRAL_TYPE_P (type)
8213 || POINTER_TYPE_P (type))
8214 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8215 || POINTER_TYPE_P (TREE_TYPE (op0)))
8216 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8217 return fold_convert_loc (loc, type, op0);
8219 /* Strip inner integral conversions that do not change the precision. */
8220 if (CONVERT_EXPR_P (op0)
8221 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8222 || POINTER_TYPE_P (TREE_TYPE (op0)))
8223 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8224 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8225 && (TYPE_PRECISION (TREE_TYPE (op0))
8226 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8227 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8228 type, TREE_OPERAND (op0, 0));
8230 return fold_view_convert_expr (type, op0);
8232 case NEGATE_EXPR:
8233 tem = fold_negate_expr (loc, arg0);
8234 if (tem)
8235 return fold_convert_loc (loc, type, tem);
8236 return NULL_TREE;
8238 case ABS_EXPR:
8239 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8240 return fold_abs_const (arg0, type);
8241 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8242 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8243 /* Convert fabs((double)float) into (double)fabsf(float). */
8244 else if (TREE_CODE (arg0) == NOP_EXPR
8245 && TREE_CODE (type) == REAL_TYPE)
8247 tree targ0 = strip_float_extensions (arg0);
8248 if (targ0 != arg0)
8249 return fold_convert_loc (loc, type,
8250 fold_build1_loc (loc, ABS_EXPR,
8251 TREE_TYPE (targ0),
8252 targ0));
8254 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8255 else if (TREE_CODE (arg0) == ABS_EXPR)
8256 return arg0;
8257 else if (tree_expr_nonnegative_p (arg0))
8258 return arg0;
8260 /* Strip sign ops from argument. */
8261 if (TREE_CODE (type) == REAL_TYPE)
8263 tem = fold_strip_sign_ops (arg0);
8264 if (tem)
8265 return fold_build1_loc (loc, ABS_EXPR, type,
8266 fold_convert_loc (loc, type, tem));
8268 return NULL_TREE;
8270 case CONJ_EXPR:
8271 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8272 return fold_convert_loc (loc, type, arg0);
8273 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8275 tree itype = TREE_TYPE (type);
8276 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8277 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8278 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8279 negate_expr (ipart));
8281 if (TREE_CODE (arg0) == COMPLEX_CST)
8283 tree itype = TREE_TYPE (type);
8284 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8285 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8286 return build_complex (type, rpart, negate_expr (ipart));
8288 if (TREE_CODE (arg0) == CONJ_EXPR)
8289 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8290 return NULL_TREE;
8292 case BIT_NOT_EXPR:
8293 if (TREE_CODE (arg0) == INTEGER_CST)
8294 return fold_not_const (arg0, type);
8295 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8296 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8297 /* Convert ~ (-A) to A - 1. */
8298 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8299 return fold_build2_loc (loc, MINUS_EXPR, type,
8300 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8301 build_int_cst (type, 1));
8302 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8303 else if (INTEGRAL_TYPE_P (type)
8304 && ((TREE_CODE (arg0) == MINUS_EXPR
8305 && integer_onep (TREE_OPERAND (arg0, 1)))
8306 || (TREE_CODE (arg0) == PLUS_EXPR
8307 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8308 return fold_build1_loc (loc, NEGATE_EXPR, type,
8309 fold_convert_loc (loc, type,
8310 TREE_OPERAND (arg0, 0)));
8311 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8312 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8313 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8314 fold_convert_loc (loc, type,
8315 TREE_OPERAND (arg0, 0)))))
8316 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8317 fold_convert_loc (loc, type,
8318 TREE_OPERAND (arg0, 1)));
8319 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8320 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8321 fold_convert_loc (loc, type,
8322 TREE_OPERAND (arg0, 1)))))
8323 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8324 fold_convert_loc (loc, type,
8325 TREE_OPERAND (arg0, 0)), tem);
8326 /* Perform BIT_NOT_EXPR on each element individually. */
8327 else if (TREE_CODE (arg0) == VECTOR_CST)
8329 tree *elements;
8330 tree elem;
8331 unsigned count = VECTOR_CST_NELTS (arg0), i;
8333 elements = XALLOCAVEC (tree, count);
8334 for (i = 0; i < count; i++)
8336 elem = VECTOR_CST_ELT (arg0, i);
8337 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8338 if (elem == NULL_TREE)
8339 break;
8340 elements[i] = elem;
8342 if (i == count)
8343 return build_vector (type, elements);
8345 else if (COMPARISON_CLASS_P (arg0)
8346 && (VECTOR_TYPE_P (type)
8347 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8349 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8350 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8351 HONOR_NANS (TYPE_MODE (op_type)));
8352 if (subcode != ERROR_MARK)
8353 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8354 TREE_OPERAND (arg0, 1));
8358 return NULL_TREE;
8360 case TRUTH_NOT_EXPR:
8361 /* Note that the operand of this must be an int
8362 and its values must be 0 or 1.
8363 ("true" is a fixed value perhaps depending on the language,
8364 but we don't handle values other than 1 correctly yet.) */
8365 tem = fold_truth_not_expr (loc, arg0);
8366 if (!tem)
8367 return NULL_TREE;
8368 return fold_convert_loc (loc, type, tem);
8370 case REALPART_EXPR:
8371 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8372 return fold_convert_loc (loc, type, arg0);
8373 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8374 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8375 TREE_OPERAND (arg0, 1));
8376 if (TREE_CODE (arg0) == COMPLEX_CST)
8377 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8378 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8380 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8381 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8382 fold_build1_loc (loc, REALPART_EXPR, itype,
8383 TREE_OPERAND (arg0, 0)),
8384 fold_build1_loc (loc, REALPART_EXPR, itype,
8385 TREE_OPERAND (arg0, 1)));
8386 return fold_convert_loc (loc, type, tem);
8388 if (TREE_CODE (arg0) == CONJ_EXPR)
8390 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8391 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8392 TREE_OPERAND (arg0, 0));
8393 return fold_convert_loc (loc, type, tem);
8395 if (TREE_CODE (arg0) == CALL_EXPR)
8397 tree fn = get_callee_fndecl (arg0);
8398 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8399 switch (DECL_FUNCTION_CODE (fn))
8401 CASE_FLT_FN (BUILT_IN_CEXPI):
8402 fn = mathfn_built_in (type, BUILT_IN_COS);
8403 if (fn)
8404 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8405 break;
8407 default:
8408 break;
8411 return NULL_TREE;
8413 case IMAGPART_EXPR:
8414 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8415 return build_zero_cst (type);
8416 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8417 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8418 TREE_OPERAND (arg0, 0));
8419 if (TREE_CODE (arg0) == COMPLEX_CST)
8420 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8421 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8423 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8424 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8425 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8426 TREE_OPERAND (arg0, 0)),
8427 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8428 TREE_OPERAND (arg0, 1)));
8429 return fold_convert_loc (loc, type, tem);
8431 if (TREE_CODE (arg0) == CONJ_EXPR)
8433 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8434 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8435 return fold_convert_loc (loc, type, negate_expr (tem));
8437 if (TREE_CODE (arg0) == CALL_EXPR)
8439 tree fn = get_callee_fndecl (arg0);
8440 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8441 switch (DECL_FUNCTION_CODE (fn))
8443 CASE_FLT_FN (BUILT_IN_CEXPI):
8444 fn = mathfn_built_in (type, BUILT_IN_SIN);
8445 if (fn)
8446 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8447 break;
8449 default:
8450 break;
8453 return NULL_TREE;
8455 case INDIRECT_REF:
8456 /* Fold *&X to X if X is an lvalue. */
8457 if (TREE_CODE (op0) == ADDR_EXPR)
8459 tree op00 = TREE_OPERAND (op0, 0);
8460 if ((TREE_CODE (op00) == VAR_DECL
8461 || TREE_CODE (op00) == PARM_DECL
8462 || TREE_CODE (op00) == RESULT_DECL)
8463 && !TREE_READONLY (op00))
8464 return op00;
8466 return NULL_TREE;
8468 case VEC_UNPACK_LO_EXPR:
8469 case VEC_UNPACK_HI_EXPR:
8470 case VEC_UNPACK_FLOAT_LO_EXPR:
8471 case VEC_UNPACK_FLOAT_HI_EXPR:
8473 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8474 tree *elts;
8475 enum tree_code subcode;
8477 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8478 if (TREE_CODE (arg0) != VECTOR_CST)
8479 return NULL_TREE;
8481 elts = XALLOCAVEC (tree, nelts * 2);
8482 if (!vec_cst_ctor_to_array (arg0, elts))
8483 return NULL_TREE;
8485 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8486 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8487 elts += nelts;
8489 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8490 subcode = NOP_EXPR;
8491 else
8492 subcode = FLOAT_EXPR;
8494 for (i = 0; i < nelts; i++)
8496 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8497 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8498 return NULL_TREE;
8501 return build_vector (type, elts);
8504 case REDUC_MIN_EXPR:
8505 case REDUC_MAX_EXPR:
8506 case REDUC_PLUS_EXPR:
8508 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8509 tree *elts;
8510 enum tree_code subcode;
8512 if (TREE_CODE (op0) != VECTOR_CST)
8513 return NULL_TREE;
8515 elts = XALLOCAVEC (tree, nelts);
8516 if (!vec_cst_ctor_to_array (op0, elts))
8517 return NULL_TREE;
8519 switch (code)
8521 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8522 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8523 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8524 default: gcc_unreachable ();
8527 for (i = 1; i < nelts; i++)
8529 elts[0] = const_binop (subcode, elts[0], elts[i]);
8530 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8531 return NULL_TREE;
8532 elts[i] = build_zero_cst (TREE_TYPE (type));
8535 return build_vector (type, elts);
8538 default:
8539 return NULL_TREE;
8540 } /* switch (code) */
8543 /* Given an expression tree EXP, set the EXPR_FOLDED flag, and if it is
8544 a nop, recursively set the EXPR_FOLDED flag of its operand. */
8546 static void
8547 set_expr_folded_flag (tree exp)
8549 // Can not be called any more -- the bit is used for other purpose - wide-int
8550 // support
8551 gcc_assert (0);
8552 /* FIXME -- can not set the flag on SSA_NAME, the flag overlaps
8553 with the version member. */
8554 if (TREE_CODE (exp) == SSA_NAME)
8555 return;
8557 EXPR_FOLDED (exp) = 1;
8559 /* If EXP is a nop (i.e. NON_LVALUE_EXPRs and NOP_EXPRs), we need to
8560 recursively set the EXPR_FOLDED flag of its operand because the
8561 expression will be stripped later. */
8562 while ((CONVERT_EXPR_P (exp)
8563 || TREE_CODE (exp) == NON_LVALUE_EXPR)
8564 && TREE_OPERAND (exp, 0) != error_mark_node)
8566 exp = TREE_OPERAND (exp, 0);
8567 /* FIXME -- can not set the flag on SSA_NAME, the flag overlaps
8568 with the version member. */
8569 if (TREE_CODE (exp) != SSA_NAME)
8570 EXPR_FOLDED (exp) = 1;
8574 /* Fold a unary expression of code CODE and type TYPE with operand
8575 OP0. Return the folded expression if folding is successful.
8576 Otherwise, return NULL_TREE.
8577 This is a wrapper around fold_unary_1 function (which does the
8578 actual folding). Set the EXPR_FOLDED flag of the folded expression
8579 if folding is successful. */
8581 tree
8582 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8584 tree tem = fold_unary_loc_1 (loc, code, type, op0);
8585 return tem;
8588 /* If the operation was a conversion do _not_ mark a resulting constant
8589 with TREE_OVERFLOW if the original constant was not. These conversions
8590 have implementation defined behavior and retaining the TREE_OVERFLOW
8591 flag here would confuse later passes such as VRP. */
8592 tree
8593 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8594 tree type, tree op0)
8596 tree res = fold_unary_loc (loc, code, type, op0);
8597 if (res
8598 && TREE_CODE (res) == INTEGER_CST
8599 && TREE_CODE (op0) == INTEGER_CST
8600 && CONVERT_EXPR_CODE_P (code))
8601 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8603 return res;
8606 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8607 operands OP0 and OP1. LOC is the location of the resulting expression.
8608 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8609 Return the folded expression if folding is successful. Otherwise,
8610 return NULL_TREE. */
8611 static tree
8612 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8613 tree arg0, tree arg1, tree op0, tree op1)
8615 tree tem;
8617 /* We only do these simplifications if we are optimizing. */
8618 if (!optimize)
8619 return NULL_TREE;
8621 /* Check for things like (A || B) && (A || C). We can convert this
8622 to A || (B && C). Note that either operator can be any of the four
8623 truth and/or operations and the transformation will still be
8624 valid. Also note that we only care about order for the
8625 ANDIF and ORIF operators. If B contains side effects, this
8626 might change the truth-value of A. */
8627 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8628 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8629 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8630 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8631 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8632 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8634 tree a00 = TREE_OPERAND (arg0, 0);
8635 tree a01 = TREE_OPERAND (arg0, 1);
8636 tree a10 = TREE_OPERAND (arg1, 0);
8637 tree a11 = TREE_OPERAND (arg1, 1);
8638 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8639 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8640 && (code == TRUTH_AND_EXPR
8641 || code == TRUTH_OR_EXPR));
8643 if (operand_equal_p (a00, a10, 0))
8644 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8645 fold_build2_loc (loc, code, type, a01, a11));
8646 else if (commutative && operand_equal_p (a00, a11, 0))
8647 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8648 fold_build2_loc (loc, code, type, a01, a10));
8649 else if (commutative && operand_equal_p (a01, a10, 0))
8650 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8651 fold_build2_loc (loc, code, type, a00, a11));
8653 /* This case if tricky because we must either have commutative
8654 operators or else A10 must not have side-effects. */
8656 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8657 && operand_equal_p (a01, a11, 0))
8658 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8659 fold_build2_loc (loc, code, type, a00, a10),
8660 a01);
8663 /* See if we can build a range comparison. */
8664 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8665 return tem;
8667 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8668 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8670 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8671 if (tem)
8672 return fold_build2_loc (loc, code, type, tem, arg1);
8675 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8676 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8678 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8679 if (tem)
8680 return fold_build2_loc (loc, code, type, arg0, tem);
8683 /* Check for the possibility of merging component references. If our
8684 lhs is another similar operation, try to merge its rhs with our
8685 rhs. Then try to merge our lhs and rhs. */
8686 if (TREE_CODE (arg0) == code
8687 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8688 TREE_OPERAND (arg0, 1), arg1)))
8689 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8691 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8692 return tem;
8694 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8695 && (code == TRUTH_AND_EXPR
8696 || code == TRUTH_ANDIF_EXPR
8697 || code == TRUTH_OR_EXPR
8698 || code == TRUTH_ORIF_EXPR))
8700 enum tree_code ncode, icode;
8702 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8703 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8704 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8706 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8707 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8708 We don't want to pack more than two leafs to a non-IF AND/OR
8709 expression.
8710 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8711 equal to IF-CODE, then we don't want to add right-hand operand.
8712 If the inner right-hand side of left-hand operand has
8713 side-effects, or isn't simple, then we can't add to it,
8714 as otherwise we might destroy if-sequence. */
8715 if (TREE_CODE (arg0) == icode
8716 && simple_operand_p_2 (arg1)
8717 /* Needed for sequence points to handle trappings, and
8718 side-effects. */
8719 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8721 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8722 arg1);
8723 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8724 tem);
8726 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8727 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8728 else if (TREE_CODE (arg1) == icode
8729 && simple_operand_p_2 (arg0)
8730 /* Needed for sequence points to handle trappings, and
8731 side-effects. */
8732 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8734 tem = fold_build2_loc (loc, ncode, type,
8735 arg0, TREE_OPERAND (arg1, 0));
8736 return fold_build2_loc (loc, icode, type, tem,
8737 TREE_OPERAND (arg1, 1));
8739 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8740 into (A OR B).
8741 For sequence point consistancy, we need to check for trapping,
8742 and side-effects. */
8743 else if (code == icode && simple_operand_p_2 (arg0)
8744 && simple_operand_p_2 (arg1))
8745 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8748 return NULL_TREE;
8751 /* Fold a binary expression of code CODE and type TYPE with operands
8752 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8753 Return the folded expression if folding is successful. Otherwise,
8754 return NULL_TREE. */
8756 static tree
8757 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8759 enum tree_code compl_code;
8761 if (code == MIN_EXPR)
8762 compl_code = MAX_EXPR;
8763 else if (code == MAX_EXPR)
8764 compl_code = MIN_EXPR;
8765 else
8766 gcc_unreachable ();
8768 /* MIN (MAX (a, b), b) == b. */
8769 if (TREE_CODE (op0) == compl_code
8770 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8771 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8773 /* MIN (MAX (b, a), b) == b. */
8774 if (TREE_CODE (op0) == compl_code
8775 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8776 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8777 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8779 /* MIN (a, MAX (a, b)) == a. */
8780 if (TREE_CODE (op1) == compl_code
8781 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8782 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8783 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8785 /* MIN (a, MAX (b, a)) == a. */
8786 if (TREE_CODE (op1) == compl_code
8787 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8788 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8789 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8791 return NULL_TREE;
8794 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8795 by changing CODE to reduce the magnitude of constants involved in
8796 ARG0 of the comparison.
8797 Returns a canonicalized comparison tree if a simplification was
8798 possible, otherwise returns NULL_TREE.
8799 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8800 valid if signed overflow is undefined. */
8802 static tree
8803 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8804 tree arg0, tree arg1,
8805 bool *strict_overflow_p)
8807 enum tree_code code0 = TREE_CODE (arg0);
8808 tree t, cst0 = NULL_TREE;
8809 int sgn0;
8810 bool swap = false;
8812 /* Match A +- CST code arg1 and CST code arg1. We can change the
8813 first form only if overflow is undefined. */
8814 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8815 /* In principle pointers also have undefined overflow behavior,
8816 but that causes problems elsewhere. */
8817 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8818 && (code0 == MINUS_EXPR
8819 || code0 == PLUS_EXPR)
8820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8821 || code0 == INTEGER_CST))
8822 return NULL_TREE;
8824 /* Identify the constant in arg0 and its sign. */
8825 if (code0 == INTEGER_CST)
8826 cst0 = arg0;
8827 else
8828 cst0 = TREE_OPERAND (arg0, 1);
8829 sgn0 = tree_int_cst_sgn (cst0);
8831 /* Overflowed constants and zero will cause problems. */
8832 if (integer_zerop (cst0)
8833 || TREE_OVERFLOW (cst0))
8834 return NULL_TREE;
8836 /* See if we can reduce the magnitude of the constant in
8837 arg0 by changing the comparison code. */
8838 if (code0 == INTEGER_CST)
8840 /* CST <= arg1 -> CST-1 < arg1. */
8841 if (code == LE_EXPR && sgn0 == 1)
8842 code = LT_EXPR;
8843 /* -CST < arg1 -> -CST-1 <= arg1. */
8844 else if (code == LT_EXPR && sgn0 == -1)
8845 code = LE_EXPR;
8846 /* CST > arg1 -> CST-1 >= arg1. */
8847 else if (code == GT_EXPR && sgn0 == 1)
8848 code = GE_EXPR;
8849 /* -CST >= arg1 -> -CST-1 > arg1. */
8850 else if (code == GE_EXPR && sgn0 == -1)
8851 code = GT_EXPR;
8852 else
8853 return NULL_TREE;
8854 /* arg1 code' CST' might be more canonical. */
8855 swap = true;
8857 else
8859 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8860 if (code == LT_EXPR
8861 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8862 code = LE_EXPR;
8863 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8864 else if (code == GT_EXPR
8865 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8866 code = GE_EXPR;
8867 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8868 else if (code == LE_EXPR
8869 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8870 code = LT_EXPR;
8871 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8872 else if (code == GE_EXPR
8873 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8874 code = GT_EXPR;
8875 else
8876 return NULL_TREE;
8877 *strict_overflow_p = true;
8880 /* Now build the constant reduced in magnitude. But not if that
8881 would produce one outside of its types range. */
8882 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8883 && ((sgn0 == 1
8884 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8885 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8886 || (sgn0 == -1
8887 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8888 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8889 /* We cannot swap the comparison here as that would cause us to
8890 endlessly recurse. */
8891 return NULL_TREE;
8893 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8894 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8895 if (code0 != INTEGER_CST)
8896 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8897 t = fold_convert (TREE_TYPE (arg1), t);
8899 /* If swapping might yield to a more canonical form, do so. */
8900 if (swap)
8901 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8902 else
8903 return fold_build2_loc (loc, code, type, t, arg1);
8906 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8907 overflow further. Try to decrease the magnitude of constants involved
8908 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8909 and put sole constants at the second argument position.
8910 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8912 static tree
8913 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8914 tree arg0, tree arg1)
8916 tree t;
8917 bool strict_overflow_p;
8918 const char * const warnmsg = G_("assuming signed overflow does not occur "
8919 "when reducing constant in comparison");
8921 /* Try canonicalization by simplifying arg0. */
8922 strict_overflow_p = false;
8923 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8924 &strict_overflow_p);
8925 if (t)
8927 if (strict_overflow_p)
8928 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8929 return t;
8932 /* Try canonicalization by simplifying arg1 using the swapped
8933 comparison. */
8934 code = swap_tree_comparison (code);
8935 strict_overflow_p = false;
8936 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8937 &strict_overflow_p);
8938 if (t && strict_overflow_p)
8939 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8940 return t;
8943 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8944 space. This is used to avoid issuing overflow warnings for
8945 expressions like &p->x which can not wrap. */
8947 static bool
8948 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8950 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8951 return true;
8953 if (bitpos < 0)
8954 return true;
8956 wide_int wi_offset;
8957 int precision = TYPE_PRECISION (TREE_TYPE (base));
8958 if (offset == NULL_TREE)
8959 wi_offset = wi::zero (precision);
8960 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8961 return true;
8962 else
8963 wi_offset = offset;
8965 bool overflow;
8966 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8967 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8968 if (overflow)
8969 return true;
8971 if (!wi::fits_uhwi_p (total))
8972 return true;
8974 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8975 if (size <= 0)
8976 return true;
8978 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8979 array. */
8980 if (TREE_CODE (base) == ADDR_EXPR)
8982 HOST_WIDE_INT base_size;
8984 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8985 if (base_size > 0 && size < base_size)
8986 size = base_size;
8989 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8992 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8993 kind INTEGER_CST. This makes sure to properly sign-extend the
8994 constant. */
8996 static HOST_WIDE_INT
8997 size_low_cst (const_tree t)
8999 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
9000 int prec = TYPE_PRECISION (TREE_TYPE (t));
9001 if (prec < HOST_BITS_PER_WIDE_INT)
9002 return sext_hwi (w, prec);
9003 return w;
9006 /* Subroutine of fold_binary. This routine performs all of the
9007 transformations that are common to the equality/inequality
9008 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9009 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9010 fold_binary should call fold_binary. Fold a comparison with
9011 tree code CODE and type TYPE with operands OP0 and OP1. Return
9012 the folded comparison or NULL_TREE. */
9014 static tree
9015 fold_comparison (location_t loc, enum tree_code code, tree type,
9016 tree op0, tree op1)
9018 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9019 tree arg0, arg1, tem;
9021 arg0 = op0;
9022 arg1 = op1;
9024 STRIP_SIGN_NOPS (arg0);
9025 STRIP_SIGN_NOPS (arg1);
9027 tem = fold_relational_const (code, type, arg0, arg1);
9028 if (tem != NULL_TREE)
9029 return tem;
9031 /* If one arg is a real or integer constant, put it last. */
9032 if (tree_swap_operands_p (arg0, arg1, true))
9033 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9035 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
9036 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9037 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9038 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9039 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9040 && TREE_CODE (arg1) == INTEGER_CST
9041 && !TREE_OVERFLOW (arg1))
9043 const enum tree_code
9044 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
9045 tree const1 = TREE_OPERAND (arg0, 1);
9046 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
9047 tree variable = TREE_OPERAND (arg0, 0);
9048 tree new_const = int_const_binop (reverse_op, const2, const1);
9050 /* If the constant operation overflowed this can be
9051 simplified as a comparison against INT_MAX/INT_MIN. */
9052 if (TREE_OVERFLOW (new_const))
9054 int const1_sgn = tree_int_cst_sgn (const1);
9055 enum tree_code code2 = code;
9057 /* Get the sign of the constant on the lhs if the
9058 operation were VARIABLE + CONST1. */
9059 if (TREE_CODE (arg0) == MINUS_EXPR)
9060 const1_sgn = -const1_sgn;
9062 /* The sign of the constant determines if we overflowed
9063 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9064 Canonicalize to the INT_MIN overflow by swapping the comparison
9065 if necessary. */
9066 if (const1_sgn == -1)
9067 code2 = swap_tree_comparison (code);
9069 /* We now can look at the canonicalized case
9070 VARIABLE + 1 CODE2 INT_MIN
9071 and decide on the result. */
9072 switch (code2)
9074 case EQ_EXPR:
9075 case LT_EXPR:
9076 case LE_EXPR:
9077 return
9078 omit_one_operand_loc (loc, type, boolean_false_node, variable);
9080 case NE_EXPR:
9081 case GE_EXPR:
9082 case GT_EXPR:
9083 return
9084 omit_one_operand_loc (loc, type, boolean_true_node, variable);
9086 default:
9087 gcc_unreachable ();
9090 else
9092 if (!equality_code)
9093 fold_overflow_warning ("assuming signed overflow does not occur "
9094 "when changing X +- C1 cmp C2 to "
9095 "X cmp C2 -+ C1",
9096 WARN_STRICT_OVERFLOW_COMPARISON);
9097 return fold_build2_loc (loc, code, type, variable, new_const);
9101 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
9102 if (TREE_CODE (arg0) == MINUS_EXPR
9103 && equality_code
9104 && integer_zerop (arg1))
9106 /* ??? The transformation is valid for the other operators if overflow
9107 is undefined for the type, but performing it here badly interacts
9108 with the transformation in fold_cond_expr_with_comparison which
9109 attempts to synthetize ABS_EXPR. */
9110 if (!equality_code)
9111 fold_overflow_warning ("assuming signed overflow does not occur "
9112 "when changing X - Y cmp 0 to X cmp Y",
9113 WARN_STRICT_OVERFLOW_COMPARISON);
9114 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
9115 TREE_OPERAND (arg0, 1));
9118 /* For comparisons of pointers we can decompose it to a compile time
9119 comparison of the base objects and the offsets into the object.
9120 This requires at least one operand being an ADDR_EXPR or a
9121 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9122 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9123 && (TREE_CODE (arg0) == ADDR_EXPR
9124 || TREE_CODE (arg1) == ADDR_EXPR
9125 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9126 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9128 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9129 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9130 enum machine_mode mode;
9131 int volatilep, unsignedp;
9132 bool indirect_base0 = false, indirect_base1 = false;
9134 /* Get base and offset for the access. Strip ADDR_EXPR for
9135 get_inner_reference, but put it back by stripping INDIRECT_REF
9136 off the base object if possible. indirect_baseN will be true
9137 if baseN is not an address but refers to the object itself. */
9138 base0 = arg0;
9139 if (TREE_CODE (arg0) == ADDR_EXPR)
9141 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9142 &bitsize, &bitpos0, &offset0, &mode,
9143 &unsignedp, &volatilep, false);
9144 if (TREE_CODE (base0) == INDIRECT_REF)
9145 base0 = TREE_OPERAND (base0, 0);
9146 else
9147 indirect_base0 = true;
9149 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9151 base0 = TREE_OPERAND (arg0, 0);
9152 STRIP_SIGN_NOPS (base0);
9153 if (TREE_CODE (base0) == ADDR_EXPR)
9155 base0 = TREE_OPERAND (base0, 0);
9156 indirect_base0 = true;
9158 offset0 = TREE_OPERAND (arg0, 1);
9159 if (tree_fits_shwi_p (offset0))
9161 HOST_WIDE_INT off = size_low_cst (offset0);
9162 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9163 * BITS_PER_UNIT)
9164 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9166 bitpos0 = off * BITS_PER_UNIT;
9167 offset0 = NULL_TREE;
9172 base1 = arg1;
9173 if (TREE_CODE (arg1) == ADDR_EXPR)
9175 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9176 &bitsize, &bitpos1, &offset1, &mode,
9177 &unsignedp, &volatilep, false);
9178 if (TREE_CODE (base1) == INDIRECT_REF)
9179 base1 = TREE_OPERAND (base1, 0);
9180 else
9181 indirect_base1 = true;
9183 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9185 base1 = TREE_OPERAND (arg1, 0);
9186 STRIP_SIGN_NOPS (base1);
9187 if (TREE_CODE (base1) == ADDR_EXPR)
9189 base1 = TREE_OPERAND (base1, 0);
9190 indirect_base1 = true;
9192 offset1 = TREE_OPERAND (arg1, 1);
9193 if (tree_fits_shwi_p (offset1))
9195 HOST_WIDE_INT off = size_low_cst (offset1);
9196 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9197 * BITS_PER_UNIT)
9198 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9200 bitpos1 = off * BITS_PER_UNIT;
9201 offset1 = NULL_TREE;
9206 /* A local variable can never be pointed to by
9207 the default SSA name of an incoming parameter. */
9208 if ((TREE_CODE (arg0) == ADDR_EXPR
9209 && indirect_base0
9210 && TREE_CODE (base0) == VAR_DECL
9211 && auto_var_in_fn_p (base0, current_function_decl)
9212 && !indirect_base1
9213 && TREE_CODE (base1) == SSA_NAME
9214 && SSA_NAME_IS_DEFAULT_DEF (base1)
9215 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9216 || (TREE_CODE (arg1) == ADDR_EXPR
9217 && indirect_base1
9218 && TREE_CODE (base1) == VAR_DECL
9219 && auto_var_in_fn_p (base1, current_function_decl)
9220 && !indirect_base0
9221 && TREE_CODE (base0) == SSA_NAME
9222 && SSA_NAME_IS_DEFAULT_DEF (base0)
9223 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9225 if (code == NE_EXPR)
9226 return constant_boolean_node (1, type);
9227 else if (code == EQ_EXPR)
9228 return constant_boolean_node (0, type);
9230 /* If we have equivalent bases we might be able to simplify. */
9231 else if (indirect_base0 == indirect_base1
9232 && operand_equal_p (base0, base1, 0))
9234 /* We can fold this expression to a constant if the non-constant
9235 offset parts are equal. */
9236 if ((offset0 == offset1
9237 || (offset0 && offset1
9238 && operand_equal_p (offset0, offset1, 0)))
9239 && (code == EQ_EXPR
9240 || code == NE_EXPR
9241 || (indirect_base0 && DECL_P (base0))
9242 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9245 if (!equality_code
9246 && bitpos0 != bitpos1
9247 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9248 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9249 fold_overflow_warning (("assuming pointer wraparound does not "
9250 "occur when comparing P +- C1 with "
9251 "P +- C2"),
9252 WARN_STRICT_OVERFLOW_CONDITIONAL);
9254 switch (code)
9256 case EQ_EXPR:
9257 return constant_boolean_node (bitpos0 == bitpos1, type);
9258 case NE_EXPR:
9259 return constant_boolean_node (bitpos0 != bitpos1, type);
9260 case LT_EXPR:
9261 return constant_boolean_node (bitpos0 < bitpos1, type);
9262 case LE_EXPR:
9263 return constant_boolean_node (bitpos0 <= bitpos1, type);
9264 case GE_EXPR:
9265 return constant_boolean_node (bitpos0 >= bitpos1, type);
9266 case GT_EXPR:
9267 return constant_boolean_node (bitpos0 > bitpos1, type);
9268 default:;
9271 /* We can simplify the comparison to a comparison of the variable
9272 offset parts if the constant offset parts are equal.
9273 Be careful to use signed sizetype here because otherwise we
9274 mess with array offsets in the wrong way. This is possible
9275 because pointer arithmetic is restricted to retain within an
9276 object and overflow on pointer differences is undefined as of
9277 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9278 else if (bitpos0 == bitpos1
9279 && (equality_code
9280 || (indirect_base0 && DECL_P (base0))
9281 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9283 /* By converting to signed sizetype we cover middle-end pointer
9284 arithmetic which operates on unsigned pointer types of size
9285 type size and ARRAY_REF offsets which are properly sign or
9286 zero extended from their type in case it is narrower than
9287 sizetype. */
9288 if (offset0 == NULL_TREE)
9289 offset0 = build_int_cst (ssizetype, 0);
9290 else
9291 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9292 if (offset1 == NULL_TREE)
9293 offset1 = build_int_cst (ssizetype, 0);
9294 else
9295 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9297 if (!equality_code
9298 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9299 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9300 fold_overflow_warning (("assuming pointer wraparound does not "
9301 "occur when comparing P +- C1 with "
9302 "P +- C2"),
9303 WARN_STRICT_OVERFLOW_COMPARISON);
9305 return fold_build2_loc (loc, code, type, offset0, offset1);
9308 /* For non-equal bases we can simplify if they are addresses
9309 of local binding decls or constants. */
9310 else if (indirect_base0 && indirect_base1
9311 /* We know that !operand_equal_p (base0, base1, 0)
9312 because the if condition was false. But make
9313 sure two decls are not the same. */
9314 && base0 != base1
9315 && TREE_CODE (arg0) == ADDR_EXPR
9316 && TREE_CODE (arg1) == ADDR_EXPR
9317 && (((TREE_CODE (base0) == VAR_DECL
9318 || TREE_CODE (base0) == PARM_DECL)
9319 && (targetm.binds_local_p (base0)
9320 || CONSTANT_CLASS_P (base1)))
9321 || CONSTANT_CLASS_P (base0))
9322 && (((TREE_CODE (base1) == VAR_DECL
9323 || TREE_CODE (base1) == PARM_DECL)
9324 && (targetm.binds_local_p (base1)
9325 || CONSTANT_CLASS_P (base0)))
9326 || CONSTANT_CLASS_P (base1)))
9328 if (code == EQ_EXPR)
9329 return omit_two_operands_loc (loc, type, boolean_false_node,
9330 arg0, arg1);
9331 else if (code == NE_EXPR)
9332 return omit_two_operands_loc (loc, type, boolean_true_node,
9333 arg0, arg1);
9335 /* For equal offsets we can simplify to a comparison of the
9336 base addresses. */
9337 else if (bitpos0 == bitpos1
9338 && (indirect_base0
9339 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9340 && (indirect_base1
9341 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9342 && ((offset0 == offset1)
9343 || (offset0 && offset1
9344 && operand_equal_p (offset0, offset1, 0))))
9346 if (indirect_base0)
9347 base0 = build_fold_addr_expr_loc (loc, base0);
9348 if (indirect_base1)
9349 base1 = build_fold_addr_expr_loc (loc, base1);
9350 return fold_build2_loc (loc, code, type, base0, base1);
9354 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9355 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9356 the resulting offset is smaller in absolute value than the
9357 original one and has the same sign. */
9358 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9359 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9360 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9361 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9362 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9363 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9364 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9366 tree const1 = TREE_OPERAND (arg0, 1);
9367 tree const2 = TREE_OPERAND (arg1, 1);
9368 tree variable1 = TREE_OPERAND (arg0, 0);
9369 tree variable2 = TREE_OPERAND (arg1, 0);
9370 tree cst;
9371 const char * const warnmsg = G_("assuming signed overflow does not "
9372 "occur when combining constants around "
9373 "a comparison");
9375 /* Put the constant on the side where it doesn't overflow and is
9376 of lower absolute value and of same sign than before. */
9377 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9378 ? MINUS_EXPR : PLUS_EXPR,
9379 const2, const1);
9380 if (!TREE_OVERFLOW (cst)
9381 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9382 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9385 return fold_build2_loc (loc, code, type,
9386 variable1,
9387 fold_build2_loc (loc, TREE_CODE (arg1),
9388 TREE_TYPE (arg1),
9389 variable2, cst));
9392 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9393 ? MINUS_EXPR : PLUS_EXPR,
9394 const1, const2);
9395 if (!TREE_OVERFLOW (cst)
9396 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9397 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9399 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9400 return fold_build2_loc (loc, code, type,
9401 fold_build2_loc (loc, TREE_CODE (arg0),
9402 TREE_TYPE (arg0),
9403 variable1, cst),
9404 variable2);
9408 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9409 signed arithmetic case. That form is created by the compiler
9410 often enough for folding it to be of value. One example is in
9411 computing loop trip counts after Operator Strength Reduction. */
9412 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9413 && TREE_CODE (arg0) == MULT_EXPR
9414 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9415 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9416 && integer_zerop (arg1))
9418 tree const1 = TREE_OPERAND (arg0, 1);
9419 tree const2 = arg1; /* zero */
9420 tree variable1 = TREE_OPERAND (arg0, 0);
9421 enum tree_code cmp_code = code;
9423 /* Handle unfolded multiplication by zero. */
9424 if (integer_zerop (const1))
9425 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9427 fold_overflow_warning (("assuming signed overflow does not occur when "
9428 "eliminating multiplication in comparison "
9429 "with zero"),
9430 WARN_STRICT_OVERFLOW_COMPARISON);
9432 /* If const1 is negative we swap the sense of the comparison. */
9433 if (tree_int_cst_sgn (const1) < 0)
9434 cmp_code = swap_tree_comparison (cmp_code);
9436 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9439 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9440 if (tem)
9441 return tem;
9443 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9445 tree targ0 = strip_float_extensions (arg0);
9446 tree targ1 = strip_float_extensions (arg1);
9447 tree newtype = TREE_TYPE (targ0);
9449 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9450 newtype = TREE_TYPE (targ1);
9452 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9453 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9454 return fold_build2_loc (loc, code, type,
9455 fold_convert_loc (loc, newtype, targ0),
9456 fold_convert_loc (loc, newtype, targ1));
9458 /* (-a) CMP (-b) -> b CMP a */
9459 if (TREE_CODE (arg0) == NEGATE_EXPR
9460 && TREE_CODE (arg1) == NEGATE_EXPR)
9461 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9462 TREE_OPERAND (arg0, 0));
9464 if (TREE_CODE (arg1) == REAL_CST)
9466 REAL_VALUE_TYPE cst;
9467 cst = TREE_REAL_CST (arg1);
9469 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9470 if (TREE_CODE (arg0) == NEGATE_EXPR)
9471 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9472 TREE_OPERAND (arg0, 0),
9473 build_real (TREE_TYPE (arg1),
9474 real_value_negate (&cst)));
9476 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9477 /* a CMP (-0) -> a CMP 0 */
9478 if (REAL_VALUE_MINUS_ZERO (cst))
9479 return fold_build2_loc (loc, code, type, arg0,
9480 build_real (TREE_TYPE (arg1), dconst0));
9482 /* x != NaN is always true, other ops are always false. */
9483 if (REAL_VALUE_ISNAN (cst)
9484 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9486 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9487 return omit_one_operand_loc (loc, type, tem, arg0);
9490 /* Fold comparisons against infinity. */
9491 if (REAL_VALUE_ISINF (cst)
9492 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9494 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9495 if (tem != NULL_TREE)
9496 return tem;
9500 /* If this is a comparison of a real constant with a PLUS_EXPR
9501 or a MINUS_EXPR of a real constant, we can convert it into a
9502 comparison with a revised real constant as long as no overflow
9503 occurs when unsafe_math_optimizations are enabled. */
9504 if (flag_unsafe_math_optimizations
9505 && TREE_CODE (arg1) == REAL_CST
9506 && (TREE_CODE (arg0) == PLUS_EXPR
9507 || TREE_CODE (arg0) == MINUS_EXPR)
9508 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9509 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9510 ? MINUS_EXPR : PLUS_EXPR,
9511 arg1, TREE_OPERAND (arg0, 1)))
9512 && !TREE_OVERFLOW (tem))
9513 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9515 /* Likewise, we can simplify a comparison of a real constant with
9516 a MINUS_EXPR whose first operand is also a real constant, i.e.
9517 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9518 floating-point types only if -fassociative-math is set. */
9519 if (flag_associative_math
9520 && TREE_CODE (arg1) == REAL_CST
9521 && TREE_CODE (arg0) == MINUS_EXPR
9522 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9523 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9524 arg1))
9525 && !TREE_OVERFLOW (tem))
9526 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9527 TREE_OPERAND (arg0, 1), tem);
9529 /* Fold comparisons against built-in math functions. */
9530 if (TREE_CODE (arg1) == REAL_CST
9531 && flag_unsafe_math_optimizations
9532 && ! flag_errno_math)
9534 enum built_in_function fcode = builtin_mathfn_code (arg0);
9536 if (fcode != END_BUILTINS)
9538 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9539 if (tem != NULL_TREE)
9540 return tem;
9545 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9546 && CONVERT_EXPR_P (arg0))
9548 /* If we are widening one operand of an integer comparison,
9549 see if the other operand is similarly being widened. Perhaps we
9550 can do the comparison in the narrower type. */
9551 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9552 if (tem)
9553 return tem;
9555 /* Or if we are changing signedness. */
9556 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9557 if (tem)
9558 return tem;
9561 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9562 constant, we can simplify it. */
9563 if (TREE_CODE (arg1) == INTEGER_CST
9564 && (TREE_CODE (arg0) == MIN_EXPR
9565 || TREE_CODE (arg0) == MAX_EXPR)
9566 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9568 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9569 if (tem)
9570 return tem;
9573 /* Simplify comparison of something with itself. (For IEEE
9574 floating-point, we can only do some of these simplifications.) */
9575 if (operand_equal_p (arg0, arg1, 0))
9577 switch (code)
9579 case EQ_EXPR:
9580 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9581 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9582 return constant_boolean_node (1, type);
9583 break;
9585 case GE_EXPR:
9586 case LE_EXPR:
9587 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9588 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9589 return constant_boolean_node (1, type);
9590 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9592 case NE_EXPR:
9593 /* For NE, we can only do this simplification if integer
9594 or we don't honor IEEE floating point NaNs. */
9595 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9596 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9597 break;
9598 /* ... fall through ... */
9599 case GT_EXPR:
9600 case LT_EXPR:
9601 return constant_boolean_node (0, type);
9602 default:
9603 gcc_unreachable ();
9607 /* If we are comparing an expression that just has comparisons
9608 of two integer values, arithmetic expressions of those comparisons,
9609 and constants, we can simplify it. There are only three cases
9610 to check: the two values can either be equal, the first can be
9611 greater, or the second can be greater. Fold the expression for
9612 those three values. Since each value must be 0 or 1, we have
9613 eight possibilities, each of which corresponds to the constant 0
9614 or 1 or one of the six possible comparisons.
9616 This handles common cases like (a > b) == 0 but also handles
9617 expressions like ((x > y) - (y > x)) > 0, which supposedly
9618 occur in macroized code. */
9620 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9622 tree cval1 = 0, cval2 = 0;
9623 int save_p = 0;
9625 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9626 /* Don't handle degenerate cases here; they should already
9627 have been handled anyway. */
9628 && cval1 != 0 && cval2 != 0
9629 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9630 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9631 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9632 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9633 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9634 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9635 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9637 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9638 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9640 /* We can't just pass T to eval_subst in case cval1 or cval2
9641 was the same as ARG1. */
9643 tree high_result
9644 = fold_build2_loc (loc, code, type,
9645 eval_subst (loc, arg0, cval1, maxval,
9646 cval2, minval),
9647 arg1);
9648 tree equal_result
9649 = fold_build2_loc (loc, code, type,
9650 eval_subst (loc, arg0, cval1, maxval,
9651 cval2, maxval),
9652 arg1);
9653 tree low_result
9654 = fold_build2_loc (loc, code, type,
9655 eval_subst (loc, arg0, cval1, minval,
9656 cval2, maxval),
9657 arg1);
9659 /* All three of these results should be 0 or 1. Confirm they are.
9660 Then use those values to select the proper code to use. */
9662 if (TREE_CODE (high_result) == INTEGER_CST
9663 && TREE_CODE (equal_result) == INTEGER_CST
9664 && TREE_CODE (low_result) == INTEGER_CST)
9666 /* Make a 3-bit mask with the high-order bit being the
9667 value for `>', the next for '=', and the low for '<'. */
9668 switch ((integer_onep (high_result) * 4)
9669 + (integer_onep (equal_result) * 2)
9670 + integer_onep (low_result))
9672 case 0:
9673 /* Always false. */
9674 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9675 case 1:
9676 code = LT_EXPR;
9677 break;
9678 case 2:
9679 code = EQ_EXPR;
9680 break;
9681 case 3:
9682 code = LE_EXPR;
9683 break;
9684 case 4:
9685 code = GT_EXPR;
9686 break;
9687 case 5:
9688 code = NE_EXPR;
9689 break;
9690 case 6:
9691 code = GE_EXPR;
9692 break;
9693 case 7:
9694 /* Always true. */
9695 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9698 if (save_p)
9700 tem = save_expr (build2 (code, type, cval1, cval2));
9701 SET_EXPR_LOCATION (tem, loc);
9702 return tem;
9704 return fold_build2_loc (loc, code, type, cval1, cval2);
9709 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9710 into a single range test. */
9711 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9712 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9713 && TREE_CODE (arg1) == INTEGER_CST
9714 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9715 && !integer_zerop (TREE_OPERAND (arg0, 1))
9716 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9717 && !TREE_OVERFLOW (arg1))
9719 tem = fold_div_compare (loc, code, type, arg0, arg1);
9720 if (tem != NULL_TREE)
9721 return tem;
9724 /* Fold ~X op ~Y as Y op X. */
9725 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9726 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9728 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9729 return fold_build2_loc (loc, code, type,
9730 fold_convert_loc (loc, cmp_type,
9731 TREE_OPERAND (arg1, 0)),
9732 TREE_OPERAND (arg0, 0));
9735 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9736 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9737 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9739 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9740 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9741 TREE_OPERAND (arg0, 0),
9742 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9743 fold_convert_loc (loc, cmp_type, arg1)));
9746 return NULL_TREE;
9750 /* Subroutine of fold_binary. Optimize complex multiplications of the
9751 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9752 argument EXPR represents the expression "z" of type TYPE. */
9754 static tree
9755 fold_mult_zconjz (location_t loc, tree type, tree expr)
9757 tree itype = TREE_TYPE (type);
9758 tree rpart, ipart, tem;
9760 if (TREE_CODE (expr) == COMPLEX_EXPR)
9762 rpart = TREE_OPERAND (expr, 0);
9763 ipart = TREE_OPERAND (expr, 1);
9765 else if (TREE_CODE (expr) == COMPLEX_CST)
9767 rpart = TREE_REALPART (expr);
9768 ipart = TREE_IMAGPART (expr);
9770 else
9772 expr = save_expr (expr);
9773 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9774 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9777 rpart = save_expr (rpart);
9778 ipart = save_expr (ipart);
9779 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9780 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9781 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9782 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9783 build_zero_cst (itype));
9787 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9788 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9789 guarantees that P and N have the same least significant log2(M) bits.
9790 N is not otherwise constrained. In particular, N is not normalized to
9791 0 <= N < M as is common. In general, the precise value of P is unknown.
9792 M is chosen as large as possible such that constant N can be determined.
9794 Returns M and sets *RESIDUE to N.
9796 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9797 account. This is not always possible due to PR 35705.
9800 static unsigned HOST_WIDE_INT
9801 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9802 bool allow_func_align)
9804 enum tree_code code;
9806 *residue = 0;
9808 code = TREE_CODE (expr);
9809 if (code == ADDR_EXPR)
9811 unsigned int bitalign;
9812 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9813 *residue /= BITS_PER_UNIT;
9814 return bitalign / BITS_PER_UNIT;
9816 else if (code == POINTER_PLUS_EXPR)
9818 tree op0, op1;
9819 unsigned HOST_WIDE_INT modulus;
9820 enum tree_code inner_code;
9822 op0 = TREE_OPERAND (expr, 0);
9823 STRIP_NOPS (op0);
9824 modulus = get_pointer_modulus_and_residue (op0, residue,
9825 allow_func_align);
9827 op1 = TREE_OPERAND (expr, 1);
9828 STRIP_NOPS (op1);
9829 inner_code = TREE_CODE (op1);
9830 if (inner_code == INTEGER_CST)
9832 *residue += TREE_INT_CST_LOW (op1);
9833 return modulus;
9835 else if (inner_code == MULT_EXPR)
9837 op1 = TREE_OPERAND (op1, 1);
9838 if (TREE_CODE (op1) == INTEGER_CST)
9840 unsigned HOST_WIDE_INT align;
9842 /* Compute the greatest power-of-2 divisor of op1. */
9843 align = TREE_INT_CST_LOW (op1);
9844 align &= -align;
9846 /* If align is non-zero and less than *modulus, replace
9847 *modulus with align., If align is 0, then either op1 is 0
9848 or the greatest power-of-2 divisor of op1 doesn't fit in an
9849 unsigned HOST_WIDE_INT. In either case, no additional
9850 constraint is imposed. */
9851 if (align)
9852 modulus = MIN (modulus, align);
9854 return modulus;
9859 /* If we get here, we were unable to determine anything useful about the
9860 expression. */
9861 return 1;
9864 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9865 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9867 static bool
9868 vec_cst_ctor_to_array (tree arg, tree *elts)
9870 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9872 if (TREE_CODE (arg) == VECTOR_CST)
9874 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9875 elts[i] = VECTOR_CST_ELT (arg, i);
9877 else if (TREE_CODE (arg) == CONSTRUCTOR)
9879 constructor_elt *elt;
9881 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9882 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9883 return false;
9884 else
9885 elts[i] = elt->value;
9887 else
9888 return false;
9889 for (; i < nelts; i++)
9890 elts[i]
9891 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9892 return true;
9895 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9896 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9897 NULL_TREE otherwise. */
9899 static tree
9900 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9902 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9903 tree *elts;
9904 bool need_ctor = false;
9906 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9907 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9908 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9909 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9910 return NULL_TREE;
9912 elts = XALLOCAVEC (tree, nelts * 3);
9913 if (!vec_cst_ctor_to_array (arg0, elts)
9914 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9915 return NULL_TREE;
9917 for (i = 0; i < nelts; i++)
9919 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9920 need_ctor = true;
9921 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9924 if (need_ctor)
9926 vec<constructor_elt, va_gc> *v;
9927 vec_alloc (v, nelts);
9928 for (i = 0; i < nelts; i++)
9929 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9930 return build_constructor (type, v);
9932 else
9933 return build_vector (type, &elts[2 * nelts]);
9936 /* Try to fold a pointer difference of type TYPE two address expressions of
9937 array references AREF0 and AREF1 using location LOC. Return a
9938 simplified expression for the difference or NULL_TREE. */
9940 static tree
9941 fold_addr_of_array_ref_difference (location_t loc, tree type,
9942 tree aref0, tree aref1)
9944 tree base0 = TREE_OPERAND (aref0, 0);
9945 tree base1 = TREE_OPERAND (aref1, 0);
9946 tree base_offset = build_int_cst (type, 0);
9948 /* If the bases are array references as well, recurse. If the bases
9949 are pointer indirections compute the difference of the pointers.
9950 If the bases are equal, we are set. */
9951 if ((TREE_CODE (base0) == ARRAY_REF
9952 && TREE_CODE (base1) == ARRAY_REF
9953 && (base_offset
9954 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9955 || (INDIRECT_REF_P (base0)
9956 && INDIRECT_REF_P (base1)
9957 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9958 TREE_OPERAND (base0, 0),
9959 TREE_OPERAND (base1, 0))))
9960 || operand_equal_p (base0, base1, 0))
9962 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9963 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9964 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9965 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9966 return fold_build2_loc (loc, PLUS_EXPR, type,
9967 base_offset,
9968 fold_build2_loc (loc, MULT_EXPR, type,
9969 diff, esz));
9971 return NULL_TREE;
9974 /* If the real or vector real constant CST of type TYPE has an exact
9975 inverse, return it, else return NULL. */
9977 static tree
9978 exact_inverse (tree type, tree cst)
9980 REAL_VALUE_TYPE r;
9981 tree unit_type, *elts;
9982 enum machine_mode mode;
9983 unsigned vec_nelts, i;
9985 switch (TREE_CODE (cst))
9987 case REAL_CST:
9988 r = TREE_REAL_CST (cst);
9990 if (exact_real_inverse (TYPE_MODE (type), &r))
9991 return build_real (type, r);
9993 return NULL_TREE;
9995 case VECTOR_CST:
9996 vec_nelts = VECTOR_CST_NELTS (cst);
9997 elts = XALLOCAVEC (tree, vec_nelts);
9998 unit_type = TREE_TYPE (type);
9999 mode = TYPE_MODE (unit_type);
10001 for (i = 0; i < vec_nelts; i++)
10003 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10004 if (!exact_real_inverse (mode, &r))
10005 return NULL_TREE;
10006 elts[i] = build_real (unit_type, r);
10009 return build_vector (type, elts);
10011 default:
10012 return NULL_TREE;
10016 /* Mask out the tz least significant bits of X of type TYPE where
10017 tz is the number of trailing zeroes in Y. */
10018 static wide_int
10019 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10021 int tz = wi::ctz (y);
10022 if (tz > 0)
10023 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10024 return x;
10027 /* Return true when T is an address and is known to be nonzero.
10028 For floating point we further ensure that T is not denormal.
10029 Similar logic is present in nonzero_address in rtlanal.h.
10031 If the return value is based on the assumption that signed overflow
10032 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10033 change *STRICT_OVERFLOW_P. */
10035 static bool
10036 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10038 tree type = TREE_TYPE (t);
10039 enum tree_code code;
10041 /* Doing something useful for floating point would need more work. */
10042 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10043 return false;
10045 code = TREE_CODE (t);
10046 switch (TREE_CODE_CLASS (code))
10048 case tcc_unary:
10049 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10050 strict_overflow_p);
10051 case tcc_binary:
10052 case tcc_comparison:
10053 return tree_binary_nonzero_warnv_p (code, type,
10054 TREE_OPERAND (t, 0),
10055 TREE_OPERAND (t, 1),
10056 strict_overflow_p);
10057 case tcc_constant:
10058 case tcc_declaration:
10059 case tcc_reference:
10060 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10062 default:
10063 break;
10066 switch (code)
10068 case TRUTH_NOT_EXPR:
10069 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10070 strict_overflow_p);
10072 case TRUTH_AND_EXPR:
10073 case TRUTH_OR_EXPR:
10074 case TRUTH_XOR_EXPR:
10075 return tree_binary_nonzero_warnv_p (code, type,
10076 TREE_OPERAND (t, 0),
10077 TREE_OPERAND (t, 1),
10078 strict_overflow_p);
10080 case COND_EXPR:
10081 case CONSTRUCTOR:
10082 case OBJ_TYPE_REF:
10083 case ASSERT_EXPR:
10084 case ADDR_EXPR:
10085 case WITH_SIZE_EXPR:
10086 case SSA_NAME:
10087 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10089 case COMPOUND_EXPR:
10090 case MODIFY_EXPR:
10091 case BIND_EXPR:
10092 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10093 strict_overflow_p);
10095 case SAVE_EXPR:
10096 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10097 strict_overflow_p);
10099 case CALL_EXPR:
10101 tree fndecl = get_callee_fndecl (t);
10102 if (!fndecl) return false;
10103 if (flag_delete_null_pointer_checks && !flag_check_new
10104 && DECL_IS_OPERATOR_NEW (fndecl)
10105 && !TREE_NOTHROW (fndecl))
10106 return true;
10107 if (flag_delete_null_pointer_checks
10108 && lookup_attribute ("returns_nonnull",
10109 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10110 return true;
10111 return alloca_call_p (t);
10114 default:
10115 break;
10117 return false;
10120 /* Return true when T is an address and is known to be nonzero.
10121 Handle warnings about undefined signed overflow. */
10123 static bool
10124 tree_expr_nonzero_p (tree t)
10126 bool ret, strict_overflow_p;
10128 strict_overflow_p = false;
10129 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10130 if (strict_overflow_p)
10131 fold_overflow_warning (("assuming signed overflow does not occur when "
10132 "determining that expression is always "
10133 "non-zero"),
10134 WARN_STRICT_OVERFLOW_MISC);
10135 return ret;
10138 /* Fold a binary expression of code CODE and type TYPE with operands
10139 OP0 and OP1. LOC is the location of the resulting expression.
10140 Return the folded expression if folding is successful. Otherwise,
10141 return NULL_TREE. */
10143 static tree
10144 fold_binary_loc_1 (location_t loc,
10145 enum tree_code code, tree type, tree op0, tree op1)
10147 enum tree_code_class kind = TREE_CODE_CLASS (code);
10148 tree arg0, arg1, tem;
10149 tree t1 = NULL_TREE;
10150 bool strict_overflow_p;
10151 unsigned int prec;
10153 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10154 && TREE_CODE_LENGTH (code) == 2
10155 && op0 != NULL_TREE
10156 && op1 != NULL_TREE);
10158 arg0 = op0;
10159 arg1 = op1;
10161 /* Strip any conversions that don't change the mode. This is
10162 safe for every expression, except for a comparison expression
10163 because its signedness is derived from its operands. So, in
10164 the latter case, only strip conversions that don't change the
10165 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10166 preserved.
10168 Note that this is done as an internal manipulation within the
10169 constant folder, in order to find the simplest representation
10170 of the arguments so that their form can be studied. In any
10171 cases, the appropriate type conversions should be put back in
10172 the tree that will get out of the constant folder. */
10174 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10176 STRIP_SIGN_NOPS (arg0);
10177 STRIP_SIGN_NOPS (arg1);
10179 else
10181 STRIP_NOPS (arg0);
10182 STRIP_NOPS (arg1);
10185 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10186 constant but we can't do arithmetic on them. */
10187 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10188 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10189 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10190 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10191 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10192 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10193 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10195 if (kind == tcc_binary)
10197 /* Make sure type and arg0 have the same saturating flag. */
10198 gcc_assert (TYPE_SATURATING (type)
10199 == TYPE_SATURATING (TREE_TYPE (arg0)));
10200 tem = const_binop (code, arg0, arg1);
10202 else if (kind == tcc_comparison)
10203 tem = fold_relational_const (code, type, arg0, arg1);
10204 else
10205 tem = NULL_TREE;
10207 if (tem != NULL_TREE)
10209 if (TREE_TYPE (tem) != type)
10210 tem = fold_convert_loc (loc, type, tem);
10211 return tem;
10215 /* If this is a commutative operation, and ARG0 is a constant, move it
10216 to ARG1 to reduce the number of tests below. */
10217 if (commutative_tree_code (code)
10218 && tree_swap_operands_p (arg0, arg1, true))
10219 return fold_build2_loc (loc, code, type, op1, op0);
10221 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10223 First check for cases where an arithmetic operation is applied to a
10224 compound, conditional, or comparison operation. Push the arithmetic
10225 operation inside the compound or conditional to see if any folding
10226 can then be done. Convert comparison to conditional for this purpose.
10227 The also optimizes non-constant cases that used to be done in
10228 expand_expr.
10230 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10231 one of the operands is a comparison and the other is a comparison, a
10232 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10233 code below would make the expression more complex. Change it to a
10234 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10235 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10237 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10238 || code == EQ_EXPR || code == NE_EXPR)
10239 && TREE_CODE (type) != VECTOR_TYPE
10240 && ((truth_value_p (TREE_CODE (arg0))
10241 && (truth_value_p (TREE_CODE (arg1))
10242 || (TREE_CODE (arg1) == BIT_AND_EXPR
10243 && integer_onep (TREE_OPERAND (arg1, 1)))))
10244 || (truth_value_p (TREE_CODE (arg1))
10245 && (truth_value_p (TREE_CODE (arg0))
10246 || (TREE_CODE (arg0) == BIT_AND_EXPR
10247 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10249 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10250 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10251 : TRUTH_XOR_EXPR,
10252 boolean_type_node,
10253 fold_convert_loc (loc, boolean_type_node, arg0),
10254 fold_convert_loc (loc, boolean_type_node, arg1));
10256 if (code == EQ_EXPR)
10257 tem = invert_truthvalue_loc (loc, tem);
10259 return fold_convert_loc (loc, type, tem);
10262 if (TREE_CODE_CLASS (code) == tcc_binary
10263 || TREE_CODE_CLASS (code) == tcc_comparison)
10265 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10267 tem = fold_build2_loc (loc, code, type,
10268 fold_convert_loc (loc, TREE_TYPE (op0),
10269 TREE_OPERAND (arg0, 1)), op1);
10270 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10271 tem);
10273 if (TREE_CODE (arg1) == COMPOUND_EXPR
10274 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10276 tem = fold_build2_loc (loc, code, type, op0,
10277 fold_convert_loc (loc, TREE_TYPE (op1),
10278 TREE_OPERAND (arg1, 1)));
10279 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10280 tem);
10283 if (TREE_CODE (arg0) == COND_EXPR
10284 || TREE_CODE (arg0) == VEC_COND_EXPR
10285 || COMPARISON_CLASS_P (arg0))
10287 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10288 arg0, arg1,
10289 /*cond_first_p=*/1);
10290 if (tem != NULL_TREE)
10291 return tem;
10294 if (TREE_CODE (arg1) == COND_EXPR
10295 || TREE_CODE (arg1) == VEC_COND_EXPR
10296 || COMPARISON_CLASS_P (arg1))
10298 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10299 arg1, arg0,
10300 /*cond_first_p=*/0);
10301 if (tem != NULL_TREE)
10302 return tem;
10306 switch (code)
10308 case MEM_REF:
10309 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10310 if (TREE_CODE (arg0) == ADDR_EXPR
10311 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10313 tree iref = TREE_OPERAND (arg0, 0);
10314 return fold_build2 (MEM_REF, type,
10315 TREE_OPERAND (iref, 0),
10316 int_const_binop (PLUS_EXPR, arg1,
10317 TREE_OPERAND (iref, 1)));
10320 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10321 if (TREE_CODE (arg0) == ADDR_EXPR
10322 && handled_component_p (TREE_OPERAND (arg0, 0)))
10324 tree base;
10325 HOST_WIDE_INT coffset;
10326 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10327 &coffset);
10328 if (!base)
10329 return NULL_TREE;
10330 return fold_build2 (MEM_REF, type,
10331 build_fold_addr_expr (base),
10332 int_const_binop (PLUS_EXPR, arg1,
10333 size_int (coffset)));
10336 return NULL_TREE;
10338 case POINTER_PLUS_EXPR:
10339 /* 0 +p index -> (type)index */
10340 if (integer_zerop (arg0))
10341 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10343 /* PTR +p 0 -> PTR */
10344 if (integer_zerop (arg1))
10345 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10347 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10348 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10349 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10350 return fold_convert_loc (loc, type,
10351 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10352 fold_convert_loc (loc, sizetype,
10353 arg1),
10354 fold_convert_loc (loc, sizetype,
10355 arg0)));
10357 /* (PTR +p B) +p A -> PTR +p (B + A) */
10358 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10360 tree inner;
10361 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10362 tree arg00 = TREE_OPERAND (arg0, 0);
10363 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10364 arg01, fold_convert_loc (loc, sizetype, arg1));
10365 return fold_convert_loc (loc, type,
10366 fold_build_pointer_plus_loc (loc,
10367 arg00, inner));
10370 /* PTR_CST +p CST -> CST1 */
10371 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10372 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10373 fold_convert_loc (loc, type, arg1));
10375 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10376 of the array. Loop optimizer sometimes produce this type of
10377 expressions. */
10378 if (TREE_CODE (arg0) == ADDR_EXPR)
10380 tem = try_move_mult_to_index (loc, arg0,
10381 fold_convert_loc (loc,
10382 ssizetype, arg1));
10383 if (tem)
10384 return fold_convert_loc (loc, type, tem);
10387 return NULL_TREE;
10389 case PLUS_EXPR:
10390 /* A + (-B) -> A - B */
10391 if (TREE_CODE (arg1) == NEGATE_EXPR
10392 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10393 return fold_build2_loc (loc, MINUS_EXPR, type,
10394 fold_convert_loc (loc, type, arg0),
10395 fold_convert_loc (loc, type,
10396 TREE_OPERAND (arg1, 0)));
10397 /* (-A) + B -> B - A */
10398 if (TREE_CODE (arg0) == NEGATE_EXPR
10399 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10400 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10401 return fold_build2_loc (loc, MINUS_EXPR, type,
10402 fold_convert_loc (loc, type, arg1),
10403 fold_convert_loc (loc, type,
10404 TREE_OPERAND (arg0, 0)));
10406 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10408 /* Convert ~A + 1 to -A. */
10409 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10410 && integer_onep (arg1))
10411 return fold_build1_loc (loc, NEGATE_EXPR, type,
10412 fold_convert_loc (loc, type,
10413 TREE_OPERAND (arg0, 0)));
10415 /* ~X + X is -1. */
10416 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10417 && !TYPE_OVERFLOW_TRAPS (type))
10419 tree tem = TREE_OPERAND (arg0, 0);
10421 STRIP_NOPS (tem);
10422 if (operand_equal_p (tem, arg1, 0))
10424 t1 = build_all_ones_cst (type);
10425 return omit_one_operand_loc (loc, type, t1, arg1);
10429 /* X + ~X is -1. */
10430 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10431 && !TYPE_OVERFLOW_TRAPS (type))
10433 tree tem = TREE_OPERAND (arg1, 0);
10435 STRIP_NOPS (tem);
10436 if (operand_equal_p (arg0, tem, 0))
10438 t1 = build_all_ones_cst (type);
10439 return omit_one_operand_loc (loc, type, t1, arg0);
10443 /* X + (X / CST) * -CST is X % CST. */
10444 if (TREE_CODE (arg1) == MULT_EXPR
10445 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10446 && operand_equal_p (arg0,
10447 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10449 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10450 tree cst1 = TREE_OPERAND (arg1, 1);
10451 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10452 cst1, cst0);
10453 if (sum && integer_zerop (sum))
10454 return fold_convert_loc (loc, type,
10455 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10456 TREE_TYPE (arg0), arg0,
10457 cst0));
10461 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10462 one. Make sure the type is not saturating and has the signedness of
10463 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10464 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10465 if ((TREE_CODE (arg0) == MULT_EXPR
10466 || TREE_CODE (arg1) == MULT_EXPR)
10467 && !TYPE_SATURATING (type)
10468 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10469 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10470 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10472 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10473 if (tem)
10474 return tem;
10477 if (! FLOAT_TYPE_P (type))
10479 if (integer_zerop (arg1))
10480 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10482 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10483 with a constant, and the two constants have no bits in common,
10484 we should treat this as a BIT_IOR_EXPR since this may produce more
10485 simplifications. */
10486 if (TREE_CODE (arg0) == BIT_AND_EXPR
10487 && TREE_CODE (arg1) == BIT_AND_EXPR
10488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10489 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10490 && wi::bit_and (TREE_OPERAND (arg0, 1),
10491 TREE_OPERAND (arg1, 1)) == 0)
10493 code = BIT_IOR_EXPR;
10494 goto bit_ior;
10497 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10498 (plus (plus (mult) (mult)) (foo)) so that we can
10499 take advantage of the factoring cases below. */
10500 if (TYPE_OVERFLOW_WRAPS (type)
10501 && (((TREE_CODE (arg0) == PLUS_EXPR
10502 || TREE_CODE (arg0) == MINUS_EXPR)
10503 && TREE_CODE (arg1) == MULT_EXPR)
10504 || ((TREE_CODE (arg1) == PLUS_EXPR
10505 || TREE_CODE (arg1) == MINUS_EXPR)
10506 && TREE_CODE (arg0) == MULT_EXPR)))
10508 tree parg0, parg1, parg, marg;
10509 enum tree_code pcode;
10511 if (TREE_CODE (arg1) == MULT_EXPR)
10512 parg = arg0, marg = arg1;
10513 else
10514 parg = arg1, marg = arg0;
10515 pcode = TREE_CODE (parg);
10516 parg0 = TREE_OPERAND (parg, 0);
10517 parg1 = TREE_OPERAND (parg, 1);
10518 STRIP_NOPS (parg0);
10519 STRIP_NOPS (parg1);
10521 if (TREE_CODE (parg0) == MULT_EXPR
10522 && TREE_CODE (parg1) != MULT_EXPR)
10523 return fold_build2_loc (loc, pcode, type,
10524 fold_build2_loc (loc, PLUS_EXPR, type,
10525 fold_convert_loc (loc, type,
10526 parg0),
10527 fold_convert_loc (loc, type,
10528 marg)),
10529 fold_convert_loc (loc, type, parg1));
10530 if (TREE_CODE (parg0) != MULT_EXPR
10531 && TREE_CODE (parg1) == MULT_EXPR)
10532 return
10533 fold_build2_loc (loc, PLUS_EXPR, type,
10534 fold_convert_loc (loc, type, parg0),
10535 fold_build2_loc (loc, pcode, type,
10536 fold_convert_loc (loc, type, marg),
10537 fold_convert_loc (loc, type,
10538 parg1)));
10541 else
10543 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10544 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10545 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10547 /* Likewise if the operands are reversed. */
10548 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10549 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10551 /* Convert X + -C into X - C. */
10552 if (TREE_CODE (arg1) == REAL_CST
10553 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10555 tem = fold_negate_const (arg1, type);
10556 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10557 return fold_build2_loc (loc, MINUS_EXPR, type,
10558 fold_convert_loc (loc, type, arg0),
10559 fold_convert_loc (loc, type, tem));
10562 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10563 to __complex__ ( x, y ). This is not the same for SNaNs or
10564 if signed zeros are involved. */
10565 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10566 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10567 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10569 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10570 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10571 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10572 bool arg0rz = false, arg0iz = false;
10573 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10574 || (arg0i && (arg0iz = real_zerop (arg0i))))
10576 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10577 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10578 if (arg0rz && arg1i && real_zerop (arg1i))
10580 tree rp = arg1r ? arg1r
10581 : build1 (REALPART_EXPR, rtype, arg1);
10582 tree ip = arg0i ? arg0i
10583 : build1 (IMAGPART_EXPR, rtype, arg0);
10584 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10586 else if (arg0iz && arg1r && real_zerop (arg1r))
10588 tree rp = arg0r ? arg0r
10589 : build1 (REALPART_EXPR, rtype, arg0);
10590 tree ip = arg1i ? arg1i
10591 : build1 (IMAGPART_EXPR, rtype, arg1);
10592 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10597 if (flag_unsafe_math_optimizations
10598 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10599 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10600 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10601 return tem;
10603 /* Convert x+x into x*2.0. */
10604 if (operand_equal_p (arg0, arg1, 0)
10605 && SCALAR_FLOAT_TYPE_P (type))
10606 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10607 build_real (type, dconst2));
10609 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10610 We associate floats only if the user has specified
10611 -fassociative-math. */
10612 if (flag_associative_math
10613 && TREE_CODE (arg1) == PLUS_EXPR
10614 && TREE_CODE (arg0) != MULT_EXPR)
10616 tree tree10 = TREE_OPERAND (arg1, 0);
10617 tree tree11 = TREE_OPERAND (arg1, 1);
10618 if (TREE_CODE (tree11) == MULT_EXPR
10619 && TREE_CODE (tree10) == MULT_EXPR)
10621 tree tree0;
10622 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10623 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10626 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10627 We associate floats only if the user has specified
10628 -fassociative-math. */
10629 if (flag_associative_math
10630 && TREE_CODE (arg0) == PLUS_EXPR
10631 && TREE_CODE (arg1) != MULT_EXPR)
10633 tree tree00 = TREE_OPERAND (arg0, 0);
10634 tree tree01 = TREE_OPERAND (arg0, 1);
10635 if (TREE_CODE (tree01) == MULT_EXPR
10636 && TREE_CODE (tree00) == MULT_EXPR)
10638 tree tree0;
10639 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10640 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10645 bit_rotate:
10646 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10647 is a rotate of A by C1 bits. */
10648 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10649 is a rotate of A by B bits. */
10651 enum tree_code code0, code1;
10652 tree rtype;
10653 code0 = TREE_CODE (arg0);
10654 code1 = TREE_CODE (arg1);
10655 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10656 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10657 && operand_equal_p (TREE_OPERAND (arg0, 0),
10658 TREE_OPERAND (arg1, 0), 0)
10659 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10660 TYPE_UNSIGNED (rtype))
10661 /* Only create rotates in complete modes. Other cases are not
10662 expanded properly. */
10663 && (element_precision (rtype)
10664 == element_precision (TYPE_MODE (rtype))))
10666 tree tree01, tree11;
10667 enum tree_code code01, code11;
10669 tree01 = TREE_OPERAND (arg0, 1);
10670 tree11 = TREE_OPERAND (arg1, 1);
10671 STRIP_NOPS (tree01);
10672 STRIP_NOPS (tree11);
10673 code01 = TREE_CODE (tree01);
10674 code11 = TREE_CODE (tree11);
10675 if (code01 == INTEGER_CST
10676 && code11 == INTEGER_CST
10677 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10678 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10680 tem = build2_loc (loc, LROTATE_EXPR,
10681 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10682 TREE_OPERAND (arg0, 0),
10683 code0 == LSHIFT_EXPR ? tree01 : tree11);
10684 return fold_convert_loc (loc, type, tem);
10686 else if (code11 == MINUS_EXPR)
10688 tree tree110, tree111;
10689 tree110 = TREE_OPERAND (tree11, 0);
10690 tree111 = TREE_OPERAND (tree11, 1);
10691 STRIP_NOPS (tree110);
10692 STRIP_NOPS (tree111);
10693 if (TREE_CODE (tree110) == INTEGER_CST
10694 && 0 == compare_tree_int (tree110,
10695 element_precision
10696 (TREE_TYPE (TREE_OPERAND
10697 (arg0, 0))))
10698 && operand_equal_p (tree01, tree111, 0))
10699 return
10700 fold_convert_loc (loc, type,
10701 build2 ((code0 == LSHIFT_EXPR
10702 ? LROTATE_EXPR
10703 : RROTATE_EXPR),
10704 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10705 TREE_OPERAND (arg0, 0), tree01));
10707 else if (code01 == MINUS_EXPR)
10709 tree tree010, tree011;
10710 tree010 = TREE_OPERAND (tree01, 0);
10711 tree011 = TREE_OPERAND (tree01, 1);
10712 STRIP_NOPS (tree010);
10713 STRIP_NOPS (tree011);
10714 if (TREE_CODE (tree010) == INTEGER_CST
10715 && 0 == compare_tree_int (tree010,
10716 element_precision
10717 (TREE_TYPE (TREE_OPERAND
10718 (arg0, 0))))
10719 && operand_equal_p (tree11, tree011, 0))
10720 return fold_convert_loc
10721 (loc, type,
10722 build2 ((code0 != LSHIFT_EXPR
10723 ? LROTATE_EXPR
10724 : RROTATE_EXPR),
10725 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10726 TREE_OPERAND (arg0, 0), tree11));
10731 associate:
10732 /* In most languages, can't associate operations on floats through
10733 parentheses. Rather than remember where the parentheses were, we
10734 don't associate floats at all, unless the user has specified
10735 -fassociative-math.
10736 And, we need to make sure type is not saturating. */
10738 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10739 && !TYPE_SATURATING (type))
10741 tree var0, con0, lit0, minus_lit0;
10742 tree var1, con1, lit1, minus_lit1;
10743 tree atype = type;
10744 bool ok = true;
10746 /* Split both trees into variables, constants, and literals. Then
10747 associate each group together, the constants with literals,
10748 then the result with variables. This increases the chances of
10749 literals being recombined later and of generating relocatable
10750 expressions for the sum of a constant and literal. */
10751 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10752 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10753 code == MINUS_EXPR);
10755 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10756 if (code == MINUS_EXPR)
10757 code = PLUS_EXPR;
10759 /* With undefined overflow prefer doing association in a type
10760 which wraps on overflow, if that is one of the operand types. */
10761 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10762 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10764 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10765 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10766 atype = TREE_TYPE (arg0);
10767 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10768 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10769 atype = TREE_TYPE (arg1);
10770 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10773 /* With undefined overflow we can only associate constants with one
10774 variable, and constants whose association doesn't overflow. */
10775 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10776 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10778 if (var0 && var1)
10780 tree tmp0 = var0;
10781 tree tmp1 = var1;
10783 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10784 tmp0 = TREE_OPERAND (tmp0, 0);
10785 if (CONVERT_EXPR_P (tmp0)
10786 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10787 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10788 <= TYPE_PRECISION (atype)))
10789 tmp0 = TREE_OPERAND (tmp0, 0);
10790 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10791 tmp1 = TREE_OPERAND (tmp1, 0);
10792 if (CONVERT_EXPR_P (tmp1)
10793 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10794 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10795 <= TYPE_PRECISION (atype)))
10796 tmp1 = TREE_OPERAND (tmp1, 0);
10797 /* The only case we can still associate with two variables
10798 is if they are the same, modulo negation and bit-pattern
10799 preserving conversions. */
10800 if (!operand_equal_p (tmp0, tmp1, 0))
10801 ok = false;
10805 /* Only do something if we found more than two objects. Otherwise,
10806 nothing has changed and we risk infinite recursion. */
10807 if (ok
10808 && (2 < ((var0 != 0) + (var1 != 0)
10809 + (con0 != 0) + (con1 != 0)
10810 + (lit0 != 0) + (lit1 != 0)
10811 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10813 bool any_overflows = false;
10814 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10815 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10816 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10817 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10818 var0 = associate_trees (loc, var0, var1, code, atype);
10819 con0 = associate_trees (loc, con0, con1, code, atype);
10820 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10821 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10822 code, atype);
10824 /* Preserve the MINUS_EXPR if the negative part of the literal is
10825 greater than the positive part. Otherwise, the multiplicative
10826 folding code (i.e extract_muldiv) may be fooled in case
10827 unsigned constants are subtracted, like in the following
10828 example: ((X*2 + 4) - 8U)/2. */
10829 if (minus_lit0 && lit0)
10831 if (TREE_CODE (lit0) == INTEGER_CST
10832 && TREE_CODE (minus_lit0) == INTEGER_CST
10833 && tree_int_cst_lt (lit0, minus_lit0))
10835 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10836 MINUS_EXPR, atype);
10837 lit0 = 0;
10839 else
10841 lit0 = associate_trees (loc, lit0, minus_lit0,
10842 MINUS_EXPR, atype);
10843 minus_lit0 = 0;
10847 /* Don't introduce overflows through reassociation. */
10848 if (!any_overflows
10849 && ((lit0 && TREE_OVERFLOW (lit0))
10850 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10851 return NULL_TREE;
10853 if (minus_lit0)
10855 if (con0 == 0)
10856 return
10857 fold_convert_loc (loc, type,
10858 associate_trees (loc, var0, minus_lit0,
10859 MINUS_EXPR, atype));
10860 else
10862 con0 = associate_trees (loc, con0, minus_lit0,
10863 MINUS_EXPR, atype);
10864 return
10865 fold_convert_loc (loc, type,
10866 associate_trees (loc, var0, con0,
10867 PLUS_EXPR, atype));
10871 con0 = associate_trees (loc, con0, lit0, code, atype);
10872 return
10873 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10874 code, atype));
10878 return NULL_TREE;
10880 case MINUS_EXPR:
10881 /* Pointer simplifications for subtraction, simple reassociations. */
10882 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10884 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10885 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10886 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10888 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10889 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10890 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10891 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10892 return fold_build2_loc (loc, PLUS_EXPR, type,
10893 fold_build2_loc (loc, MINUS_EXPR, type,
10894 arg00, arg10),
10895 fold_build2_loc (loc, MINUS_EXPR, type,
10896 arg01, arg11));
10898 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10899 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10901 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10902 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10903 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10904 fold_convert_loc (loc, type, arg1));
10905 if (tmp)
10906 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10909 /* A - (-B) -> A + B */
10910 if (TREE_CODE (arg1) == NEGATE_EXPR)
10911 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10912 fold_convert_loc (loc, type,
10913 TREE_OPERAND (arg1, 0)));
10914 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10915 if (TREE_CODE (arg0) == NEGATE_EXPR
10916 && negate_expr_p (arg1)
10917 && reorder_operands_p (arg0, arg1))
10918 return fold_build2_loc (loc, MINUS_EXPR, type,
10919 fold_convert_loc (loc, type,
10920 negate_expr (arg1)),
10921 fold_convert_loc (loc, type,
10922 TREE_OPERAND (arg0, 0)));
10923 /* Convert -A - 1 to ~A. */
10924 if (TREE_CODE (type) != COMPLEX_TYPE
10925 && TREE_CODE (arg0) == NEGATE_EXPR
10926 && integer_onep (arg1)
10927 && !TYPE_OVERFLOW_TRAPS (type))
10928 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10929 fold_convert_loc (loc, type,
10930 TREE_OPERAND (arg0, 0)));
10932 /* Convert -1 - A to ~A. */
10933 if (TREE_CODE (type) != COMPLEX_TYPE
10934 && integer_all_onesp (arg0))
10935 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10938 /* X - (X / Y) * Y is X % Y. */
10939 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10940 && TREE_CODE (arg1) == MULT_EXPR
10941 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10942 && operand_equal_p (arg0,
10943 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10944 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10945 TREE_OPERAND (arg1, 1), 0))
10946 return
10947 fold_convert_loc (loc, type,
10948 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10949 arg0, TREE_OPERAND (arg1, 1)));
10951 if (! FLOAT_TYPE_P (type))
10953 if (integer_zerop (arg0))
10954 return negate_expr (fold_convert_loc (loc, type, arg1));
10955 if (integer_zerop (arg1))
10956 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10958 /* Fold A - (A & B) into ~B & A. */
10959 if (!TREE_SIDE_EFFECTS (arg0)
10960 && TREE_CODE (arg1) == BIT_AND_EXPR)
10962 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10964 tree arg10 = fold_convert_loc (loc, type,
10965 TREE_OPERAND (arg1, 0));
10966 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10967 fold_build1_loc (loc, BIT_NOT_EXPR,
10968 type, arg10),
10969 fold_convert_loc (loc, type, arg0));
10971 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10973 tree arg11 = fold_convert_loc (loc,
10974 type, TREE_OPERAND (arg1, 1));
10975 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10976 fold_build1_loc (loc, BIT_NOT_EXPR,
10977 type, arg11),
10978 fold_convert_loc (loc, type, arg0));
10982 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10983 any power of 2 minus 1. */
10984 if (TREE_CODE (arg0) == BIT_AND_EXPR
10985 && TREE_CODE (arg1) == BIT_AND_EXPR
10986 && operand_equal_p (TREE_OPERAND (arg0, 0),
10987 TREE_OPERAND (arg1, 0), 0))
10989 tree mask0 = TREE_OPERAND (arg0, 1);
10990 tree mask1 = TREE_OPERAND (arg1, 1);
10991 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10993 if (operand_equal_p (tem, mask1, 0))
10995 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10996 TREE_OPERAND (arg0, 0), mask1);
10997 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
11002 /* See if ARG1 is zero and X - ARG1 reduces to X. */
11003 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
11004 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11006 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
11007 ARG0 is zero and X + ARG0 reduces to X, since that would mean
11008 (-ARG1 + ARG0) reduces to -ARG1. */
11009 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
11010 return negate_expr (fold_convert_loc (loc, type, arg1));
11012 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11013 __complex__ ( x, -y ). This is not the same for SNaNs or if
11014 signed zeros are involved. */
11015 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11016 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11017 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11019 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11020 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11021 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11022 bool arg0rz = false, arg0iz = false;
11023 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11024 || (arg0i && (arg0iz = real_zerop (arg0i))))
11026 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11027 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11028 if (arg0rz && arg1i && real_zerop (arg1i))
11030 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11031 arg1r ? arg1r
11032 : build1 (REALPART_EXPR, rtype, arg1));
11033 tree ip = arg0i ? arg0i
11034 : build1 (IMAGPART_EXPR, rtype, arg0);
11035 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11037 else if (arg0iz && arg1r && real_zerop (arg1r))
11039 tree rp = arg0r ? arg0r
11040 : build1 (REALPART_EXPR, rtype, arg0);
11041 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11042 arg1i ? arg1i
11043 : build1 (IMAGPART_EXPR, rtype, arg1));
11044 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11049 /* Fold &x - &x. This can happen from &x.foo - &x.
11050 This is unsafe for certain floats even in non-IEEE formats.
11051 In IEEE, it is unsafe because it does wrong for NaNs.
11052 Also note that operand_equal_p is always false if an operand
11053 is volatile. */
11055 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11056 && operand_equal_p (arg0, arg1, 0))
11057 return build_zero_cst (type);
11059 /* A - B -> A + (-B) if B is easily negatable. */
11060 if (negate_expr_p (arg1)
11061 && ((FLOAT_TYPE_P (type)
11062 /* Avoid this transformation if B is a positive REAL_CST. */
11063 && (TREE_CODE (arg1) != REAL_CST
11064 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11065 || INTEGRAL_TYPE_P (type)))
11066 return fold_build2_loc (loc, PLUS_EXPR, type,
11067 fold_convert_loc (loc, type, arg0),
11068 fold_convert_loc (loc, type,
11069 negate_expr (arg1)));
11071 /* Try folding difference of addresses. */
11073 HOST_WIDE_INT diff;
11075 if ((TREE_CODE (arg0) == ADDR_EXPR
11076 || TREE_CODE (arg1) == ADDR_EXPR)
11077 && ptr_difference_const (arg0, arg1, &diff))
11078 return build_int_cst_type (type, diff);
11081 /* Fold &a[i] - &a[j] to i-j. */
11082 if (TREE_CODE (arg0) == ADDR_EXPR
11083 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11084 && TREE_CODE (arg1) == ADDR_EXPR
11085 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11087 tree tem = fold_addr_of_array_ref_difference (loc, type,
11088 TREE_OPERAND (arg0, 0),
11089 TREE_OPERAND (arg1, 0));
11090 if (tem)
11091 return tem;
11094 if (FLOAT_TYPE_P (type)
11095 && flag_unsafe_math_optimizations
11096 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11097 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11098 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11099 return tem;
11101 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11102 one. Make sure the type is not saturating and has the signedness of
11103 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11104 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11105 if ((TREE_CODE (arg0) == MULT_EXPR
11106 || TREE_CODE (arg1) == MULT_EXPR)
11107 && !TYPE_SATURATING (type)
11108 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11109 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11110 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11112 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11113 if (tem)
11114 return tem;
11117 goto associate;
11119 case MULT_EXPR:
11120 /* (-A) * (-B) -> A * B */
11121 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11122 return fold_build2_loc (loc, MULT_EXPR, type,
11123 fold_convert_loc (loc, type,
11124 TREE_OPERAND (arg0, 0)),
11125 fold_convert_loc (loc, type,
11126 negate_expr (arg1)));
11127 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11128 return fold_build2_loc (loc, MULT_EXPR, type,
11129 fold_convert_loc (loc, type,
11130 negate_expr (arg0)),
11131 fold_convert_loc (loc, type,
11132 TREE_OPERAND (arg1, 0)));
11134 if (! FLOAT_TYPE_P (type))
11136 if (integer_zerop (arg1))
11137 return omit_one_operand_loc (loc, type, arg1, arg0);
11138 if (integer_onep (arg1))
11139 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11140 /* Transform x * -1 into -x. Make sure to do the negation
11141 on the original operand with conversions not stripped
11142 because we can only strip non-sign-changing conversions. */
11143 if (integer_minus_onep (arg1))
11144 return fold_convert_loc (loc, type, negate_expr (op0));
11145 /* Transform x * -C into -x * C if x is easily negatable. */
11146 if (TREE_CODE (arg1) == INTEGER_CST
11147 && tree_int_cst_sgn (arg1) == -1
11148 && negate_expr_p (arg0)
11149 && (tem = negate_expr (arg1)) != arg1
11150 && !TREE_OVERFLOW (tem))
11151 return fold_build2_loc (loc, MULT_EXPR, type,
11152 fold_convert_loc (loc, type,
11153 negate_expr (arg0)),
11154 tem);
11156 /* (a * (1 << b)) is (a << b) */
11157 if (TREE_CODE (arg1) == LSHIFT_EXPR
11158 && integer_onep (TREE_OPERAND (arg1, 0)))
11159 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11160 TREE_OPERAND (arg1, 1));
11161 if (TREE_CODE (arg0) == LSHIFT_EXPR
11162 && integer_onep (TREE_OPERAND (arg0, 0)))
11163 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11164 TREE_OPERAND (arg0, 1));
11166 /* (A + A) * C -> A * 2 * C */
11167 if (TREE_CODE (arg0) == PLUS_EXPR
11168 && TREE_CODE (arg1) == INTEGER_CST
11169 && operand_equal_p (TREE_OPERAND (arg0, 0),
11170 TREE_OPERAND (arg0, 1), 0))
11171 return fold_build2_loc (loc, MULT_EXPR, type,
11172 omit_one_operand_loc (loc, type,
11173 TREE_OPERAND (arg0, 0),
11174 TREE_OPERAND (arg0, 1)),
11175 fold_build2_loc (loc, MULT_EXPR, type,
11176 build_int_cst (type, 2) , arg1));
11178 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11179 sign-changing only. */
11180 if (TREE_CODE (arg1) == INTEGER_CST
11181 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11182 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11183 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11185 strict_overflow_p = false;
11186 if (TREE_CODE (arg1) == INTEGER_CST
11187 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11188 &strict_overflow_p)))
11190 if (strict_overflow_p)
11191 fold_overflow_warning (("assuming signed overflow does not "
11192 "occur when simplifying "
11193 "multiplication"),
11194 WARN_STRICT_OVERFLOW_MISC);
11195 return fold_convert_loc (loc, type, tem);
11198 /* Optimize z * conj(z) for integer complex numbers. */
11199 if (TREE_CODE (arg0) == CONJ_EXPR
11200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11201 return fold_mult_zconjz (loc, type, arg1);
11202 if (TREE_CODE (arg1) == CONJ_EXPR
11203 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11204 return fold_mult_zconjz (loc, type, arg0);
11206 else
11208 /* Maybe fold x * 0 to 0. The expressions aren't the same
11209 when x is NaN, since x * 0 is also NaN. Nor are they the
11210 same in modes with signed zeros, since multiplying a
11211 negative value by 0 gives -0, not +0. */
11212 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11213 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11214 && real_zerop (arg1))
11215 return omit_one_operand_loc (loc, type, arg1, arg0);
11216 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11217 Likewise for complex arithmetic with signed zeros. */
11218 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11219 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11220 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11221 && real_onep (arg1))
11222 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11224 /* Transform x * -1.0 into -x. */
11225 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11226 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11227 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11228 && real_minus_onep (arg1))
11229 return fold_convert_loc (loc, type, negate_expr (arg0));
11231 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11232 the result for floating point types due to rounding so it is applied
11233 only if -fassociative-math was specify. */
11234 if (flag_associative_math
11235 && TREE_CODE (arg0) == RDIV_EXPR
11236 && TREE_CODE (arg1) == REAL_CST
11237 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11239 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11240 arg1);
11241 if (tem)
11242 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11243 TREE_OPERAND (arg0, 1));
11246 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11247 if (operand_equal_p (arg0, arg1, 0))
11249 tree tem = fold_strip_sign_ops (arg0);
11250 if (tem != NULL_TREE)
11252 tem = fold_convert_loc (loc, type, tem);
11253 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11257 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11258 This is not the same for NaNs or if signed zeros are
11259 involved. */
11260 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11261 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11262 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11263 && TREE_CODE (arg1) == COMPLEX_CST
11264 && real_zerop (TREE_REALPART (arg1)))
11266 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11267 if (real_onep (TREE_IMAGPART (arg1)))
11268 return
11269 fold_build2_loc (loc, COMPLEX_EXPR, type,
11270 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11271 rtype, arg0)),
11272 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11273 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11274 return
11275 fold_build2_loc (loc, COMPLEX_EXPR, type,
11276 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11277 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11278 rtype, arg0)));
11281 /* Optimize z * conj(z) for floating point complex numbers.
11282 Guarded by flag_unsafe_math_optimizations as non-finite
11283 imaginary components don't produce scalar results. */
11284 if (flag_unsafe_math_optimizations
11285 && TREE_CODE (arg0) == CONJ_EXPR
11286 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11287 return fold_mult_zconjz (loc, type, arg1);
11288 if (flag_unsafe_math_optimizations
11289 && TREE_CODE (arg1) == CONJ_EXPR
11290 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11291 return fold_mult_zconjz (loc, type, arg0);
11293 if (flag_unsafe_math_optimizations)
11295 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11296 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11298 /* Optimizations of root(...)*root(...). */
11299 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11301 tree rootfn, arg;
11302 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11303 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11305 /* Optimize sqrt(x)*sqrt(x) as x. */
11306 if (BUILTIN_SQRT_P (fcode0)
11307 && operand_equal_p (arg00, arg10, 0)
11308 && ! HONOR_SNANS (TYPE_MODE (type)))
11309 return arg00;
11311 /* Optimize root(x)*root(y) as root(x*y). */
11312 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11313 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11314 return build_call_expr_loc (loc, rootfn, 1, arg);
11317 /* Optimize expN(x)*expN(y) as expN(x+y). */
11318 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11320 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11321 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11322 CALL_EXPR_ARG (arg0, 0),
11323 CALL_EXPR_ARG (arg1, 0));
11324 return build_call_expr_loc (loc, expfn, 1, arg);
11327 /* Optimizations of pow(...)*pow(...). */
11328 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11329 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11330 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11332 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11333 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11334 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11335 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11337 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11338 if (operand_equal_p (arg01, arg11, 0))
11340 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11341 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11342 arg00, arg10);
11343 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11346 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11347 if (operand_equal_p (arg00, arg10, 0))
11349 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11350 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11351 arg01, arg11);
11352 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11356 /* Optimize tan(x)*cos(x) as sin(x). */
11357 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11358 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11359 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11360 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11361 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11362 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11363 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11364 CALL_EXPR_ARG (arg1, 0), 0))
11366 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11368 if (sinfn != NULL_TREE)
11369 return build_call_expr_loc (loc, sinfn, 1,
11370 CALL_EXPR_ARG (arg0, 0));
11373 /* Optimize x*pow(x,c) as pow(x,c+1). */
11374 if (fcode1 == BUILT_IN_POW
11375 || fcode1 == BUILT_IN_POWF
11376 || fcode1 == BUILT_IN_POWL)
11378 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11379 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11380 if (TREE_CODE (arg11) == REAL_CST
11381 && !TREE_OVERFLOW (arg11)
11382 && operand_equal_p (arg0, arg10, 0))
11384 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11385 REAL_VALUE_TYPE c;
11386 tree arg;
11388 c = TREE_REAL_CST (arg11);
11389 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11390 arg = build_real (type, c);
11391 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11395 /* Optimize pow(x,c)*x as pow(x,c+1). */
11396 if (fcode0 == BUILT_IN_POW
11397 || fcode0 == BUILT_IN_POWF
11398 || fcode0 == BUILT_IN_POWL)
11400 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11401 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11402 if (TREE_CODE (arg01) == REAL_CST
11403 && !TREE_OVERFLOW (arg01)
11404 && operand_equal_p (arg1, arg00, 0))
11406 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11407 REAL_VALUE_TYPE c;
11408 tree arg;
11410 c = TREE_REAL_CST (arg01);
11411 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11412 arg = build_real (type, c);
11413 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11417 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11418 if (!in_gimple_form
11419 && optimize
11420 && operand_equal_p (arg0, arg1, 0))
11422 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11424 if (powfn)
11426 tree arg = build_real (type, dconst2);
11427 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11432 goto associate;
11434 case BIT_IOR_EXPR:
11435 bit_ior:
11436 if (integer_all_onesp (arg1))
11437 return omit_one_operand_loc (loc, type, arg1, arg0);
11438 if (integer_zerop (arg1))
11439 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11440 if (operand_equal_p (arg0, arg1, 0))
11441 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11443 /* ~X | X is -1. */
11444 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11445 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11447 t1 = build_zero_cst (type);
11448 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11449 return omit_one_operand_loc (loc, type, t1, arg1);
11452 /* X | ~X is -1. */
11453 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11454 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11456 t1 = build_zero_cst (type);
11457 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11458 return omit_one_operand_loc (loc, type, t1, arg0);
11461 /* Canonicalize (X & C1) | C2. */
11462 if (TREE_CODE (arg0) == BIT_AND_EXPR
11463 && TREE_CODE (arg1) == INTEGER_CST
11464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11466 int width = TYPE_PRECISION (type), w;
11467 wide_int c1 = TREE_OPERAND (arg0, 1);
11468 wide_int c2 = arg1;
11470 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11471 if ((c1 & c2) == c1)
11472 return omit_one_operand_loc (loc, type, arg1,
11473 TREE_OPERAND (arg0, 0));
11475 wide_int msk = wi::mask (width, false,
11476 TYPE_PRECISION (TREE_TYPE (arg1)));
11478 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11479 if (msk.and_not (c1 | c2) == 0)
11480 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11481 TREE_OPERAND (arg0, 0), arg1);
11483 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11484 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11485 mode which allows further optimizations. */
11486 c1 &= msk;
11487 c2 &= msk;
11488 wide_int c3 = c1.and_not (c2);
11489 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11491 wide_int mask = wi::mask (w, false,
11492 TYPE_PRECISION (type));
11493 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11495 c3 = mask;
11496 break;
11500 if (c3 != c1)
11501 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11502 fold_build2_loc (loc, BIT_AND_EXPR, type,
11503 TREE_OPERAND (arg0, 0),
11504 wide_int_to_tree (type,
11505 c3)),
11506 arg1);
11509 /* (X & Y) | Y is (X, Y). */
11510 if (TREE_CODE (arg0) == BIT_AND_EXPR
11511 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11512 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11513 /* (X & Y) | X is (Y, X). */
11514 if (TREE_CODE (arg0) == BIT_AND_EXPR
11515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11516 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11517 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11518 /* X | (X & Y) is (Y, X). */
11519 if (TREE_CODE (arg1) == BIT_AND_EXPR
11520 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11521 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11522 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11523 /* X | (Y & X) is (Y, X). */
11524 if (TREE_CODE (arg1) == BIT_AND_EXPR
11525 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11526 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11527 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11529 /* (X & ~Y) | (~X & Y) is X ^ Y */
11530 if (TREE_CODE (arg0) == BIT_AND_EXPR
11531 && TREE_CODE (arg1) == BIT_AND_EXPR)
11533 tree a0, a1, l0, l1, n0, n1;
11535 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11536 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11538 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11539 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11541 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11542 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11544 if ((operand_equal_p (n0, a0, 0)
11545 && operand_equal_p (n1, a1, 0))
11546 || (operand_equal_p (n0, a1, 0)
11547 && operand_equal_p (n1, a0, 0)))
11548 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11551 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11552 if (t1 != NULL_TREE)
11553 return t1;
11555 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11557 This results in more efficient code for machines without a NAND
11558 instruction. Combine will canonicalize to the first form
11559 which will allow use of NAND instructions provided by the
11560 backend if they exist. */
11561 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11562 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11564 return
11565 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11566 build2 (BIT_AND_EXPR, type,
11567 fold_convert_loc (loc, type,
11568 TREE_OPERAND (arg0, 0)),
11569 fold_convert_loc (loc, type,
11570 TREE_OPERAND (arg1, 0))));
11573 /* See if this can be simplified into a rotate first. If that
11574 is unsuccessful continue in the association code. */
11575 goto bit_rotate;
11577 case BIT_XOR_EXPR:
11578 if (integer_zerop (arg1))
11579 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11580 if (integer_all_onesp (arg1))
11581 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11582 if (operand_equal_p (arg0, arg1, 0))
11583 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11585 /* ~X ^ X is -1. */
11586 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11587 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11589 t1 = build_zero_cst (type);
11590 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11591 return omit_one_operand_loc (loc, type, t1, arg1);
11594 /* X ^ ~X is -1. */
11595 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11596 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11598 t1 = build_zero_cst (type);
11599 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11600 return omit_one_operand_loc (loc, type, t1, arg0);
11603 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11604 with a constant, and the two constants have no bits in common,
11605 we should treat this as a BIT_IOR_EXPR since this may produce more
11606 simplifications. */
11607 if (TREE_CODE (arg0) == BIT_AND_EXPR
11608 && TREE_CODE (arg1) == BIT_AND_EXPR
11609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11610 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11611 && wi::bit_and (TREE_OPERAND (arg0, 1),
11612 TREE_OPERAND (arg1, 1)) == 0)
11614 code = BIT_IOR_EXPR;
11615 goto bit_ior;
11618 /* (X | Y) ^ X -> Y & ~ X*/
11619 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11620 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11622 tree t2 = TREE_OPERAND (arg0, 1);
11623 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11624 arg1);
11625 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11626 fold_convert_loc (loc, type, t2),
11627 fold_convert_loc (loc, type, t1));
11628 return t1;
11631 /* (Y | X) ^ X -> Y & ~ X*/
11632 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11633 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11635 tree t2 = TREE_OPERAND (arg0, 0);
11636 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11637 arg1);
11638 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11639 fold_convert_loc (loc, type, t2),
11640 fold_convert_loc (loc, type, t1));
11641 return t1;
11644 /* X ^ (X | Y) -> Y & ~ X*/
11645 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11646 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11648 tree t2 = TREE_OPERAND (arg1, 1);
11649 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11650 arg0);
11651 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11652 fold_convert_loc (loc, type, t2),
11653 fold_convert_loc (loc, type, t1));
11654 return t1;
11657 /* X ^ (Y | X) -> Y & ~ X*/
11658 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11659 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11661 tree t2 = TREE_OPERAND (arg1, 0);
11662 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11663 arg0);
11664 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11665 fold_convert_loc (loc, type, t2),
11666 fold_convert_loc (loc, type, t1));
11667 return t1;
11670 /* Convert ~X ^ ~Y to X ^ Y. */
11671 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11672 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11673 return fold_build2_loc (loc, code, type,
11674 fold_convert_loc (loc, type,
11675 TREE_OPERAND (arg0, 0)),
11676 fold_convert_loc (loc, type,
11677 TREE_OPERAND (arg1, 0)));
11679 /* Convert ~X ^ C to X ^ ~C. */
11680 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11681 && TREE_CODE (arg1) == INTEGER_CST)
11682 return fold_build2_loc (loc, code, type,
11683 fold_convert_loc (loc, type,
11684 TREE_OPERAND (arg0, 0)),
11685 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11687 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11688 if (TREE_CODE (arg0) == BIT_AND_EXPR
11689 && integer_onep (TREE_OPERAND (arg0, 1))
11690 && integer_onep (arg1))
11691 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11692 build_zero_cst (TREE_TYPE (arg0)));
11694 /* Fold (X & Y) ^ Y as ~X & Y. */
11695 if (TREE_CODE (arg0) == BIT_AND_EXPR
11696 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11698 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11699 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11700 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11701 fold_convert_loc (loc, type, arg1));
11703 /* Fold (X & Y) ^ X as ~Y & X. */
11704 if (TREE_CODE (arg0) == BIT_AND_EXPR
11705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11706 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11708 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11709 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11710 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11711 fold_convert_loc (loc, type, arg1));
11713 /* Fold X ^ (X & Y) as X & ~Y. */
11714 if (TREE_CODE (arg1) == BIT_AND_EXPR
11715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11717 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11718 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11719 fold_convert_loc (loc, type, arg0),
11720 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11722 /* Fold X ^ (Y & X) as ~Y & X. */
11723 if (TREE_CODE (arg1) == BIT_AND_EXPR
11724 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11725 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11727 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11728 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11729 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11730 fold_convert_loc (loc, type, arg0));
11733 /* See if this can be simplified into a rotate first. If that
11734 is unsuccessful continue in the association code. */
11735 goto bit_rotate;
11737 case BIT_AND_EXPR:
11738 if (integer_all_onesp (arg1))
11739 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11740 if (integer_zerop (arg1))
11741 return omit_one_operand_loc (loc, type, arg1, arg0);
11742 if (operand_equal_p (arg0, arg1, 0))
11743 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11745 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11746 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11747 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11748 || (TREE_CODE (arg0) == EQ_EXPR
11749 && integer_zerop (TREE_OPERAND (arg0, 1))))
11750 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11751 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11753 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11754 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11755 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11756 || (TREE_CODE (arg1) == EQ_EXPR
11757 && integer_zerop (TREE_OPERAND (arg1, 1))))
11758 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11759 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11761 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11762 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11763 && TREE_CODE (arg1) == INTEGER_CST
11764 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11766 tree tmp1 = fold_convert_loc (loc, type, arg1);
11767 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11768 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11769 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11770 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11771 return
11772 fold_convert_loc (loc, type,
11773 fold_build2_loc (loc, BIT_IOR_EXPR,
11774 type, tmp2, tmp3));
11777 /* (X | Y) & Y is (X, Y). */
11778 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11779 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11780 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11781 /* (X | Y) & X is (Y, X). */
11782 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11783 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11784 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11785 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11786 /* X & (X | Y) is (Y, X). */
11787 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11788 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11789 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11790 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11791 /* X & (Y | X) is (Y, X). */
11792 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11793 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11794 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11795 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11797 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11798 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11799 && integer_onep (TREE_OPERAND (arg0, 1))
11800 && integer_onep (arg1))
11802 tree tem2;
11803 tem = TREE_OPERAND (arg0, 0);
11804 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11805 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11806 tem, tem2);
11807 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11808 build_zero_cst (TREE_TYPE (tem)));
11810 /* Fold ~X & 1 as (X & 1) == 0. */
11811 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11812 && integer_onep (arg1))
11814 tree tem2;
11815 tem = TREE_OPERAND (arg0, 0);
11816 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11817 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11818 tem, tem2);
11819 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11820 build_zero_cst (TREE_TYPE (tem)));
11822 /* Fold !X & 1 as X == 0. */
11823 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11824 && integer_onep (arg1))
11826 tem = TREE_OPERAND (arg0, 0);
11827 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11828 build_zero_cst (TREE_TYPE (tem)));
11831 /* Fold (X ^ Y) & Y as ~X & Y. */
11832 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11833 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11835 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11836 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11837 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11838 fold_convert_loc (loc, type, arg1));
11840 /* Fold (X ^ Y) & X as ~Y & X. */
11841 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11843 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11845 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11846 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11847 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11848 fold_convert_loc (loc, type, arg1));
11850 /* Fold X & (X ^ Y) as X & ~Y. */
11851 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11852 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11854 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11855 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11856 fold_convert_loc (loc, type, arg0),
11857 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11859 /* Fold X & (Y ^ X) as ~Y & X. */
11860 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11861 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11862 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11864 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11865 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11866 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11867 fold_convert_loc (loc, type, arg0));
11870 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11871 multiple of 1 << CST. */
11872 if (TREE_CODE (arg1) == INTEGER_CST)
11874 wide_int cst1 = arg1;
11875 wide_int ncst1 = -cst1;
11876 if ((cst1 & ncst1) == ncst1
11877 && multiple_of_p (type, arg0,
11878 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11879 return fold_convert_loc (loc, type, arg0);
11882 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11883 bits from CST2. */
11884 if (TREE_CODE (arg1) == INTEGER_CST
11885 && TREE_CODE (arg0) == MULT_EXPR
11886 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11888 wide_int warg1 = arg1;
11889 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11891 if (masked == 0)
11892 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11893 arg0, arg1);
11894 else if (masked != warg1)
11896 /* Avoid the transform if arg1 is a mask of some
11897 mode which allows further optimizations. */
11898 int pop = wi::popcount (warg1);
11899 if (!(pop >= BITS_PER_UNIT
11900 && exact_log2 (pop) != -1
11901 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11902 return fold_build2_loc (loc, code, type, op0,
11903 wide_int_to_tree (type, masked));
11907 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11908 ((A & N) + B) & M -> (A + B) & M
11909 Similarly if (N & M) == 0,
11910 ((A | N) + B) & M -> (A + B) & M
11911 and for - instead of + (or unary - instead of +)
11912 and/or ^ instead of |.
11913 If B is constant and (B & M) == 0, fold into A & M. */
11914 if (TREE_CODE (arg1) == INTEGER_CST)
11916 wide_int cst1 = arg1;
11917 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11918 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11919 && (TREE_CODE (arg0) == PLUS_EXPR
11920 || TREE_CODE (arg0) == MINUS_EXPR
11921 || TREE_CODE (arg0) == NEGATE_EXPR)
11922 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11923 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11925 tree pmop[2];
11926 int which = 0;
11927 wide_int cst0;
11929 /* Now we know that arg0 is (C + D) or (C - D) or
11930 -C and arg1 (M) is == (1LL << cst) - 1.
11931 Store C into PMOP[0] and D into PMOP[1]. */
11932 pmop[0] = TREE_OPERAND (arg0, 0);
11933 pmop[1] = NULL;
11934 if (TREE_CODE (arg0) != NEGATE_EXPR)
11936 pmop[1] = TREE_OPERAND (arg0, 1);
11937 which = 1;
11940 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11941 which = -1;
11943 for (; which >= 0; which--)
11944 switch (TREE_CODE (pmop[which]))
11946 case BIT_AND_EXPR:
11947 case BIT_IOR_EXPR:
11948 case BIT_XOR_EXPR:
11949 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11950 != INTEGER_CST)
11951 break;
11952 cst0 = TREE_OPERAND (pmop[which], 1);
11953 cst0 &= cst1;
11954 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11956 if (cst0 != cst1)
11957 break;
11959 else if (cst0 != 0)
11960 break;
11961 /* If C or D is of the form (A & N) where
11962 (N & M) == M, or of the form (A | N) or
11963 (A ^ N) where (N & M) == 0, replace it with A. */
11964 pmop[which] = TREE_OPERAND (pmop[which], 0);
11965 break;
11966 case INTEGER_CST:
11967 /* If C or D is a N where (N & M) == 0, it can be
11968 omitted (assumed 0). */
11969 if ((TREE_CODE (arg0) == PLUS_EXPR
11970 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11971 && (cst1 & pmop[which]) == 0)
11972 pmop[which] = NULL;
11973 break;
11974 default:
11975 break;
11978 /* Only build anything new if we optimized one or both arguments
11979 above. */
11980 if (pmop[0] != TREE_OPERAND (arg0, 0)
11981 || (TREE_CODE (arg0) != NEGATE_EXPR
11982 && pmop[1] != TREE_OPERAND (arg0, 1)))
11984 tree utype = TREE_TYPE (arg0);
11985 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11987 /* Perform the operations in a type that has defined
11988 overflow behavior. */
11989 utype = unsigned_type_for (TREE_TYPE (arg0));
11990 if (pmop[0] != NULL)
11991 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11992 if (pmop[1] != NULL)
11993 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11996 if (TREE_CODE (arg0) == NEGATE_EXPR)
11997 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11998 else if (TREE_CODE (arg0) == PLUS_EXPR)
12000 if (pmop[0] != NULL && pmop[1] != NULL)
12001 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
12002 pmop[0], pmop[1]);
12003 else if (pmop[0] != NULL)
12004 tem = pmop[0];
12005 else if (pmop[1] != NULL)
12006 tem = pmop[1];
12007 else
12008 return build_int_cst (type, 0);
12010 else if (pmop[0] == NULL)
12011 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
12012 else
12013 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
12014 pmop[0], pmop[1]);
12015 /* TEM is now the new binary +, - or unary - replacement. */
12016 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
12017 fold_convert_loc (loc, utype, arg1));
12018 return fold_convert_loc (loc, type, tem);
12023 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12024 if (t1 != NULL_TREE)
12025 return t1;
12026 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12027 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12028 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12030 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12032 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
12033 if (mask == -1)
12034 return
12035 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12038 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12040 This results in more efficient code for machines without a NOR
12041 instruction. Combine will canonicalize to the first form
12042 which will allow use of NOR instructions provided by the
12043 backend if they exist. */
12044 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12045 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12047 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12048 build2 (BIT_IOR_EXPR, type,
12049 fold_convert_loc (loc, type,
12050 TREE_OPERAND (arg0, 0)),
12051 fold_convert_loc (loc, type,
12052 TREE_OPERAND (arg1, 0))));
12055 /* If arg0 is derived from the address of an object or function, we may
12056 be able to fold this expression using the object or function's
12057 alignment. */
12058 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12060 unsigned HOST_WIDE_INT modulus, residue;
12061 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12063 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12064 integer_onep (arg1));
12066 /* This works because modulus is a power of 2. If this weren't the
12067 case, we'd have to replace it by its greatest power-of-2
12068 divisor: modulus & -modulus. */
12069 if (low < modulus)
12070 return build_int_cst (type, residue & low);
12073 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12074 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12075 if the new mask might be further optimized. */
12076 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12077 || TREE_CODE (arg0) == RSHIFT_EXPR)
12078 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12079 && TREE_CODE (arg1) == INTEGER_CST
12080 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12081 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12082 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12083 < TYPE_PRECISION (TREE_TYPE (arg0))))
12085 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12086 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12087 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12088 tree shift_type = TREE_TYPE (arg0);
12090 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12091 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12092 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12093 && TYPE_PRECISION (TREE_TYPE (arg0))
12094 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12096 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12097 tree arg00 = TREE_OPERAND (arg0, 0);
12098 /* See if more bits can be proven as zero because of
12099 zero extension. */
12100 if (TREE_CODE (arg00) == NOP_EXPR
12101 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12103 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12104 if (TYPE_PRECISION (inner_type)
12105 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12106 && TYPE_PRECISION (inner_type) < prec)
12108 prec = TYPE_PRECISION (inner_type);
12109 /* See if we can shorten the right shift. */
12110 if (shiftc < prec)
12111 shift_type = inner_type;
12112 /* Otherwise X >> C1 is all zeros, so we'll optimize
12113 it into (X, 0) later on by making sure zerobits
12114 is all ones. */
12117 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12118 if (shiftc < prec)
12120 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12121 zerobits <<= prec - shiftc;
12123 /* For arithmetic shift if sign bit could be set, zerobits
12124 can contain actually sign bits, so no transformation is
12125 possible, unless MASK masks them all away. In that
12126 case the shift needs to be converted into logical shift. */
12127 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12128 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12130 if ((mask & zerobits) == 0)
12131 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12132 else
12133 zerobits = 0;
12137 /* ((X << 16) & 0xff00) is (X, 0). */
12138 if ((mask & zerobits) == mask)
12139 return omit_one_operand_loc (loc, type,
12140 build_int_cst (type, 0), arg0);
12142 newmask = mask | zerobits;
12143 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12145 /* Only do the transformation if NEWMASK is some integer
12146 mode's mask. */
12147 for (prec = BITS_PER_UNIT;
12148 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12149 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12150 break;
12151 if (prec < HOST_BITS_PER_WIDE_INT
12152 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12154 tree newmaskt;
12156 if (shift_type != TREE_TYPE (arg0))
12158 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12159 fold_convert_loc (loc, shift_type,
12160 TREE_OPERAND (arg0, 0)),
12161 TREE_OPERAND (arg0, 1));
12162 tem = fold_convert_loc (loc, type, tem);
12164 else
12165 tem = op0;
12166 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12167 if (!tree_int_cst_equal (newmaskt, arg1))
12168 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12173 goto associate;
12175 case RDIV_EXPR:
12176 /* Don't touch a floating-point divide by zero unless the mode
12177 of the constant can represent infinity. */
12178 if (TREE_CODE (arg1) == REAL_CST
12179 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12180 && real_zerop (arg1))
12181 return NULL_TREE;
12183 /* Optimize A / A to 1.0 if we don't care about
12184 NaNs or Infinities. Skip the transformation
12185 for non-real operands. */
12186 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12187 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12188 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12189 && operand_equal_p (arg0, arg1, 0))
12191 tree r = build_real (TREE_TYPE (arg0), dconst1);
12193 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12196 /* The complex version of the above A / A optimization. */
12197 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12198 && operand_equal_p (arg0, arg1, 0))
12200 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12201 if (! HONOR_NANS (TYPE_MODE (elem_type))
12202 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12204 tree r = build_real (elem_type, dconst1);
12205 /* omit_two_operands will call fold_convert for us. */
12206 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12210 /* (-A) / (-B) -> A / B */
12211 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12212 return fold_build2_loc (loc, RDIV_EXPR, type,
12213 TREE_OPERAND (arg0, 0),
12214 negate_expr (arg1));
12215 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12216 return fold_build2_loc (loc, RDIV_EXPR, type,
12217 negate_expr (arg0),
12218 TREE_OPERAND (arg1, 0));
12220 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12221 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12222 && real_onep (arg1))
12223 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12225 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12226 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12227 && real_minus_onep (arg1))
12228 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12229 negate_expr (arg0)));
12231 /* If ARG1 is a constant, we can convert this to a multiply by the
12232 reciprocal. This does not have the same rounding properties,
12233 so only do this if -freciprocal-math. We can actually
12234 always safely do it if ARG1 is a power of two, but it's hard to
12235 tell if it is or not in a portable manner. */
12236 if (optimize
12237 && (TREE_CODE (arg1) == REAL_CST
12238 || (TREE_CODE (arg1) == COMPLEX_CST
12239 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12240 || (TREE_CODE (arg1) == VECTOR_CST
12241 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12243 if (flag_reciprocal_math
12244 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12245 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12246 /* Find the reciprocal if optimizing and the result is exact.
12247 TODO: Complex reciprocal not implemented. */
12248 if (TREE_CODE (arg1) != COMPLEX_CST)
12250 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12252 if (inverse)
12253 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12256 /* Convert A/B/C to A/(B*C). */
12257 if (flag_reciprocal_math
12258 && TREE_CODE (arg0) == RDIV_EXPR)
12259 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12260 fold_build2_loc (loc, MULT_EXPR, type,
12261 TREE_OPERAND (arg0, 1), arg1));
12263 /* Convert A/(B/C) to (A/B)*C. */
12264 if (flag_reciprocal_math
12265 && TREE_CODE (arg1) == RDIV_EXPR)
12266 return fold_build2_loc (loc, MULT_EXPR, type,
12267 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12268 TREE_OPERAND (arg1, 0)),
12269 TREE_OPERAND (arg1, 1));
12271 /* Convert C1/(X*C2) into (C1/C2)/X. */
12272 if (flag_reciprocal_math
12273 && TREE_CODE (arg1) == MULT_EXPR
12274 && TREE_CODE (arg0) == REAL_CST
12275 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12277 tree tem = const_binop (RDIV_EXPR, arg0,
12278 TREE_OPERAND (arg1, 1));
12279 if (tem)
12280 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12281 TREE_OPERAND (arg1, 0));
12284 if (flag_unsafe_math_optimizations)
12286 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12287 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12289 /* Optimize sin(x)/cos(x) as tan(x). */
12290 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12291 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12292 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12293 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12294 CALL_EXPR_ARG (arg1, 0), 0))
12296 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12298 if (tanfn != NULL_TREE)
12299 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12302 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12303 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12304 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12305 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12306 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12307 CALL_EXPR_ARG (arg1, 0), 0))
12309 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12311 if (tanfn != NULL_TREE)
12313 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12314 CALL_EXPR_ARG (arg0, 0));
12315 return fold_build2_loc (loc, RDIV_EXPR, type,
12316 build_real (type, dconst1), tmp);
12320 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12321 NaNs or Infinities. */
12322 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12323 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12324 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12326 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12327 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12329 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12330 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12331 && operand_equal_p (arg00, arg01, 0))
12333 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12335 if (cosfn != NULL_TREE)
12336 return build_call_expr_loc (loc, cosfn, 1, arg00);
12340 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12341 NaNs or Infinities. */
12342 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12343 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12344 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12346 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12347 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12349 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12350 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12351 && operand_equal_p (arg00, arg01, 0))
12353 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12355 if (cosfn != NULL_TREE)
12357 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12358 return fold_build2_loc (loc, RDIV_EXPR, type,
12359 build_real (type, dconst1),
12360 tmp);
12365 /* Optimize pow(x,c)/x as pow(x,c-1). */
12366 if (fcode0 == BUILT_IN_POW
12367 || fcode0 == BUILT_IN_POWF
12368 || fcode0 == BUILT_IN_POWL)
12370 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12371 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12372 if (TREE_CODE (arg01) == REAL_CST
12373 && !TREE_OVERFLOW (arg01)
12374 && operand_equal_p (arg1, arg00, 0))
12376 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12377 REAL_VALUE_TYPE c;
12378 tree arg;
12380 c = TREE_REAL_CST (arg01);
12381 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12382 arg = build_real (type, c);
12383 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12387 /* Optimize a/root(b/c) into a*root(c/b). */
12388 if (BUILTIN_ROOT_P (fcode1))
12390 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12392 if (TREE_CODE (rootarg) == RDIV_EXPR)
12394 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12395 tree b = TREE_OPERAND (rootarg, 0);
12396 tree c = TREE_OPERAND (rootarg, 1);
12398 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12400 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12401 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12405 /* Optimize x/expN(y) into x*expN(-y). */
12406 if (BUILTIN_EXPONENT_P (fcode1))
12408 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12409 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12410 arg1 = build_call_expr_loc (loc,
12411 expfn, 1,
12412 fold_convert_loc (loc, type, arg));
12413 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12416 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12417 if (fcode1 == BUILT_IN_POW
12418 || fcode1 == BUILT_IN_POWF
12419 || fcode1 == BUILT_IN_POWL)
12421 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12422 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12423 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12424 tree neg11 = fold_convert_loc (loc, type,
12425 negate_expr (arg11));
12426 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12427 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12430 return NULL_TREE;
12432 case TRUNC_DIV_EXPR:
12433 /* Optimize (X & (-A)) / A where A is a power of 2,
12434 to X >> log2(A) */
12435 if (TREE_CODE (arg0) == BIT_AND_EXPR
12436 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12437 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12439 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12440 arg1, TREE_OPERAND (arg0, 1));
12441 if (sum && integer_zerop (sum)) {
12442 tree pow2 = build_int_cst (integer_type_node,
12443 wi::exact_log2 (arg1));
12444 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12445 TREE_OPERAND (arg0, 0), pow2);
12449 /* Fall through */
12451 case FLOOR_DIV_EXPR:
12452 /* Simplify A / (B << N) where A and B are positive and B is
12453 a power of 2, to A >> (N + log2(B)). */
12454 strict_overflow_p = false;
12455 if (TREE_CODE (arg1) == LSHIFT_EXPR
12456 && (TYPE_UNSIGNED (type)
12457 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12459 tree sval = TREE_OPERAND (arg1, 0);
12460 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12462 tree sh_cnt = TREE_OPERAND (arg1, 1);
12463 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12464 wi::exact_log2 (sval));
12466 if (strict_overflow_p)
12467 fold_overflow_warning (("assuming signed overflow does not "
12468 "occur when simplifying A / (B << N)"),
12469 WARN_STRICT_OVERFLOW_MISC);
12471 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12472 sh_cnt, pow2);
12473 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12474 fold_convert_loc (loc, type, arg0), sh_cnt);
12478 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12479 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12480 if (INTEGRAL_TYPE_P (type)
12481 && TYPE_UNSIGNED (type)
12482 && code == FLOOR_DIV_EXPR)
12483 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12485 /* Fall through */
12487 case ROUND_DIV_EXPR:
12488 case CEIL_DIV_EXPR:
12489 case EXACT_DIV_EXPR:
12490 if (integer_onep (arg1))
12491 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12492 if (integer_zerop (arg1))
12493 return NULL_TREE;
12494 /* X / -1 is -X. */
12495 if (!TYPE_UNSIGNED (type)
12496 && TREE_CODE (arg1) == INTEGER_CST
12497 && wi::eq_p (arg1, -1))
12498 return fold_convert_loc (loc, type, negate_expr (arg0));
12500 /* Convert -A / -B to A / B when the type is signed and overflow is
12501 undefined. */
12502 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12503 && TREE_CODE (arg0) == NEGATE_EXPR
12504 && negate_expr_p (arg1))
12506 if (INTEGRAL_TYPE_P (type))
12507 fold_overflow_warning (("assuming signed overflow does not occur "
12508 "when distributing negation across "
12509 "division"),
12510 WARN_STRICT_OVERFLOW_MISC);
12511 return fold_build2_loc (loc, code, type,
12512 fold_convert_loc (loc, type,
12513 TREE_OPERAND (arg0, 0)),
12514 fold_convert_loc (loc, type,
12515 negate_expr (arg1)));
12517 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12518 && TREE_CODE (arg1) == NEGATE_EXPR
12519 && negate_expr_p (arg0))
12521 if (INTEGRAL_TYPE_P (type))
12522 fold_overflow_warning (("assuming signed overflow does not occur "
12523 "when distributing negation across "
12524 "division"),
12525 WARN_STRICT_OVERFLOW_MISC);
12526 return fold_build2_loc (loc, code, type,
12527 fold_convert_loc (loc, type,
12528 negate_expr (arg0)),
12529 fold_convert_loc (loc, type,
12530 TREE_OPERAND (arg1, 0)));
12533 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12534 operation, EXACT_DIV_EXPR.
12536 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12537 At one time others generated faster code, it's not clear if they do
12538 after the last round to changes to the DIV code in expmed.c. */
12539 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12540 && multiple_of_p (type, arg0, arg1))
12541 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12543 strict_overflow_p = false;
12544 if (TREE_CODE (arg1) == INTEGER_CST
12545 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12546 &strict_overflow_p)))
12548 if (strict_overflow_p)
12549 fold_overflow_warning (("assuming signed overflow does not occur "
12550 "when simplifying division"),
12551 WARN_STRICT_OVERFLOW_MISC);
12552 return fold_convert_loc (loc, type, tem);
12555 return NULL_TREE;
12557 case CEIL_MOD_EXPR:
12558 case FLOOR_MOD_EXPR:
12559 case ROUND_MOD_EXPR:
12560 case TRUNC_MOD_EXPR:
12561 /* X % 1 is always zero, but be sure to preserve any side
12562 effects in X. */
12563 if (integer_onep (arg1))
12564 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12566 /* X % 0, return X % 0 unchanged so that we can get the
12567 proper warnings and errors. */
12568 if (integer_zerop (arg1))
12569 return NULL_TREE;
12571 /* 0 % X is always zero, but be sure to preserve any side
12572 effects in X. Place this after checking for X == 0. */
12573 if (integer_zerop (arg0))
12574 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12576 /* X % -1 is zero. */
12577 if (!TYPE_UNSIGNED (type)
12578 && TREE_CODE (arg1) == INTEGER_CST
12579 && wi::eq_p (arg1, -1))
12580 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12582 /* X % -C is the same as X % C. */
12583 if (code == TRUNC_MOD_EXPR
12584 && TYPE_SIGN (type) == SIGNED
12585 && TREE_CODE (arg1) == INTEGER_CST
12586 && !TREE_OVERFLOW (arg1)
12587 && wi::neg_p (arg1)
12588 && !TYPE_OVERFLOW_TRAPS (type)
12589 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12590 && !sign_bit_p (arg1, arg1))
12591 return fold_build2_loc (loc, code, type,
12592 fold_convert_loc (loc, type, arg0),
12593 fold_convert_loc (loc, type,
12594 negate_expr (arg1)));
12596 /* X % -Y is the same as X % Y. */
12597 if (code == TRUNC_MOD_EXPR
12598 && !TYPE_UNSIGNED (type)
12599 && TREE_CODE (arg1) == NEGATE_EXPR
12600 && !TYPE_OVERFLOW_TRAPS (type))
12601 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12602 fold_convert_loc (loc, type,
12603 TREE_OPERAND (arg1, 0)));
12605 strict_overflow_p = false;
12606 if (TREE_CODE (arg1) == INTEGER_CST
12607 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12608 &strict_overflow_p)))
12610 if (strict_overflow_p)
12611 fold_overflow_warning (("assuming signed overflow does not occur "
12612 "when simplifying modulus"),
12613 WARN_STRICT_OVERFLOW_MISC);
12614 return fold_convert_loc (loc, type, tem);
12617 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12618 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12619 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12620 && (TYPE_UNSIGNED (type)
12621 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12623 tree c = arg1;
12624 /* Also optimize A % (C << N) where C is a power of 2,
12625 to A & ((C << N) - 1). */
12626 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12627 c = TREE_OPERAND (arg1, 0);
12629 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12631 tree mask
12632 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12633 build_int_cst (TREE_TYPE (arg1), 1));
12634 if (strict_overflow_p)
12635 fold_overflow_warning (("assuming signed overflow does not "
12636 "occur when simplifying "
12637 "X % (power of two)"),
12638 WARN_STRICT_OVERFLOW_MISC);
12639 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12640 fold_convert_loc (loc, type, arg0),
12641 fold_convert_loc (loc, type, mask));
12645 return NULL_TREE;
12647 case LROTATE_EXPR:
12648 case RROTATE_EXPR:
12649 if (integer_all_onesp (arg0))
12650 return omit_one_operand_loc (loc, type, arg0, arg1);
12651 goto shift;
12653 case RSHIFT_EXPR:
12654 /* Optimize -1 >> x for arithmetic right shifts. */
12655 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12656 && tree_expr_nonnegative_p (arg1))
12657 return omit_one_operand_loc (loc, type, arg0, arg1);
12658 /* ... fall through ... */
12660 case LSHIFT_EXPR:
12661 shift:
12662 if (integer_zerop (arg1))
12663 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12664 if (integer_zerop (arg0))
12665 return omit_one_operand_loc (loc, type, arg0, arg1);
12667 /* Prefer vector1 << scalar to vector1 << vector2
12668 if vector2 is uniform. */
12669 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12670 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12671 return fold_build2_loc (loc, code, type, op0, tem);
12673 /* Since negative shift count is not well-defined,
12674 don't try to compute it in the compiler. */
12675 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12676 return NULL_TREE;
12678 prec = element_precision (type);
12680 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12681 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12682 && tree_to_uhwi (arg1) < prec
12683 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12684 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12686 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12687 + tree_to_uhwi (arg1));
12689 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12690 being well defined. */
12691 if (low >= prec)
12693 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12694 low = low % prec;
12695 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12696 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12697 TREE_OPERAND (arg0, 0));
12698 else
12699 low = prec - 1;
12702 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12703 build_int_cst (TREE_TYPE (arg1), low));
12706 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12707 into x & ((unsigned)-1 >> c) for unsigned types. */
12708 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12709 || (TYPE_UNSIGNED (type)
12710 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12711 && tree_fits_uhwi_p (arg1)
12712 && tree_to_uhwi (arg1) < prec
12713 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12714 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12716 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12717 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12718 tree lshift;
12719 tree arg00;
12721 if (low0 == low1)
12723 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12725 lshift = build_minus_one_cst (type);
12726 lshift = const_binop (code, lshift, arg1);
12728 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12732 /* Rewrite an LROTATE_EXPR by a constant into an
12733 RROTATE_EXPR by a new constant. */
12734 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12736 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12737 tem = const_binop (MINUS_EXPR, tem, arg1);
12738 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12741 /* If we have a rotate of a bit operation with the rotate count and
12742 the second operand of the bit operation both constant,
12743 permute the two operations. */
12744 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12745 && (TREE_CODE (arg0) == BIT_AND_EXPR
12746 || TREE_CODE (arg0) == BIT_IOR_EXPR
12747 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12749 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12750 fold_build2_loc (loc, code, type,
12751 TREE_OPERAND (arg0, 0), arg1),
12752 fold_build2_loc (loc, code, type,
12753 TREE_OPERAND (arg0, 1), arg1));
12755 /* Two consecutive rotates adding up to the some integer
12756 multiple of the precision of the type can be ignored. */
12757 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12758 && TREE_CODE (arg0) == RROTATE_EXPR
12759 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12760 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12761 prec) == 0)
12762 return TREE_OPERAND (arg0, 0);
12764 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12765 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12766 if the latter can be further optimized. */
12767 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12768 && TREE_CODE (arg0) == BIT_AND_EXPR
12769 && TREE_CODE (arg1) == INTEGER_CST
12770 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12772 tree mask = fold_build2_loc (loc, code, type,
12773 fold_convert_loc (loc, type,
12774 TREE_OPERAND (arg0, 1)),
12775 arg1);
12776 tree shift = fold_build2_loc (loc, code, type,
12777 fold_convert_loc (loc, type,
12778 TREE_OPERAND (arg0, 0)),
12779 arg1);
12780 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12781 if (tem)
12782 return tem;
12785 return NULL_TREE;
12787 case MIN_EXPR:
12788 if (operand_equal_p (arg0, arg1, 0))
12789 return omit_one_operand_loc (loc, type, arg0, arg1);
12790 if (INTEGRAL_TYPE_P (type)
12791 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12792 return omit_one_operand_loc (loc, type, arg1, arg0);
12793 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12794 if (tem)
12795 return tem;
12796 goto associate;
12798 case MAX_EXPR:
12799 if (operand_equal_p (arg0, arg1, 0))
12800 return omit_one_operand_loc (loc, type, arg0, arg1);
12801 if (INTEGRAL_TYPE_P (type)
12802 && TYPE_MAX_VALUE (type)
12803 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12804 return omit_one_operand_loc (loc, type, arg1, arg0);
12805 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12806 if (tem)
12807 return tem;
12808 goto associate;
12810 case TRUTH_ANDIF_EXPR:
12811 /* Note that the operands of this must be ints
12812 and their values must be 0 or 1.
12813 ("true" is a fixed value perhaps depending on the language.) */
12814 /* If first arg is constant zero, return it. */
12815 if (integer_zerop (arg0))
12816 return fold_convert_loc (loc, type, arg0);
12817 case TRUTH_AND_EXPR:
12818 /* If either arg is constant true, drop it. */
12819 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12820 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12821 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12822 /* Preserve sequence points. */
12823 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12824 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12825 /* If second arg is constant zero, result is zero, but first arg
12826 must be evaluated. */
12827 if (integer_zerop (arg1))
12828 return omit_one_operand_loc (loc, type, arg1, arg0);
12829 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12830 case will be handled here. */
12831 if (integer_zerop (arg0))
12832 return omit_one_operand_loc (loc, type, arg0, arg1);
12834 /* !X && X is always false. */
12835 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12836 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12837 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12838 /* X && !X is always false. */
12839 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12840 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12841 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12843 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12844 means A >= Y && A != MAX, but in this case we know that
12845 A < X <= MAX. */
12847 if (!TREE_SIDE_EFFECTS (arg0)
12848 && !TREE_SIDE_EFFECTS (arg1))
12850 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12851 if (tem && !operand_equal_p (tem, arg0, 0))
12852 return fold_build2_loc (loc, code, type, tem, arg1);
12854 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12855 if (tem && !operand_equal_p (tem, arg1, 0))
12856 return fold_build2_loc (loc, code, type, arg0, tem);
12859 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12860 != NULL_TREE)
12861 return tem;
12863 return NULL_TREE;
12865 case TRUTH_ORIF_EXPR:
12866 /* Note that the operands of this must be ints
12867 and their values must be 0 or true.
12868 ("true" is a fixed value perhaps depending on the language.) */
12869 /* If first arg is constant true, return it. */
12870 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12871 return fold_convert_loc (loc, type, arg0);
12872 case TRUTH_OR_EXPR:
12873 /* If either arg is constant zero, drop it. */
12874 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12875 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12876 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12877 /* Preserve sequence points. */
12878 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12879 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12880 /* If second arg is constant true, result is true, but we must
12881 evaluate first arg. */
12882 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12883 return omit_one_operand_loc (loc, type, arg1, arg0);
12884 /* Likewise for first arg, but note this only occurs here for
12885 TRUTH_OR_EXPR. */
12886 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12887 return omit_one_operand_loc (loc, type, arg0, arg1);
12889 /* !X || X is always true. */
12890 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12891 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12892 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12893 /* X || !X is always true. */
12894 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12895 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12896 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12898 /* (X && !Y) || (!X && Y) is X ^ Y */
12899 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12900 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12902 tree a0, a1, l0, l1, n0, n1;
12904 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12905 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12907 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12908 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12910 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12911 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12913 if ((operand_equal_p (n0, a0, 0)
12914 && operand_equal_p (n1, a1, 0))
12915 || (operand_equal_p (n0, a1, 0)
12916 && operand_equal_p (n1, a0, 0)))
12917 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12920 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12921 != NULL_TREE)
12922 return tem;
12924 return NULL_TREE;
12926 case TRUTH_XOR_EXPR:
12927 /* If the second arg is constant zero, drop it. */
12928 if (integer_zerop (arg1))
12929 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12930 /* If the second arg is constant true, this is a logical inversion. */
12931 if (integer_onep (arg1))
12933 tem = invert_truthvalue_loc (loc, arg0);
12934 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12936 /* Identical arguments cancel to zero. */
12937 if (operand_equal_p (arg0, arg1, 0))
12938 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12940 /* !X ^ X is always true. */
12941 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12942 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12943 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12945 /* X ^ !X is always true. */
12946 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12948 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12950 return NULL_TREE;
12952 case EQ_EXPR:
12953 case NE_EXPR:
12954 STRIP_NOPS (arg0);
12955 STRIP_NOPS (arg1);
12957 tem = fold_comparison (loc, code, type, op0, op1);
12958 if (tem != NULL_TREE)
12959 return tem;
12961 /* bool_var != 0 becomes bool_var. */
12962 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12963 && code == NE_EXPR)
12964 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12966 /* bool_var == 1 becomes bool_var. */
12967 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12968 && code == EQ_EXPR)
12969 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12971 /* bool_var != 1 becomes !bool_var. */
12972 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12973 && code == NE_EXPR)
12974 return fold_convert_loc (loc, type,
12975 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12976 TREE_TYPE (arg0), arg0));
12978 /* bool_var == 0 becomes !bool_var. */
12979 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12980 && code == EQ_EXPR)
12981 return fold_convert_loc (loc, type,
12982 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12983 TREE_TYPE (arg0), arg0));
12985 /* !exp != 0 becomes !exp */
12986 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12987 && code == NE_EXPR)
12988 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12990 /* If this is an equality comparison of the address of two non-weak,
12991 unaliased symbols neither of which are extern (since we do not
12992 have access to attributes for externs), then we know the result. */
12993 if (TREE_CODE (arg0) == ADDR_EXPR
12994 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12995 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12996 && ! lookup_attribute ("alias",
12997 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12998 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12999 && TREE_CODE (arg1) == ADDR_EXPR
13000 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
13001 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
13002 && ! lookup_attribute ("alias",
13003 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13004 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13006 /* We know that we're looking at the address of two
13007 non-weak, unaliased, static _DECL nodes.
13009 It is both wasteful and incorrect to call operand_equal_p
13010 to compare the two ADDR_EXPR nodes. It is wasteful in that
13011 all we need to do is test pointer equality for the arguments
13012 to the two ADDR_EXPR nodes. It is incorrect to use
13013 operand_equal_p as that function is NOT equivalent to a
13014 C equality test. It can in fact return false for two
13015 objects which would test as equal using the C equality
13016 operator. */
13017 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13018 return constant_boolean_node (equal
13019 ? code == EQ_EXPR : code != EQ_EXPR,
13020 type);
13023 /* Similarly for a NEGATE_EXPR. */
13024 if (TREE_CODE (arg0) == NEGATE_EXPR
13025 && TREE_CODE (arg1) == INTEGER_CST
13026 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13027 arg1)))
13028 && TREE_CODE (tem) == INTEGER_CST
13029 && !TREE_OVERFLOW (tem))
13030 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13032 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13033 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13034 && TREE_CODE (arg1) == INTEGER_CST
13035 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13036 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13037 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13038 fold_convert_loc (loc,
13039 TREE_TYPE (arg0),
13040 arg1),
13041 TREE_OPERAND (arg0, 1)));
13043 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13044 if ((TREE_CODE (arg0) == PLUS_EXPR
13045 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13046 || TREE_CODE (arg0) == MINUS_EXPR)
13047 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13048 0)),
13049 arg1, 0)
13050 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13051 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13053 tree val = TREE_OPERAND (arg0, 1);
13054 return omit_two_operands_loc (loc, type,
13055 fold_build2_loc (loc, code, type,
13056 val,
13057 build_int_cst (TREE_TYPE (val),
13058 0)),
13059 TREE_OPERAND (arg0, 0), arg1);
13062 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13063 if (TREE_CODE (arg0) == MINUS_EXPR
13064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13065 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13066 1)),
13067 arg1, 0)
13068 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
13070 return omit_two_operands_loc (loc, type,
13071 code == NE_EXPR
13072 ? boolean_true_node : boolean_false_node,
13073 TREE_OPERAND (arg0, 1), arg1);
13076 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13077 if (TREE_CODE (arg0) == ABS_EXPR
13078 && (integer_zerop (arg1) || real_zerop (arg1)))
13079 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13081 /* If this is an EQ or NE comparison with zero and ARG0 is
13082 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13083 two operations, but the latter can be done in one less insn
13084 on machines that have only two-operand insns or on which a
13085 constant cannot be the first operand. */
13086 if (TREE_CODE (arg0) == BIT_AND_EXPR
13087 && integer_zerop (arg1))
13089 tree arg00 = TREE_OPERAND (arg0, 0);
13090 tree arg01 = TREE_OPERAND (arg0, 1);
13091 if (TREE_CODE (arg00) == LSHIFT_EXPR
13092 && integer_onep (TREE_OPERAND (arg00, 0)))
13094 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13095 arg01, TREE_OPERAND (arg00, 1));
13096 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13097 build_int_cst (TREE_TYPE (arg0), 1));
13098 return fold_build2_loc (loc, code, type,
13099 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13100 arg1);
13102 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13103 && integer_onep (TREE_OPERAND (arg01, 0)))
13105 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13106 arg00, TREE_OPERAND (arg01, 1));
13107 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13108 build_int_cst (TREE_TYPE (arg0), 1));
13109 return fold_build2_loc (loc, code, type,
13110 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13111 arg1);
13115 /* If this is an NE or EQ comparison of zero against the result of a
13116 signed MOD operation whose second operand is a power of 2, make
13117 the MOD operation unsigned since it is simpler and equivalent. */
13118 if (integer_zerop (arg1)
13119 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13120 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13121 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13122 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13123 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13124 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13126 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13127 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13128 fold_convert_loc (loc, newtype,
13129 TREE_OPERAND (arg0, 0)),
13130 fold_convert_loc (loc, newtype,
13131 TREE_OPERAND (arg0, 1)));
13133 return fold_build2_loc (loc, code, type, newmod,
13134 fold_convert_loc (loc, newtype, arg1));
13137 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13138 C1 is a valid shift constant, and C2 is a power of two, i.e.
13139 a single bit. */
13140 if (TREE_CODE (arg0) == BIT_AND_EXPR
13141 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13142 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13143 == INTEGER_CST
13144 && integer_pow2p (TREE_OPERAND (arg0, 1))
13145 && integer_zerop (arg1))
13147 tree itype = TREE_TYPE (arg0);
13148 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13149 prec = TYPE_PRECISION (itype);
13151 /* Check for a valid shift count. */
13152 if (wi::ltu_p (arg001, prec))
13154 tree arg01 = TREE_OPERAND (arg0, 1);
13155 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13156 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13157 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13158 can be rewritten as (X & (C2 << C1)) != 0. */
13159 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13161 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13162 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13163 return fold_build2_loc (loc, code, type, tem,
13164 fold_convert_loc (loc, itype, arg1));
13166 /* Otherwise, for signed (arithmetic) shifts,
13167 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13168 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13169 else if (!TYPE_UNSIGNED (itype))
13170 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13171 arg000, build_int_cst (itype, 0));
13172 /* Otherwise, of unsigned (logical) shifts,
13173 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13174 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13175 else
13176 return omit_one_operand_loc (loc, type,
13177 code == EQ_EXPR ? integer_one_node
13178 : integer_zero_node,
13179 arg000);
13183 /* If we have (A & C) == C where C is a power of 2, convert this into
13184 (A & C) != 0. Similarly for NE_EXPR. */
13185 if (TREE_CODE (arg0) == BIT_AND_EXPR
13186 && integer_pow2p (TREE_OPERAND (arg0, 1))
13187 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13188 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13189 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13190 integer_zero_node));
13192 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13193 bit, then fold the expression into A < 0 or A >= 0. */
13194 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13195 if (tem)
13196 return tem;
13198 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13199 Similarly for NE_EXPR. */
13200 if (TREE_CODE (arg0) == BIT_AND_EXPR
13201 && TREE_CODE (arg1) == INTEGER_CST
13202 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13204 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13205 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13206 TREE_OPERAND (arg0, 1));
13207 tree dandnotc
13208 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13209 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13210 notc);
13211 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13212 if (integer_nonzerop (dandnotc))
13213 return omit_one_operand_loc (loc, type, rslt, arg0);
13216 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13217 Similarly for NE_EXPR. */
13218 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13219 && TREE_CODE (arg1) == INTEGER_CST
13220 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13222 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13223 tree candnotd
13224 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13225 TREE_OPERAND (arg0, 1),
13226 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13227 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13228 if (integer_nonzerop (candnotd))
13229 return omit_one_operand_loc (loc, type, rslt, arg0);
13232 /* If this is a comparison of a field, we may be able to simplify it. */
13233 if ((TREE_CODE (arg0) == COMPONENT_REF
13234 || TREE_CODE (arg0) == BIT_FIELD_REF)
13235 /* Handle the constant case even without -O
13236 to make sure the warnings are given. */
13237 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13239 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13240 if (t1)
13241 return t1;
13244 /* Optimize comparisons of strlen vs zero to a compare of the
13245 first character of the string vs zero. To wit,
13246 strlen(ptr) == 0 => *ptr == 0
13247 strlen(ptr) != 0 => *ptr != 0
13248 Other cases should reduce to one of these two (or a constant)
13249 due to the return value of strlen being unsigned. */
13250 if (TREE_CODE (arg0) == CALL_EXPR
13251 && integer_zerop (arg1))
13253 tree fndecl = get_callee_fndecl (arg0);
13255 if (fndecl
13256 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13257 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13258 && call_expr_nargs (arg0) == 1
13259 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13261 tree iref = build_fold_indirect_ref_loc (loc,
13262 CALL_EXPR_ARG (arg0, 0));
13263 return fold_build2_loc (loc, code, type, iref,
13264 build_int_cst (TREE_TYPE (iref), 0));
13268 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13269 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13270 if (TREE_CODE (arg0) == RSHIFT_EXPR
13271 && integer_zerop (arg1)
13272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13274 tree arg00 = TREE_OPERAND (arg0, 0);
13275 tree arg01 = TREE_OPERAND (arg0, 1);
13276 tree itype = TREE_TYPE (arg00);
13277 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13279 if (TYPE_UNSIGNED (itype))
13281 itype = signed_type_for (itype);
13282 arg00 = fold_convert_loc (loc, itype, arg00);
13284 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13285 type, arg00, build_zero_cst (itype));
13289 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13290 if (integer_zerop (arg1)
13291 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13292 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13293 TREE_OPERAND (arg0, 1));
13295 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13296 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13297 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13298 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13299 build_zero_cst (TREE_TYPE (arg0)));
13300 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13301 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13302 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13303 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13304 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13305 build_zero_cst (TREE_TYPE (arg0)));
13307 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13308 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13309 && TREE_CODE (arg1) == INTEGER_CST
13310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13311 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13312 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13313 TREE_OPERAND (arg0, 1), arg1));
13315 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13316 (X & C) == 0 when C is a single bit. */
13317 if (TREE_CODE (arg0) == BIT_AND_EXPR
13318 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13319 && integer_zerop (arg1)
13320 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13322 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13323 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13324 TREE_OPERAND (arg0, 1));
13325 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13326 type, tem,
13327 fold_convert_loc (loc, TREE_TYPE (arg0),
13328 arg1));
13331 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13332 constant C is a power of two, i.e. a single bit. */
13333 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13335 && integer_zerop (arg1)
13336 && integer_pow2p (TREE_OPERAND (arg0, 1))
13337 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13338 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13340 tree arg00 = TREE_OPERAND (arg0, 0);
13341 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13342 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13345 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13346 when is C is a power of two, i.e. a single bit. */
13347 if (TREE_CODE (arg0) == BIT_AND_EXPR
13348 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13349 && integer_zerop (arg1)
13350 && integer_pow2p (TREE_OPERAND (arg0, 1))
13351 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13352 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13354 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13355 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13356 arg000, TREE_OPERAND (arg0, 1));
13357 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13358 tem, build_int_cst (TREE_TYPE (tem), 0));
13361 if (integer_zerop (arg1)
13362 && tree_expr_nonzero_p (arg0))
13364 tree res = constant_boolean_node (code==NE_EXPR, type);
13365 return omit_one_operand_loc (loc, type, res, arg0);
13368 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13369 if (TREE_CODE (arg0) == NEGATE_EXPR
13370 && TREE_CODE (arg1) == NEGATE_EXPR)
13371 return fold_build2_loc (loc, code, type,
13372 TREE_OPERAND (arg0, 0),
13373 fold_convert_loc (loc, TREE_TYPE (arg0),
13374 TREE_OPERAND (arg1, 0)));
13376 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13377 if (TREE_CODE (arg0) == BIT_AND_EXPR
13378 && TREE_CODE (arg1) == BIT_AND_EXPR)
13380 tree arg00 = TREE_OPERAND (arg0, 0);
13381 tree arg01 = TREE_OPERAND (arg0, 1);
13382 tree arg10 = TREE_OPERAND (arg1, 0);
13383 tree arg11 = TREE_OPERAND (arg1, 1);
13384 tree itype = TREE_TYPE (arg0);
13386 if (operand_equal_p (arg01, arg11, 0))
13387 return fold_build2_loc (loc, code, type,
13388 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13389 fold_build2_loc (loc,
13390 BIT_XOR_EXPR, itype,
13391 arg00, arg10),
13392 arg01),
13393 build_zero_cst (itype));
13395 if (operand_equal_p (arg01, arg10, 0))
13396 return fold_build2_loc (loc, code, type,
13397 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13398 fold_build2_loc (loc,
13399 BIT_XOR_EXPR, itype,
13400 arg00, arg11),
13401 arg01),
13402 build_zero_cst (itype));
13404 if (operand_equal_p (arg00, arg11, 0))
13405 return fold_build2_loc (loc, code, type,
13406 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13407 fold_build2_loc (loc,
13408 BIT_XOR_EXPR, itype,
13409 arg01, arg10),
13410 arg00),
13411 build_zero_cst (itype));
13413 if (operand_equal_p (arg00, arg10, 0))
13414 return fold_build2_loc (loc, code, type,
13415 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13416 fold_build2_loc (loc,
13417 BIT_XOR_EXPR, itype,
13418 arg01, arg11),
13419 arg00),
13420 build_zero_cst (itype));
13423 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13424 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13426 tree arg00 = TREE_OPERAND (arg0, 0);
13427 tree arg01 = TREE_OPERAND (arg0, 1);
13428 tree arg10 = TREE_OPERAND (arg1, 0);
13429 tree arg11 = TREE_OPERAND (arg1, 1);
13430 tree itype = TREE_TYPE (arg0);
13432 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13433 operand_equal_p guarantees no side-effects so we don't need
13434 to use omit_one_operand on Z. */
13435 if (operand_equal_p (arg01, arg11, 0))
13436 return fold_build2_loc (loc, code, type, arg00,
13437 fold_convert_loc (loc, TREE_TYPE (arg00),
13438 arg10));
13439 if (operand_equal_p (arg01, arg10, 0))
13440 return fold_build2_loc (loc, code, type, arg00,
13441 fold_convert_loc (loc, TREE_TYPE (arg00),
13442 arg11));
13443 if (operand_equal_p (arg00, arg11, 0))
13444 return fold_build2_loc (loc, code, type, arg01,
13445 fold_convert_loc (loc, TREE_TYPE (arg01),
13446 arg10));
13447 if (operand_equal_p (arg00, arg10, 0))
13448 return fold_build2_loc (loc, code, type, arg01,
13449 fold_convert_loc (loc, TREE_TYPE (arg01),
13450 arg11));
13452 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13453 if (TREE_CODE (arg01) == INTEGER_CST
13454 && TREE_CODE (arg11) == INTEGER_CST)
13456 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13457 fold_convert_loc (loc, itype, arg11));
13458 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13459 return fold_build2_loc (loc, code, type, tem,
13460 fold_convert_loc (loc, itype, arg10));
13464 /* Attempt to simplify equality/inequality comparisons of complex
13465 values. Only lower the comparison if the result is known or
13466 can be simplified to a single scalar comparison. */
13467 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13468 || TREE_CODE (arg0) == COMPLEX_CST)
13469 && (TREE_CODE (arg1) == COMPLEX_EXPR
13470 || TREE_CODE (arg1) == COMPLEX_CST))
13472 tree real0, imag0, real1, imag1;
13473 tree rcond, icond;
13475 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13477 real0 = TREE_OPERAND (arg0, 0);
13478 imag0 = TREE_OPERAND (arg0, 1);
13480 else
13482 real0 = TREE_REALPART (arg0);
13483 imag0 = TREE_IMAGPART (arg0);
13486 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13488 real1 = TREE_OPERAND (arg1, 0);
13489 imag1 = TREE_OPERAND (arg1, 1);
13491 else
13493 real1 = TREE_REALPART (arg1);
13494 imag1 = TREE_IMAGPART (arg1);
13497 rcond = fold_binary_loc (loc, code, type, real0, real1);
13498 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13500 if (integer_zerop (rcond))
13502 if (code == EQ_EXPR)
13503 return omit_two_operands_loc (loc, type, boolean_false_node,
13504 imag0, imag1);
13505 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13507 else
13509 if (code == NE_EXPR)
13510 return omit_two_operands_loc (loc, type, boolean_true_node,
13511 imag0, imag1);
13512 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13516 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13517 if (icond && TREE_CODE (icond) == INTEGER_CST)
13519 if (integer_zerop (icond))
13521 if (code == EQ_EXPR)
13522 return omit_two_operands_loc (loc, type, boolean_false_node,
13523 real0, real1);
13524 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13526 else
13528 if (code == NE_EXPR)
13529 return omit_two_operands_loc (loc, type, boolean_true_node,
13530 real0, real1);
13531 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13536 return NULL_TREE;
13538 case LT_EXPR:
13539 case GT_EXPR:
13540 case LE_EXPR:
13541 case GE_EXPR:
13542 tem = fold_comparison (loc, code, type, op0, op1);
13543 if (tem != NULL_TREE)
13544 return tem;
13546 /* Transform comparisons of the form X +- C CMP X. */
13547 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13548 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13549 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13550 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13551 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13552 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13554 tree arg01 = TREE_OPERAND (arg0, 1);
13555 enum tree_code code0 = TREE_CODE (arg0);
13556 int is_positive;
13558 if (TREE_CODE (arg01) == REAL_CST)
13559 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13560 else
13561 is_positive = tree_int_cst_sgn (arg01);
13563 /* (X - c) > X becomes false. */
13564 if (code == GT_EXPR
13565 && ((code0 == MINUS_EXPR && is_positive >= 0)
13566 || (code0 == PLUS_EXPR && is_positive <= 0)))
13568 if (TREE_CODE (arg01) == INTEGER_CST
13569 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13570 fold_overflow_warning (("assuming signed overflow does not "
13571 "occur when assuming that (X - c) > X "
13572 "is always false"),
13573 WARN_STRICT_OVERFLOW_ALL);
13574 return constant_boolean_node (0, type);
13577 /* Likewise (X + c) < X becomes false. */
13578 if (code == LT_EXPR
13579 && ((code0 == PLUS_EXPR && is_positive >= 0)
13580 || (code0 == MINUS_EXPR && is_positive <= 0)))
13582 if (TREE_CODE (arg01) == INTEGER_CST
13583 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13584 fold_overflow_warning (("assuming signed overflow does not "
13585 "occur when assuming that "
13586 "(X + c) < X is always false"),
13587 WARN_STRICT_OVERFLOW_ALL);
13588 return constant_boolean_node (0, type);
13591 /* Convert (X - c) <= X to true. */
13592 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13593 && code == LE_EXPR
13594 && ((code0 == MINUS_EXPR && is_positive >= 0)
13595 || (code0 == PLUS_EXPR && is_positive <= 0)))
13597 if (TREE_CODE (arg01) == INTEGER_CST
13598 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13599 fold_overflow_warning (("assuming signed overflow does not "
13600 "occur when assuming that "
13601 "(X - c) <= X is always true"),
13602 WARN_STRICT_OVERFLOW_ALL);
13603 return constant_boolean_node (1, type);
13606 /* Convert (X + c) >= X to true. */
13607 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13608 && code == GE_EXPR
13609 && ((code0 == PLUS_EXPR && is_positive >= 0)
13610 || (code0 == MINUS_EXPR && is_positive <= 0)))
13612 if (TREE_CODE (arg01) == INTEGER_CST
13613 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13614 fold_overflow_warning (("assuming signed overflow does not "
13615 "occur when assuming that "
13616 "(X + c) >= X is always true"),
13617 WARN_STRICT_OVERFLOW_ALL);
13618 return constant_boolean_node (1, type);
13621 if (TREE_CODE (arg01) == INTEGER_CST)
13623 /* Convert X + c > X and X - c < X to true for integers. */
13624 if (code == GT_EXPR
13625 && ((code0 == PLUS_EXPR && is_positive > 0)
13626 || (code0 == MINUS_EXPR && is_positive < 0)))
13628 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13629 fold_overflow_warning (("assuming signed overflow does "
13630 "not occur when assuming that "
13631 "(X + c) > X is always true"),
13632 WARN_STRICT_OVERFLOW_ALL);
13633 return constant_boolean_node (1, type);
13636 if (code == LT_EXPR
13637 && ((code0 == MINUS_EXPR && is_positive > 0)
13638 || (code0 == PLUS_EXPR && is_positive < 0)))
13640 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13641 fold_overflow_warning (("assuming signed overflow does "
13642 "not occur when assuming that "
13643 "(X - c) < X is always true"),
13644 WARN_STRICT_OVERFLOW_ALL);
13645 return constant_boolean_node (1, type);
13648 /* Convert X + c <= X and X - c >= X to false for integers. */
13649 if (code == LE_EXPR
13650 && ((code0 == PLUS_EXPR && is_positive > 0)
13651 || (code0 == MINUS_EXPR && is_positive < 0)))
13653 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13654 fold_overflow_warning (("assuming signed overflow does "
13655 "not occur when assuming that "
13656 "(X + c) <= X is always false"),
13657 WARN_STRICT_OVERFLOW_ALL);
13658 return constant_boolean_node (0, type);
13661 if (code == GE_EXPR
13662 && ((code0 == MINUS_EXPR && is_positive > 0)
13663 || (code0 == PLUS_EXPR && is_positive < 0)))
13665 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13666 fold_overflow_warning (("assuming signed overflow does "
13667 "not occur when assuming that "
13668 "(X - c) >= X is always false"),
13669 WARN_STRICT_OVERFLOW_ALL);
13670 return constant_boolean_node (0, type);
13675 /* Comparisons with the highest or lowest possible integer of
13676 the specified precision will have known values. */
13678 tree arg1_type = TREE_TYPE (arg1);
13679 unsigned int prec = TYPE_PRECISION (arg1_type);
13681 if (TREE_CODE (arg1) == INTEGER_CST
13682 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13684 wide_int max = wi::max_value (arg1_type);
13685 wide_int signed_max = wi::max_value (prec, SIGNED);
13686 wide_int min = wi::min_value (arg1_type);
13688 if (wi::eq_p (arg1, max))
13689 switch (code)
13691 case GT_EXPR:
13692 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13694 case GE_EXPR:
13695 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13697 case LE_EXPR:
13698 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13700 case LT_EXPR:
13701 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13703 /* The GE_EXPR and LT_EXPR cases above are not normally
13704 reached because of previous transformations. */
13706 default:
13707 break;
13709 else if (wi::eq_p (arg1, max - 1))
13710 switch (code)
13712 case GT_EXPR:
13713 arg1 = const_binop (PLUS_EXPR, arg1,
13714 build_int_cst (TREE_TYPE (arg1), 1));
13715 return fold_build2_loc (loc, EQ_EXPR, type,
13716 fold_convert_loc (loc,
13717 TREE_TYPE (arg1), arg0),
13718 arg1);
13719 case LE_EXPR:
13720 arg1 = const_binop (PLUS_EXPR, arg1,
13721 build_int_cst (TREE_TYPE (arg1), 1));
13722 return fold_build2_loc (loc, NE_EXPR, type,
13723 fold_convert_loc (loc, TREE_TYPE (arg1),
13724 arg0),
13725 arg1);
13726 default:
13727 break;
13729 else if (wi::eq_p (arg1, min))
13730 switch (code)
13732 case LT_EXPR:
13733 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13735 case LE_EXPR:
13736 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13738 case GE_EXPR:
13739 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13741 case GT_EXPR:
13742 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13744 default:
13745 break;
13747 else if (wi::eq_p (arg1, min + 1))
13748 switch (code)
13750 case GE_EXPR:
13751 arg1 = const_binop (MINUS_EXPR, arg1,
13752 build_int_cst (TREE_TYPE (arg1), 1));
13753 return fold_build2_loc (loc, NE_EXPR, type,
13754 fold_convert_loc (loc,
13755 TREE_TYPE (arg1), arg0),
13756 arg1);
13757 case LT_EXPR:
13758 arg1 = const_binop (MINUS_EXPR, arg1,
13759 build_int_cst (TREE_TYPE (arg1), 1));
13760 return fold_build2_loc (loc, EQ_EXPR, type,
13761 fold_convert_loc (loc, TREE_TYPE (arg1),
13762 arg0),
13763 arg1);
13764 default:
13765 break;
13768 else if (wi::eq_p (arg1, signed_max)
13769 && TYPE_UNSIGNED (arg1_type)
13770 /* We will flip the signedness of the comparison operator
13771 associated with the mode of arg1, so the sign bit is
13772 specified by this mode. Check that arg1 is the signed
13773 max associated with this sign bit. */
13774 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13775 /* signed_type does not work on pointer types. */
13776 && INTEGRAL_TYPE_P (arg1_type))
13778 /* The following case also applies to X < signed_max+1
13779 and X >= signed_max+1 because previous transformations. */
13780 if (code == LE_EXPR || code == GT_EXPR)
13782 tree st = signed_type_for (arg1_type);
13783 return fold_build2_loc (loc,
13784 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13785 type, fold_convert_loc (loc, st, arg0),
13786 build_int_cst (st, 0));
13792 /* If we are comparing an ABS_EXPR with a constant, we can
13793 convert all the cases into explicit comparisons, but they may
13794 well not be faster than doing the ABS and one comparison.
13795 But ABS (X) <= C is a range comparison, which becomes a subtraction
13796 and a comparison, and is probably faster. */
13797 if (code == LE_EXPR
13798 && TREE_CODE (arg1) == INTEGER_CST
13799 && TREE_CODE (arg0) == ABS_EXPR
13800 && ! TREE_SIDE_EFFECTS (arg0)
13801 && (0 != (tem = negate_expr (arg1)))
13802 && TREE_CODE (tem) == INTEGER_CST
13803 && !TREE_OVERFLOW (tem))
13804 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13805 build2 (GE_EXPR, type,
13806 TREE_OPERAND (arg0, 0), tem),
13807 build2 (LE_EXPR, type,
13808 TREE_OPERAND (arg0, 0), arg1));
13810 /* Convert ABS_EXPR<x> >= 0 to true. */
13811 strict_overflow_p = false;
13812 if (code == GE_EXPR
13813 && (integer_zerop (arg1)
13814 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13815 && real_zerop (arg1)))
13816 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13818 if (strict_overflow_p)
13819 fold_overflow_warning (("assuming signed overflow does not occur "
13820 "when simplifying comparison of "
13821 "absolute value and zero"),
13822 WARN_STRICT_OVERFLOW_CONDITIONAL);
13823 return omit_one_operand_loc (loc, type,
13824 constant_boolean_node (true, type),
13825 arg0);
13828 /* Convert ABS_EXPR<x> < 0 to false. */
13829 strict_overflow_p = false;
13830 if (code == LT_EXPR
13831 && (integer_zerop (arg1) || real_zerop (arg1))
13832 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13834 if (strict_overflow_p)
13835 fold_overflow_warning (("assuming signed overflow does not occur "
13836 "when simplifying comparison of "
13837 "absolute value and zero"),
13838 WARN_STRICT_OVERFLOW_CONDITIONAL);
13839 return omit_one_operand_loc (loc, type,
13840 constant_boolean_node (false, type),
13841 arg0);
13844 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13845 and similarly for >= into !=. */
13846 if ((code == LT_EXPR || code == GE_EXPR)
13847 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13848 && TREE_CODE (arg1) == LSHIFT_EXPR
13849 && integer_onep (TREE_OPERAND (arg1, 0)))
13850 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13851 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13852 TREE_OPERAND (arg1, 1)),
13853 build_zero_cst (TREE_TYPE (arg0)));
13855 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13856 otherwise Y might be >= # of bits in X's type and thus e.g.
13857 (unsigned char) (1 << Y) for Y 15 might be 0.
13858 If the cast is widening, then 1 << Y should have unsigned type,
13859 otherwise if Y is number of bits in the signed shift type minus 1,
13860 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13861 31 might be 0xffffffff80000000. */
13862 if ((code == LT_EXPR || code == GE_EXPR)
13863 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13864 && CONVERT_EXPR_P (arg1)
13865 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13866 && (TYPE_PRECISION (TREE_TYPE (arg1))
13867 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13868 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13869 || (TYPE_PRECISION (TREE_TYPE (arg1))
13870 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13871 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13873 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13874 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13875 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13876 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13877 build_zero_cst (TREE_TYPE (arg0)));
13880 return NULL_TREE;
13882 case UNORDERED_EXPR:
13883 case ORDERED_EXPR:
13884 case UNLT_EXPR:
13885 case UNLE_EXPR:
13886 case UNGT_EXPR:
13887 case UNGE_EXPR:
13888 case UNEQ_EXPR:
13889 case LTGT_EXPR:
13890 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13892 t1 = fold_relational_const (code, type, arg0, arg1);
13893 if (t1 != NULL_TREE)
13894 return t1;
13897 /* If the first operand is NaN, the result is constant. */
13898 if (TREE_CODE (arg0) == REAL_CST
13899 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13900 && (code != LTGT_EXPR || ! flag_trapping_math))
13902 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13903 ? integer_zero_node
13904 : integer_one_node;
13905 return omit_one_operand_loc (loc, type, t1, arg1);
13908 /* If the second operand is NaN, the result is constant. */
13909 if (TREE_CODE (arg1) == REAL_CST
13910 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13911 && (code != LTGT_EXPR || ! flag_trapping_math))
13913 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13914 ? integer_zero_node
13915 : integer_one_node;
13916 return omit_one_operand_loc (loc, type, t1, arg0);
13919 /* Simplify unordered comparison of something with itself. */
13920 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13921 && operand_equal_p (arg0, arg1, 0))
13922 return constant_boolean_node (1, type);
13924 if (code == LTGT_EXPR
13925 && !flag_trapping_math
13926 && operand_equal_p (arg0, arg1, 0))
13927 return constant_boolean_node (0, type);
13929 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13931 tree targ0 = strip_float_extensions (arg0);
13932 tree targ1 = strip_float_extensions (arg1);
13933 tree newtype = TREE_TYPE (targ0);
13935 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13936 newtype = TREE_TYPE (targ1);
13938 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13939 return fold_build2_loc (loc, code, type,
13940 fold_convert_loc (loc, newtype, targ0),
13941 fold_convert_loc (loc, newtype, targ1));
13944 return NULL_TREE;
13946 case COMPOUND_EXPR:
13947 /* When pedantic, a compound expression can be neither an lvalue
13948 nor an integer constant expression. */
13949 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13950 return NULL_TREE;
13951 /* Don't let (0, 0) be null pointer constant. */
13952 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13953 : fold_convert_loc (loc, type, arg1);
13954 return pedantic_non_lvalue_loc (loc, tem);
13956 case COMPLEX_EXPR:
13957 if ((TREE_CODE (arg0) == REAL_CST
13958 && TREE_CODE (arg1) == REAL_CST)
13959 || (TREE_CODE (arg0) == INTEGER_CST
13960 && TREE_CODE (arg1) == INTEGER_CST))
13961 return build_complex (type, arg0, arg1);
13962 if (TREE_CODE (arg0) == REALPART_EXPR
13963 && TREE_CODE (arg1) == IMAGPART_EXPR
13964 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13965 && operand_equal_p (TREE_OPERAND (arg0, 0),
13966 TREE_OPERAND (arg1, 0), 0))
13967 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13968 TREE_OPERAND (arg1, 0));
13969 return NULL_TREE;
13971 case ASSERT_EXPR:
13972 /* An ASSERT_EXPR should never be passed to fold_binary. */
13973 gcc_unreachable ();
13975 case VEC_PACK_TRUNC_EXPR:
13976 case VEC_PACK_FIX_TRUNC_EXPR:
13978 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13979 tree *elts;
13981 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13982 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13983 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13984 return NULL_TREE;
13986 elts = XALLOCAVEC (tree, nelts);
13987 if (!vec_cst_ctor_to_array (arg0, elts)
13988 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13989 return NULL_TREE;
13991 for (i = 0; i < nelts; i++)
13993 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13994 ? NOP_EXPR : FIX_TRUNC_EXPR,
13995 TREE_TYPE (type), elts[i]);
13996 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13997 return NULL_TREE;
14000 return build_vector (type, elts);
14003 case VEC_WIDEN_MULT_LO_EXPR:
14004 case VEC_WIDEN_MULT_HI_EXPR:
14005 case VEC_WIDEN_MULT_EVEN_EXPR:
14006 case VEC_WIDEN_MULT_ODD_EXPR:
14008 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14009 unsigned int out, ofs, scale;
14010 tree *elts;
14012 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14013 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14014 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14015 return NULL_TREE;
14017 elts = XALLOCAVEC (tree, nelts * 4);
14018 if (!vec_cst_ctor_to_array (arg0, elts)
14019 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14020 return NULL_TREE;
14022 if (code == VEC_WIDEN_MULT_LO_EXPR)
14023 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14024 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14025 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14026 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14027 scale = 1, ofs = 0;
14028 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14029 scale = 1, ofs = 1;
14031 for (out = 0; out < nelts; out++)
14033 unsigned int in1 = (out << scale) + ofs;
14034 unsigned int in2 = in1 + nelts * 2;
14035 tree t1, t2;
14037 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14038 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14040 if (t1 == NULL_TREE || t2 == NULL_TREE)
14041 return NULL_TREE;
14042 elts[out] = const_binop (MULT_EXPR, t1, t2);
14043 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14044 return NULL_TREE;
14047 return build_vector (type, elts);
14050 default:
14051 return NULL_TREE;
14052 } /* switch (code) */
14055 /* Fold a binary expression of code CODE and type TYPE with operands
14056 OP0 and OP1. Return the folded expression if folding is
14057 successful. Otherwise, return NULL_TREE.
14058 This is a wrapper around fold_binary_1 function (which does the
14059 actual folding). Set the EXPR_FOLDED flag of the folded expression
14060 if folding is successful. */
14061 tree
14062 fold_binary_loc (location_t loc,
14063 enum tree_code code, tree type, tree op0, tree op1)
14065 tree tem = fold_binary_loc_1 (loc, code, type, op0, op1);
14066 return tem;
14069 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14070 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14071 of GOTO_EXPR. */
14073 static tree
14074 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14076 switch (TREE_CODE (*tp))
14078 case LABEL_EXPR:
14079 return *tp;
14081 case GOTO_EXPR:
14082 *walk_subtrees = 0;
14084 /* ... fall through ... */
14086 default:
14087 return NULL_TREE;
14091 /* Return whether the sub-tree ST contains a label which is accessible from
14092 outside the sub-tree. */
14094 static bool
14095 contains_label_p (tree st)
14097 return
14098 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14101 /* Fold a ternary expression of code CODE and type TYPE with operands
14102 OP0, OP1, and OP2. Return the folded expression if folding is
14103 successful. Otherwise, return NULL_TREE. */
14105 static tree
14106 fold_ternary_loc_1 (location_t loc, enum tree_code code, tree type,
14107 tree op0, tree op1, tree op2)
14109 tree tem;
14110 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14111 enum tree_code_class kind = TREE_CODE_CLASS (code);
14113 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14114 && TREE_CODE_LENGTH (code) == 3);
14116 /* Strip any conversions that don't change the mode. This is safe
14117 for every expression, except for a comparison expression because
14118 its signedness is derived from its operands. So, in the latter
14119 case, only strip conversions that don't change the signedness.
14121 Note that this is done as an internal manipulation within the
14122 constant folder, in order to find the simplest representation of
14123 the arguments so that their form can be studied. In any cases,
14124 the appropriate type conversions should be put back in the tree
14125 that will get out of the constant folder. */
14126 if (op0)
14128 arg0 = op0;
14129 STRIP_NOPS (arg0);
14132 if (op1)
14134 arg1 = op1;
14135 STRIP_NOPS (arg1);
14138 if (op2)
14140 arg2 = op2;
14141 STRIP_NOPS (arg2);
14144 switch (code)
14146 case COMPONENT_REF:
14147 if (TREE_CODE (arg0) == CONSTRUCTOR
14148 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14150 unsigned HOST_WIDE_INT idx;
14151 tree field, value;
14152 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14153 if (field == arg1)
14154 return value;
14156 return NULL_TREE;
14158 case COND_EXPR:
14159 case VEC_COND_EXPR:
14160 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14161 so all simple results must be passed through pedantic_non_lvalue. */
14162 if (TREE_CODE (arg0) == INTEGER_CST)
14164 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14165 tem = integer_zerop (arg0) ? op2 : op1;
14166 /* Only optimize constant conditions when the selected branch
14167 has the same type as the COND_EXPR. This avoids optimizing
14168 away "c ? x : throw", where the throw has a void type.
14169 Avoid throwing away that operand which contains label. */
14170 if ((!TREE_SIDE_EFFECTS (unused_op)
14171 || !contains_label_p (unused_op))
14172 && (! VOID_TYPE_P (TREE_TYPE (tem))
14173 || VOID_TYPE_P (type)))
14174 return pedantic_non_lvalue_loc (loc, tem);
14175 return NULL_TREE;
14177 else if (TREE_CODE (arg0) == VECTOR_CST)
14179 if (integer_all_onesp (arg0))
14180 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14181 if (integer_zerop (arg0))
14182 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14184 if ((TREE_CODE (arg1) == VECTOR_CST
14185 || TREE_CODE (arg1) == CONSTRUCTOR)
14186 && (TREE_CODE (arg2) == VECTOR_CST
14187 || TREE_CODE (arg2) == CONSTRUCTOR))
14189 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14190 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14191 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14192 for (i = 0; i < nelts; i++)
14194 tree val = VECTOR_CST_ELT (arg0, i);
14195 if (integer_all_onesp (val))
14196 sel[i] = i;
14197 else if (integer_zerop (val))
14198 sel[i] = nelts + i;
14199 else /* Currently unreachable. */
14200 return NULL_TREE;
14202 tree t = fold_vec_perm (type, arg1, arg2, sel);
14203 if (t != NULL_TREE)
14204 return t;
14208 if (operand_equal_p (arg1, op2, 0))
14209 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14211 /* If we have A op B ? A : C, we may be able to convert this to a
14212 simpler expression, depending on the operation and the values
14213 of B and C. Signed zeros prevent all of these transformations,
14214 for reasons given above each one.
14216 Also try swapping the arguments and inverting the conditional. */
14217 if (COMPARISON_CLASS_P (arg0)
14218 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14219 arg1, TREE_OPERAND (arg0, 1))
14220 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14222 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14223 if (tem)
14224 return tem;
14227 if (COMPARISON_CLASS_P (arg0)
14228 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14229 op2,
14230 TREE_OPERAND (arg0, 1))
14231 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14233 location_t loc0 = expr_location_or (arg0, loc);
14234 tem = fold_invert_truthvalue (loc0, arg0);
14235 if (tem && COMPARISON_CLASS_P (tem))
14237 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14238 if (tem)
14239 return tem;
14243 /* If the second operand is simpler than the third, swap them
14244 since that produces better jump optimization results. */
14245 if (truth_value_p (TREE_CODE (arg0))
14246 && tree_swap_operands_p (op1, op2, false))
14248 location_t loc0 = expr_location_or (arg0, loc);
14249 /* See if this can be inverted. If it can't, possibly because
14250 it was a floating-point inequality comparison, don't do
14251 anything. */
14252 tem = fold_invert_truthvalue (loc0, arg0);
14253 if (tem)
14254 return fold_build3_loc (loc, code, type, tem, op2, op1);
14257 /* Convert A ? 1 : 0 to simply A. */
14258 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14259 : (integer_onep (op1)
14260 && !VECTOR_TYPE_P (type)))
14261 && integer_zerop (op2)
14262 /* If we try to convert OP0 to our type, the
14263 call to fold will try to move the conversion inside
14264 a COND, which will recurse. In that case, the COND_EXPR
14265 is probably the best choice, so leave it alone. */
14266 && type == TREE_TYPE (arg0))
14267 return pedantic_non_lvalue_loc (loc, arg0);
14269 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14270 over COND_EXPR in cases such as floating point comparisons. */
14271 if (integer_zerop (op1)
14272 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14273 : (integer_onep (op2)
14274 && !VECTOR_TYPE_P (type)))
14275 && truth_value_p (TREE_CODE (arg0)))
14276 return pedantic_non_lvalue_loc (loc,
14277 fold_convert_loc (loc, type,
14278 invert_truthvalue_loc (loc,
14279 arg0)));
14281 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14282 if (TREE_CODE (arg0) == LT_EXPR
14283 && integer_zerop (TREE_OPERAND (arg0, 1))
14284 && integer_zerop (op2)
14285 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14287 /* sign_bit_p looks through both zero and sign extensions,
14288 but for this optimization only sign extensions are
14289 usable. */
14290 tree tem2 = TREE_OPERAND (arg0, 0);
14291 while (tem != tem2)
14293 if (TREE_CODE (tem2) != NOP_EXPR
14294 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14296 tem = NULL_TREE;
14297 break;
14299 tem2 = TREE_OPERAND (tem2, 0);
14301 /* sign_bit_p only checks ARG1 bits within A's precision.
14302 If <sign bit of A> has wider type than A, bits outside
14303 of A's precision in <sign bit of A> need to be checked.
14304 If they are all 0, this optimization needs to be done
14305 in unsigned A's type, if they are all 1 in signed A's type,
14306 otherwise this can't be done. */
14307 if (tem
14308 && TYPE_PRECISION (TREE_TYPE (tem))
14309 < TYPE_PRECISION (TREE_TYPE (arg1))
14310 && TYPE_PRECISION (TREE_TYPE (tem))
14311 < TYPE_PRECISION (type))
14313 int inner_width, outer_width;
14314 tree tem_type;
14316 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14317 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14318 if (outer_width > TYPE_PRECISION (type))
14319 outer_width = TYPE_PRECISION (type);
14321 wide_int mask = wi::shifted_mask
14322 (inner_width, outer_width - inner_width, false,
14323 TYPE_PRECISION (TREE_TYPE (arg1)));
14325 wide_int common = mask & arg1;
14326 if (common == mask)
14328 tem_type = signed_type_for (TREE_TYPE (tem));
14329 tem = fold_convert_loc (loc, tem_type, tem);
14331 else if (common == 0)
14333 tem_type = unsigned_type_for (TREE_TYPE (tem));
14334 tem = fold_convert_loc (loc, tem_type, tem);
14336 else
14337 tem = NULL;
14340 if (tem)
14341 return
14342 fold_convert_loc (loc, type,
14343 fold_build2_loc (loc, BIT_AND_EXPR,
14344 TREE_TYPE (tem), tem,
14345 fold_convert_loc (loc,
14346 TREE_TYPE (tem),
14347 arg1)));
14350 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14351 already handled above. */
14352 if (TREE_CODE (arg0) == BIT_AND_EXPR
14353 && integer_onep (TREE_OPERAND (arg0, 1))
14354 && integer_zerop (op2)
14355 && integer_pow2p (arg1))
14357 tree tem = TREE_OPERAND (arg0, 0);
14358 STRIP_NOPS (tem);
14359 if (TREE_CODE (tem) == RSHIFT_EXPR
14360 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14361 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14362 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14363 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14364 TREE_OPERAND (tem, 0), arg1);
14367 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14368 is probably obsolete because the first operand should be a
14369 truth value (that's why we have the two cases above), but let's
14370 leave it in until we can confirm this for all front-ends. */
14371 if (integer_zerop (op2)
14372 && TREE_CODE (arg0) == NE_EXPR
14373 && integer_zerop (TREE_OPERAND (arg0, 1))
14374 && integer_pow2p (arg1)
14375 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14376 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14377 arg1, OEP_ONLY_CONST))
14378 return pedantic_non_lvalue_loc (loc,
14379 fold_convert_loc (loc, type,
14380 TREE_OPERAND (arg0, 0)));
14382 /* Disable the transformations below for vectors, since
14383 fold_binary_op_with_conditional_arg may undo them immediately,
14384 yielding an infinite loop. */
14385 if (code == VEC_COND_EXPR)
14386 return NULL_TREE;
14388 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14389 if (integer_zerop (op2)
14390 && truth_value_p (TREE_CODE (arg0))
14391 && truth_value_p (TREE_CODE (arg1))
14392 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14393 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14394 : TRUTH_ANDIF_EXPR,
14395 type, fold_convert_loc (loc, type, arg0), arg1);
14397 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14398 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14399 && truth_value_p (TREE_CODE (arg0))
14400 && truth_value_p (TREE_CODE (arg1))
14401 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14403 location_t loc0 = expr_location_or (arg0, loc);
14404 /* Only perform transformation if ARG0 is easily inverted. */
14405 tem = fold_invert_truthvalue (loc0, arg0);
14406 if (tem)
14407 return fold_build2_loc (loc, code == VEC_COND_EXPR
14408 ? BIT_IOR_EXPR
14409 : TRUTH_ORIF_EXPR,
14410 type, fold_convert_loc (loc, type, tem),
14411 arg1);
14414 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14415 if (integer_zerop (arg1)
14416 && truth_value_p (TREE_CODE (arg0))
14417 && truth_value_p (TREE_CODE (op2))
14418 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14420 location_t loc0 = expr_location_or (arg0, loc);
14421 /* Only perform transformation if ARG0 is easily inverted. */
14422 tem = fold_invert_truthvalue (loc0, arg0);
14423 if (tem)
14424 return fold_build2_loc (loc, code == VEC_COND_EXPR
14425 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14426 type, fold_convert_loc (loc, type, tem),
14427 op2);
14430 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14431 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14432 && truth_value_p (TREE_CODE (arg0))
14433 && truth_value_p (TREE_CODE (op2))
14434 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14435 return fold_build2_loc (loc, code == VEC_COND_EXPR
14436 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14437 type, fold_convert_loc (loc, type, arg0), op2);
14439 return NULL_TREE;
14441 case CALL_EXPR:
14442 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14443 of fold_ternary on them. */
14444 gcc_unreachable ();
14446 case BIT_FIELD_REF:
14447 if ((TREE_CODE (arg0) == VECTOR_CST
14448 || (TREE_CODE (arg0) == CONSTRUCTOR
14449 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14450 && (type == TREE_TYPE (TREE_TYPE (arg0))
14451 || (TREE_CODE (type) == VECTOR_TYPE
14452 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14454 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14455 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14456 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14457 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14459 if (n != 0
14460 && (idx % width) == 0
14461 && (n % width) == 0
14462 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14464 idx = idx / width;
14465 n = n / width;
14467 if (TREE_CODE (arg0) == VECTOR_CST)
14469 if (n == 1)
14470 return VECTOR_CST_ELT (arg0, idx);
14472 tree *vals = XALLOCAVEC (tree, n);
14473 for (unsigned i = 0; i < n; ++i)
14474 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14475 return build_vector (type, vals);
14478 /* Constructor elements can be subvectors. */
14479 unsigned HOST_WIDE_INT k = 1;
14480 if (CONSTRUCTOR_NELTS (arg0) != 0)
14482 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14483 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14484 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14487 /* We keep an exact subset of the constructor elements. */
14488 if ((idx % k) == 0 && (n % k) == 0)
14490 if (CONSTRUCTOR_NELTS (arg0) == 0)
14491 return build_constructor (type, NULL);
14492 idx /= k;
14493 n /= k;
14494 if (n == 1)
14496 if (idx < CONSTRUCTOR_NELTS (arg0))
14497 return CONSTRUCTOR_ELT (arg0, idx)->value;
14498 return build_zero_cst (type);
14501 vec<constructor_elt, va_gc> *vals;
14502 vec_alloc (vals, n);
14503 for (unsigned i = 0;
14504 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14505 ++i)
14506 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14507 CONSTRUCTOR_ELT
14508 (arg0, idx + i)->value);
14509 return build_constructor (type, vals);
14511 /* The bitfield references a single constructor element. */
14512 else if (idx + n <= (idx / k + 1) * k)
14514 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14515 return build_zero_cst (type);
14516 else if (n == k)
14517 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14518 else
14519 return fold_build3_loc (loc, code, type,
14520 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14521 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14526 /* A bit-field-ref that referenced the full argument can be stripped. */
14527 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14528 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14529 && integer_zerop (op2))
14530 return fold_convert_loc (loc, type, arg0);
14532 /* On constants we can use native encode/interpret to constant
14533 fold (nearly) all BIT_FIELD_REFs. */
14534 if (CONSTANT_CLASS_P (arg0)
14535 && can_native_interpret_type_p (type)
14536 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14537 /* This limitation should not be necessary, we just need to
14538 round this up to mode size. */
14539 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14540 /* Need bit-shifting of the buffer to relax the following. */
14541 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14543 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14544 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14545 unsigned HOST_WIDE_INT clen;
14546 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14547 /* ??? We cannot tell native_encode_expr to start at
14548 some random byte only. So limit us to a reasonable amount
14549 of work. */
14550 if (clen <= 4096)
14552 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14553 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14554 if (len > 0
14555 && len * BITS_PER_UNIT >= bitpos + bitsize)
14557 tree v = native_interpret_expr (type,
14558 b + bitpos / BITS_PER_UNIT,
14559 bitsize / BITS_PER_UNIT);
14560 if (v)
14561 return v;
14566 return NULL_TREE;
14568 case FMA_EXPR:
14569 /* For integers we can decompose the FMA if possible. */
14570 if (TREE_CODE (arg0) == INTEGER_CST
14571 && TREE_CODE (arg1) == INTEGER_CST)
14572 return fold_build2_loc (loc, PLUS_EXPR, type,
14573 const_binop (MULT_EXPR, arg0, arg1), arg2);
14574 if (integer_zerop (arg2))
14575 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14577 return fold_fma (loc, type, arg0, arg1, arg2);
14579 case VEC_PERM_EXPR:
14580 if (TREE_CODE (arg2) == VECTOR_CST)
14582 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14583 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14584 bool need_mask_canon = false;
14585 bool all_in_vec0 = true;
14586 bool all_in_vec1 = true;
14587 bool maybe_identity = true;
14588 bool single_arg = (op0 == op1);
14589 bool changed = false;
14591 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14592 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14593 for (i = 0; i < nelts; i++)
14595 tree val = VECTOR_CST_ELT (arg2, i);
14596 if (TREE_CODE (val) != INTEGER_CST)
14597 return NULL_TREE;
14599 /* Make sure that the perm value is in an acceptable
14600 range. */
14601 wide_int t = val;
14602 if (wi::gtu_p (t, mask))
14604 need_mask_canon = true;
14605 sel[i] = t.to_uhwi () & mask;
14607 else
14608 sel[i] = t.to_uhwi ();
14610 if (sel[i] < nelts)
14611 all_in_vec1 = false;
14612 else
14613 all_in_vec0 = false;
14615 if ((sel[i] & (nelts-1)) != i)
14616 maybe_identity = false;
14619 if (maybe_identity)
14621 if (all_in_vec0)
14622 return op0;
14623 if (all_in_vec1)
14624 return op1;
14627 if (all_in_vec0)
14628 op1 = op0;
14629 else if (all_in_vec1)
14631 op0 = op1;
14632 for (i = 0; i < nelts; i++)
14633 sel[i] -= nelts;
14634 need_mask_canon = true;
14637 if ((TREE_CODE (op0) == VECTOR_CST
14638 || TREE_CODE (op0) == CONSTRUCTOR)
14639 && (TREE_CODE (op1) == VECTOR_CST
14640 || TREE_CODE (op1) == CONSTRUCTOR))
14642 tree t = fold_vec_perm (type, op0, op1, sel);
14643 if (t != NULL_TREE)
14644 return t;
14647 if (op0 == op1 && !single_arg)
14648 changed = true;
14650 if (need_mask_canon && arg2 == op2)
14652 tree *tsel = XALLOCAVEC (tree, nelts);
14653 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14654 for (i = 0; i < nelts; i++)
14655 tsel[i] = build_int_cst (eltype, sel[i]);
14656 op2 = build_vector (TREE_TYPE (arg2), tsel);
14657 changed = true;
14660 if (changed)
14661 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14663 return NULL_TREE;
14665 default:
14666 return NULL_TREE;
14667 } /* switch (code) */
14670 /* Fold a ternary expression of code CODE and type TYPE with operands
14671 OP0, OP1, and OP2. Return the folded expression if folding is
14672 successful. Otherwise, return NULL_TREE.
14673 This is a wrapper around fold_ternary_1 function (which does the
14674 actual folding). Set the EXPR_FOLDED flag of the folded expression
14675 if folding is successful. */
14677 tree
14678 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14679 tree op0, tree op1, tree op2)
14681 tree tem = fold_ternary_loc_1 (loc, code, type, op0, op1, op2);
14682 return tem;
14685 /* Perform constant folding and related simplification of EXPR.
14686 The related simplifications include x*1 => x, x*0 => 0, etc.,
14687 and application of the associative law.
14688 NOP_EXPR conversions may be removed freely (as long as we
14689 are careful not to change the type of the overall expression).
14690 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14691 but we can constant-fold them if they have constant operands. */
14693 #ifdef ENABLE_FOLD_CHECKING
14694 # define fold(x) fold_1 (x)
14695 static tree fold_1 (tree);
14696 static
14697 #endif
14698 tree
14699 fold (tree expr)
14701 const tree t = expr;
14702 enum tree_code code = TREE_CODE (t);
14703 enum tree_code_class kind = TREE_CODE_CLASS (code);
14704 tree tem;
14705 location_t loc = EXPR_LOCATION (expr);
14707 /* Return right away if a constant. */
14708 if (kind == tcc_constant)
14709 return t;
14711 /* CALL_EXPR-like objects with variable numbers of operands are
14712 treated specially. */
14713 if (kind == tcc_vl_exp)
14715 if (code == CALL_EXPR)
14717 tem = fold_call_expr (loc, expr, false);
14718 return tem ? tem : expr;
14720 return expr;
14723 if (IS_EXPR_CODE_CLASS (kind))
14725 tree type = TREE_TYPE (t);
14726 tree op0, op1, op2;
14728 switch (TREE_CODE_LENGTH (code))
14730 case 1:
14731 op0 = TREE_OPERAND (t, 0);
14732 tem = fold_unary_loc (loc, code, type, op0);
14733 return tem ? tem : expr;
14734 case 2:
14735 op0 = TREE_OPERAND (t, 0);
14736 op1 = TREE_OPERAND (t, 1);
14737 tem = fold_binary_loc (loc, code, type, op0, op1);
14738 return tem ? tem : expr;
14739 case 3:
14740 op0 = TREE_OPERAND (t, 0);
14741 op1 = TREE_OPERAND (t, 1);
14742 op2 = TREE_OPERAND (t, 2);
14743 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14744 return tem ? tem : expr;
14745 default:
14746 break;
14750 switch (code)
14752 case ARRAY_REF:
14754 tree op0 = TREE_OPERAND (t, 0);
14755 tree op1 = TREE_OPERAND (t, 1);
14757 if (TREE_CODE (op1) == INTEGER_CST
14758 && TREE_CODE (op0) == CONSTRUCTOR
14759 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14761 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14762 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14763 unsigned HOST_WIDE_INT begin = 0;
14765 /* Find a matching index by means of a binary search. */
14766 while (begin != end)
14768 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14769 tree index = (*elts)[middle].index;
14771 if (TREE_CODE (index) == INTEGER_CST
14772 && tree_int_cst_lt (index, op1))
14773 begin = middle + 1;
14774 else if (TREE_CODE (index) == INTEGER_CST
14775 && tree_int_cst_lt (op1, index))
14776 end = middle;
14777 else if (TREE_CODE (index) == RANGE_EXPR
14778 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14779 begin = middle + 1;
14780 else if (TREE_CODE (index) == RANGE_EXPR
14781 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14782 end = middle;
14783 else
14784 return (*elts)[middle].value;
14788 return t;
14791 /* Return a VECTOR_CST if possible. */
14792 case CONSTRUCTOR:
14794 tree type = TREE_TYPE (t);
14795 if (TREE_CODE (type) != VECTOR_TYPE)
14796 return t;
14798 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14799 unsigned HOST_WIDE_INT idx, pos = 0;
14800 tree value;
14802 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14804 if (!CONSTANT_CLASS_P (value))
14805 return t;
14806 if (TREE_CODE (value) == VECTOR_CST)
14808 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14809 vec[pos++] = VECTOR_CST_ELT (value, i);
14811 else
14812 vec[pos++] = value;
14814 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14815 vec[pos] = build_zero_cst (TREE_TYPE (type));
14817 return build_vector (type, vec);
14820 case CONST_DECL:
14821 return fold (DECL_INITIAL (t));
14823 default:
14824 return t;
14825 } /* switch (code) */
14828 #ifdef ENABLE_FOLD_CHECKING
14829 #undef fold
14831 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14832 hash_table<pointer_hash<const tree_node> > *);
14833 static void fold_check_failed (const_tree, const_tree);
14834 void print_fold_checksum (const_tree);
14836 /* When --enable-checking=fold, compute a digest of expr before
14837 and after actual fold call to see if fold did not accidentally
14838 change original expr. */
14840 tree
14841 fold (tree expr)
14843 tree ret;
14844 struct md5_ctx ctx;
14845 unsigned char checksum_before[16], checksum_after[16];
14846 hash_table<pointer_hash<const tree_node> > ht (32);
14848 md5_init_ctx (&ctx);
14849 fold_checksum_tree (expr, &ctx, &ht);
14850 md5_finish_ctx (&ctx, checksum_before);
14851 ht.empty ();
14853 ret = fold_1 (expr);
14855 md5_init_ctx (&ctx);
14856 fold_checksum_tree (expr, &ctx, &ht);
14857 md5_finish_ctx (&ctx, checksum_after);
14859 if (memcmp (checksum_before, checksum_after, 16))
14860 fold_check_failed (expr, ret);
14862 return ret;
14865 void
14866 print_fold_checksum (const_tree expr)
14868 struct md5_ctx ctx;
14869 unsigned char checksum[16], cnt;
14870 hash_table<pointer_hash<const tree_node> > ht (32);
14872 md5_init_ctx (&ctx);
14873 fold_checksum_tree (expr, &ctx, &ht);
14874 md5_finish_ctx (&ctx, checksum);
14875 for (cnt = 0; cnt < 16; ++cnt)
14876 fprintf (stderr, "%02x", checksum[cnt]);
14877 putc ('\n', stderr);
14880 static void
14881 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14883 internal_error ("fold check: original tree changed by fold");
14886 static void
14887 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14888 hash_table<pointer_hash <const tree_node> > *ht)
14890 const tree_node **slot;
14891 enum tree_code code;
14892 union tree_node buf;
14893 int i, len;
14895 recursive_label:
14896 if (expr == NULL)
14897 return;
14898 slot = ht->find_slot (expr, INSERT);
14899 if (*slot != NULL)
14900 return;
14901 *slot = expr;
14902 code = TREE_CODE (expr);
14903 if (TREE_CODE_CLASS (code) == tcc_declaration
14904 && DECL_ASSEMBLER_NAME_SET_P (expr))
14906 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14907 memcpy ((char *) &buf, expr, tree_size (expr));
14908 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14909 expr = (tree) &buf;
14911 else if (TREE_CODE_CLASS (code) == tcc_type
14912 && (TYPE_POINTER_TO (expr)
14913 || TYPE_REFERENCE_TO (expr)
14914 || TYPE_CACHED_VALUES_P (expr)
14915 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14916 || TYPE_NEXT_VARIANT (expr)))
14918 /* Allow these fields to be modified. */
14919 tree tmp;
14920 memcpy ((char *) &buf, expr, tree_size (expr));
14921 expr = tmp = (tree) &buf;
14922 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14923 TYPE_POINTER_TO (tmp) = NULL;
14924 TYPE_REFERENCE_TO (tmp) = NULL;
14925 TYPE_NEXT_VARIANT (tmp) = NULL;
14926 if (TYPE_CACHED_VALUES_P (tmp))
14928 TYPE_CACHED_VALUES_P (tmp) = 0;
14929 TYPE_CACHED_VALUES (tmp) = NULL;
14932 md5_process_bytes (expr, tree_size (expr), ctx);
14933 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14934 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14935 if (TREE_CODE_CLASS (code) != tcc_type
14936 && TREE_CODE_CLASS (code) != tcc_declaration
14937 && code != TREE_LIST
14938 && code != SSA_NAME
14939 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14940 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14941 switch (TREE_CODE_CLASS (code))
14943 case tcc_constant:
14944 switch (code)
14946 case STRING_CST:
14947 md5_process_bytes (TREE_STRING_POINTER (expr),
14948 TREE_STRING_LENGTH (expr), ctx);
14949 break;
14950 case COMPLEX_CST:
14951 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14952 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14953 break;
14954 case VECTOR_CST:
14955 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14956 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14957 break;
14958 default:
14959 break;
14961 break;
14962 case tcc_exceptional:
14963 switch (code)
14965 case TREE_LIST:
14966 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14967 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14968 expr = TREE_CHAIN (expr);
14969 goto recursive_label;
14970 break;
14971 case TREE_VEC:
14972 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14973 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14974 break;
14975 default:
14976 break;
14978 break;
14979 case tcc_expression:
14980 case tcc_reference:
14981 case tcc_comparison:
14982 case tcc_unary:
14983 case tcc_binary:
14984 case tcc_statement:
14985 case tcc_vl_exp:
14986 len = TREE_OPERAND_LENGTH (expr);
14987 for (i = 0; i < len; ++i)
14988 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14989 break;
14990 case tcc_declaration:
14991 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14992 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14993 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14995 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14996 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14997 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14998 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14999 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15002 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15004 if (TREE_CODE (expr) == FUNCTION_DECL)
15006 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15007 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
15009 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15011 break;
15012 case tcc_type:
15013 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15014 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15015 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15016 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15017 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15018 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15019 if (INTEGRAL_TYPE_P (expr)
15020 || SCALAR_FLOAT_TYPE_P (expr))
15022 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15023 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15025 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15026 if (TREE_CODE (expr) == RECORD_TYPE
15027 || TREE_CODE (expr) == UNION_TYPE
15028 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15029 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15030 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15031 break;
15032 default:
15033 break;
15037 /* Helper function for outputting the checksum of a tree T. When
15038 debugging with gdb, you can "define mynext" to be "next" followed
15039 by "call debug_fold_checksum (op0)", then just trace down till the
15040 outputs differ. */
15042 DEBUG_FUNCTION void
15043 debug_fold_checksum (const_tree t)
15045 int i;
15046 unsigned char checksum[16];
15047 struct md5_ctx ctx;
15048 hash_table<pointer_hash<const tree_node> > ht (32);
15050 md5_init_ctx (&ctx);
15051 fold_checksum_tree (t, &ctx, &ht);
15052 md5_finish_ctx (&ctx, checksum);
15053 ht.empty ();
15055 for (i = 0; i < 16; i++)
15056 fprintf (stderr, "%d ", checksum[i]);
15058 fprintf (stderr, "\n");
15061 #endif
15063 /* Fold a unary tree expression with code CODE of type TYPE with an
15064 operand OP0. LOC is the location of the resulting expression.
15065 Return a folded expression if successful. Otherwise, return a tree
15066 expression with code CODE of type TYPE with an operand OP0. */
15068 tree
15069 fold_build1_stat_loc (location_t loc,
15070 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15072 tree tem;
15073 #ifdef ENABLE_FOLD_CHECKING
15074 unsigned char checksum_before[16], checksum_after[16];
15075 struct md5_ctx ctx;
15076 hash_table<pointer_hash<const tree_node> > ht (32);
15078 md5_init_ctx (&ctx);
15079 fold_checksum_tree (op0, &ctx, &ht);
15080 md5_finish_ctx (&ctx, checksum_before);
15081 ht.empty ();
15082 #endif
15084 tem = fold_unary_loc (loc, code, type, op0);
15085 if (!tem)
15086 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15088 #ifdef ENABLE_FOLD_CHECKING
15089 md5_init_ctx (&ctx);
15090 fold_checksum_tree (op0, &ctx, &ht);
15091 md5_finish_ctx (&ctx, checksum_after);
15093 if (memcmp (checksum_before, checksum_after, 16))
15094 fold_check_failed (op0, tem);
15095 #endif
15096 return tem;
15099 /* Fold a binary tree expression with code CODE of type TYPE with
15100 operands OP0 and OP1. LOC is the location of the resulting
15101 expression. Return a folded expression if successful. Otherwise,
15102 return a tree expression with code CODE of type TYPE with operands
15103 OP0 and OP1. */
15105 tree
15106 fold_build2_stat_loc (location_t loc,
15107 enum tree_code code, tree type, tree op0, tree op1
15108 MEM_STAT_DECL)
15110 tree tem;
15111 #ifdef ENABLE_FOLD_CHECKING
15112 unsigned char checksum_before_op0[16],
15113 checksum_before_op1[16],
15114 checksum_after_op0[16],
15115 checksum_after_op1[16];
15116 struct md5_ctx ctx;
15117 hash_table<pointer_hash<const tree_node> > ht (32);
15119 md5_init_ctx (&ctx);
15120 fold_checksum_tree (op0, &ctx, &ht);
15121 md5_finish_ctx (&ctx, checksum_before_op0);
15122 ht.empty ();
15124 md5_init_ctx (&ctx);
15125 fold_checksum_tree (op1, &ctx, &ht);
15126 md5_finish_ctx (&ctx, checksum_before_op1);
15127 ht.empty ();
15128 #endif
15130 tem = fold_binary_loc (loc, code, type, op0, op1);
15131 if (!tem)
15132 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15134 #ifdef ENABLE_FOLD_CHECKING
15135 md5_init_ctx (&ctx);
15136 fold_checksum_tree (op0, &ctx, &ht);
15137 md5_finish_ctx (&ctx, checksum_after_op0);
15138 ht.empty ();
15140 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15141 fold_check_failed (op0, tem);
15143 md5_init_ctx (&ctx);
15144 fold_checksum_tree (op1, &ctx, &ht);
15145 md5_finish_ctx (&ctx, checksum_after_op1);
15147 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15148 fold_check_failed (op1, tem);
15149 #endif
15150 return tem;
15153 /* Fold a ternary tree expression with code CODE of type TYPE with
15154 operands OP0, OP1, and OP2. Return a folded expression if
15155 successful. Otherwise, return a tree expression with code CODE of
15156 type TYPE with operands OP0, OP1, and OP2. */
15158 tree
15159 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15160 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15162 tree tem;
15163 #ifdef ENABLE_FOLD_CHECKING
15164 unsigned char checksum_before_op0[16],
15165 checksum_before_op1[16],
15166 checksum_before_op2[16],
15167 checksum_after_op0[16],
15168 checksum_after_op1[16],
15169 checksum_after_op2[16];
15170 struct md5_ctx ctx;
15171 hash_table<pointer_hash<const tree_node> > ht (32);
15173 md5_init_ctx (&ctx);
15174 fold_checksum_tree (op0, &ctx, &ht);
15175 md5_finish_ctx (&ctx, checksum_before_op0);
15176 ht.empty ();
15178 md5_init_ctx (&ctx);
15179 fold_checksum_tree (op1, &ctx, &ht);
15180 md5_finish_ctx (&ctx, checksum_before_op1);
15181 ht.empty ();
15183 md5_init_ctx (&ctx);
15184 fold_checksum_tree (op2, &ctx, &ht);
15185 md5_finish_ctx (&ctx, checksum_before_op2);
15186 ht.empty ();
15187 #endif
15189 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15190 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15191 if (!tem)
15192 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15194 #ifdef ENABLE_FOLD_CHECKING
15195 md5_init_ctx (&ctx);
15196 fold_checksum_tree (op0, &ctx, &ht);
15197 md5_finish_ctx (&ctx, checksum_after_op0);
15198 ht.empty ();
15200 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15201 fold_check_failed (op0, tem);
15203 md5_init_ctx (&ctx);
15204 fold_checksum_tree (op1, &ctx, &ht);
15205 md5_finish_ctx (&ctx, checksum_after_op1);
15206 ht.empty ();
15208 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15209 fold_check_failed (op1, tem);
15211 md5_init_ctx (&ctx);
15212 fold_checksum_tree (op2, &ctx, &ht);
15213 md5_finish_ctx (&ctx, checksum_after_op2);
15215 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15216 fold_check_failed (op2, tem);
15217 #endif
15218 return tem;
15221 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15222 arguments in ARGARRAY, and a null static chain.
15223 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15224 of type TYPE from the given operands as constructed by build_call_array. */
15226 tree
15227 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15228 int nargs, tree *argarray)
15230 tree tem;
15231 #ifdef ENABLE_FOLD_CHECKING
15232 unsigned char checksum_before_fn[16],
15233 checksum_before_arglist[16],
15234 checksum_after_fn[16],
15235 checksum_after_arglist[16];
15236 struct md5_ctx ctx;
15237 hash_table<pointer_hash<const tree_node> > ht (32);
15238 int i;
15240 md5_init_ctx (&ctx);
15241 fold_checksum_tree (fn, &ctx, &ht);
15242 md5_finish_ctx (&ctx, checksum_before_fn);
15243 ht.empty ();
15245 md5_init_ctx (&ctx);
15246 for (i = 0; i < nargs; i++)
15247 fold_checksum_tree (argarray[i], &ctx, &ht);
15248 md5_finish_ctx (&ctx, checksum_before_arglist);
15249 ht.empty ();
15250 #endif
15252 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15254 #ifdef ENABLE_FOLD_CHECKING
15255 md5_init_ctx (&ctx);
15256 fold_checksum_tree (fn, &ctx, &ht);
15257 md5_finish_ctx (&ctx, checksum_after_fn);
15258 ht.empty ();
15260 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15261 fold_check_failed (fn, tem);
15263 md5_init_ctx (&ctx);
15264 for (i = 0; i < nargs; i++)
15265 fold_checksum_tree (argarray[i], &ctx, &ht);
15266 md5_finish_ctx (&ctx, checksum_after_arglist);
15268 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15269 fold_check_failed (NULL_TREE, tem);
15270 #endif
15271 return tem;
15274 /* Perform constant folding and related simplification of initializer
15275 expression EXPR. These behave identically to "fold_buildN" but ignore
15276 potential run-time traps and exceptions that fold must preserve. */
15278 #define START_FOLD_INIT \
15279 int saved_signaling_nans = flag_signaling_nans;\
15280 int saved_trapping_math = flag_trapping_math;\
15281 int saved_rounding_math = flag_rounding_math;\
15282 int saved_trapv = flag_trapv;\
15283 int saved_folding_initializer = folding_initializer;\
15284 flag_signaling_nans = 0;\
15285 flag_trapping_math = 0;\
15286 flag_rounding_math = 0;\
15287 flag_trapv = 0;\
15288 folding_initializer = 1;
15290 #define END_FOLD_INIT \
15291 flag_signaling_nans = saved_signaling_nans;\
15292 flag_trapping_math = saved_trapping_math;\
15293 flag_rounding_math = saved_rounding_math;\
15294 flag_trapv = saved_trapv;\
15295 folding_initializer = saved_folding_initializer;
15297 tree
15298 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15299 tree type, tree op)
15301 tree result;
15302 START_FOLD_INIT;
15304 result = fold_build1_loc (loc, code, type, op);
15306 END_FOLD_INIT;
15307 return result;
15310 tree
15311 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15312 tree type, tree op0, tree op1)
15314 tree result;
15315 START_FOLD_INIT;
15317 result = fold_build2_loc (loc, code, type, op0, op1);
15319 END_FOLD_INIT;
15320 return result;
15323 tree
15324 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15325 int nargs, tree *argarray)
15327 tree result;
15328 START_FOLD_INIT;
15330 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15332 END_FOLD_INIT;
15333 return result;
15336 #undef START_FOLD_INIT
15337 #undef END_FOLD_INIT
15339 /* Determine if first argument is a multiple of second argument. Return 0 if
15340 it is not, or we cannot easily determined it to be.
15342 An example of the sort of thing we care about (at this point; this routine
15343 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15344 fold cases do now) is discovering that
15346 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15348 is a multiple of
15350 SAVE_EXPR (J * 8)
15352 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15354 This code also handles discovering that
15356 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15358 is a multiple of 8 so we don't have to worry about dealing with a
15359 possible remainder.
15361 Note that we *look* inside a SAVE_EXPR only to determine how it was
15362 calculated; it is not safe for fold to do much of anything else with the
15363 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15364 at run time. For example, the latter example above *cannot* be implemented
15365 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15366 evaluation time of the original SAVE_EXPR is not necessarily the same at
15367 the time the new expression is evaluated. The only optimization of this
15368 sort that would be valid is changing
15370 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15372 divided by 8 to
15374 SAVE_EXPR (I) * SAVE_EXPR (J)
15376 (where the same SAVE_EXPR (J) is used in the original and the
15377 transformed version). */
15380 multiple_of_p (tree type, const_tree top, const_tree bottom)
15382 if (operand_equal_p (top, bottom, 0))
15383 return 1;
15385 if (TREE_CODE (type) != INTEGER_TYPE)
15386 return 0;
15388 switch (TREE_CODE (top))
15390 case BIT_AND_EXPR:
15391 /* Bitwise and provides a power of two multiple. If the mask is
15392 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15393 if (!integer_pow2p (bottom))
15394 return 0;
15395 /* FALLTHRU */
15397 case MULT_EXPR:
15398 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15399 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15401 case PLUS_EXPR:
15402 case MINUS_EXPR:
15403 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15404 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15406 case LSHIFT_EXPR:
15407 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15409 tree op1, t1;
15411 op1 = TREE_OPERAND (top, 1);
15412 /* const_binop may not detect overflow correctly,
15413 so check for it explicitly here. */
15414 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15415 && 0 != (t1 = fold_convert (type,
15416 const_binop (LSHIFT_EXPR,
15417 size_one_node,
15418 op1)))
15419 && !TREE_OVERFLOW (t1))
15420 return multiple_of_p (type, t1, bottom);
15422 return 0;
15424 case NOP_EXPR:
15425 /* Can't handle conversions from non-integral or wider integral type. */
15426 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15427 || (TYPE_PRECISION (type)
15428 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15429 return 0;
15431 /* .. fall through ... */
15433 case SAVE_EXPR:
15434 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15436 case COND_EXPR:
15437 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15438 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15440 case INTEGER_CST:
15441 if (TREE_CODE (bottom) != INTEGER_CST
15442 || integer_zerop (bottom)
15443 || (TYPE_UNSIGNED (type)
15444 && (tree_int_cst_sgn (top) < 0
15445 || tree_int_cst_sgn (bottom) < 0)))
15446 return 0;
15447 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15448 SIGNED);
15450 default:
15451 return 0;
15455 /* Return true if CODE or TYPE is known to be non-negative. */
15457 static bool
15458 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15460 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15461 && truth_value_p (code))
15462 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15463 have a signed:1 type (where the value is -1 and 0). */
15464 return true;
15465 return false;
15468 /* Return true if (CODE OP0) is known to be non-negative. If the return
15469 value is based on the assumption that signed overflow is undefined,
15470 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15471 *STRICT_OVERFLOW_P. */
15473 bool
15474 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15475 bool *strict_overflow_p)
15477 if (TYPE_UNSIGNED (type))
15478 return true;
15480 switch (code)
15482 case ABS_EXPR:
15483 /* We can't return 1 if flag_wrapv is set because
15484 ABS_EXPR<INT_MIN> = INT_MIN. */
15485 if (!INTEGRAL_TYPE_P (type))
15486 return true;
15487 if (TYPE_OVERFLOW_UNDEFINED (type))
15489 *strict_overflow_p = true;
15490 return true;
15492 break;
15494 case NON_LVALUE_EXPR:
15495 case FLOAT_EXPR:
15496 case FIX_TRUNC_EXPR:
15497 return tree_expr_nonnegative_warnv_p (op0,
15498 strict_overflow_p);
15500 case NOP_EXPR:
15502 tree inner_type = TREE_TYPE (op0);
15503 tree outer_type = type;
15505 if (TREE_CODE (outer_type) == REAL_TYPE)
15507 if (TREE_CODE (inner_type) == REAL_TYPE)
15508 return tree_expr_nonnegative_warnv_p (op0,
15509 strict_overflow_p);
15510 if (INTEGRAL_TYPE_P (inner_type))
15512 if (TYPE_UNSIGNED (inner_type))
15513 return true;
15514 return tree_expr_nonnegative_warnv_p (op0,
15515 strict_overflow_p);
15518 else if (INTEGRAL_TYPE_P (outer_type))
15520 if (TREE_CODE (inner_type) == REAL_TYPE)
15521 return tree_expr_nonnegative_warnv_p (op0,
15522 strict_overflow_p);
15523 if (INTEGRAL_TYPE_P (inner_type))
15524 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15525 && TYPE_UNSIGNED (inner_type);
15528 break;
15530 default:
15531 return tree_simple_nonnegative_warnv_p (code, type);
15534 /* We don't know sign of `t', so be conservative and return false. */
15535 return false;
15538 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15539 value is based on the assumption that signed overflow is undefined,
15540 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15541 *STRICT_OVERFLOW_P. */
15543 bool
15544 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15545 tree op1, bool *strict_overflow_p)
15547 if (TYPE_UNSIGNED (type))
15548 return true;
15550 switch (code)
15552 case POINTER_PLUS_EXPR:
15553 case PLUS_EXPR:
15554 if (FLOAT_TYPE_P (type))
15555 return (tree_expr_nonnegative_warnv_p (op0,
15556 strict_overflow_p)
15557 && tree_expr_nonnegative_warnv_p (op1,
15558 strict_overflow_p));
15560 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15561 both unsigned and at least 2 bits shorter than the result. */
15562 if (TREE_CODE (type) == INTEGER_TYPE
15563 && TREE_CODE (op0) == NOP_EXPR
15564 && TREE_CODE (op1) == NOP_EXPR)
15566 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15567 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15568 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15569 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15571 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15572 TYPE_PRECISION (inner2)) + 1;
15573 return prec < TYPE_PRECISION (type);
15576 break;
15578 case MULT_EXPR:
15579 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15581 /* x * x is always non-negative for floating point x
15582 or without overflow. */
15583 if (operand_equal_p (op0, op1, 0)
15584 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15585 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15587 if (TYPE_OVERFLOW_UNDEFINED (type))
15588 *strict_overflow_p = true;
15589 return true;
15593 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15594 both unsigned and their total bits is shorter than the result. */
15595 if (TREE_CODE (type) == INTEGER_TYPE
15596 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15597 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15599 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15600 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15601 : TREE_TYPE (op0);
15602 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15603 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15604 : TREE_TYPE (op1);
15606 bool unsigned0 = TYPE_UNSIGNED (inner0);
15607 bool unsigned1 = TYPE_UNSIGNED (inner1);
15609 if (TREE_CODE (op0) == INTEGER_CST)
15610 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15612 if (TREE_CODE (op1) == INTEGER_CST)
15613 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15615 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15616 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15618 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15619 ? tree_int_cst_min_precision (op0, UNSIGNED)
15620 : TYPE_PRECISION (inner0);
15622 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15623 ? tree_int_cst_min_precision (op1, UNSIGNED)
15624 : TYPE_PRECISION (inner1);
15626 return precision0 + precision1 < TYPE_PRECISION (type);
15629 return false;
15631 case BIT_AND_EXPR:
15632 case MAX_EXPR:
15633 return (tree_expr_nonnegative_warnv_p (op0,
15634 strict_overflow_p)
15635 || tree_expr_nonnegative_warnv_p (op1,
15636 strict_overflow_p));
15638 case BIT_IOR_EXPR:
15639 case BIT_XOR_EXPR:
15640 case MIN_EXPR:
15641 case RDIV_EXPR:
15642 case TRUNC_DIV_EXPR:
15643 case CEIL_DIV_EXPR:
15644 case FLOOR_DIV_EXPR:
15645 case ROUND_DIV_EXPR:
15646 return (tree_expr_nonnegative_warnv_p (op0,
15647 strict_overflow_p)
15648 && tree_expr_nonnegative_warnv_p (op1,
15649 strict_overflow_p));
15651 case TRUNC_MOD_EXPR:
15652 case CEIL_MOD_EXPR:
15653 case FLOOR_MOD_EXPR:
15654 case ROUND_MOD_EXPR:
15655 return tree_expr_nonnegative_warnv_p (op0,
15656 strict_overflow_p);
15657 default:
15658 return tree_simple_nonnegative_warnv_p (code, type);
15661 /* We don't know sign of `t', so be conservative and return false. */
15662 return false;
15665 /* Return true if T is known to be non-negative. If the return
15666 value is based on the assumption that signed overflow is undefined,
15667 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15668 *STRICT_OVERFLOW_P. */
15670 bool
15671 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15673 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15674 return true;
15676 switch (TREE_CODE (t))
15678 case INTEGER_CST:
15679 return tree_int_cst_sgn (t) >= 0;
15681 case REAL_CST:
15682 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15684 case FIXED_CST:
15685 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15687 case COND_EXPR:
15688 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15689 strict_overflow_p)
15690 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15691 strict_overflow_p));
15692 default:
15693 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15694 TREE_TYPE (t));
15696 /* We don't know sign of `t', so be conservative and return false. */
15697 return false;
15700 /* Return true if T is known to be non-negative. If the return
15701 value is based on the assumption that signed overflow is undefined,
15702 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15703 *STRICT_OVERFLOW_P. */
15705 bool
15706 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15707 tree arg0, tree arg1, bool *strict_overflow_p)
15709 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15710 switch (DECL_FUNCTION_CODE (fndecl))
15712 CASE_FLT_FN (BUILT_IN_ACOS):
15713 CASE_FLT_FN (BUILT_IN_ACOSH):
15714 CASE_FLT_FN (BUILT_IN_CABS):
15715 CASE_FLT_FN (BUILT_IN_COSH):
15716 CASE_FLT_FN (BUILT_IN_ERFC):
15717 CASE_FLT_FN (BUILT_IN_EXP):
15718 CASE_FLT_FN (BUILT_IN_EXP10):
15719 CASE_FLT_FN (BUILT_IN_EXP2):
15720 CASE_FLT_FN (BUILT_IN_FABS):
15721 CASE_FLT_FN (BUILT_IN_FDIM):
15722 CASE_FLT_FN (BUILT_IN_HYPOT):
15723 CASE_FLT_FN (BUILT_IN_POW10):
15724 CASE_INT_FN (BUILT_IN_FFS):
15725 CASE_INT_FN (BUILT_IN_PARITY):
15726 CASE_INT_FN (BUILT_IN_POPCOUNT):
15727 CASE_INT_FN (BUILT_IN_CLZ):
15728 CASE_INT_FN (BUILT_IN_CLRSB):
15729 case BUILT_IN_BSWAP32:
15730 case BUILT_IN_BSWAP64:
15731 /* Always true. */
15732 return true;
15734 CASE_FLT_FN (BUILT_IN_SQRT):
15735 /* sqrt(-0.0) is -0.0. */
15736 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15737 return true;
15738 return tree_expr_nonnegative_warnv_p (arg0,
15739 strict_overflow_p);
15741 CASE_FLT_FN (BUILT_IN_ASINH):
15742 CASE_FLT_FN (BUILT_IN_ATAN):
15743 CASE_FLT_FN (BUILT_IN_ATANH):
15744 CASE_FLT_FN (BUILT_IN_CBRT):
15745 CASE_FLT_FN (BUILT_IN_CEIL):
15746 CASE_FLT_FN (BUILT_IN_ERF):
15747 CASE_FLT_FN (BUILT_IN_EXPM1):
15748 CASE_FLT_FN (BUILT_IN_FLOOR):
15749 CASE_FLT_FN (BUILT_IN_FMOD):
15750 CASE_FLT_FN (BUILT_IN_FREXP):
15751 CASE_FLT_FN (BUILT_IN_ICEIL):
15752 CASE_FLT_FN (BUILT_IN_IFLOOR):
15753 CASE_FLT_FN (BUILT_IN_IRINT):
15754 CASE_FLT_FN (BUILT_IN_IROUND):
15755 CASE_FLT_FN (BUILT_IN_LCEIL):
15756 CASE_FLT_FN (BUILT_IN_LDEXP):
15757 CASE_FLT_FN (BUILT_IN_LFLOOR):
15758 CASE_FLT_FN (BUILT_IN_LLCEIL):
15759 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15760 CASE_FLT_FN (BUILT_IN_LLRINT):
15761 CASE_FLT_FN (BUILT_IN_LLROUND):
15762 CASE_FLT_FN (BUILT_IN_LRINT):
15763 CASE_FLT_FN (BUILT_IN_LROUND):
15764 CASE_FLT_FN (BUILT_IN_MODF):
15765 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15766 CASE_FLT_FN (BUILT_IN_RINT):
15767 CASE_FLT_FN (BUILT_IN_ROUND):
15768 CASE_FLT_FN (BUILT_IN_SCALB):
15769 CASE_FLT_FN (BUILT_IN_SCALBLN):
15770 CASE_FLT_FN (BUILT_IN_SCALBN):
15771 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15772 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15773 CASE_FLT_FN (BUILT_IN_SINH):
15774 CASE_FLT_FN (BUILT_IN_TANH):
15775 CASE_FLT_FN (BUILT_IN_TRUNC):
15776 /* True if the 1st argument is nonnegative. */
15777 return tree_expr_nonnegative_warnv_p (arg0,
15778 strict_overflow_p);
15780 CASE_FLT_FN (BUILT_IN_FMAX):
15781 /* True if the 1st OR 2nd arguments are nonnegative. */
15782 return (tree_expr_nonnegative_warnv_p (arg0,
15783 strict_overflow_p)
15784 || (tree_expr_nonnegative_warnv_p (arg1,
15785 strict_overflow_p)));
15787 CASE_FLT_FN (BUILT_IN_FMIN):
15788 /* True if the 1st AND 2nd arguments are nonnegative. */
15789 return (tree_expr_nonnegative_warnv_p (arg0,
15790 strict_overflow_p)
15791 && (tree_expr_nonnegative_warnv_p (arg1,
15792 strict_overflow_p)));
15794 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15795 /* True if the 2nd argument is nonnegative. */
15796 return tree_expr_nonnegative_warnv_p (arg1,
15797 strict_overflow_p);
15799 CASE_FLT_FN (BUILT_IN_POWI):
15800 /* True if the 1st argument is nonnegative or the second
15801 argument is an even integer. */
15802 if (TREE_CODE (arg1) == INTEGER_CST
15803 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15804 return true;
15805 return tree_expr_nonnegative_warnv_p (arg0,
15806 strict_overflow_p);
15808 CASE_FLT_FN (BUILT_IN_POW):
15809 /* True if the 1st argument is nonnegative or the second
15810 argument is an even integer valued real. */
15811 if (TREE_CODE (arg1) == REAL_CST)
15813 REAL_VALUE_TYPE c;
15814 HOST_WIDE_INT n;
15816 c = TREE_REAL_CST (arg1);
15817 n = real_to_integer (&c);
15818 if ((n & 1) == 0)
15820 REAL_VALUE_TYPE cint;
15821 real_from_integer (&cint, VOIDmode, n, SIGNED);
15822 if (real_identical (&c, &cint))
15823 return true;
15826 return tree_expr_nonnegative_warnv_p (arg0,
15827 strict_overflow_p);
15829 default:
15830 break;
15832 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15833 type);
15836 /* Return true if T is known to be non-negative. If the return
15837 value is based on the assumption that signed overflow is undefined,
15838 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15839 *STRICT_OVERFLOW_P. */
15841 static bool
15842 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15844 enum tree_code code = TREE_CODE (t);
15845 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15846 return true;
15848 switch (code)
15850 case TARGET_EXPR:
15852 tree temp = TARGET_EXPR_SLOT (t);
15853 t = TARGET_EXPR_INITIAL (t);
15855 /* If the initializer is non-void, then it's a normal expression
15856 that will be assigned to the slot. */
15857 if (!VOID_TYPE_P (t))
15858 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15860 /* Otherwise, the initializer sets the slot in some way. One common
15861 way is an assignment statement at the end of the initializer. */
15862 while (1)
15864 if (TREE_CODE (t) == BIND_EXPR)
15865 t = expr_last (BIND_EXPR_BODY (t));
15866 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15867 || TREE_CODE (t) == TRY_CATCH_EXPR)
15868 t = expr_last (TREE_OPERAND (t, 0));
15869 else if (TREE_CODE (t) == STATEMENT_LIST)
15870 t = expr_last (t);
15871 else
15872 break;
15874 if (TREE_CODE (t) == MODIFY_EXPR
15875 && TREE_OPERAND (t, 0) == temp)
15876 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15877 strict_overflow_p);
15879 return false;
15882 case CALL_EXPR:
15884 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15885 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15887 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15888 get_callee_fndecl (t),
15889 arg0,
15890 arg1,
15891 strict_overflow_p);
15893 case COMPOUND_EXPR:
15894 case MODIFY_EXPR:
15895 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15896 strict_overflow_p);
15897 case BIND_EXPR:
15898 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15899 strict_overflow_p);
15900 case SAVE_EXPR:
15901 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15902 strict_overflow_p);
15904 default:
15905 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15906 TREE_TYPE (t));
15909 /* We don't know sign of `t', so be conservative and return false. */
15910 return false;
15913 /* Return true if T is known to be non-negative. If the return
15914 value is based on the assumption that signed overflow is undefined,
15915 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15916 *STRICT_OVERFLOW_P. */
15918 bool
15919 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15921 enum tree_code code;
15922 if (t == error_mark_node)
15923 return false;
15925 code = TREE_CODE (t);
15926 switch (TREE_CODE_CLASS (code))
15928 case tcc_binary:
15929 case tcc_comparison:
15930 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15931 TREE_TYPE (t),
15932 TREE_OPERAND (t, 0),
15933 TREE_OPERAND (t, 1),
15934 strict_overflow_p);
15936 case tcc_unary:
15937 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15938 TREE_TYPE (t),
15939 TREE_OPERAND (t, 0),
15940 strict_overflow_p);
15942 case tcc_constant:
15943 case tcc_declaration:
15944 case tcc_reference:
15945 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15947 default:
15948 break;
15951 switch (code)
15953 case TRUTH_AND_EXPR:
15954 case TRUTH_OR_EXPR:
15955 case TRUTH_XOR_EXPR:
15956 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15957 TREE_TYPE (t),
15958 TREE_OPERAND (t, 0),
15959 TREE_OPERAND (t, 1),
15960 strict_overflow_p);
15961 case TRUTH_NOT_EXPR:
15962 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15963 TREE_TYPE (t),
15964 TREE_OPERAND (t, 0),
15965 strict_overflow_p);
15967 case COND_EXPR:
15968 case CONSTRUCTOR:
15969 case OBJ_TYPE_REF:
15970 case ASSERT_EXPR:
15971 case ADDR_EXPR:
15972 case WITH_SIZE_EXPR:
15973 case SSA_NAME:
15974 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15976 default:
15977 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15981 /* Return true if `t' is known to be non-negative. Handle warnings
15982 about undefined signed overflow. */
15984 bool
15985 tree_expr_nonnegative_p (tree t)
15987 bool ret, strict_overflow_p;
15989 strict_overflow_p = false;
15990 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15991 if (strict_overflow_p)
15992 fold_overflow_warning (("assuming signed overflow does not occur when "
15993 "determining that expression is always "
15994 "non-negative"),
15995 WARN_STRICT_OVERFLOW_MISC);
15996 return ret;
16000 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16001 For floating point we further ensure that T is not denormal.
16002 Similar logic is present in nonzero_address in rtlanal.h.
16004 If the return value is based on the assumption that signed overflow
16005 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16006 change *STRICT_OVERFLOW_P. */
16008 bool
16009 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16010 bool *strict_overflow_p)
16012 switch (code)
16014 case ABS_EXPR:
16015 return tree_expr_nonzero_warnv_p (op0,
16016 strict_overflow_p);
16018 case NOP_EXPR:
16020 tree inner_type = TREE_TYPE (op0);
16021 tree outer_type = type;
16023 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16024 && tree_expr_nonzero_warnv_p (op0,
16025 strict_overflow_p));
16027 break;
16029 case NON_LVALUE_EXPR:
16030 return tree_expr_nonzero_warnv_p (op0,
16031 strict_overflow_p);
16033 default:
16034 break;
16037 return false;
16040 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16041 For floating point we further ensure that T is not denormal.
16042 Similar logic is present in nonzero_address in rtlanal.h.
16044 If the return value is based on the assumption that signed overflow
16045 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16046 change *STRICT_OVERFLOW_P. */
16048 bool
16049 tree_binary_nonzero_warnv_p (enum tree_code code,
16050 tree type,
16051 tree op0,
16052 tree op1, bool *strict_overflow_p)
16054 bool sub_strict_overflow_p;
16055 switch (code)
16057 case POINTER_PLUS_EXPR:
16058 case PLUS_EXPR:
16059 if (TYPE_OVERFLOW_UNDEFINED (type))
16061 /* With the presence of negative values it is hard
16062 to say something. */
16063 sub_strict_overflow_p = false;
16064 if (!tree_expr_nonnegative_warnv_p (op0,
16065 &sub_strict_overflow_p)
16066 || !tree_expr_nonnegative_warnv_p (op1,
16067 &sub_strict_overflow_p))
16068 return false;
16069 /* One of operands must be positive and the other non-negative. */
16070 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16071 overflows, on a twos-complement machine the sum of two
16072 nonnegative numbers can never be zero. */
16073 return (tree_expr_nonzero_warnv_p (op0,
16074 strict_overflow_p)
16075 || tree_expr_nonzero_warnv_p (op1,
16076 strict_overflow_p));
16078 break;
16080 case MULT_EXPR:
16081 if (TYPE_OVERFLOW_UNDEFINED (type))
16083 if (tree_expr_nonzero_warnv_p (op0,
16084 strict_overflow_p)
16085 && tree_expr_nonzero_warnv_p (op1,
16086 strict_overflow_p))
16088 *strict_overflow_p = true;
16089 return true;
16092 break;
16094 case MIN_EXPR:
16095 sub_strict_overflow_p = false;
16096 if (tree_expr_nonzero_warnv_p (op0,
16097 &sub_strict_overflow_p)
16098 && tree_expr_nonzero_warnv_p (op1,
16099 &sub_strict_overflow_p))
16101 if (sub_strict_overflow_p)
16102 *strict_overflow_p = true;
16104 break;
16106 case MAX_EXPR:
16107 sub_strict_overflow_p = false;
16108 if (tree_expr_nonzero_warnv_p (op0,
16109 &sub_strict_overflow_p))
16111 if (sub_strict_overflow_p)
16112 *strict_overflow_p = true;
16114 /* When both operands are nonzero, then MAX must be too. */
16115 if (tree_expr_nonzero_warnv_p (op1,
16116 strict_overflow_p))
16117 return true;
16119 /* MAX where operand 0 is positive is positive. */
16120 return tree_expr_nonnegative_warnv_p (op0,
16121 strict_overflow_p);
16123 /* MAX where operand 1 is positive is positive. */
16124 else if (tree_expr_nonzero_warnv_p (op1,
16125 &sub_strict_overflow_p)
16126 && tree_expr_nonnegative_warnv_p (op1,
16127 &sub_strict_overflow_p))
16129 if (sub_strict_overflow_p)
16130 *strict_overflow_p = true;
16131 return true;
16133 break;
16135 case BIT_IOR_EXPR:
16136 return (tree_expr_nonzero_warnv_p (op1,
16137 strict_overflow_p)
16138 || tree_expr_nonzero_warnv_p (op0,
16139 strict_overflow_p));
16141 default:
16142 break;
16145 return false;
16148 /* Return true when T is an address and is known to be nonzero.
16149 For floating point we further ensure that T is not denormal.
16150 Similar logic is present in nonzero_address in rtlanal.h.
16152 If the return value is based on the assumption that signed overflow
16153 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16154 change *STRICT_OVERFLOW_P. */
16156 bool
16157 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16159 bool sub_strict_overflow_p;
16160 switch (TREE_CODE (t))
16162 case INTEGER_CST:
16163 return !integer_zerop (t);
16165 case ADDR_EXPR:
16167 tree base = TREE_OPERAND (t, 0);
16169 if (!DECL_P (base))
16170 base = get_base_address (base);
16172 if (!base)
16173 return false;
16175 /* For objects in symbol table check if we know they are non-zero.
16176 Don't do anything for variables and functions before symtab is built;
16177 it is quite possible that they will be declared weak later. */
16178 if (DECL_P (base) && decl_in_symtab_p (base))
16180 struct symtab_node *symbol;
16182 symbol = symtab_node::get (base);
16183 if (symbol)
16184 return symbol->nonzero_address ();
16185 else
16186 return false;
16189 /* Function local objects are never NULL. */
16190 if (DECL_P (base)
16191 && (DECL_CONTEXT (base)
16192 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16193 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
16194 return true;
16196 /* Constants are never weak. */
16197 if (CONSTANT_CLASS_P (base))
16198 return true;
16200 return false;
16203 case COND_EXPR:
16204 sub_strict_overflow_p = false;
16205 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16206 &sub_strict_overflow_p)
16207 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16208 &sub_strict_overflow_p))
16210 if (sub_strict_overflow_p)
16211 *strict_overflow_p = true;
16212 return true;
16214 break;
16216 default:
16217 break;
16219 return false;
16222 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16223 attempt to fold the expression to a constant without modifying TYPE,
16224 OP0 or OP1.
16226 If the expression could be simplified to a constant, then return
16227 the constant. If the expression would not be simplified to a
16228 constant, then return NULL_TREE. */
16230 tree
16231 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16233 tree tem = fold_binary (code, type, op0, op1);
16234 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16237 /* Given the components of a unary expression CODE, TYPE and OP0,
16238 attempt to fold the expression to a constant without modifying
16239 TYPE or OP0.
16241 If the expression could be simplified to a constant, then return
16242 the constant. If the expression would not be simplified to a
16243 constant, then return NULL_TREE. */
16245 tree
16246 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16248 tree tem = fold_unary (code, type, op0);
16249 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16252 /* If EXP represents referencing an element in a constant string
16253 (either via pointer arithmetic or array indexing), return the
16254 tree representing the value accessed, otherwise return NULL. */
16256 tree
16257 fold_read_from_constant_string (tree exp)
16259 if ((TREE_CODE (exp) == INDIRECT_REF
16260 || TREE_CODE (exp) == ARRAY_REF)
16261 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16263 tree exp1 = TREE_OPERAND (exp, 0);
16264 tree index;
16265 tree string;
16266 location_t loc = EXPR_LOCATION (exp);
16268 if (TREE_CODE (exp) == INDIRECT_REF)
16269 string = string_constant (exp1, &index);
16270 else
16272 tree low_bound = array_ref_low_bound (exp);
16273 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16275 /* Optimize the special-case of a zero lower bound.
16277 We convert the low_bound to sizetype to avoid some problems
16278 with constant folding. (E.g. suppose the lower bound is 1,
16279 and its mode is QI. Without the conversion,l (ARRAY
16280 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16281 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16282 if (! integer_zerop (low_bound))
16283 index = size_diffop_loc (loc, index,
16284 fold_convert_loc (loc, sizetype, low_bound));
16286 string = exp1;
16289 if (string
16290 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16291 && TREE_CODE (string) == STRING_CST
16292 && TREE_CODE (index) == INTEGER_CST
16293 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16294 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16295 == MODE_INT)
16296 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16297 return build_int_cst_type (TREE_TYPE (exp),
16298 (TREE_STRING_POINTER (string)
16299 [TREE_INT_CST_LOW (index)]));
16301 return NULL;
16304 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16305 an integer constant, real, or fixed-point constant.
16307 TYPE is the type of the result. */
16309 static tree
16310 fold_negate_const (tree arg0, tree type)
16312 tree t = NULL_TREE;
16314 switch (TREE_CODE (arg0))
16316 case INTEGER_CST:
16318 bool overflow;
16319 wide_int val = wi::neg (arg0, &overflow);
16320 t = force_fit_type (type, val, 1,
16321 (overflow | TREE_OVERFLOW (arg0))
16322 && !TYPE_UNSIGNED (type));
16323 break;
16326 case REAL_CST:
16327 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16328 break;
16330 case FIXED_CST:
16332 FIXED_VALUE_TYPE f;
16333 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16334 &(TREE_FIXED_CST (arg0)), NULL,
16335 TYPE_SATURATING (type));
16336 t = build_fixed (type, f);
16337 /* Propagate overflow flags. */
16338 if (overflow_p | TREE_OVERFLOW (arg0))
16339 TREE_OVERFLOW (t) = 1;
16340 break;
16343 default:
16344 gcc_unreachable ();
16347 return t;
16350 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16351 an integer constant or real constant.
16353 TYPE is the type of the result. */
16355 tree
16356 fold_abs_const (tree arg0, tree type)
16358 tree t = NULL_TREE;
16360 switch (TREE_CODE (arg0))
16362 case INTEGER_CST:
16364 /* If the value is unsigned or non-negative, then the absolute value
16365 is the same as the ordinary value. */
16366 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16367 t = arg0;
16369 /* If the value is negative, then the absolute value is
16370 its negation. */
16371 else
16373 bool overflow;
16374 wide_int val = wi::neg (arg0, &overflow);
16375 t = force_fit_type (type, val, -1,
16376 overflow | TREE_OVERFLOW (arg0));
16379 break;
16381 case REAL_CST:
16382 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16383 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16384 else
16385 t = arg0;
16386 break;
16388 default:
16389 gcc_unreachable ();
16392 return t;
16395 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16396 constant. TYPE is the type of the result. */
16398 static tree
16399 fold_not_const (const_tree arg0, tree type)
16401 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16403 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16406 /* Given CODE, a relational operator, the target type, TYPE and two
16407 constant operands OP0 and OP1, return the result of the
16408 relational operation. If the result is not a compile time
16409 constant, then return NULL_TREE. */
16411 static tree
16412 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16414 int result, invert;
16416 /* From here on, the only cases we handle are when the result is
16417 known to be a constant. */
16419 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16421 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16422 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16424 /* Handle the cases where either operand is a NaN. */
16425 if (real_isnan (c0) || real_isnan (c1))
16427 switch (code)
16429 case EQ_EXPR:
16430 case ORDERED_EXPR:
16431 result = 0;
16432 break;
16434 case NE_EXPR:
16435 case UNORDERED_EXPR:
16436 case UNLT_EXPR:
16437 case UNLE_EXPR:
16438 case UNGT_EXPR:
16439 case UNGE_EXPR:
16440 case UNEQ_EXPR:
16441 result = 1;
16442 break;
16444 case LT_EXPR:
16445 case LE_EXPR:
16446 case GT_EXPR:
16447 case GE_EXPR:
16448 case LTGT_EXPR:
16449 if (flag_trapping_math)
16450 return NULL_TREE;
16451 result = 0;
16452 break;
16454 default:
16455 gcc_unreachable ();
16458 return constant_boolean_node (result, type);
16461 return constant_boolean_node (real_compare (code, c0, c1), type);
16464 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16466 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16467 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16468 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16471 /* Handle equality/inequality of complex constants. */
16472 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16474 tree rcond = fold_relational_const (code, type,
16475 TREE_REALPART (op0),
16476 TREE_REALPART (op1));
16477 tree icond = fold_relational_const (code, type,
16478 TREE_IMAGPART (op0),
16479 TREE_IMAGPART (op1));
16480 if (code == EQ_EXPR)
16481 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16482 else if (code == NE_EXPR)
16483 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16484 else
16485 return NULL_TREE;
16488 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16490 unsigned count = VECTOR_CST_NELTS (op0);
16491 tree *elts = XALLOCAVEC (tree, count);
16492 gcc_assert (VECTOR_CST_NELTS (op1) == count
16493 && TYPE_VECTOR_SUBPARTS (type) == count);
16495 for (unsigned i = 0; i < count; i++)
16497 tree elem_type = TREE_TYPE (type);
16498 tree elem0 = VECTOR_CST_ELT (op0, i);
16499 tree elem1 = VECTOR_CST_ELT (op1, i);
16501 tree tem = fold_relational_const (code, elem_type,
16502 elem0, elem1);
16504 if (tem == NULL_TREE)
16505 return NULL_TREE;
16507 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16510 return build_vector (type, elts);
16513 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16515 To compute GT, swap the arguments and do LT.
16516 To compute GE, do LT and invert the result.
16517 To compute LE, swap the arguments, do LT and invert the result.
16518 To compute NE, do EQ and invert the result.
16520 Therefore, the code below must handle only EQ and LT. */
16522 if (code == LE_EXPR || code == GT_EXPR)
16524 tree tem = op0;
16525 op0 = op1;
16526 op1 = tem;
16527 code = swap_tree_comparison (code);
16530 /* Note that it is safe to invert for real values here because we
16531 have already handled the one case that it matters. */
16533 invert = 0;
16534 if (code == NE_EXPR || code == GE_EXPR)
16536 invert = 1;
16537 code = invert_tree_comparison (code, false);
16540 /* Compute a result for LT or EQ if args permit;
16541 Otherwise return T. */
16542 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16544 if (code == EQ_EXPR)
16545 result = tree_int_cst_equal (op0, op1);
16546 else
16547 result = tree_int_cst_lt (op0, op1);
16549 else
16550 return NULL_TREE;
16552 if (invert)
16553 result ^= 1;
16554 return constant_boolean_node (result, type);
16557 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16558 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16559 itself. */
16561 tree
16562 fold_build_cleanup_point_expr (tree type, tree expr)
16564 /* If the expression does not have side effects then we don't have to wrap
16565 it with a cleanup point expression. */
16566 if (!TREE_SIDE_EFFECTS (expr))
16567 return expr;
16569 /* If the expression is a return, check to see if the expression inside the
16570 return has no side effects or the right hand side of the modify expression
16571 inside the return. If either don't have side effects set we don't need to
16572 wrap the expression in a cleanup point expression. Note we don't check the
16573 left hand side of the modify because it should always be a return decl. */
16574 if (TREE_CODE (expr) == RETURN_EXPR)
16576 tree op = TREE_OPERAND (expr, 0);
16577 if (!op || !TREE_SIDE_EFFECTS (op))
16578 return expr;
16579 op = TREE_OPERAND (op, 1);
16580 if (!TREE_SIDE_EFFECTS (op))
16581 return expr;
16584 return build1 (CLEANUP_POINT_EXPR, type, expr);
16587 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16588 of an indirection through OP0, or NULL_TREE if no simplification is
16589 possible. */
16591 tree
16592 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16594 tree sub = op0;
16595 tree subtype;
16597 STRIP_NOPS (sub);
16598 subtype = TREE_TYPE (sub);
16599 if (!POINTER_TYPE_P (subtype))
16600 return NULL_TREE;
16602 if (TREE_CODE (sub) == ADDR_EXPR)
16604 tree op = TREE_OPERAND (sub, 0);
16605 tree optype = TREE_TYPE (op);
16606 /* *&CONST_DECL -> to the value of the const decl. */
16607 if (TREE_CODE (op) == CONST_DECL)
16608 return DECL_INITIAL (op);
16609 /* *&p => p; make sure to handle *&"str"[cst] here. */
16610 if (type == optype)
16612 tree fop = fold_read_from_constant_string (op);
16613 if (fop)
16614 return fop;
16615 else
16616 return op;
16618 /* *(foo *)&fooarray => fooarray[0] */
16619 else if (TREE_CODE (optype) == ARRAY_TYPE
16620 && type == TREE_TYPE (optype)
16621 && (!in_gimple_form
16622 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16624 tree type_domain = TYPE_DOMAIN (optype);
16625 tree min_val = size_zero_node;
16626 if (type_domain && TYPE_MIN_VALUE (type_domain))
16627 min_val = TYPE_MIN_VALUE (type_domain);
16628 if (in_gimple_form
16629 && TREE_CODE (min_val) != INTEGER_CST)
16630 return NULL_TREE;
16631 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16632 NULL_TREE, NULL_TREE);
16634 /* *(foo *)&complexfoo => __real__ complexfoo */
16635 else if (TREE_CODE (optype) == COMPLEX_TYPE
16636 && type == TREE_TYPE (optype))
16637 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16638 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16639 else if (TREE_CODE (optype) == VECTOR_TYPE
16640 && type == TREE_TYPE (optype))
16642 tree part_width = TYPE_SIZE (type);
16643 tree index = bitsize_int (0);
16644 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16648 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16649 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16651 tree op00 = TREE_OPERAND (sub, 0);
16652 tree op01 = TREE_OPERAND (sub, 1);
16654 STRIP_NOPS (op00);
16655 if (TREE_CODE (op00) == ADDR_EXPR)
16657 tree op00type;
16658 op00 = TREE_OPERAND (op00, 0);
16659 op00type = TREE_TYPE (op00);
16661 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16662 if (TREE_CODE (op00type) == VECTOR_TYPE
16663 && type == TREE_TYPE (op00type))
16665 HOST_WIDE_INT offset = tree_to_shwi (op01);
16666 tree part_width = TYPE_SIZE (type);
16667 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16668 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16669 tree index = bitsize_int (indexi);
16671 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16672 return fold_build3_loc (loc,
16673 BIT_FIELD_REF, type, op00,
16674 part_width, index);
16677 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16678 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16679 && type == TREE_TYPE (op00type))
16681 tree size = TYPE_SIZE_UNIT (type);
16682 if (tree_int_cst_equal (size, op01))
16683 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16685 /* ((foo *)&fooarray)[1] => fooarray[1] */
16686 else if (TREE_CODE (op00type) == ARRAY_TYPE
16687 && type == TREE_TYPE (op00type))
16689 tree type_domain = TYPE_DOMAIN (op00type);
16690 tree min_val = size_zero_node;
16691 if (type_domain && TYPE_MIN_VALUE (type_domain))
16692 min_val = TYPE_MIN_VALUE (type_domain);
16693 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16694 TYPE_SIZE_UNIT (type));
16695 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16696 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16697 NULL_TREE, NULL_TREE);
16702 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16703 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16704 && type == TREE_TYPE (TREE_TYPE (subtype))
16705 && (!in_gimple_form
16706 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16708 tree type_domain;
16709 tree min_val = size_zero_node;
16710 sub = build_fold_indirect_ref_loc (loc, sub);
16711 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16712 if (type_domain && TYPE_MIN_VALUE (type_domain))
16713 min_val = TYPE_MIN_VALUE (type_domain);
16714 if (in_gimple_form
16715 && TREE_CODE (min_val) != INTEGER_CST)
16716 return NULL_TREE;
16717 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16718 NULL_TREE);
16721 return NULL_TREE;
16724 /* Builds an expression for an indirection through T, simplifying some
16725 cases. */
16727 tree
16728 build_fold_indirect_ref_loc (location_t loc, tree t)
16730 tree type = TREE_TYPE (TREE_TYPE (t));
16731 tree sub = fold_indirect_ref_1 (loc, type, t);
16733 if (sub)
16734 return sub;
16736 return build1_loc (loc, INDIRECT_REF, type, t);
16739 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16741 tree
16742 fold_indirect_ref_loc (location_t loc, tree t)
16744 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16746 if (sub)
16747 return sub;
16748 else
16749 return t;
16752 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16753 whose result is ignored. The type of the returned tree need not be
16754 the same as the original expression. */
16756 tree
16757 fold_ignored_result (tree t)
16759 if (!TREE_SIDE_EFFECTS (t))
16760 return integer_zero_node;
16762 for (;;)
16763 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16765 case tcc_unary:
16766 t = TREE_OPERAND (t, 0);
16767 break;
16769 case tcc_binary:
16770 case tcc_comparison:
16771 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16772 t = TREE_OPERAND (t, 0);
16773 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16774 t = TREE_OPERAND (t, 1);
16775 else
16776 return t;
16777 break;
16779 case tcc_expression:
16780 switch (TREE_CODE (t))
16782 case COMPOUND_EXPR:
16783 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16784 return t;
16785 t = TREE_OPERAND (t, 0);
16786 break;
16788 case COND_EXPR:
16789 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16790 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16791 return t;
16792 t = TREE_OPERAND (t, 0);
16793 break;
16795 default:
16796 return t;
16798 break;
16800 default:
16801 return t;
16805 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16807 tree
16808 round_up_loc (location_t loc, tree value, unsigned int divisor)
16810 tree div = NULL_TREE;
16812 if (divisor == 1)
16813 return value;
16815 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16816 have to do anything. Only do this when we are not given a const,
16817 because in that case, this check is more expensive than just
16818 doing it. */
16819 if (TREE_CODE (value) != INTEGER_CST)
16821 div = build_int_cst (TREE_TYPE (value), divisor);
16823 if (multiple_of_p (TREE_TYPE (value), value, div))
16824 return value;
16827 /* If divisor is a power of two, simplify this to bit manipulation. */
16828 if (divisor == (divisor & -divisor))
16830 if (TREE_CODE (value) == INTEGER_CST)
16832 wide_int val = value;
16833 bool overflow_p;
16835 if ((val & (divisor - 1)) == 0)
16836 return value;
16838 overflow_p = TREE_OVERFLOW (value);
16839 val &= ~(divisor - 1);
16840 val += divisor;
16841 if (val == 0)
16842 overflow_p = true;
16844 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16846 else
16848 tree t;
16850 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16851 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16852 t = build_int_cst (TREE_TYPE (value), -divisor);
16853 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16856 else
16858 if (!div)
16859 div = build_int_cst (TREE_TYPE (value), divisor);
16860 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16861 value = size_binop_loc (loc, MULT_EXPR, value, div);
16864 return value;
16867 /* Likewise, but round down. */
16869 tree
16870 round_down_loc (location_t loc, tree value, int divisor)
16872 tree div = NULL_TREE;
16874 gcc_assert (divisor > 0);
16875 if (divisor == 1)
16876 return value;
16878 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16879 have to do anything. Only do this when we are not given a const,
16880 because in that case, this check is more expensive than just
16881 doing it. */
16882 if (TREE_CODE (value) != INTEGER_CST)
16884 div = build_int_cst (TREE_TYPE (value), divisor);
16886 if (multiple_of_p (TREE_TYPE (value), value, div))
16887 return value;
16890 /* If divisor is a power of two, simplify this to bit manipulation. */
16891 if (divisor == (divisor & -divisor))
16893 tree t;
16895 t = build_int_cst (TREE_TYPE (value), -divisor);
16896 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16898 else
16900 if (!div)
16901 div = build_int_cst (TREE_TYPE (value), divisor);
16902 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16903 value = size_binop_loc (loc, MULT_EXPR, value, div);
16906 return value;
16909 /* Returns the pointer to the base of the object addressed by EXP and
16910 extracts the information about the offset of the access, storing it
16911 to PBITPOS and POFFSET. */
16913 static tree
16914 split_address_to_core_and_offset (tree exp,
16915 HOST_WIDE_INT *pbitpos, tree *poffset)
16917 tree core;
16918 enum machine_mode mode;
16919 int unsignedp, volatilep;
16920 HOST_WIDE_INT bitsize;
16921 location_t loc = EXPR_LOCATION (exp);
16923 if (TREE_CODE (exp) == ADDR_EXPR)
16925 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16926 poffset, &mode, &unsignedp, &volatilep,
16927 false);
16928 core = build_fold_addr_expr_loc (loc, core);
16930 else
16932 core = exp;
16933 *pbitpos = 0;
16934 *poffset = NULL_TREE;
16937 return core;
16940 /* Returns true if addresses of E1 and E2 differ by a constant, false
16941 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16943 bool
16944 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16946 tree core1, core2;
16947 HOST_WIDE_INT bitpos1, bitpos2;
16948 tree toffset1, toffset2, tdiff, type;
16950 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16951 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16953 if (bitpos1 % BITS_PER_UNIT != 0
16954 || bitpos2 % BITS_PER_UNIT != 0
16955 || !operand_equal_p (core1, core2, 0))
16956 return false;
16958 if (toffset1 && toffset2)
16960 type = TREE_TYPE (toffset1);
16961 if (type != TREE_TYPE (toffset2))
16962 toffset2 = fold_convert (type, toffset2);
16964 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16965 if (!cst_and_fits_in_hwi (tdiff))
16966 return false;
16968 *diff = int_cst_value (tdiff);
16970 else if (toffset1 || toffset2)
16972 /* If only one of the offsets is non-constant, the difference cannot
16973 be a constant. */
16974 return false;
16976 else
16977 *diff = 0;
16979 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16980 return true;
16983 /* Simplify the floating point expression EXP when the sign of the
16984 result is not significant. Return NULL_TREE if no simplification
16985 is possible. */
16987 tree
16988 fold_strip_sign_ops (tree exp)
16990 tree arg0, arg1;
16991 location_t loc = EXPR_LOCATION (exp);
16993 switch (TREE_CODE (exp))
16995 case ABS_EXPR:
16996 case NEGATE_EXPR:
16997 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16998 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17000 case MULT_EXPR:
17001 case RDIV_EXPR:
17002 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17003 return NULL_TREE;
17004 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17005 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17006 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17007 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17008 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17009 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17010 break;
17012 case COMPOUND_EXPR:
17013 arg0 = TREE_OPERAND (exp, 0);
17014 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17015 if (arg1)
17016 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17017 break;
17019 case COND_EXPR:
17020 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17021 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17022 if (arg0 || arg1)
17023 return fold_build3_loc (loc,
17024 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17025 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17026 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17027 break;
17029 case CALL_EXPR:
17031 const enum built_in_function fcode = builtin_mathfn_code (exp);
17032 switch (fcode)
17034 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17035 /* Strip copysign function call, return the 1st argument. */
17036 arg0 = CALL_EXPR_ARG (exp, 0);
17037 arg1 = CALL_EXPR_ARG (exp, 1);
17038 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17040 default:
17041 /* Strip sign ops from the argument of "odd" math functions. */
17042 if (negate_mathfn_p (fcode))
17044 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17045 if (arg0)
17046 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17048 break;
17051 break;
17053 default:
17054 break;
17056 return NULL_TREE;