* lib/ubsan-dg.exp (check_effective_target_fsanitize_undefined):
[official-gcc.git] / gcc / fold-const.c
blob07da71ae1968b181339e82e1b2cf452ed01c1a5d
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_mathfn_compare (location_t,
149 enum built_in_function, enum tree_code,
150 tree, tree, tree);
151 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
152 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
153 static bool reorder_operands_p (const_tree, const_tree);
154 static tree fold_negate_const (tree, tree);
155 static tree fold_not_const (const_tree, tree);
156 static tree fold_relational_const (enum tree_code, tree, tree, tree);
157 static tree fold_convert_const (enum tree_code, tree, tree);
158 static tree fold_view_convert_expr (tree, tree);
159 static bool vec_cst_ctor_to_array (tree, tree *);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
165 static location_t
166 expr_location_or (tree t, location_t loc)
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
187 return x;
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
197 widest_int quo;
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
203 return NULL_TREE;
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
215 static int fold_deferring_overflow_warnings;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
232 void
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
250 const char *warnmsg;
251 location_t locus;
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
267 if (!issue || warnmsg == NULL)
268 return;
270 if (gimple_no_warning_p (stmt))
271 return;
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
278 if (!issue_strict_overflow_warning (code))
279 return;
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL, 0);
297 /* Whether we are deferring overflow warnings. */
299 bool
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings > 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
311 if (fold_deferring_overflow_warnings > 0)
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
327 static bool
328 negate_mathfn_p (enum built_in_function code)
330 switch (code)
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
363 default:
364 break;
366 return false;
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
372 bool
373 may_negate_without_overflow_p (const_tree t)
375 tree type;
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
383 return !wi::only_sign_bit_p (t);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
389 static bool
390 negate_expr_p (tree t)
392 tree type;
394 if (t == 0)
395 return false;
397 type = TREE_TYPE (t);
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
402 case INTEGER_CST:
403 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
404 return true;
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
412 case FIXED_CST:
413 return true;
415 case NEGATE_EXPR:
416 return !TYPE_OVERFLOW_SANITIZED (type);
418 case REAL_CST:
419 /* We want to canonicalize to positive real constants. Pretend
420 that only negative ones can be easily negated. */
421 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
423 case COMPLEX_CST:
424 return negate_expr_p (TREE_REALPART (t))
425 && negate_expr_p (TREE_IMAGPART (t));
427 case VECTOR_CST:
429 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
430 return true;
432 int count = TYPE_VECTOR_SUBPARTS (type), i;
434 for (i = 0; i < count; i++)
435 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
436 return false;
438 return true;
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
450 || HONOR_SIGNED_ZEROS (element_mode (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
463 && !HONOR_SIGNED_ZEROS (element_mode (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
471 /* Fall through. */
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case EXACT_DIV_EXPR:
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
486 overflow. */
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
490 break;
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
500 return true;
502 else if (negate_expr_p (TREE_OPERAND (t, 0)))
503 return true;
504 return negate_expr_p (TREE_OPERAND (t, 1));
506 case NOP_EXPR:
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type) == REAL_TYPE)
510 tree tem = strip_float_extensions (t);
511 if (tem != t)
512 return negate_expr_p (tem);
514 break;
516 case CALL_EXPR:
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t)))
519 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 break;
522 case RSHIFT_EXPR:
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
524 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
526 tree op1 = TREE_OPERAND (t, 1);
527 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
528 return true;
530 break;
532 default:
533 break;
535 return false;
538 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
539 simplification is possible.
540 If negate_expr_p would return true for T, NULL_TREE will never be
541 returned. */
543 static tree
544 fold_negate_expr (location_t loc, tree t)
546 tree type = TREE_TYPE (t);
547 tree tem;
549 switch (TREE_CODE (t))
551 /* Convert - (~A) to A + 1. */
552 case BIT_NOT_EXPR:
553 if (INTEGRAL_TYPE_P (type))
554 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
555 build_one_cst (type));
556 break;
558 case INTEGER_CST:
559 tem = fold_negate_const (t, type);
560 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
561 || (ANY_INTEGRAL_TYPE_P (type)
562 && !TYPE_OVERFLOW_TRAPS (type)
563 && TYPE_OVERFLOW_WRAPS (type))
564 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
565 return tem;
566 break;
568 case REAL_CST:
569 tem = fold_negate_const (t, type);
570 return tem;
572 case FIXED_CST:
573 tem = fold_negate_const (t, type);
574 return tem;
576 case COMPLEX_CST:
578 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
579 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
580 if (rpart && ipart)
581 return build_complex (type, rpart, ipart);
583 break;
585 case VECTOR_CST:
587 int count = TYPE_VECTOR_SUBPARTS (type), i;
588 tree *elts = XALLOCAVEC (tree, count);
590 for (i = 0; i < count; i++)
592 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
593 if (elts[i] == NULL_TREE)
594 return NULL_TREE;
597 return build_vector (type, elts);
600 case COMPLEX_EXPR:
601 if (negate_expr_p (t))
602 return fold_build2_loc (loc, COMPLEX_EXPR, type,
603 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
604 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
605 break;
607 case CONJ_EXPR:
608 if (negate_expr_p (t))
609 return fold_build1_loc (loc, CONJ_EXPR, type,
610 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
611 break;
613 case NEGATE_EXPR:
614 if (!TYPE_OVERFLOW_SANITIZED (type))
615 return TREE_OPERAND (t, 0);
616 break;
618 case PLUS_EXPR:
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
620 && !HONOR_SIGNED_ZEROS (element_mode (type)))
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t, 1))
624 && reorder_operands_p (TREE_OPERAND (t, 0),
625 TREE_OPERAND (t, 1)))
627 tem = negate_expr (TREE_OPERAND (t, 1));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 0));
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t, 0)))
635 tem = negate_expr (TREE_OPERAND (t, 0));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 1));
640 break;
642 case MINUS_EXPR:
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
645 && !HONOR_SIGNED_ZEROS (element_mode (type))
646 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
649 break;
651 case MULT_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
655 /* Fall through. */
657 case RDIV_EXPR:
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
664 tem = TREE_OPERAND (t, 0);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case TRUNC_DIV_EXPR:
672 case ROUND_DIV_EXPR:
673 case EXACT_DIV_EXPR:
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
678 overflow. */
679 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
681 const char * const warnmsg = G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem = TREE_OPERAND (t, 1);
684 if (negate_expr_p (tem))
686 if (INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) != INTEGER_CST
688 || integer_onep (tem)))
689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 TREE_OPERAND (t, 0), negate_expr (tem));
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem = TREE_OPERAND (t, 0);
700 if ((INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) == NEGATE_EXPR
702 || (TREE_CODE (tem) == INTEGER_CST
703 && may_negate_without_overflow_p (tem))))
704 || !INTEGRAL_TYPE_P (type))
705 return fold_build2_loc (loc, TREE_CODE (t), type,
706 negate_expr (tem), TREE_OPERAND (t, 1));
708 break;
710 case NOP_EXPR:
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type) == REAL_TYPE)
714 tem = strip_float_extensions (t);
715 if (tem != t && negate_expr_p (tem))
716 return fold_convert_loc (loc, type, negate_expr (tem));
718 break;
720 case CALL_EXPR:
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t))
723 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
725 tree fndecl, arg;
727 fndecl = get_callee_fndecl (t);
728 arg = negate_expr (CALL_EXPR_ARG (t, 0));
729 return build_call_expr_loc (loc, fndecl, 1, arg);
731 break;
733 case RSHIFT_EXPR:
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
737 tree op1 = TREE_OPERAND (t, 1);
738 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
740 tree ntype = TYPE_UNSIGNED (type)
741 ? signed_type_for (type)
742 : unsigned_type_for (type);
743 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
744 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
745 return fold_convert_loc (loc, type, temp);
748 break;
750 default:
751 break;
754 return NULL_TREE;
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
759 return NULL_TREE. */
761 static tree
762 negate_expr (tree t)
764 tree type, tem;
765 location_t loc;
767 if (t == NULL_TREE)
768 return NULL_TREE;
770 loc = EXPR_LOCATION (t);
771 type = TREE_TYPE (t);
772 STRIP_SIGN_NOPS (t);
774 tem = fold_negate_expr (loc, t);
775 if (!tem)
776 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
777 return fold_convert_loc (loc, type, tem);
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
795 If IN is itself a literal or constant, return it as appropriate.
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
800 static tree
801 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
802 tree *minus_litp, int negate_p)
804 tree var = 0;
806 *conp = 0;
807 *litp = 0;
808 *minus_litp = 0;
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in);
813 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
814 || TREE_CODE (in) == FIXED_CST)
815 *litp = in;
816 else if (TREE_CODE (in) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
826 tree op0 = TREE_OPERAND (in, 0);
827 tree op1 = TREE_OPERAND (in, 1);
828 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
829 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
833 || TREE_CODE (op0) == FIXED_CST)
834 *litp = op0, op0 = 0;
835 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
836 || TREE_CODE (op1) == FIXED_CST)
837 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
839 if (op0 != 0 && TREE_CONSTANT (op0))
840 *conp = op0, op0 = 0;
841 else if (op1 != 0 && TREE_CONSTANT (op1))
842 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0 != 0 && op1 != 0)
847 var = in;
848 else if (op0 != 0)
849 var = op0;
850 else
851 var = op1, neg_var_p = neg1_p;
853 /* Now do any needed negations. */
854 if (neg_litp_p)
855 *minus_litp = *litp, *litp = 0;
856 if (neg_conp_p)
857 *conp = negate_expr (*conp);
858 if (neg_var_p)
859 var = negate_expr (var);
861 else if (TREE_CODE (in) == BIT_NOT_EXPR
862 && code == PLUS_EXPR)
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp = build_one_cst (TREE_TYPE (in));
866 var = negate_expr (TREE_OPERAND (in, 0));
868 else if (TREE_CONSTANT (in))
869 *conp = in;
870 else
871 var = in;
873 if (negate_p)
875 if (*litp)
876 *minus_litp = *litp, *litp = 0;
877 else if (*minus_litp)
878 *litp = *minus_litp, *minus_litp = 0;
879 *conp = negate_expr (*conp);
880 var = negate_expr (var);
883 return var;
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
891 static tree
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
894 if (t1 == 0)
895 return t2;
896 else if (t2 == 0)
897 return t1;
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
945 switch (code)
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
953 default:
954 break;
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
977 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
978 TYPE_SIGN (TREE_TYPE (parg2)));
980 switch (code)
982 case BIT_IOR_EXPR:
983 res = wi::bit_or (arg1, arg2);
984 break;
986 case BIT_XOR_EXPR:
987 res = wi::bit_xor (arg1, arg2);
988 break;
990 case BIT_AND_EXPR:
991 res = wi::bit_and (arg1, arg2);
992 break;
994 case RSHIFT_EXPR:
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
998 arg2 = -arg2;
999 if (code == RSHIFT_EXPR)
1000 code = LSHIFT_EXPR;
1001 else
1002 code = RSHIFT_EXPR;
1005 if (code == RSHIFT_EXPR)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = wi::rshift (arg1, arg2, sign);
1010 else
1011 res = wi::lshift (arg1, arg2);
1012 break;
1014 case RROTATE_EXPR:
1015 case LROTATE_EXPR:
1016 if (wi::neg_p (arg2))
1018 arg2 = -arg2;
1019 if (code == RROTATE_EXPR)
1020 code = LROTATE_EXPR;
1021 else
1022 code = RROTATE_EXPR;
1025 if (code == RROTATE_EXPR)
1026 res = wi::rrotate (arg1, arg2);
1027 else
1028 res = wi::lrotate (arg1, arg2);
1029 break;
1031 case PLUS_EXPR:
1032 res = wi::add (arg1, arg2, sign, &overflow);
1033 break;
1035 case MINUS_EXPR:
1036 res = wi::sub (arg1, arg2, sign, &overflow);
1037 break;
1039 case MULT_EXPR:
1040 res = wi::mul (arg1, arg2, sign, &overflow);
1041 break;
1043 case MULT_HIGHPART_EXPR:
1044 res = wi::mul_high (arg1, arg2, sign);
1045 break;
1047 case TRUNC_DIV_EXPR:
1048 case EXACT_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1052 break;
1054 case FLOOR_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_floor (arg1, arg2, sign, &overflow);
1058 break;
1060 case CEIL_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1064 break;
1066 case ROUND_DIV_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::div_round (arg1, arg2, sign, &overflow);
1070 break;
1072 case TRUNC_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1076 break;
1078 case FLOOR_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1082 break;
1084 case CEIL_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1088 break;
1090 case ROUND_MOD_EXPR:
1091 if (arg2 == 0)
1092 return NULL_TREE;
1093 res = wi::mod_round (arg1, arg2, sign, &overflow);
1094 break;
1096 case MIN_EXPR:
1097 res = wi::min (arg1, arg2, sign);
1098 break;
1100 case MAX_EXPR:
1101 res = wi::max (arg1, arg2, sign);
1102 break;
1104 default:
1105 return NULL_TREE;
1108 t = force_fit_type (type, res, overflowable,
1109 (((sign == SIGNED || overflowable == -1)
1110 && overflow)
1111 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1113 return t;
1116 tree
1117 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 return int_const_binop_1 (code, arg1, arg2, 1);
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1127 static tree
1128 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 /* Sanity check for the recursive cases. */
1131 if (!arg1 || !arg2)
1132 return NULL_TREE;
1134 STRIP_NOPS (arg1);
1135 STRIP_NOPS (arg2);
1137 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1139 if (code == POINTER_PLUS_EXPR)
1140 return int_const_binop (PLUS_EXPR,
1141 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1143 return int_const_binop (code, arg1, arg2);
1146 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1148 machine_mode mode;
1149 REAL_VALUE_TYPE d1;
1150 REAL_VALUE_TYPE d2;
1151 REAL_VALUE_TYPE value;
1152 REAL_VALUE_TYPE result;
1153 bool inexact;
1154 tree t, type;
1156 /* The following codes are handled by real_arithmetic. */
1157 switch (code)
1159 case PLUS_EXPR:
1160 case MINUS_EXPR:
1161 case MULT_EXPR:
1162 case RDIV_EXPR:
1163 case MIN_EXPR:
1164 case MAX_EXPR:
1165 break;
1167 default:
1168 return NULL_TREE;
1171 d1 = TREE_REAL_CST (arg1);
1172 d2 = TREE_REAL_CST (arg2);
1174 type = TREE_TYPE (arg1);
1175 mode = TYPE_MODE (type);
1177 /* Don't perform operation if we honor signaling NaNs and
1178 either operand is a NaN. */
1179 if (HONOR_SNANS (mode)
1180 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1181 return NULL_TREE;
1183 /* Don't perform operation if it would raise a division
1184 by zero exception. */
1185 if (code == RDIV_EXPR
1186 && REAL_VALUES_EQUAL (d2, dconst0)
1187 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1188 return NULL_TREE;
1190 /* If either operand is a NaN, just return it. Otherwise, set up
1191 for floating-point trap; we return an overflow. */
1192 if (REAL_VALUE_ISNAN (d1))
1193 return arg1;
1194 else if (REAL_VALUE_ISNAN (d2))
1195 return arg2;
1197 inexact = real_arithmetic (&value, code, &d1, &d2);
1198 real_convert (&result, mode, &value);
1200 /* Don't constant fold this floating point operation if
1201 the result has overflowed and flag_trapping_math. */
1202 if (flag_trapping_math
1203 && MODE_HAS_INFINITIES (mode)
1204 && REAL_VALUE_ISINF (result)
1205 && !REAL_VALUE_ISINF (d1)
1206 && !REAL_VALUE_ISINF (d2))
1207 return NULL_TREE;
1209 /* Don't constant fold this floating point operation if the
1210 result may dependent upon the run-time rounding mode and
1211 flag_rounding_math is set, or if GCC's software emulation
1212 is unable to accurately represent the result. */
1213 if ((flag_rounding_math
1214 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1215 && (inexact || !real_identical (&result, &value)))
1216 return NULL_TREE;
1218 t = build_real (type, result);
1220 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1221 return t;
1224 if (TREE_CODE (arg1) == FIXED_CST)
1226 FIXED_VALUE_TYPE f1;
1227 FIXED_VALUE_TYPE f2;
1228 FIXED_VALUE_TYPE result;
1229 tree t, type;
1230 int sat_p;
1231 bool overflow_p;
1233 /* The following codes are handled by fixed_arithmetic. */
1234 switch (code)
1236 case PLUS_EXPR:
1237 case MINUS_EXPR:
1238 case MULT_EXPR:
1239 case TRUNC_DIV_EXPR:
1240 if (TREE_CODE (arg2) != FIXED_CST)
1241 return NULL_TREE;
1242 f2 = TREE_FIXED_CST (arg2);
1243 break;
1245 case LSHIFT_EXPR:
1246 case RSHIFT_EXPR:
1248 if (TREE_CODE (arg2) != INTEGER_CST)
1249 return NULL_TREE;
1250 wide_int w2 = arg2;
1251 f2.data.high = w2.elt (1);
1252 f2.data.low = w2.elt (0);
1253 f2.mode = SImode;
1255 break;
1257 default:
1258 return NULL_TREE;
1261 f1 = TREE_FIXED_CST (arg1);
1262 type = TREE_TYPE (arg1);
1263 sat_p = TYPE_SATURATING (type);
1264 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1265 t = build_fixed (type, result);
1266 /* Propagate overflow flags. */
1267 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1268 TREE_OVERFLOW (t) = 1;
1269 return t;
1272 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1274 tree type = TREE_TYPE (arg1);
1275 tree r1 = TREE_REALPART (arg1);
1276 tree i1 = TREE_IMAGPART (arg1);
1277 tree r2 = TREE_REALPART (arg2);
1278 tree i2 = TREE_IMAGPART (arg2);
1279 tree real, imag;
1281 switch (code)
1283 case PLUS_EXPR:
1284 case MINUS_EXPR:
1285 real = const_binop (code, r1, r2);
1286 imag = const_binop (code, i1, i2);
1287 break;
1289 case MULT_EXPR:
1290 if (COMPLEX_FLOAT_TYPE_P (type))
1291 return do_mpc_arg2 (arg1, arg2, type,
1292 /* do_nonfinite= */ folding_initializer,
1293 mpc_mul);
1295 real = const_binop (MINUS_EXPR,
1296 const_binop (MULT_EXPR, r1, r2),
1297 const_binop (MULT_EXPR, i1, i2));
1298 imag = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r1, i2),
1300 const_binop (MULT_EXPR, i1, r2));
1301 break;
1303 case RDIV_EXPR:
1304 if (COMPLEX_FLOAT_TYPE_P (type))
1305 return do_mpc_arg2 (arg1, arg2, type,
1306 /* do_nonfinite= */ folding_initializer,
1307 mpc_div);
1308 /* Fallthru ... */
1309 case TRUNC_DIV_EXPR:
1310 case CEIL_DIV_EXPR:
1311 case FLOOR_DIV_EXPR:
1312 case ROUND_DIV_EXPR:
1313 if (flag_complex_method == 0)
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_straight().
1318 Expand complex division to scalars, straightforward algorithm.
1319 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1320 t = br*br + bi*bi
1322 tree magsquared
1323 = const_binop (PLUS_EXPR,
1324 const_binop (MULT_EXPR, r2, r2),
1325 const_binop (MULT_EXPR, i2, i2));
1326 tree t1
1327 = const_binop (PLUS_EXPR,
1328 const_binop (MULT_EXPR, r1, r2),
1329 const_binop (MULT_EXPR, i1, i2));
1330 tree t2
1331 = const_binop (MINUS_EXPR,
1332 const_binop (MULT_EXPR, i1, r2),
1333 const_binop (MULT_EXPR, r1, i2));
1335 real = const_binop (code, t1, magsquared);
1336 imag = const_binop (code, t2, magsquared);
1338 else
1340 /* Keep this algorithm in sync with
1341 tree-complex.c:expand_complex_div_wide().
1343 Expand complex division to scalars, modified algorithm to minimize
1344 overflow with wide input ranges. */
1345 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1346 fold_abs_const (r2, TREE_TYPE (type)),
1347 fold_abs_const (i2, TREE_TYPE (type)));
1349 if (integer_nonzerop (compare))
1351 /* In the TRUE branch, we compute
1352 ratio = br/bi;
1353 div = (br * ratio) + bi;
1354 tr = (ar * ratio) + ai;
1355 ti = (ai * ratio) - ar;
1356 tr = tr / div;
1357 ti = ti / div; */
1358 tree ratio = const_binop (code, r2, i2);
1359 tree div = const_binop (PLUS_EXPR, i2,
1360 const_binop (MULT_EXPR, r2, ratio));
1361 real = const_binop (MULT_EXPR, r1, ratio);
1362 real = const_binop (PLUS_EXPR, real, i1);
1363 real = const_binop (code, real, div);
1365 imag = const_binop (MULT_EXPR, i1, ratio);
1366 imag = const_binop (MINUS_EXPR, imag, r1);
1367 imag = const_binop (code, imag, div);
1369 else
1371 /* In the FALSE branch, we compute
1372 ratio = d/c;
1373 divisor = (d * ratio) + c;
1374 tr = (b * ratio) + a;
1375 ti = b - (a * ratio);
1376 tr = tr / div;
1377 ti = ti / div; */
1378 tree ratio = const_binop (code, i2, r2);
1379 tree div = const_binop (PLUS_EXPR, r2,
1380 const_binop (MULT_EXPR, i2, ratio));
1382 real = const_binop (MULT_EXPR, i1, ratio);
1383 real = const_binop (PLUS_EXPR, real, r1);
1384 real = const_binop (code, real, div);
1386 imag = const_binop (MULT_EXPR, r1, ratio);
1387 imag = const_binop (MINUS_EXPR, i1, imag);
1388 imag = const_binop (code, imag, div);
1391 break;
1393 default:
1394 return NULL_TREE;
1397 if (real && imag)
1398 return build_complex (type, real, imag);
1401 if (TREE_CODE (arg1) == VECTOR_CST
1402 && TREE_CODE (arg2) == VECTOR_CST)
1404 tree type = TREE_TYPE (arg1);
1405 int count = TYPE_VECTOR_SUBPARTS (type), i;
1406 tree *elts = XALLOCAVEC (tree, count);
1408 for (i = 0; i < count; i++)
1410 tree elem1 = VECTOR_CST_ELT (arg1, i);
1411 tree elem2 = VECTOR_CST_ELT (arg2, i);
1413 elts[i] = const_binop (code, elem1, elem2);
1415 /* It is possible that const_binop cannot handle the given
1416 code and return NULL_TREE */
1417 if (elts[i] == NULL_TREE)
1418 return NULL_TREE;
1421 return build_vector (type, elts);
1424 /* Shifts allow a scalar offset for a vector. */
1425 if (TREE_CODE (arg1) == VECTOR_CST
1426 && TREE_CODE (arg2) == INTEGER_CST)
1428 tree type = TREE_TYPE (arg1);
1429 int count = TYPE_VECTOR_SUBPARTS (type), i;
1430 tree *elts = XALLOCAVEC (tree, count);
1432 for (i = 0; i < count; i++)
1434 tree elem1 = VECTOR_CST_ELT (arg1, i);
1436 elts[i] = const_binop (code, elem1, arg2);
1438 /* It is possible that const_binop cannot handle the given
1439 code and return NULL_TREE. */
1440 if (elts[i] == NULL_TREE)
1441 return NULL_TREE;
1444 return build_vector (type, elts);
1446 return NULL_TREE;
1449 /* Overload that adds a TYPE parameter to be able to dispatch
1450 to fold_relational_const. */
1452 tree
1453 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1455 if (TREE_CODE_CLASS (code) == tcc_comparison)
1456 return fold_relational_const (code, type, arg1, arg2);
1458 /* ??? Until we make the const_binop worker take the type of the
1459 result as argument put those cases that need it here. */
1460 switch (code)
1462 case COMPLEX_EXPR:
1463 if ((TREE_CODE (arg1) == REAL_CST
1464 && TREE_CODE (arg2) == REAL_CST)
1465 || (TREE_CODE (arg1) == INTEGER_CST
1466 && TREE_CODE (arg2) == INTEGER_CST))
1467 return build_complex (type, arg1, arg2);
1468 return NULL_TREE;
1470 case VEC_PACK_TRUNC_EXPR:
1471 case VEC_PACK_FIX_TRUNC_EXPR:
1473 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1474 tree *elts;
1476 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1477 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1478 if (TREE_CODE (arg1) != VECTOR_CST
1479 || TREE_CODE (arg2) != VECTOR_CST)
1480 return NULL_TREE;
1482 elts = XALLOCAVEC (tree, nelts);
1483 if (!vec_cst_ctor_to_array (arg1, elts)
1484 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1485 return NULL_TREE;
1487 for (i = 0; i < nelts; i++)
1489 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1490 ? NOP_EXPR : FIX_TRUNC_EXPR,
1491 TREE_TYPE (type), elts[i]);
1492 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1493 return NULL_TREE;
1496 return build_vector (type, elts);
1499 case VEC_WIDEN_MULT_LO_EXPR:
1500 case VEC_WIDEN_MULT_HI_EXPR:
1501 case VEC_WIDEN_MULT_EVEN_EXPR:
1502 case VEC_WIDEN_MULT_ODD_EXPR:
1504 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1505 unsigned int out, ofs, scale;
1506 tree *elts;
1508 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1509 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1510 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1511 return NULL_TREE;
1513 elts = XALLOCAVEC (tree, nelts * 4);
1514 if (!vec_cst_ctor_to_array (arg1, elts)
1515 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1516 return NULL_TREE;
1518 if (code == VEC_WIDEN_MULT_LO_EXPR)
1519 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1520 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1522 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1523 scale = 1, ofs = 0;
1524 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1525 scale = 1, ofs = 1;
1527 for (out = 0; out < nelts; out++)
1529 unsigned int in1 = (out << scale) + ofs;
1530 unsigned int in2 = in1 + nelts * 2;
1531 tree t1, t2;
1533 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1534 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1536 if (t1 == NULL_TREE || t2 == NULL_TREE)
1537 return NULL_TREE;
1538 elts[out] = const_binop (MULT_EXPR, t1, t2);
1539 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1540 return NULL_TREE;
1543 return build_vector (type, elts);
1546 default:;
1549 /* Make sure type and arg0 have the same saturating flag. */
1550 gcc_checking_assert (TYPE_SATURATING (type)
1551 == TYPE_SATURATING (TREE_TYPE (arg1)));
1552 return const_binop (code, arg1, arg2);
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1558 tree
1559 const_unop (enum tree_code code, tree type, tree arg0)
1561 switch (code)
1563 CASE_CONVERT:
1564 case FLOAT_EXPR:
1565 case FIX_TRUNC_EXPR:
1566 case FIXED_CONVERT_EXPR:
1567 return fold_convert_const (code, type, arg0);
1569 case ADDR_SPACE_CONVERT_EXPR:
1570 if (integer_zerop (arg0))
1571 return fold_convert_const (code, type, arg0);
1572 break;
1574 case VIEW_CONVERT_EXPR:
1575 return fold_view_convert_expr (type, arg0);
1577 case NEGATE_EXPR:
1579 /* Can't call fold_negate_const directly here as that doesn't
1580 handle all cases and we might not be able to negate some
1581 constants. */
1582 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1583 if (tem && CONSTANT_CLASS_P (tem))
1584 return tem;
1585 break;
1588 case ABS_EXPR:
1589 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1590 return fold_abs_const (arg0, type);
1591 break;
1593 case CONJ_EXPR:
1594 if (TREE_CODE (arg0) == COMPLEX_CST)
1596 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1597 TREE_TYPE (type));
1598 return build_complex (type, TREE_REALPART (arg0), ipart);
1600 break;
1602 case BIT_NOT_EXPR:
1603 if (TREE_CODE (arg0) == INTEGER_CST)
1604 return fold_not_const (arg0, type);
1605 /* Perform BIT_NOT_EXPR on each element individually. */
1606 else if (TREE_CODE (arg0) == VECTOR_CST)
1608 tree *elements;
1609 tree elem;
1610 unsigned count = VECTOR_CST_NELTS (arg0), i;
1612 elements = XALLOCAVEC (tree, count);
1613 for (i = 0; i < count; i++)
1615 elem = VECTOR_CST_ELT (arg0, i);
1616 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1617 if (elem == NULL_TREE)
1618 break;
1619 elements[i] = elem;
1621 if (i == count)
1622 return build_vector (type, elements);
1624 break;
1626 case TRUTH_NOT_EXPR:
1627 if (TREE_CODE (arg0) == INTEGER_CST)
1628 return constant_boolean_node (integer_zerop (arg0), type);
1629 break;
1631 case REALPART_EXPR:
1632 if (TREE_CODE (arg0) == COMPLEX_CST)
1633 return fold_convert (type, TREE_REALPART (arg0));
1634 break;
1636 case IMAGPART_EXPR:
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 return fold_convert (type, TREE_IMAGPART (arg0));
1639 break;
1641 case VEC_UNPACK_LO_EXPR:
1642 case VEC_UNPACK_HI_EXPR:
1643 case VEC_UNPACK_FLOAT_LO_EXPR:
1644 case VEC_UNPACK_FLOAT_HI_EXPR:
1646 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1647 tree *elts;
1648 enum tree_code subcode;
1650 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1651 if (TREE_CODE (arg0) != VECTOR_CST)
1652 return NULL_TREE;
1654 elts = XALLOCAVEC (tree, nelts * 2);
1655 if (!vec_cst_ctor_to_array (arg0, elts))
1656 return NULL_TREE;
1658 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1659 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1660 elts += nelts;
1662 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1663 subcode = NOP_EXPR;
1664 else
1665 subcode = FLOAT_EXPR;
1667 for (i = 0; i < nelts; i++)
1669 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1670 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1671 return NULL_TREE;
1674 return build_vector (type, elts);
1677 case REDUC_MIN_EXPR:
1678 case REDUC_MAX_EXPR:
1679 case REDUC_PLUS_EXPR:
1681 unsigned int nelts, i;
1682 tree *elts;
1683 enum tree_code subcode;
1685 if (TREE_CODE (arg0) != VECTOR_CST)
1686 return NULL_TREE;
1687 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1689 elts = XALLOCAVEC (tree, nelts);
1690 if (!vec_cst_ctor_to_array (arg0, elts))
1691 return NULL_TREE;
1693 switch (code)
1695 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1696 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1697 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1698 default: gcc_unreachable ();
1701 for (i = 1; i < nelts; i++)
1703 elts[0] = const_binop (subcode, elts[0], elts[i]);
1704 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1705 return NULL_TREE;
1708 return elts[0];
1711 default:
1712 break;
1715 return NULL_TREE;
1718 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1719 indicates which particular sizetype to create. */
1721 tree
1722 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1724 return build_int_cst (sizetype_tab[(int) kind], number);
1727 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1728 is a tree code. The type of the result is taken from the operands.
1729 Both must be equivalent integer types, ala int_binop_types_match_p.
1730 If the operands are constant, so is the result. */
1732 tree
1733 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1735 tree type = TREE_TYPE (arg0);
1737 if (arg0 == error_mark_node || arg1 == error_mark_node)
1738 return error_mark_node;
1740 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1741 TREE_TYPE (arg1)));
1743 /* Handle the special case of two integer constants faster. */
1744 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1746 /* And some specific cases even faster than that. */
1747 if (code == PLUS_EXPR)
1749 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1750 return arg1;
1751 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1752 return arg0;
1754 else if (code == MINUS_EXPR)
1756 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1757 return arg0;
1759 else if (code == MULT_EXPR)
1761 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1762 return arg1;
1765 /* Handle general case of two integer constants. For sizetype
1766 constant calculations we always want to know about overflow,
1767 even in the unsigned case. */
1768 return int_const_binop_1 (code, arg0, arg1, -1);
1771 return fold_build2_loc (loc, code, type, arg0, arg1);
1774 /* Given two values, either both of sizetype or both of bitsizetype,
1775 compute the difference between the two values. Return the value
1776 in signed type corresponding to the type of the operands. */
1778 tree
1779 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1781 tree type = TREE_TYPE (arg0);
1782 tree ctype;
1784 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1785 TREE_TYPE (arg1)));
1787 /* If the type is already signed, just do the simple thing. */
1788 if (!TYPE_UNSIGNED (type))
1789 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1791 if (type == sizetype)
1792 ctype = ssizetype;
1793 else if (type == bitsizetype)
1794 ctype = sbitsizetype;
1795 else
1796 ctype = signed_type_for (type);
1798 /* If either operand is not a constant, do the conversions to the signed
1799 type and subtract. The hardware will do the right thing with any
1800 overflow in the subtraction. */
1801 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1802 return size_binop_loc (loc, MINUS_EXPR,
1803 fold_convert_loc (loc, ctype, arg0),
1804 fold_convert_loc (loc, ctype, arg1));
1806 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1807 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1808 overflow) and negate (which can't either). Special-case a result
1809 of zero while we're here. */
1810 if (tree_int_cst_equal (arg0, arg1))
1811 return build_int_cst (ctype, 0);
1812 else if (tree_int_cst_lt (arg1, arg0))
1813 return fold_convert_loc (loc, ctype,
1814 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1815 else
1816 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1817 fold_convert_loc (loc, ctype,
1818 size_binop_loc (loc,
1819 MINUS_EXPR,
1820 arg1, arg0)));
1823 /* A subroutine of fold_convert_const handling conversions of an
1824 INTEGER_CST to another integer type. */
1826 static tree
1827 fold_convert_const_int_from_int (tree type, const_tree arg1)
1829 /* Given an integer constant, make new constant with new type,
1830 appropriately sign-extended or truncated. Use widest_int
1831 so that any extension is done according ARG1's type. */
1832 return force_fit_type (type, wi::to_widest (arg1),
1833 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1834 TREE_OVERFLOW (arg1));
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to an integer type. */
1840 static tree
1841 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1843 bool overflow = false;
1844 tree t;
1846 /* The following code implements the floating point to integer
1847 conversion rules required by the Java Language Specification,
1848 that IEEE NaNs are mapped to zero and values that overflow
1849 the target precision saturate, i.e. values greater than
1850 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1851 are mapped to INT_MIN. These semantics are allowed by the
1852 C and C++ standards that simply state that the behavior of
1853 FP-to-integer conversion is unspecified upon overflow. */
1855 wide_int val;
1856 REAL_VALUE_TYPE r;
1857 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1859 switch (code)
1861 case FIX_TRUNC_EXPR:
1862 real_trunc (&r, VOIDmode, &x);
1863 break;
1865 default:
1866 gcc_unreachable ();
1869 /* If R is NaN, return zero and show we have an overflow. */
1870 if (REAL_VALUE_ISNAN (r))
1872 overflow = true;
1873 val = wi::zero (TYPE_PRECISION (type));
1876 /* See if R is less than the lower bound or greater than the
1877 upper bound. */
1879 if (! overflow)
1881 tree lt = TYPE_MIN_VALUE (type);
1882 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1883 if (REAL_VALUES_LESS (r, l))
1885 overflow = true;
1886 val = lt;
1890 if (! overflow)
1892 tree ut = TYPE_MAX_VALUE (type);
1893 if (ut)
1895 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1896 if (REAL_VALUES_LESS (u, r))
1898 overflow = true;
1899 val = ut;
1904 if (! overflow)
1905 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1907 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1908 return t;
1911 /* A subroutine of fold_convert_const handling conversions of a
1912 FIXED_CST to an integer type. */
1914 static tree
1915 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1917 tree t;
1918 double_int temp, temp_trunc;
1919 unsigned int mode;
1921 /* Right shift FIXED_CST to temp by fbit. */
1922 temp = TREE_FIXED_CST (arg1).data;
1923 mode = TREE_FIXED_CST (arg1).mode;
1924 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1926 temp = temp.rshift (GET_MODE_FBIT (mode),
1927 HOST_BITS_PER_DOUBLE_INT,
1928 SIGNED_FIXED_POINT_MODE_P (mode));
1930 /* Left shift temp to temp_trunc by fbit. */
1931 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1932 HOST_BITS_PER_DOUBLE_INT,
1933 SIGNED_FIXED_POINT_MODE_P (mode));
1935 else
1937 temp = double_int_zero;
1938 temp_trunc = double_int_zero;
1941 /* If FIXED_CST is negative, we need to round the value toward 0.
1942 By checking if the fractional bits are not zero to add 1 to temp. */
1943 if (SIGNED_FIXED_POINT_MODE_P (mode)
1944 && temp_trunc.is_negative ()
1945 && TREE_FIXED_CST (arg1).data != temp_trunc)
1946 temp += double_int_one;
1948 /* Given a fixed-point constant, make new constant with new type,
1949 appropriately sign-extended or truncated. */
1950 t = force_fit_type (type, temp, -1,
1951 (temp.is_negative ()
1952 && (TYPE_UNSIGNED (type)
1953 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1954 | TREE_OVERFLOW (arg1));
1956 return t;
1959 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1960 to another floating point type. */
1962 static tree
1963 fold_convert_const_real_from_real (tree type, const_tree arg1)
1965 REAL_VALUE_TYPE value;
1966 tree t;
1968 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1969 t = build_real (type, value);
1971 /* If converting an infinity or NAN to a representation that doesn't
1972 have one, set the overflow bit so that we can produce some kind of
1973 error message at the appropriate point if necessary. It's not the
1974 most user-friendly message, but it's better than nothing. */
1975 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1976 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1977 TREE_OVERFLOW (t) = 1;
1978 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1979 && !MODE_HAS_NANS (TYPE_MODE (type)))
1980 TREE_OVERFLOW (t) = 1;
1981 /* Regular overflow, conversion produced an infinity in a mode that
1982 can't represent them. */
1983 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1984 && REAL_VALUE_ISINF (value)
1985 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1986 TREE_OVERFLOW (t) = 1;
1987 else
1988 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1989 return t;
1992 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1993 to a floating point type. */
1995 static tree
1996 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1998 REAL_VALUE_TYPE value;
1999 tree t;
2001 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2002 t = build_real (type, value);
2004 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2005 return t;
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to another fixed-point type. */
2011 static tree
2012 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2014 FIXED_VALUE_TYPE value;
2015 tree t;
2016 bool overflow_p;
2018 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2019 TYPE_SATURATING (type));
2020 t = build_fixed (type, value);
2022 /* Propagate overflow flags. */
2023 if (overflow_p | TREE_OVERFLOW (arg1))
2024 TREE_OVERFLOW (t) = 1;
2025 return t;
2028 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2029 to a fixed-point type. */
2031 static tree
2032 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2037 double_int di;
2039 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2041 di.low = TREE_INT_CST_ELT (arg1, 0);
2042 if (TREE_INT_CST_NUNITS (arg1) == 1)
2043 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2044 else
2045 di.high = TREE_INT_CST_ELT (arg1, 1);
2047 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2048 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2049 TYPE_SATURATING (type));
2050 t = build_fixed (type, value);
2052 /* Propagate overflow flags. */
2053 if (overflow_p | TREE_OVERFLOW (arg1))
2054 TREE_OVERFLOW (t) = 1;
2055 return t;
2058 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2059 to a fixed-point type. */
2061 static tree
2062 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2064 FIXED_VALUE_TYPE value;
2065 tree t;
2066 bool overflow_p;
2068 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2069 &TREE_REAL_CST (arg1),
2070 TYPE_SATURATING (type));
2071 t = build_fixed (type, value);
2073 /* Propagate overflow flags. */
2074 if (overflow_p | TREE_OVERFLOW (arg1))
2075 TREE_OVERFLOW (t) = 1;
2076 return t;
2079 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2080 type TYPE. If no simplification can be done return NULL_TREE. */
2082 static tree
2083 fold_convert_const (enum tree_code code, tree type, tree arg1)
2085 if (TREE_TYPE (arg1) == type)
2086 return arg1;
2088 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2089 || TREE_CODE (type) == OFFSET_TYPE)
2091 if (TREE_CODE (arg1) == INTEGER_CST)
2092 return fold_convert_const_int_from_int (type, arg1);
2093 else if (TREE_CODE (arg1) == REAL_CST)
2094 return fold_convert_const_int_from_real (code, type, arg1);
2095 else if (TREE_CODE (arg1) == FIXED_CST)
2096 return fold_convert_const_int_from_fixed (type, arg1);
2098 else if (TREE_CODE (type) == REAL_TYPE)
2100 if (TREE_CODE (arg1) == INTEGER_CST)
2101 return build_real_from_int_cst (type, arg1);
2102 else if (TREE_CODE (arg1) == REAL_CST)
2103 return fold_convert_const_real_from_real (type, arg1);
2104 else if (TREE_CODE (arg1) == FIXED_CST)
2105 return fold_convert_const_real_from_fixed (type, arg1);
2107 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2109 if (TREE_CODE (arg1) == FIXED_CST)
2110 return fold_convert_const_fixed_from_fixed (type, arg1);
2111 else if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_fixed_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_fixed_from_real (type, arg1);
2116 return NULL_TREE;
2119 /* Construct a vector of zero elements of vector type TYPE. */
2121 static tree
2122 build_zero_vector (tree type)
2124 tree t;
2126 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2127 return build_vector_from_val (type, t);
2130 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2132 bool
2133 fold_convertible_p (const_tree type, const_tree arg)
2135 tree orig = TREE_TYPE (arg);
2137 if (type == orig)
2138 return true;
2140 if (TREE_CODE (arg) == ERROR_MARK
2141 || TREE_CODE (type) == ERROR_MARK
2142 || TREE_CODE (orig) == ERROR_MARK)
2143 return false;
2145 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2146 return true;
2148 switch (TREE_CODE (type))
2150 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2151 case POINTER_TYPE: case REFERENCE_TYPE:
2152 case OFFSET_TYPE:
2153 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2154 || TREE_CODE (orig) == OFFSET_TYPE)
2155 return true;
2156 return (TREE_CODE (orig) == VECTOR_TYPE
2157 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2159 case REAL_TYPE:
2160 case FIXED_POINT_TYPE:
2161 case COMPLEX_TYPE:
2162 case VECTOR_TYPE:
2163 case VOID_TYPE:
2164 return TREE_CODE (type) == TREE_CODE (orig);
2166 default:
2167 return false;
2171 /* Convert expression ARG to type TYPE. Used by the middle-end for
2172 simple conversions in preference to calling the front-end's convert. */
2174 tree
2175 fold_convert_loc (location_t loc, tree type, tree arg)
2177 tree orig = TREE_TYPE (arg);
2178 tree tem;
2180 if (type == orig)
2181 return arg;
2183 if (TREE_CODE (arg) == ERROR_MARK
2184 || TREE_CODE (type) == ERROR_MARK
2185 || TREE_CODE (orig) == ERROR_MARK)
2186 return error_mark_node;
2188 switch (TREE_CODE (type))
2190 case POINTER_TYPE:
2191 case REFERENCE_TYPE:
2192 /* Handle conversions between pointers to different address spaces. */
2193 if (POINTER_TYPE_P (orig)
2194 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2195 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2196 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2197 /* fall through */
2199 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2200 case OFFSET_TYPE:
2201 if (TREE_CODE (arg) == INTEGER_CST)
2203 tem = fold_convert_const (NOP_EXPR, type, arg);
2204 if (tem != NULL_TREE)
2205 return tem;
2207 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2208 || TREE_CODE (orig) == OFFSET_TYPE)
2209 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2210 if (TREE_CODE (orig) == COMPLEX_TYPE)
2211 return fold_convert_loc (loc, type,
2212 fold_build1_loc (loc, REALPART_EXPR,
2213 TREE_TYPE (orig), arg));
2214 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2215 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2216 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2218 case REAL_TYPE:
2219 if (TREE_CODE (arg) == INTEGER_CST)
2221 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2222 if (tem != NULL_TREE)
2223 return tem;
2225 else if (TREE_CODE (arg) == REAL_CST)
2227 tem = fold_convert_const (NOP_EXPR, type, arg);
2228 if (tem != NULL_TREE)
2229 return tem;
2231 else if (TREE_CODE (arg) == FIXED_CST)
2233 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2234 if (tem != NULL_TREE)
2235 return tem;
2238 switch (TREE_CODE (orig))
2240 case INTEGER_TYPE:
2241 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2242 case POINTER_TYPE: case REFERENCE_TYPE:
2243 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2245 case REAL_TYPE:
2246 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2248 case FIXED_POINT_TYPE:
2249 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2251 case COMPLEX_TYPE:
2252 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2253 return fold_convert_loc (loc, type, tem);
2255 default:
2256 gcc_unreachable ();
2259 case FIXED_POINT_TYPE:
2260 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2261 || TREE_CODE (arg) == REAL_CST)
2263 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 goto fold_convert_exit;
2268 switch (TREE_CODE (orig))
2270 case FIXED_POINT_TYPE:
2271 case INTEGER_TYPE:
2272 case ENUMERAL_TYPE:
2273 case BOOLEAN_TYPE:
2274 case REAL_TYPE:
2275 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2277 case COMPLEX_TYPE:
2278 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2279 return fold_convert_loc (loc, type, tem);
2281 default:
2282 gcc_unreachable ();
2285 case COMPLEX_TYPE:
2286 switch (TREE_CODE (orig))
2288 case INTEGER_TYPE:
2289 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2290 case POINTER_TYPE: case REFERENCE_TYPE:
2291 case REAL_TYPE:
2292 case FIXED_POINT_TYPE:
2293 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2294 fold_convert_loc (loc, TREE_TYPE (type), arg),
2295 fold_convert_loc (loc, TREE_TYPE (type),
2296 integer_zero_node));
2297 case COMPLEX_TYPE:
2299 tree rpart, ipart;
2301 if (TREE_CODE (arg) == COMPLEX_EXPR)
2303 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2304 TREE_OPERAND (arg, 0));
2305 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 1));
2307 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2310 arg = save_expr (arg);
2311 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2312 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2313 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2314 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2318 default:
2319 gcc_unreachable ();
2322 case VECTOR_TYPE:
2323 if (integer_zerop (arg))
2324 return build_zero_vector (type);
2325 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2326 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2327 || TREE_CODE (orig) == VECTOR_TYPE);
2328 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2330 case VOID_TYPE:
2331 tem = fold_ignored_result (arg);
2332 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2334 default:
2335 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2336 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2337 gcc_unreachable ();
2339 fold_convert_exit:
2340 protected_set_expr_location_unshare (tem, loc);
2341 return tem;
2344 /* Return false if expr can be assumed not to be an lvalue, true
2345 otherwise. */
2347 static bool
2348 maybe_lvalue_p (const_tree x)
2350 /* We only need to wrap lvalue tree codes. */
2351 switch (TREE_CODE (x))
2353 case VAR_DECL:
2354 case PARM_DECL:
2355 case RESULT_DECL:
2356 case LABEL_DECL:
2357 case FUNCTION_DECL:
2358 case SSA_NAME:
2360 case COMPONENT_REF:
2361 case MEM_REF:
2362 case INDIRECT_REF:
2363 case ARRAY_REF:
2364 case ARRAY_RANGE_REF:
2365 case BIT_FIELD_REF:
2366 case OBJ_TYPE_REF:
2368 case REALPART_EXPR:
2369 case IMAGPART_EXPR:
2370 case PREINCREMENT_EXPR:
2371 case PREDECREMENT_EXPR:
2372 case SAVE_EXPR:
2373 case TRY_CATCH_EXPR:
2374 case WITH_CLEANUP_EXPR:
2375 case COMPOUND_EXPR:
2376 case MODIFY_EXPR:
2377 case TARGET_EXPR:
2378 case COND_EXPR:
2379 case BIND_EXPR:
2380 break;
2382 default:
2383 /* Assume the worst for front-end tree codes. */
2384 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2385 break;
2386 return false;
2389 return true;
2392 /* Return an expr equal to X but certainly not valid as an lvalue. */
2394 tree
2395 non_lvalue_loc (location_t loc, tree x)
2397 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2398 us. */
2399 if (in_gimple_form)
2400 return x;
2402 if (! maybe_lvalue_p (x))
2403 return x;
2404 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2410 static tree
2411 pedantic_non_lvalue_loc (location_t loc, tree x)
2413 return protected_set_expr_location_unshare (x, loc);
2416 /* Given a tree comparison code, return the code that is the logical inverse.
2417 It is generally not safe to do this for floating-point comparisons, except
2418 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2419 ERROR_MARK in this case. */
2421 enum tree_code
2422 invert_tree_comparison (enum tree_code code, bool honor_nans)
2424 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2425 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2426 return ERROR_MARK;
2428 switch (code)
2430 case EQ_EXPR:
2431 return NE_EXPR;
2432 case NE_EXPR:
2433 return EQ_EXPR;
2434 case GT_EXPR:
2435 return honor_nans ? UNLE_EXPR : LE_EXPR;
2436 case GE_EXPR:
2437 return honor_nans ? UNLT_EXPR : LT_EXPR;
2438 case LT_EXPR:
2439 return honor_nans ? UNGE_EXPR : GE_EXPR;
2440 case LE_EXPR:
2441 return honor_nans ? UNGT_EXPR : GT_EXPR;
2442 case LTGT_EXPR:
2443 return UNEQ_EXPR;
2444 case UNEQ_EXPR:
2445 return LTGT_EXPR;
2446 case UNGT_EXPR:
2447 return LE_EXPR;
2448 case UNGE_EXPR:
2449 return LT_EXPR;
2450 case UNLT_EXPR:
2451 return GE_EXPR;
2452 case UNLE_EXPR:
2453 return GT_EXPR;
2454 case ORDERED_EXPR:
2455 return UNORDERED_EXPR;
2456 case UNORDERED_EXPR:
2457 return ORDERED_EXPR;
2458 default:
2459 gcc_unreachable ();
2463 /* Similar, but return the comparison that results if the operands are
2464 swapped. This is safe for floating-point. */
2466 enum tree_code
2467 swap_tree_comparison (enum tree_code code)
2469 switch (code)
2471 case EQ_EXPR:
2472 case NE_EXPR:
2473 case ORDERED_EXPR:
2474 case UNORDERED_EXPR:
2475 case LTGT_EXPR:
2476 case UNEQ_EXPR:
2477 return code;
2478 case GT_EXPR:
2479 return LT_EXPR;
2480 case GE_EXPR:
2481 return LE_EXPR;
2482 case LT_EXPR:
2483 return GT_EXPR;
2484 case LE_EXPR:
2485 return GE_EXPR;
2486 case UNGT_EXPR:
2487 return UNLT_EXPR;
2488 case UNGE_EXPR:
2489 return UNLE_EXPR;
2490 case UNLT_EXPR:
2491 return UNGT_EXPR;
2492 case UNLE_EXPR:
2493 return UNGE_EXPR;
2494 default:
2495 gcc_unreachable ();
2500 /* Convert a comparison tree code from an enum tree_code representation
2501 into a compcode bit-based encoding. This function is the inverse of
2502 compcode_to_comparison. */
2504 static enum comparison_code
2505 comparison_to_compcode (enum tree_code code)
2507 switch (code)
2509 case LT_EXPR:
2510 return COMPCODE_LT;
2511 case EQ_EXPR:
2512 return COMPCODE_EQ;
2513 case LE_EXPR:
2514 return COMPCODE_LE;
2515 case GT_EXPR:
2516 return COMPCODE_GT;
2517 case NE_EXPR:
2518 return COMPCODE_NE;
2519 case GE_EXPR:
2520 return COMPCODE_GE;
2521 case ORDERED_EXPR:
2522 return COMPCODE_ORD;
2523 case UNORDERED_EXPR:
2524 return COMPCODE_UNORD;
2525 case UNLT_EXPR:
2526 return COMPCODE_UNLT;
2527 case UNEQ_EXPR:
2528 return COMPCODE_UNEQ;
2529 case UNLE_EXPR:
2530 return COMPCODE_UNLE;
2531 case UNGT_EXPR:
2532 return COMPCODE_UNGT;
2533 case LTGT_EXPR:
2534 return COMPCODE_LTGT;
2535 case UNGE_EXPR:
2536 return COMPCODE_UNGE;
2537 default:
2538 gcc_unreachable ();
2542 /* Convert a compcode bit-based encoding of a comparison operator back
2543 to GCC's enum tree_code representation. This function is the
2544 inverse of comparison_to_compcode. */
2546 static enum tree_code
2547 compcode_to_comparison (enum comparison_code code)
2549 switch (code)
2551 case COMPCODE_LT:
2552 return LT_EXPR;
2553 case COMPCODE_EQ:
2554 return EQ_EXPR;
2555 case COMPCODE_LE:
2556 return LE_EXPR;
2557 case COMPCODE_GT:
2558 return GT_EXPR;
2559 case COMPCODE_NE:
2560 return NE_EXPR;
2561 case COMPCODE_GE:
2562 return GE_EXPR;
2563 case COMPCODE_ORD:
2564 return ORDERED_EXPR;
2565 case COMPCODE_UNORD:
2566 return UNORDERED_EXPR;
2567 case COMPCODE_UNLT:
2568 return UNLT_EXPR;
2569 case COMPCODE_UNEQ:
2570 return UNEQ_EXPR;
2571 case COMPCODE_UNLE:
2572 return UNLE_EXPR;
2573 case COMPCODE_UNGT:
2574 return UNGT_EXPR;
2575 case COMPCODE_LTGT:
2576 return LTGT_EXPR;
2577 case COMPCODE_UNGE:
2578 return UNGE_EXPR;
2579 default:
2580 gcc_unreachable ();
2584 /* Return a tree for the comparison which is the combination of
2585 doing the AND or OR (depending on CODE) of the two operations LCODE
2586 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2587 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2588 if this makes the transformation invalid. */
2590 tree
2591 combine_comparisons (location_t loc,
2592 enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2596 bool honor_nans = HONOR_NANS (ll_arg);
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 int compcode;
2601 switch (code)
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2605 break;
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2609 break;
2611 default:
2612 return NULL_TREE;
2615 if (!honor_nans)
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2625 else if (flag_trapping_math)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2647 rtrap = false;
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2651 if (rtrap && !ltrap
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2653 return NULL_TREE;
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2657 return NULL_TREE;
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2664 else
2666 enum tree_code tcode;
2668 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2669 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2673 /* Return nonzero if two operands (typically of the same tree node)
2674 are necessarily equal. If either argument has side-effects this
2675 function returns zero. FLAGS modifies behavior as follows:
2677 If OEP_ONLY_CONST is set, only return nonzero for constants.
2678 This function tests whether the operands are indistinguishable;
2679 it does not test whether they are equal using C's == operation.
2680 The distinction is important for IEEE floating point, because
2681 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2682 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2684 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2685 even though it may hold multiple values during a function.
2686 This is because a GCC tree node guarantees that nothing else is
2687 executed between the evaluation of its "operands" (which may often
2688 be evaluated in arbitrary order). Hence if the operands themselves
2689 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2690 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2691 unset means assuming isochronic (or instantaneous) tree equivalence.
2692 Unless comparing arbitrary expression trees, such as from different
2693 statements, this flag can usually be left unset.
2695 If OEP_PURE_SAME is set, then pure functions with identical arguments
2696 are considered the same. It is used when the caller has other ways
2697 to ensure that global memory is unchanged in between. */
2700 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2702 /* If either is ERROR_MARK, they aren't equal. */
2703 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2704 || TREE_TYPE (arg0) == error_mark_node
2705 || TREE_TYPE (arg1) == error_mark_node)
2706 return 0;
2708 /* Similar, if either does not have a type (like a released SSA name),
2709 they aren't equal. */
2710 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2711 return 0;
2713 /* Check equality of integer constants before bailing out due to
2714 precision differences. */
2715 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2716 return tree_int_cst_equal (arg0, arg1);
2718 /* If both types don't have the same signedness, then we can't consider
2719 them equal. We must check this before the STRIP_NOPS calls
2720 because they may change the signedness of the arguments. As pointers
2721 strictly don't have a signedness, require either two pointers or
2722 two non-pointers as well. */
2723 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2724 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2725 return 0;
2727 /* We cannot consider pointers to different address space equal. */
2728 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2729 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2730 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2731 return 0;
2733 /* If both types don't have the same precision, then it is not safe
2734 to strip NOPs. */
2735 if (element_precision (TREE_TYPE (arg0))
2736 != element_precision (TREE_TYPE (arg1)))
2737 return 0;
2739 STRIP_NOPS (arg0);
2740 STRIP_NOPS (arg1);
2742 /* In case both args are comparisons but with different comparison
2743 code, try to swap the comparison operands of one arg to produce
2744 a match and compare that variant. */
2745 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2746 && COMPARISON_CLASS_P (arg0)
2747 && COMPARISON_CLASS_P (arg1))
2749 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2751 if (TREE_CODE (arg0) == swap_code)
2752 return operand_equal_p (TREE_OPERAND (arg0, 0),
2753 TREE_OPERAND (arg1, 1), flags)
2754 && operand_equal_p (TREE_OPERAND (arg0, 1),
2755 TREE_OPERAND (arg1, 0), flags);
2758 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2759 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2760 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2761 return 0;
2763 /* This is needed for conversions and for COMPONENT_REF.
2764 Might as well play it safe and always test this. */
2765 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2766 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2767 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2768 return 0;
2770 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2771 We don't care about side effects in that case because the SAVE_EXPR
2772 takes care of that for us. In all other cases, two expressions are
2773 equal if they have no side effects. If we have two identical
2774 expressions with side effects that should be treated the same due
2775 to the only side effects being identical SAVE_EXPR's, that will
2776 be detected in the recursive calls below.
2777 If we are taking an invariant address of two identical objects
2778 they are necessarily equal as well. */
2779 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2780 && (TREE_CODE (arg0) == SAVE_EXPR
2781 || (flags & OEP_CONSTANT_ADDRESS_OF)
2782 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2783 return 1;
2785 /* Next handle constant cases, those for which we can return 1 even
2786 if ONLY_CONST is set. */
2787 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2788 switch (TREE_CODE (arg0))
2790 case INTEGER_CST:
2791 return tree_int_cst_equal (arg0, arg1);
2793 case FIXED_CST:
2794 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2795 TREE_FIXED_CST (arg1));
2797 case REAL_CST:
2798 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2799 TREE_REAL_CST (arg1)))
2800 return 1;
2803 if (!HONOR_SIGNED_ZEROS (arg0))
2805 /* If we do not distinguish between signed and unsigned zero,
2806 consider them equal. */
2807 if (real_zerop (arg0) && real_zerop (arg1))
2808 return 1;
2810 return 0;
2812 case VECTOR_CST:
2814 unsigned i;
2816 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2817 return 0;
2819 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2821 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2822 VECTOR_CST_ELT (arg1, i), flags))
2823 return 0;
2825 return 1;
2828 case COMPLEX_CST:
2829 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2830 flags)
2831 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2832 flags));
2834 case STRING_CST:
2835 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2836 && ! memcmp (TREE_STRING_POINTER (arg0),
2837 TREE_STRING_POINTER (arg1),
2838 TREE_STRING_LENGTH (arg0)));
2840 case ADDR_EXPR:
2841 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2842 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2843 ? OEP_CONSTANT_ADDRESS_OF : 0);
2844 default:
2845 break;
2848 if (flags & OEP_ONLY_CONST)
2849 return 0;
2851 /* Define macros to test an operand from arg0 and arg1 for equality and a
2852 variant that allows null and views null as being different from any
2853 non-null value. In the latter case, if either is null, the both
2854 must be; otherwise, do the normal comparison. */
2855 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2856 TREE_OPERAND (arg1, N), flags)
2858 #define OP_SAME_WITH_NULL(N) \
2859 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2860 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2862 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2864 case tcc_unary:
2865 /* Two conversions are equal only if signedness and modes match. */
2866 switch (TREE_CODE (arg0))
2868 CASE_CONVERT:
2869 case FIX_TRUNC_EXPR:
2870 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2871 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2872 return 0;
2873 break;
2874 default:
2875 break;
2878 return OP_SAME (0);
2881 case tcc_comparison:
2882 case tcc_binary:
2883 if (OP_SAME (0) && OP_SAME (1))
2884 return 1;
2886 /* For commutative ops, allow the other order. */
2887 return (commutative_tree_code (TREE_CODE (arg0))
2888 && operand_equal_p (TREE_OPERAND (arg0, 0),
2889 TREE_OPERAND (arg1, 1), flags)
2890 && operand_equal_p (TREE_OPERAND (arg0, 1),
2891 TREE_OPERAND (arg1, 0), flags));
2893 case tcc_reference:
2894 /* If either of the pointer (or reference) expressions we are
2895 dereferencing contain a side effect, these cannot be equal,
2896 but their addresses can be. */
2897 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2898 && (TREE_SIDE_EFFECTS (arg0)
2899 || TREE_SIDE_EFFECTS (arg1)))
2900 return 0;
2902 switch (TREE_CODE (arg0))
2904 case INDIRECT_REF:
2905 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2906 return OP_SAME (0);
2908 case REALPART_EXPR:
2909 case IMAGPART_EXPR:
2910 return OP_SAME (0);
2912 case TARGET_MEM_REF:
2913 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2914 /* Require equal extra operands and then fall through to MEM_REF
2915 handling of the two common operands. */
2916 if (!OP_SAME_WITH_NULL (2)
2917 || !OP_SAME_WITH_NULL (3)
2918 || !OP_SAME_WITH_NULL (4))
2919 return 0;
2920 /* Fallthru. */
2921 case MEM_REF:
2922 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2923 /* Require equal access sizes, and similar pointer types.
2924 We can have incomplete types for array references of
2925 variable-sized arrays from the Fortran frontend
2926 though. Also verify the types are compatible. */
2927 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2928 || (TYPE_SIZE (TREE_TYPE (arg0))
2929 && TYPE_SIZE (TREE_TYPE (arg1))
2930 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2931 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2932 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2933 && alias_ptr_types_compatible_p
2934 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2935 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2936 && OP_SAME (0) && OP_SAME (1));
2938 case ARRAY_REF:
2939 case ARRAY_RANGE_REF:
2940 /* Operands 2 and 3 may be null.
2941 Compare the array index by value if it is constant first as we
2942 may have different types but same value here. */
2943 if (!OP_SAME (0))
2944 return 0;
2945 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2946 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2947 TREE_OPERAND (arg1, 1))
2948 || OP_SAME (1))
2949 && OP_SAME_WITH_NULL (2)
2950 && OP_SAME_WITH_NULL (3));
2952 case COMPONENT_REF:
2953 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2954 may be NULL when we're called to compare MEM_EXPRs. */
2955 if (!OP_SAME_WITH_NULL (0)
2956 || !OP_SAME (1))
2957 return 0;
2958 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2959 return OP_SAME_WITH_NULL (2);
2961 case BIT_FIELD_REF:
2962 if (!OP_SAME (0))
2963 return 0;
2964 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2965 return OP_SAME (1) && OP_SAME (2);
2967 default:
2968 return 0;
2971 case tcc_expression:
2972 switch (TREE_CODE (arg0))
2974 case ADDR_EXPR:
2975 case TRUTH_NOT_EXPR:
2976 return OP_SAME (0);
2978 case TRUTH_ANDIF_EXPR:
2979 case TRUTH_ORIF_EXPR:
2980 return OP_SAME (0) && OP_SAME (1);
2982 case FMA_EXPR:
2983 case WIDEN_MULT_PLUS_EXPR:
2984 case WIDEN_MULT_MINUS_EXPR:
2985 if (!OP_SAME (2))
2986 return 0;
2987 /* The multiplcation operands are commutative. */
2988 /* FALLTHRU */
2990 case TRUTH_AND_EXPR:
2991 case TRUTH_OR_EXPR:
2992 case TRUTH_XOR_EXPR:
2993 if (OP_SAME (0) && OP_SAME (1))
2994 return 1;
2996 /* Otherwise take into account this is a commutative operation. */
2997 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2998 TREE_OPERAND (arg1, 1), flags)
2999 && operand_equal_p (TREE_OPERAND (arg0, 1),
3000 TREE_OPERAND (arg1, 0), flags));
3002 case COND_EXPR:
3003 case VEC_COND_EXPR:
3004 case DOT_PROD_EXPR:
3005 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3007 default:
3008 return 0;
3011 case tcc_vl_exp:
3012 switch (TREE_CODE (arg0))
3014 case CALL_EXPR:
3015 /* If the CALL_EXPRs call different functions, then they
3016 clearly can not be equal. */
3017 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3018 flags))
3019 return 0;
3022 unsigned int cef = call_expr_flags (arg0);
3023 if (flags & OEP_PURE_SAME)
3024 cef &= ECF_CONST | ECF_PURE;
3025 else
3026 cef &= ECF_CONST;
3027 if (!cef)
3028 return 0;
3031 /* Now see if all the arguments are the same. */
3033 const_call_expr_arg_iterator iter0, iter1;
3034 const_tree a0, a1;
3035 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3036 a1 = first_const_call_expr_arg (arg1, &iter1);
3037 a0 && a1;
3038 a0 = next_const_call_expr_arg (&iter0),
3039 a1 = next_const_call_expr_arg (&iter1))
3040 if (! operand_equal_p (a0, a1, flags))
3041 return 0;
3043 /* If we get here and both argument lists are exhausted
3044 then the CALL_EXPRs are equal. */
3045 return ! (a0 || a1);
3047 default:
3048 return 0;
3051 case tcc_declaration:
3052 /* Consider __builtin_sqrt equal to sqrt. */
3053 return (TREE_CODE (arg0) == FUNCTION_DECL
3054 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3055 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3056 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3058 default:
3059 return 0;
3062 #undef OP_SAME
3063 #undef OP_SAME_WITH_NULL
3066 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3067 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3069 When in doubt, return 0. */
3071 static int
3072 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3074 int unsignedp1, unsignedpo;
3075 tree primarg0, primarg1, primother;
3076 unsigned int correct_width;
3078 if (operand_equal_p (arg0, arg1, 0))
3079 return 1;
3081 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3082 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3083 return 0;
3085 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3086 and see if the inner values are the same. This removes any
3087 signedness comparison, which doesn't matter here. */
3088 primarg0 = arg0, primarg1 = arg1;
3089 STRIP_NOPS (primarg0);
3090 STRIP_NOPS (primarg1);
3091 if (operand_equal_p (primarg0, primarg1, 0))
3092 return 1;
3094 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3095 actual comparison operand, ARG0.
3097 First throw away any conversions to wider types
3098 already present in the operands. */
3100 primarg1 = get_narrower (arg1, &unsignedp1);
3101 primother = get_narrower (other, &unsignedpo);
3103 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3104 if (unsignedp1 == unsignedpo
3105 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3106 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3108 tree type = TREE_TYPE (arg0);
3110 /* Make sure shorter operand is extended the right way
3111 to match the longer operand. */
3112 primarg1 = fold_convert (signed_or_unsigned_type_for
3113 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3115 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3116 return 1;
3119 return 0;
3122 /* See if ARG is an expression that is either a comparison or is performing
3123 arithmetic on comparisons. The comparisons must only be comparing
3124 two different values, which will be stored in *CVAL1 and *CVAL2; if
3125 they are nonzero it means that some operands have already been found.
3126 No variables may be used anywhere else in the expression except in the
3127 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3128 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3130 If this is true, return 1. Otherwise, return zero. */
3132 static int
3133 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3135 enum tree_code code = TREE_CODE (arg);
3136 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3138 /* We can handle some of the tcc_expression cases here. */
3139 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3140 tclass = tcc_unary;
3141 else if (tclass == tcc_expression
3142 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3143 || code == COMPOUND_EXPR))
3144 tclass = tcc_binary;
3146 else if (tclass == tcc_expression && code == SAVE_EXPR
3147 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3149 /* If we've already found a CVAL1 or CVAL2, this expression is
3150 two complex to handle. */
3151 if (*cval1 || *cval2)
3152 return 0;
3154 tclass = tcc_unary;
3155 *save_p = 1;
3158 switch (tclass)
3160 case tcc_unary:
3161 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3163 case tcc_binary:
3164 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3165 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3166 cval1, cval2, save_p));
3168 case tcc_constant:
3169 return 1;
3171 case tcc_expression:
3172 if (code == COND_EXPR)
3173 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3174 cval1, cval2, save_p)
3175 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3176 cval1, cval2, save_p)
3177 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3178 cval1, cval2, save_p));
3179 return 0;
3181 case tcc_comparison:
3182 /* First see if we can handle the first operand, then the second. For
3183 the second operand, we know *CVAL1 can't be zero. It must be that
3184 one side of the comparison is each of the values; test for the
3185 case where this isn't true by failing if the two operands
3186 are the same. */
3188 if (operand_equal_p (TREE_OPERAND (arg, 0),
3189 TREE_OPERAND (arg, 1), 0))
3190 return 0;
3192 if (*cval1 == 0)
3193 *cval1 = TREE_OPERAND (arg, 0);
3194 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3196 else if (*cval2 == 0)
3197 *cval2 = TREE_OPERAND (arg, 0);
3198 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3200 else
3201 return 0;
3203 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3205 else if (*cval2 == 0)
3206 *cval2 = TREE_OPERAND (arg, 1);
3207 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3209 else
3210 return 0;
3212 return 1;
3214 default:
3215 return 0;
3219 /* ARG is a tree that is known to contain just arithmetic operations and
3220 comparisons. Evaluate the operations in the tree substituting NEW0 for
3221 any occurrence of OLD0 as an operand of a comparison and likewise for
3222 NEW1 and OLD1. */
3224 static tree
3225 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3226 tree old1, tree new1)
3228 tree type = TREE_TYPE (arg);
3229 enum tree_code code = TREE_CODE (arg);
3230 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3232 /* We can handle some of the tcc_expression cases here. */
3233 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3234 tclass = tcc_unary;
3235 else if (tclass == tcc_expression
3236 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3237 tclass = tcc_binary;
3239 switch (tclass)
3241 case tcc_unary:
3242 return fold_build1_loc (loc, code, type,
3243 eval_subst (loc, TREE_OPERAND (arg, 0),
3244 old0, new0, old1, new1));
3246 case tcc_binary:
3247 return fold_build2_loc (loc, code, type,
3248 eval_subst (loc, TREE_OPERAND (arg, 0),
3249 old0, new0, old1, new1),
3250 eval_subst (loc, TREE_OPERAND (arg, 1),
3251 old0, new0, old1, new1));
3253 case tcc_expression:
3254 switch (code)
3256 case SAVE_EXPR:
3257 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3258 old1, new1);
3260 case COMPOUND_EXPR:
3261 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3262 old1, new1);
3264 case COND_EXPR:
3265 return fold_build3_loc (loc, code, type,
3266 eval_subst (loc, TREE_OPERAND (arg, 0),
3267 old0, new0, old1, new1),
3268 eval_subst (loc, TREE_OPERAND (arg, 1),
3269 old0, new0, old1, new1),
3270 eval_subst (loc, TREE_OPERAND (arg, 2),
3271 old0, new0, old1, new1));
3272 default:
3273 break;
3275 /* Fall through - ??? */
3277 case tcc_comparison:
3279 tree arg0 = TREE_OPERAND (arg, 0);
3280 tree arg1 = TREE_OPERAND (arg, 1);
3282 /* We need to check both for exact equality and tree equality. The
3283 former will be true if the operand has a side-effect. In that
3284 case, we know the operand occurred exactly once. */
3286 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3287 arg0 = new0;
3288 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3289 arg0 = new1;
3291 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3292 arg1 = new0;
3293 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3294 arg1 = new1;
3296 return fold_build2_loc (loc, code, type, arg0, arg1);
3299 default:
3300 return arg;
3304 /* Return a tree for the case when the result of an expression is RESULT
3305 converted to TYPE and OMITTED was previously an operand of the expression
3306 but is now not needed (e.g., we folded OMITTED * 0).
3308 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3309 the conversion of RESULT to TYPE. */
3311 tree
3312 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3314 tree t = fold_convert_loc (loc, type, result);
3316 /* If the resulting operand is an empty statement, just return the omitted
3317 statement casted to void. */
3318 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3319 return build1_loc (loc, NOP_EXPR, void_type_node,
3320 fold_ignored_result (omitted));
3322 if (TREE_SIDE_EFFECTS (omitted))
3323 return build2_loc (loc, COMPOUND_EXPR, type,
3324 fold_ignored_result (omitted), t);
3326 return non_lvalue_loc (loc, t);
3329 /* Return a tree for the case when the result of an expression is RESULT
3330 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3331 of the expression but are now not needed.
3333 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3334 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3335 evaluated before OMITTED2. Otherwise, if neither has side effects,
3336 just do the conversion of RESULT to TYPE. */
3338 tree
3339 omit_two_operands_loc (location_t loc, tree type, tree result,
3340 tree omitted1, tree omitted2)
3342 tree t = fold_convert_loc (loc, type, result);
3344 if (TREE_SIDE_EFFECTS (omitted2))
3345 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3346 if (TREE_SIDE_EFFECTS (omitted1))
3347 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3349 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3353 /* Return a simplified tree node for the truth-negation of ARG. This
3354 never alters ARG itself. We assume that ARG is an operation that
3355 returns a truth value (0 or 1).
3357 FIXME: one would think we would fold the result, but it causes
3358 problems with the dominator optimizer. */
3360 static tree
3361 fold_truth_not_expr (location_t loc, tree arg)
3363 tree type = TREE_TYPE (arg);
3364 enum tree_code code = TREE_CODE (arg);
3365 location_t loc1, loc2;
3367 /* If this is a comparison, we can simply invert it, except for
3368 floating-point non-equality comparisons, in which case we just
3369 enclose a TRUTH_NOT_EXPR around what we have. */
3371 if (TREE_CODE_CLASS (code) == tcc_comparison)
3373 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3374 if (FLOAT_TYPE_P (op_type)
3375 && flag_trapping_math
3376 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3377 && code != NE_EXPR && code != EQ_EXPR)
3378 return NULL_TREE;
3380 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3381 if (code == ERROR_MARK)
3382 return NULL_TREE;
3384 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3385 TREE_OPERAND (arg, 1));
3388 switch (code)
3390 case INTEGER_CST:
3391 return constant_boolean_node (integer_zerop (arg), type);
3393 case TRUTH_AND_EXPR:
3394 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3395 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3396 return build2_loc (loc, TRUTH_OR_EXPR, type,
3397 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3398 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3400 case TRUTH_OR_EXPR:
3401 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3402 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3403 return build2_loc (loc, TRUTH_AND_EXPR, type,
3404 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3405 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3407 case TRUTH_XOR_EXPR:
3408 /* Here we can invert either operand. We invert the first operand
3409 unless the second operand is a TRUTH_NOT_EXPR in which case our
3410 result is the XOR of the first operand with the inside of the
3411 negation of the second operand. */
3413 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3414 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3415 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3416 else
3417 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3418 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3419 TREE_OPERAND (arg, 1));
3421 case TRUTH_ANDIF_EXPR:
3422 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3423 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3424 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3425 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3426 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3428 case TRUTH_ORIF_EXPR:
3429 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3430 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3431 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3432 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3433 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3435 case TRUTH_NOT_EXPR:
3436 return TREE_OPERAND (arg, 0);
3438 case COND_EXPR:
3440 tree arg1 = TREE_OPERAND (arg, 1);
3441 tree arg2 = TREE_OPERAND (arg, 2);
3443 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3444 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3446 /* A COND_EXPR may have a throw as one operand, which
3447 then has void type. Just leave void operands
3448 as they are. */
3449 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3450 VOID_TYPE_P (TREE_TYPE (arg1))
3451 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3452 VOID_TYPE_P (TREE_TYPE (arg2))
3453 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3456 case COMPOUND_EXPR:
3457 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3458 return build2_loc (loc, COMPOUND_EXPR, type,
3459 TREE_OPERAND (arg, 0),
3460 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3462 case NON_LVALUE_EXPR:
3463 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3464 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3466 CASE_CONVERT:
3467 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3468 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3470 /* ... fall through ... */
3472 case FLOAT_EXPR:
3473 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3474 return build1_loc (loc, TREE_CODE (arg), type,
3475 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3477 case BIT_AND_EXPR:
3478 if (!integer_onep (TREE_OPERAND (arg, 1)))
3479 return NULL_TREE;
3480 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3482 case SAVE_EXPR:
3483 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3485 case CLEANUP_POINT_EXPR:
3486 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3487 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3488 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3490 default:
3491 return NULL_TREE;
3495 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3496 assume that ARG is an operation that returns a truth value (0 or 1
3497 for scalars, 0 or -1 for vectors). Return the folded expression if
3498 folding is successful. Otherwise, return NULL_TREE. */
3500 static tree
3501 fold_invert_truthvalue (location_t loc, tree arg)
3503 tree type = TREE_TYPE (arg);
3504 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3505 ? BIT_NOT_EXPR
3506 : TRUTH_NOT_EXPR,
3507 type, arg);
3510 /* Return a simplified tree node for the truth-negation of ARG. This
3511 never alters ARG itself. We assume that ARG is an operation that
3512 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3514 tree
3515 invert_truthvalue_loc (location_t loc, tree arg)
3517 if (TREE_CODE (arg) == ERROR_MARK)
3518 return arg;
3520 tree type = TREE_TYPE (arg);
3521 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3522 ? BIT_NOT_EXPR
3523 : TRUTH_NOT_EXPR,
3524 type, arg);
3527 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3528 operands are another bit-wise operation with a common input. If so,
3529 distribute the bit operations to save an operation and possibly two if
3530 constants are involved. For example, convert
3531 (A | B) & (A | C) into A | (B & C)
3532 Further simplification will occur if B and C are constants.
3534 If this optimization cannot be done, 0 will be returned. */
3536 static tree
3537 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3538 tree arg0, tree arg1)
3540 tree common;
3541 tree left, right;
3543 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3544 || TREE_CODE (arg0) == code
3545 || (TREE_CODE (arg0) != BIT_AND_EXPR
3546 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3547 return 0;
3549 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3551 common = TREE_OPERAND (arg0, 0);
3552 left = TREE_OPERAND (arg0, 1);
3553 right = TREE_OPERAND (arg1, 1);
3555 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3557 common = TREE_OPERAND (arg0, 0);
3558 left = TREE_OPERAND (arg0, 1);
3559 right = TREE_OPERAND (arg1, 0);
3561 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3563 common = TREE_OPERAND (arg0, 1);
3564 left = TREE_OPERAND (arg0, 0);
3565 right = TREE_OPERAND (arg1, 1);
3567 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3569 common = TREE_OPERAND (arg0, 1);
3570 left = TREE_OPERAND (arg0, 0);
3571 right = TREE_OPERAND (arg1, 0);
3573 else
3574 return 0;
3576 common = fold_convert_loc (loc, type, common);
3577 left = fold_convert_loc (loc, type, left);
3578 right = fold_convert_loc (loc, type, right);
3579 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3580 fold_build2_loc (loc, code, type, left, right));
3583 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3584 with code CODE. This optimization is unsafe. */
3585 static tree
3586 distribute_real_division (location_t loc, enum tree_code code, tree type,
3587 tree arg0, tree arg1)
3589 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3590 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3592 /* (A / C) +- (B / C) -> (A +- B) / C. */
3593 if (mul0 == mul1
3594 && operand_equal_p (TREE_OPERAND (arg0, 1),
3595 TREE_OPERAND (arg1, 1), 0))
3596 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3597 fold_build2_loc (loc, code, type,
3598 TREE_OPERAND (arg0, 0),
3599 TREE_OPERAND (arg1, 0)),
3600 TREE_OPERAND (arg0, 1));
3602 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3603 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3604 TREE_OPERAND (arg1, 0), 0)
3605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3606 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3608 REAL_VALUE_TYPE r0, r1;
3609 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3610 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3611 if (!mul0)
3612 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3613 if (!mul1)
3614 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3615 real_arithmetic (&r0, code, &r0, &r1);
3616 return fold_build2_loc (loc, MULT_EXPR, type,
3617 TREE_OPERAND (arg0, 0),
3618 build_real (type, r0));
3621 return NULL_TREE;
3624 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3625 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3627 static tree
3628 make_bit_field_ref (location_t loc, tree inner, tree type,
3629 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3631 tree result, bftype;
3633 if (bitpos == 0)
3635 tree size = TYPE_SIZE (TREE_TYPE (inner));
3636 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3637 || POINTER_TYPE_P (TREE_TYPE (inner)))
3638 && tree_fits_shwi_p (size)
3639 && tree_to_shwi (size) == bitsize)
3640 return fold_convert_loc (loc, type, inner);
3643 bftype = type;
3644 if (TYPE_PRECISION (bftype) != bitsize
3645 || TYPE_UNSIGNED (bftype) == !unsignedp)
3646 bftype = build_nonstandard_integer_type (bitsize, 0);
3648 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3649 size_int (bitsize), bitsize_int (bitpos));
3651 if (bftype != type)
3652 result = fold_convert_loc (loc, type, result);
3654 return result;
3657 /* Optimize a bit-field compare.
3659 There are two cases: First is a compare against a constant and the
3660 second is a comparison of two items where the fields are at the same
3661 bit position relative to the start of a chunk (byte, halfword, word)
3662 large enough to contain it. In these cases we can avoid the shift
3663 implicit in bitfield extractions.
3665 For constants, we emit a compare of the shifted constant with the
3666 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3667 compared. For two fields at the same position, we do the ANDs with the
3668 similar mask and compare the result of the ANDs.
3670 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3671 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3672 are the left and right operands of the comparison, respectively.
3674 If the optimization described above can be done, we return the resulting
3675 tree. Otherwise we return zero. */
3677 static tree
3678 optimize_bit_field_compare (location_t loc, enum tree_code code,
3679 tree compare_type, tree lhs, tree rhs)
3681 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3682 tree type = TREE_TYPE (lhs);
3683 tree unsigned_type;
3684 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3685 machine_mode lmode, rmode, nmode;
3686 int lunsignedp, runsignedp;
3687 int lvolatilep = 0, rvolatilep = 0;
3688 tree linner, rinner = NULL_TREE;
3689 tree mask;
3690 tree offset;
3692 /* Get all the information about the extractions being done. If the bit size
3693 if the same as the size of the underlying object, we aren't doing an
3694 extraction at all and so can do nothing. We also don't want to
3695 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3696 then will no longer be able to replace it. */
3697 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3698 &lunsignedp, &lvolatilep, false);
3699 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3700 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3701 return 0;
3703 if (!const_p)
3705 /* If this is not a constant, we can only do something if bit positions,
3706 sizes, and signedness are the same. */
3707 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3708 &runsignedp, &rvolatilep, false);
3710 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3711 || lunsignedp != runsignedp || offset != 0
3712 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3713 return 0;
3716 /* See if we can find a mode to refer to this field. We should be able to,
3717 but fail if we can't. */
3718 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3719 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3720 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3721 TYPE_ALIGN (TREE_TYPE (rinner))),
3722 word_mode, false);
3723 if (nmode == VOIDmode)
3724 return 0;
3726 /* Set signed and unsigned types of the precision of this mode for the
3727 shifts below. */
3728 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3730 /* Compute the bit position and size for the new reference and our offset
3731 within it. If the new reference is the same size as the original, we
3732 won't optimize anything, so return zero. */
3733 nbitsize = GET_MODE_BITSIZE (nmode);
3734 nbitpos = lbitpos & ~ (nbitsize - 1);
3735 lbitpos -= nbitpos;
3736 if (nbitsize == lbitsize)
3737 return 0;
3739 if (BYTES_BIG_ENDIAN)
3740 lbitpos = nbitsize - lbitsize - lbitpos;
3742 /* Make the mask to be used against the extracted field. */
3743 mask = build_int_cst_type (unsigned_type, -1);
3744 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3745 mask = const_binop (RSHIFT_EXPR, mask,
3746 size_int (nbitsize - lbitsize - lbitpos));
3748 if (! const_p)
3749 /* If not comparing with constant, just rework the comparison
3750 and return. */
3751 return fold_build2_loc (loc, code, compare_type,
3752 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3753 make_bit_field_ref (loc, linner,
3754 unsigned_type,
3755 nbitsize, nbitpos,
3757 mask),
3758 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3759 make_bit_field_ref (loc, rinner,
3760 unsigned_type,
3761 nbitsize, nbitpos,
3763 mask));
3765 /* Otherwise, we are handling the constant case. See if the constant is too
3766 big for the field. Warn and return a tree of for 0 (false) if so. We do
3767 this not only for its own sake, but to avoid having to test for this
3768 error case below. If we didn't, we might generate wrong code.
3770 For unsigned fields, the constant shifted right by the field length should
3771 be all zero. For signed fields, the high-order bits should agree with
3772 the sign bit. */
3774 if (lunsignedp)
3776 if (wi::lrshift (rhs, lbitsize) != 0)
3778 warning (0, "comparison is always %d due to width of bit-field",
3779 code == NE_EXPR);
3780 return constant_boolean_node (code == NE_EXPR, compare_type);
3783 else
3785 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3786 if (tem != 0 && tem != -1)
3788 warning (0, "comparison is always %d due to width of bit-field",
3789 code == NE_EXPR);
3790 return constant_boolean_node (code == NE_EXPR, compare_type);
3794 /* Single-bit compares should always be against zero. */
3795 if (lbitsize == 1 && ! integer_zerop (rhs))
3797 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3798 rhs = build_int_cst (type, 0);
3801 /* Make a new bitfield reference, shift the constant over the
3802 appropriate number of bits and mask it with the computed mask
3803 (in case this was a signed field). If we changed it, make a new one. */
3804 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3806 rhs = const_binop (BIT_AND_EXPR,
3807 const_binop (LSHIFT_EXPR,
3808 fold_convert_loc (loc, unsigned_type, rhs),
3809 size_int (lbitpos)),
3810 mask);
3812 lhs = build2_loc (loc, code, compare_type,
3813 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3814 return lhs;
3817 /* Subroutine for fold_truth_andor_1: decode a field reference.
3819 If EXP is a comparison reference, we return the innermost reference.
3821 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3822 set to the starting bit number.
3824 If the innermost field can be completely contained in a mode-sized
3825 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3827 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3828 otherwise it is not changed.
3830 *PUNSIGNEDP is set to the signedness of the field.
3832 *PMASK is set to the mask used. This is either contained in a
3833 BIT_AND_EXPR or derived from the width of the field.
3835 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3837 Return 0 if this is not a component reference or is one that we can't
3838 do anything with. */
3840 static tree
3841 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3842 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3843 int *punsignedp, int *pvolatilep,
3844 tree *pmask, tree *pand_mask)
3846 tree outer_type = 0;
3847 tree and_mask = 0;
3848 tree mask, inner, offset;
3849 tree unsigned_type;
3850 unsigned int precision;
3852 /* All the optimizations using this function assume integer fields.
3853 There are problems with FP fields since the type_for_size call
3854 below can fail for, e.g., XFmode. */
3855 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3856 return 0;
3858 /* We are interested in the bare arrangement of bits, so strip everything
3859 that doesn't affect the machine mode. However, record the type of the
3860 outermost expression if it may matter below. */
3861 if (CONVERT_EXPR_P (exp)
3862 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3863 outer_type = TREE_TYPE (exp);
3864 STRIP_NOPS (exp);
3866 if (TREE_CODE (exp) == BIT_AND_EXPR)
3868 and_mask = TREE_OPERAND (exp, 1);
3869 exp = TREE_OPERAND (exp, 0);
3870 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3871 if (TREE_CODE (and_mask) != INTEGER_CST)
3872 return 0;
3875 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3876 punsignedp, pvolatilep, false);
3877 if ((inner == exp && and_mask == 0)
3878 || *pbitsize < 0 || offset != 0
3879 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3880 return 0;
3882 /* If the number of bits in the reference is the same as the bitsize of
3883 the outer type, then the outer type gives the signedness. Otherwise
3884 (in case of a small bitfield) the signedness is unchanged. */
3885 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3886 *punsignedp = TYPE_UNSIGNED (outer_type);
3888 /* Compute the mask to access the bitfield. */
3889 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3890 precision = TYPE_PRECISION (unsigned_type);
3892 mask = build_int_cst_type (unsigned_type, -1);
3894 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3895 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3897 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3898 if (and_mask != 0)
3899 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3900 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3902 *pmask = mask;
3903 *pand_mask = and_mask;
3904 return inner;
3907 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3908 bit positions and MASK is SIGNED. */
3910 static int
3911 all_ones_mask_p (const_tree mask, unsigned int size)
3913 tree type = TREE_TYPE (mask);
3914 unsigned int precision = TYPE_PRECISION (type);
3916 /* If this function returns true when the type of the mask is
3917 UNSIGNED, then there will be errors. In particular see
3918 gcc.c-torture/execute/990326-1.c. There does not appear to be
3919 any documentation paper trail as to why this is so. But the pre
3920 wide-int worked with that restriction and it has been preserved
3921 here. */
3922 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3923 return false;
3925 return wi::mask (size, false, precision) == mask;
3928 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3929 represents the sign bit of EXP's type. If EXP represents a sign
3930 or zero extension, also test VAL against the unextended type.
3931 The return value is the (sub)expression whose sign bit is VAL,
3932 or NULL_TREE otherwise. */
3934 tree
3935 sign_bit_p (tree exp, const_tree val)
3937 int width;
3938 tree t;
3940 /* Tree EXP must have an integral type. */
3941 t = TREE_TYPE (exp);
3942 if (! INTEGRAL_TYPE_P (t))
3943 return NULL_TREE;
3945 /* Tree VAL must be an integer constant. */
3946 if (TREE_CODE (val) != INTEGER_CST
3947 || TREE_OVERFLOW (val))
3948 return NULL_TREE;
3950 width = TYPE_PRECISION (t);
3951 if (wi::only_sign_bit_p (val, width))
3952 return exp;
3954 /* Handle extension from a narrower type. */
3955 if (TREE_CODE (exp) == NOP_EXPR
3956 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3957 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3959 return NULL_TREE;
3962 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3963 to be evaluated unconditionally. */
3965 static int
3966 simple_operand_p (const_tree exp)
3968 /* Strip any conversions that don't change the machine mode. */
3969 STRIP_NOPS (exp);
3971 return (CONSTANT_CLASS_P (exp)
3972 || TREE_CODE (exp) == SSA_NAME
3973 || (DECL_P (exp)
3974 && ! TREE_ADDRESSABLE (exp)
3975 && ! TREE_THIS_VOLATILE (exp)
3976 && ! DECL_NONLOCAL (exp)
3977 /* Don't regard global variables as simple. They may be
3978 allocated in ways unknown to the compiler (shared memory,
3979 #pragma weak, etc). */
3980 && ! TREE_PUBLIC (exp)
3981 && ! DECL_EXTERNAL (exp)
3982 /* Weakrefs are not safe to be read, since they can be NULL.
3983 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3984 have DECL_WEAK flag set. */
3985 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3986 /* Loading a static variable is unduly expensive, but global
3987 registers aren't expensive. */
3988 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3991 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3992 to be evaluated unconditionally.
3993 I addition to simple_operand_p, we assume that comparisons, conversions,
3994 and logic-not operations are simple, if their operands are simple, too. */
3996 static bool
3997 simple_operand_p_2 (tree exp)
3999 enum tree_code code;
4001 if (TREE_SIDE_EFFECTS (exp)
4002 || tree_could_trap_p (exp))
4003 return false;
4005 while (CONVERT_EXPR_P (exp))
4006 exp = TREE_OPERAND (exp, 0);
4008 code = TREE_CODE (exp);
4010 if (TREE_CODE_CLASS (code) == tcc_comparison)
4011 return (simple_operand_p (TREE_OPERAND (exp, 0))
4012 && simple_operand_p (TREE_OPERAND (exp, 1)));
4014 if (code == TRUTH_NOT_EXPR)
4015 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4017 return simple_operand_p (exp);
4021 /* The following functions are subroutines to fold_range_test and allow it to
4022 try to change a logical combination of comparisons into a range test.
4024 For example, both
4025 X == 2 || X == 3 || X == 4 || X == 5
4027 X >= 2 && X <= 5
4028 are converted to
4029 (unsigned) (X - 2) <= 3
4031 We describe each set of comparisons as being either inside or outside
4032 a range, using a variable named like IN_P, and then describe the
4033 range with a lower and upper bound. If one of the bounds is omitted,
4034 it represents either the highest or lowest value of the type.
4036 In the comments below, we represent a range by two numbers in brackets
4037 preceded by a "+" to designate being inside that range, or a "-" to
4038 designate being outside that range, so the condition can be inverted by
4039 flipping the prefix. An omitted bound is represented by a "-". For
4040 example, "- [-, 10]" means being outside the range starting at the lowest
4041 possible value and ending at 10, in other words, being greater than 10.
4042 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4043 always false.
4045 We set up things so that the missing bounds are handled in a consistent
4046 manner so neither a missing bound nor "true" and "false" need to be
4047 handled using a special case. */
4049 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4050 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4051 and UPPER1_P are nonzero if the respective argument is an upper bound
4052 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4053 must be specified for a comparison. ARG1 will be converted to ARG0's
4054 type if both are specified. */
4056 static tree
4057 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4058 tree arg1, int upper1_p)
4060 tree tem;
4061 int result;
4062 int sgn0, sgn1;
4064 /* If neither arg represents infinity, do the normal operation.
4065 Else, if not a comparison, return infinity. Else handle the special
4066 comparison rules. Note that most of the cases below won't occur, but
4067 are handled for consistency. */
4069 if (arg0 != 0 && arg1 != 0)
4071 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4072 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4073 STRIP_NOPS (tem);
4074 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4077 if (TREE_CODE_CLASS (code) != tcc_comparison)
4078 return 0;
4080 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4081 for neither. In real maths, we cannot assume open ended ranges are
4082 the same. But, this is computer arithmetic, where numbers are finite.
4083 We can therefore make the transformation of any unbounded range with
4084 the value Z, Z being greater than any representable number. This permits
4085 us to treat unbounded ranges as equal. */
4086 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4087 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4088 switch (code)
4090 case EQ_EXPR:
4091 result = sgn0 == sgn1;
4092 break;
4093 case NE_EXPR:
4094 result = sgn0 != sgn1;
4095 break;
4096 case LT_EXPR:
4097 result = sgn0 < sgn1;
4098 break;
4099 case LE_EXPR:
4100 result = sgn0 <= sgn1;
4101 break;
4102 case GT_EXPR:
4103 result = sgn0 > sgn1;
4104 break;
4105 case GE_EXPR:
4106 result = sgn0 >= sgn1;
4107 break;
4108 default:
4109 gcc_unreachable ();
4112 return constant_boolean_node (result, type);
4115 /* Helper routine for make_range. Perform one step for it, return
4116 new expression if the loop should continue or NULL_TREE if it should
4117 stop. */
4119 tree
4120 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4121 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4122 bool *strict_overflow_p)
4124 tree arg0_type = TREE_TYPE (arg0);
4125 tree n_low, n_high, low = *p_low, high = *p_high;
4126 int in_p = *p_in_p, n_in_p;
4128 switch (code)
4130 case TRUTH_NOT_EXPR:
4131 /* We can only do something if the range is testing for zero. */
4132 if (low == NULL_TREE || high == NULL_TREE
4133 || ! integer_zerop (low) || ! integer_zerop (high))
4134 return NULL_TREE;
4135 *p_in_p = ! in_p;
4136 return arg0;
4138 case EQ_EXPR: case NE_EXPR:
4139 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4140 /* We can only do something if the range is testing for zero
4141 and if the second operand is an integer constant. Note that
4142 saying something is "in" the range we make is done by
4143 complementing IN_P since it will set in the initial case of
4144 being not equal to zero; "out" is leaving it alone. */
4145 if (low == NULL_TREE || high == NULL_TREE
4146 || ! integer_zerop (low) || ! integer_zerop (high)
4147 || TREE_CODE (arg1) != INTEGER_CST)
4148 return NULL_TREE;
4150 switch (code)
4152 case NE_EXPR: /* - [c, c] */
4153 low = high = arg1;
4154 break;
4155 case EQ_EXPR: /* + [c, c] */
4156 in_p = ! in_p, low = high = arg1;
4157 break;
4158 case GT_EXPR: /* - [-, c] */
4159 low = 0, high = arg1;
4160 break;
4161 case GE_EXPR: /* + [c, -] */
4162 in_p = ! in_p, low = arg1, high = 0;
4163 break;
4164 case LT_EXPR: /* - [c, -] */
4165 low = arg1, high = 0;
4166 break;
4167 case LE_EXPR: /* + [-, c] */
4168 in_p = ! in_p, low = 0, high = arg1;
4169 break;
4170 default:
4171 gcc_unreachable ();
4174 /* If this is an unsigned comparison, we also know that EXP is
4175 greater than or equal to zero. We base the range tests we make
4176 on that fact, so we record it here so we can parse existing
4177 range tests. We test arg0_type since often the return type
4178 of, e.g. EQ_EXPR, is boolean. */
4179 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4181 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4182 in_p, low, high, 1,
4183 build_int_cst (arg0_type, 0),
4184 NULL_TREE))
4185 return NULL_TREE;
4187 in_p = n_in_p, low = n_low, high = n_high;
4189 /* If the high bound is missing, but we have a nonzero low
4190 bound, reverse the range so it goes from zero to the low bound
4191 minus 1. */
4192 if (high == 0 && low && ! integer_zerop (low))
4194 in_p = ! in_p;
4195 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4196 build_int_cst (TREE_TYPE (low), 1), 0);
4197 low = build_int_cst (arg0_type, 0);
4201 *p_low = low;
4202 *p_high = high;
4203 *p_in_p = in_p;
4204 return arg0;
4206 case NEGATE_EXPR:
4207 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4208 low and high are non-NULL, then normalize will DTRT. */
4209 if (!TYPE_UNSIGNED (arg0_type)
4210 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4212 if (low == NULL_TREE)
4213 low = TYPE_MIN_VALUE (arg0_type);
4214 if (high == NULL_TREE)
4215 high = TYPE_MAX_VALUE (arg0_type);
4218 /* (-x) IN [a,b] -> x in [-b, -a] */
4219 n_low = range_binop (MINUS_EXPR, exp_type,
4220 build_int_cst (exp_type, 0),
4221 0, high, 1);
4222 n_high = range_binop (MINUS_EXPR, exp_type,
4223 build_int_cst (exp_type, 0),
4224 0, low, 0);
4225 if (n_high != 0 && TREE_OVERFLOW (n_high))
4226 return NULL_TREE;
4227 goto normalize;
4229 case BIT_NOT_EXPR:
4230 /* ~ X -> -X - 1 */
4231 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4232 build_int_cst (exp_type, 1));
4234 case PLUS_EXPR:
4235 case MINUS_EXPR:
4236 if (TREE_CODE (arg1) != INTEGER_CST)
4237 return NULL_TREE;
4239 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4240 move a constant to the other side. */
4241 if (!TYPE_UNSIGNED (arg0_type)
4242 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4243 return NULL_TREE;
4245 /* If EXP is signed, any overflow in the computation is undefined,
4246 so we don't worry about it so long as our computations on
4247 the bounds don't overflow. For unsigned, overflow is defined
4248 and this is exactly the right thing. */
4249 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4250 arg0_type, low, 0, arg1, 0);
4251 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4252 arg0_type, high, 1, arg1, 0);
4253 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4254 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4255 return NULL_TREE;
4257 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4258 *strict_overflow_p = true;
4260 normalize:
4261 /* Check for an unsigned range which has wrapped around the maximum
4262 value thus making n_high < n_low, and normalize it. */
4263 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4265 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4266 build_int_cst (TREE_TYPE (n_high), 1), 0);
4267 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4268 build_int_cst (TREE_TYPE (n_low), 1), 0);
4270 /* If the range is of the form +/- [ x+1, x ], we won't
4271 be able to normalize it. But then, it represents the
4272 whole range or the empty set, so make it
4273 +/- [ -, - ]. */
4274 if (tree_int_cst_equal (n_low, low)
4275 && tree_int_cst_equal (n_high, high))
4276 low = high = 0;
4277 else
4278 in_p = ! in_p;
4280 else
4281 low = n_low, high = n_high;
4283 *p_low = low;
4284 *p_high = high;
4285 *p_in_p = in_p;
4286 return arg0;
4288 CASE_CONVERT:
4289 case NON_LVALUE_EXPR:
4290 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4291 return NULL_TREE;
4293 if (! INTEGRAL_TYPE_P (arg0_type)
4294 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4295 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4296 return NULL_TREE;
4298 n_low = low, n_high = high;
4300 if (n_low != 0)
4301 n_low = fold_convert_loc (loc, arg0_type, n_low);
4303 if (n_high != 0)
4304 n_high = fold_convert_loc (loc, arg0_type, n_high);
4306 /* If we're converting arg0 from an unsigned type, to exp,
4307 a signed type, we will be doing the comparison as unsigned.
4308 The tests above have already verified that LOW and HIGH
4309 are both positive.
4311 So we have to ensure that we will handle large unsigned
4312 values the same way that the current signed bounds treat
4313 negative values. */
4315 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4317 tree high_positive;
4318 tree equiv_type;
4319 /* For fixed-point modes, we need to pass the saturating flag
4320 as the 2nd parameter. */
4321 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4322 equiv_type
4323 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4324 TYPE_SATURATING (arg0_type));
4325 else
4326 equiv_type
4327 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4329 /* A range without an upper bound is, naturally, unbounded.
4330 Since convert would have cropped a very large value, use
4331 the max value for the destination type. */
4332 high_positive
4333 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4334 : TYPE_MAX_VALUE (arg0_type);
4336 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4337 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4338 fold_convert_loc (loc, arg0_type,
4339 high_positive),
4340 build_int_cst (arg0_type, 1));
4342 /* If the low bound is specified, "and" the range with the
4343 range for which the original unsigned value will be
4344 positive. */
4345 if (low != 0)
4347 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4348 1, fold_convert_loc (loc, arg0_type,
4349 integer_zero_node),
4350 high_positive))
4351 return NULL_TREE;
4353 in_p = (n_in_p == in_p);
4355 else
4357 /* Otherwise, "or" the range with the range of the input
4358 that will be interpreted as negative. */
4359 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4360 1, fold_convert_loc (loc, arg0_type,
4361 integer_zero_node),
4362 high_positive))
4363 return NULL_TREE;
4365 in_p = (in_p != n_in_p);
4369 *p_low = n_low;
4370 *p_high = n_high;
4371 *p_in_p = in_p;
4372 return arg0;
4374 default:
4375 return NULL_TREE;
4379 /* Given EXP, a logical expression, set the range it is testing into
4380 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4381 actually being tested. *PLOW and *PHIGH will be made of the same
4382 type as the returned expression. If EXP is not a comparison, we
4383 will most likely not be returning a useful value and range. Set
4384 *STRICT_OVERFLOW_P to true if the return value is only valid
4385 because signed overflow is undefined; otherwise, do not change
4386 *STRICT_OVERFLOW_P. */
4388 tree
4389 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4390 bool *strict_overflow_p)
4392 enum tree_code code;
4393 tree arg0, arg1 = NULL_TREE;
4394 tree exp_type, nexp;
4395 int in_p;
4396 tree low, high;
4397 location_t loc = EXPR_LOCATION (exp);
4399 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4400 and see if we can refine the range. Some of the cases below may not
4401 happen, but it doesn't seem worth worrying about this. We "continue"
4402 the outer loop when we've changed something; otherwise we "break"
4403 the switch, which will "break" the while. */
4405 in_p = 0;
4406 low = high = build_int_cst (TREE_TYPE (exp), 0);
4408 while (1)
4410 code = TREE_CODE (exp);
4411 exp_type = TREE_TYPE (exp);
4412 arg0 = NULL_TREE;
4414 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4416 if (TREE_OPERAND_LENGTH (exp) > 0)
4417 arg0 = TREE_OPERAND (exp, 0);
4418 if (TREE_CODE_CLASS (code) == tcc_binary
4419 || TREE_CODE_CLASS (code) == tcc_comparison
4420 || (TREE_CODE_CLASS (code) == tcc_expression
4421 && TREE_OPERAND_LENGTH (exp) > 1))
4422 arg1 = TREE_OPERAND (exp, 1);
4424 if (arg0 == NULL_TREE)
4425 break;
4427 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4428 &high, &in_p, strict_overflow_p);
4429 if (nexp == NULL_TREE)
4430 break;
4431 exp = nexp;
4434 /* If EXP is a constant, we can evaluate whether this is true or false. */
4435 if (TREE_CODE (exp) == INTEGER_CST)
4437 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4438 exp, 0, low, 0))
4439 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4440 exp, 1, high, 1)));
4441 low = high = 0;
4442 exp = 0;
4445 *pin_p = in_p, *plow = low, *phigh = high;
4446 return exp;
4449 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4450 type, TYPE, return an expression to test if EXP is in (or out of, depending
4451 on IN_P) the range. Return 0 if the test couldn't be created. */
4453 tree
4454 build_range_check (location_t loc, tree type, tree exp, int in_p,
4455 tree low, tree high)
4457 tree etype = TREE_TYPE (exp), value;
4459 #ifdef HAVE_canonicalize_funcptr_for_compare
4460 /* Disable this optimization for function pointer expressions
4461 on targets that require function pointer canonicalization. */
4462 if (HAVE_canonicalize_funcptr_for_compare
4463 && TREE_CODE (etype) == POINTER_TYPE
4464 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4465 return NULL_TREE;
4466 #endif
4468 if (! in_p)
4470 value = build_range_check (loc, type, exp, 1, low, high);
4471 if (value != 0)
4472 return invert_truthvalue_loc (loc, value);
4474 return 0;
4477 if (low == 0 && high == 0)
4478 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4480 if (low == 0)
4481 return fold_build2_loc (loc, LE_EXPR, type, exp,
4482 fold_convert_loc (loc, etype, high));
4484 if (high == 0)
4485 return fold_build2_loc (loc, GE_EXPR, type, exp,
4486 fold_convert_loc (loc, etype, low));
4488 if (operand_equal_p (low, high, 0))
4489 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4490 fold_convert_loc (loc, etype, low));
4492 if (integer_zerop (low))
4494 if (! TYPE_UNSIGNED (etype))
4496 etype = unsigned_type_for (etype);
4497 high = fold_convert_loc (loc, etype, high);
4498 exp = fold_convert_loc (loc, etype, exp);
4500 return build_range_check (loc, type, exp, 1, 0, high);
4503 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4504 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4506 int prec = TYPE_PRECISION (etype);
4508 if (wi::mask (prec - 1, false, prec) == high)
4510 if (TYPE_UNSIGNED (etype))
4512 tree signed_etype = signed_type_for (etype);
4513 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4514 etype
4515 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4516 else
4517 etype = signed_etype;
4518 exp = fold_convert_loc (loc, etype, exp);
4520 return fold_build2_loc (loc, GT_EXPR, type, exp,
4521 build_int_cst (etype, 0));
4525 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4526 This requires wrap-around arithmetics for the type of the expression.
4527 First make sure that arithmetics in this type is valid, then make sure
4528 that it wraps around. */
4529 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4530 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4531 TYPE_UNSIGNED (etype));
4533 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4535 tree utype, minv, maxv;
4537 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4538 for the type in question, as we rely on this here. */
4539 utype = unsigned_type_for (etype);
4540 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4541 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4542 build_int_cst (TREE_TYPE (maxv), 1), 1);
4543 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4545 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4546 minv, 1, maxv, 1)))
4547 etype = utype;
4548 else
4549 return 0;
4552 high = fold_convert_loc (loc, etype, high);
4553 low = fold_convert_loc (loc, etype, low);
4554 exp = fold_convert_loc (loc, etype, exp);
4556 value = const_binop (MINUS_EXPR, high, low);
4559 if (POINTER_TYPE_P (etype))
4561 if (value != 0 && !TREE_OVERFLOW (value))
4563 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4564 return build_range_check (loc, type,
4565 fold_build_pointer_plus_loc (loc, exp, low),
4566 1, build_int_cst (etype, 0), value);
4568 return 0;
4571 if (value != 0 && !TREE_OVERFLOW (value))
4572 return build_range_check (loc, type,
4573 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4574 1, build_int_cst (etype, 0), value);
4576 return 0;
4579 /* Return the predecessor of VAL in its type, handling the infinite case. */
4581 static tree
4582 range_predecessor (tree val)
4584 tree type = TREE_TYPE (val);
4586 if (INTEGRAL_TYPE_P (type)
4587 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4588 return 0;
4589 else
4590 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4591 build_int_cst (TREE_TYPE (val), 1), 0);
4594 /* Return the successor of VAL in its type, handling the infinite case. */
4596 static tree
4597 range_successor (tree val)
4599 tree type = TREE_TYPE (val);
4601 if (INTEGRAL_TYPE_P (type)
4602 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4603 return 0;
4604 else
4605 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4606 build_int_cst (TREE_TYPE (val), 1), 0);
4609 /* Given two ranges, see if we can merge them into one. Return 1 if we
4610 can, 0 if we can't. Set the output range into the specified parameters. */
4612 bool
4613 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4614 tree high0, int in1_p, tree low1, tree high1)
4616 int no_overlap;
4617 int subset;
4618 int temp;
4619 tree tem;
4620 int in_p;
4621 tree low, high;
4622 int lowequal = ((low0 == 0 && low1 == 0)
4623 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4624 low0, 0, low1, 0)));
4625 int highequal = ((high0 == 0 && high1 == 0)
4626 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4627 high0, 1, high1, 1)));
4629 /* Make range 0 be the range that starts first, or ends last if they
4630 start at the same value. Swap them if it isn't. */
4631 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4632 low0, 0, low1, 0))
4633 || (lowequal
4634 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4635 high1, 1, high0, 1))))
4637 temp = in0_p, in0_p = in1_p, in1_p = temp;
4638 tem = low0, low0 = low1, low1 = tem;
4639 tem = high0, high0 = high1, high1 = tem;
4642 /* Now flag two cases, whether the ranges are disjoint or whether the
4643 second range is totally subsumed in the first. Note that the tests
4644 below are simplified by the ones above. */
4645 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4646 high0, 1, low1, 0));
4647 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4648 high1, 1, high0, 1));
4650 /* We now have four cases, depending on whether we are including or
4651 excluding the two ranges. */
4652 if (in0_p && in1_p)
4654 /* If they don't overlap, the result is false. If the second range
4655 is a subset it is the result. Otherwise, the range is from the start
4656 of the second to the end of the first. */
4657 if (no_overlap)
4658 in_p = 0, low = high = 0;
4659 else if (subset)
4660 in_p = 1, low = low1, high = high1;
4661 else
4662 in_p = 1, low = low1, high = high0;
4665 else if (in0_p && ! in1_p)
4667 /* If they don't overlap, the result is the first range. If they are
4668 equal, the result is false. If the second range is a subset of the
4669 first, and the ranges begin at the same place, we go from just after
4670 the end of the second range to the end of the first. If the second
4671 range is not a subset of the first, or if it is a subset and both
4672 ranges end at the same place, the range starts at the start of the
4673 first range and ends just before the second range.
4674 Otherwise, we can't describe this as a single range. */
4675 if (no_overlap)
4676 in_p = 1, low = low0, high = high0;
4677 else if (lowequal && highequal)
4678 in_p = 0, low = high = 0;
4679 else if (subset && lowequal)
4681 low = range_successor (high1);
4682 high = high0;
4683 in_p = 1;
4684 if (low == 0)
4686 /* We are in the weird situation where high0 > high1 but
4687 high1 has no successor. Punt. */
4688 return 0;
4691 else if (! subset || highequal)
4693 low = low0;
4694 high = range_predecessor (low1);
4695 in_p = 1;
4696 if (high == 0)
4698 /* low0 < low1 but low1 has no predecessor. Punt. */
4699 return 0;
4702 else
4703 return 0;
4706 else if (! in0_p && in1_p)
4708 /* If they don't overlap, the result is the second range. If the second
4709 is a subset of the first, the result is false. Otherwise,
4710 the range starts just after the first range and ends at the
4711 end of the second. */
4712 if (no_overlap)
4713 in_p = 1, low = low1, high = high1;
4714 else if (subset || highequal)
4715 in_p = 0, low = high = 0;
4716 else
4718 low = range_successor (high0);
4719 high = high1;
4720 in_p = 1;
4721 if (low == 0)
4723 /* high1 > high0 but high0 has no successor. Punt. */
4724 return 0;
4729 else
4731 /* The case where we are excluding both ranges. Here the complex case
4732 is if they don't overlap. In that case, the only time we have a
4733 range is if they are adjacent. If the second is a subset of the
4734 first, the result is the first. Otherwise, the range to exclude
4735 starts at the beginning of the first range and ends at the end of the
4736 second. */
4737 if (no_overlap)
4739 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4740 range_successor (high0),
4741 1, low1, 0)))
4742 in_p = 0, low = low0, high = high1;
4743 else
4745 /* Canonicalize - [min, x] into - [-, x]. */
4746 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4747 switch (TREE_CODE (TREE_TYPE (low0)))
4749 case ENUMERAL_TYPE:
4750 if (TYPE_PRECISION (TREE_TYPE (low0))
4751 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4752 break;
4753 /* FALLTHROUGH */
4754 case INTEGER_TYPE:
4755 if (tree_int_cst_equal (low0,
4756 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4757 low0 = 0;
4758 break;
4759 case POINTER_TYPE:
4760 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4761 && integer_zerop (low0))
4762 low0 = 0;
4763 break;
4764 default:
4765 break;
4768 /* Canonicalize - [x, max] into - [x, -]. */
4769 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4770 switch (TREE_CODE (TREE_TYPE (high1)))
4772 case ENUMERAL_TYPE:
4773 if (TYPE_PRECISION (TREE_TYPE (high1))
4774 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4775 break;
4776 /* FALLTHROUGH */
4777 case INTEGER_TYPE:
4778 if (tree_int_cst_equal (high1,
4779 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4780 high1 = 0;
4781 break;
4782 case POINTER_TYPE:
4783 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4784 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4785 high1, 1,
4786 build_int_cst (TREE_TYPE (high1), 1),
4787 1)))
4788 high1 = 0;
4789 break;
4790 default:
4791 break;
4794 /* The ranges might be also adjacent between the maximum and
4795 minimum values of the given type. For
4796 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4797 return + [x + 1, y - 1]. */
4798 if (low0 == 0 && high1 == 0)
4800 low = range_successor (high0);
4801 high = range_predecessor (low1);
4802 if (low == 0 || high == 0)
4803 return 0;
4805 in_p = 1;
4807 else
4808 return 0;
4811 else if (subset)
4812 in_p = 0, low = low0, high = high0;
4813 else
4814 in_p = 0, low = low0, high = high1;
4817 *pin_p = in_p, *plow = low, *phigh = high;
4818 return 1;
4822 /* Subroutine of fold, looking inside expressions of the form
4823 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4824 of the COND_EXPR. This function is being used also to optimize
4825 A op B ? C : A, by reversing the comparison first.
4827 Return a folded expression whose code is not a COND_EXPR
4828 anymore, or NULL_TREE if no folding opportunity is found. */
4830 static tree
4831 fold_cond_expr_with_comparison (location_t loc, tree type,
4832 tree arg0, tree arg1, tree arg2)
4834 enum tree_code comp_code = TREE_CODE (arg0);
4835 tree arg00 = TREE_OPERAND (arg0, 0);
4836 tree arg01 = TREE_OPERAND (arg0, 1);
4837 tree arg1_type = TREE_TYPE (arg1);
4838 tree tem;
4840 STRIP_NOPS (arg1);
4841 STRIP_NOPS (arg2);
4843 /* If we have A op 0 ? A : -A, consider applying the following
4844 transformations:
4846 A == 0? A : -A same as -A
4847 A != 0? A : -A same as A
4848 A >= 0? A : -A same as abs (A)
4849 A > 0? A : -A same as abs (A)
4850 A <= 0? A : -A same as -abs (A)
4851 A < 0? A : -A same as -abs (A)
4853 None of these transformations work for modes with signed
4854 zeros. If A is +/-0, the first two transformations will
4855 change the sign of the result (from +0 to -0, or vice
4856 versa). The last four will fix the sign of the result,
4857 even though the original expressions could be positive or
4858 negative, depending on the sign of A.
4860 Note that all these transformations are correct if A is
4861 NaN, since the two alternatives (A and -A) are also NaNs. */
4862 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4863 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4864 ? real_zerop (arg01)
4865 : integer_zerop (arg01))
4866 && ((TREE_CODE (arg2) == NEGATE_EXPR
4867 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4868 /* In the case that A is of the form X-Y, '-A' (arg2) may
4869 have already been folded to Y-X, check for that. */
4870 || (TREE_CODE (arg1) == MINUS_EXPR
4871 && TREE_CODE (arg2) == MINUS_EXPR
4872 && operand_equal_p (TREE_OPERAND (arg1, 0),
4873 TREE_OPERAND (arg2, 1), 0)
4874 && operand_equal_p (TREE_OPERAND (arg1, 1),
4875 TREE_OPERAND (arg2, 0), 0))))
4876 switch (comp_code)
4878 case EQ_EXPR:
4879 case UNEQ_EXPR:
4880 tem = fold_convert_loc (loc, arg1_type, arg1);
4881 return pedantic_non_lvalue_loc (loc,
4882 fold_convert_loc (loc, type,
4883 negate_expr (tem)));
4884 case NE_EXPR:
4885 case LTGT_EXPR:
4886 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4887 case UNGE_EXPR:
4888 case UNGT_EXPR:
4889 if (flag_trapping_math)
4890 break;
4891 /* Fall through. */
4892 case GE_EXPR:
4893 case GT_EXPR:
4894 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4895 arg1 = fold_convert_loc (loc, signed_type_for
4896 (TREE_TYPE (arg1)), arg1);
4897 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4898 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4899 case UNLE_EXPR:
4900 case UNLT_EXPR:
4901 if (flag_trapping_math)
4902 break;
4903 case LE_EXPR:
4904 case LT_EXPR:
4905 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4906 arg1 = fold_convert_loc (loc, signed_type_for
4907 (TREE_TYPE (arg1)), arg1);
4908 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4909 return negate_expr (fold_convert_loc (loc, type, tem));
4910 default:
4911 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4912 break;
4915 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4916 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4917 both transformations are correct when A is NaN: A != 0
4918 is then true, and A == 0 is false. */
4920 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4921 && integer_zerop (arg01) && integer_zerop (arg2))
4923 if (comp_code == NE_EXPR)
4924 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4925 else if (comp_code == EQ_EXPR)
4926 return build_zero_cst (type);
4929 /* Try some transformations of A op B ? A : B.
4931 A == B? A : B same as B
4932 A != B? A : B same as A
4933 A >= B? A : B same as max (A, B)
4934 A > B? A : B same as max (B, A)
4935 A <= B? A : B same as min (A, B)
4936 A < B? A : B same as min (B, A)
4938 As above, these transformations don't work in the presence
4939 of signed zeros. For example, if A and B are zeros of
4940 opposite sign, the first two transformations will change
4941 the sign of the result. In the last four, the original
4942 expressions give different results for (A=+0, B=-0) and
4943 (A=-0, B=+0), but the transformed expressions do not.
4945 The first two transformations are correct if either A or B
4946 is a NaN. In the first transformation, the condition will
4947 be false, and B will indeed be chosen. In the case of the
4948 second transformation, the condition A != B will be true,
4949 and A will be chosen.
4951 The conversions to max() and min() are not correct if B is
4952 a number and A is not. The conditions in the original
4953 expressions will be false, so all four give B. The min()
4954 and max() versions would give a NaN instead. */
4955 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4956 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4957 /* Avoid these transformations if the COND_EXPR may be used
4958 as an lvalue in the C++ front-end. PR c++/19199. */
4959 && (in_gimple_form
4960 || VECTOR_TYPE_P (type)
4961 || (! lang_GNU_CXX ()
4962 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4963 || ! maybe_lvalue_p (arg1)
4964 || ! maybe_lvalue_p (arg2)))
4966 tree comp_op0 = arg00;
4967 tree comp_op1 = arg01;
4968 tree comp_type = TREE_TYPE (comp_op0);
4970 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4971 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4973 comp_type = type;
4974 comp_op0 = arg1;
4975 comp_op1 = arg2;
4978 switch (comp_code)
4980 case EQ_EXPR:
4981 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4982 case NE_EXPR:
4983 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4984 case LE_EXPR:
4985 case LT_EXPR:
4986 case UNLE_EXPR:
4987 case UNLT_EXPR:
4988 /* In C++ a ?: expression can be an lvalue, so put the
4989 operand which will be used if they are equal first
4990 so that we can convert this back to the
4991 corresponding COND_EXPR. */
4992 if (!HONOR_NANS (arg1))
4994 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4995 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4996 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4997 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4998 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4999 comp_op1, comp_op0);
5000 return pedantic_non_lvalue_loc (loc,
5001 fold_convert_loc (loc, type, tem));
5003 break;
5004 case GE_EXPR:
5005 case GT_EXPR:
5006 case UNGE_EXPR:
5007 case UNGT_EXPR:
5008 if (!HONOR_NANS (arg1))
5010 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5011 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5012 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5013 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5014 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5015 comp_op1, comp_op0);
5016 return pedantic_non_lvalue_loc (loc,
5017 fold_convert_loc (loc, type, tem));
5019 break;
5020 case UNEQ_EXPR:
5021 if (!HONOR_NANS (arg1))
5022 return pedantic_non_lvalue_loc (loc,
5023 fold_convert_loc (loc, type, arg2));
5024 break;
5025 case LTGT_EXPR:
5026 if (!HONOR_NANS (arg1))
5027 return pedantic_non_lvalue_loc (loc,
5028 fold_convert_loc (loc, type, arg1));
5029 break;
5030 default:
5031 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5032 break;
5036 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5037 we might still be able to simplify this. For example,
5038 if C1 is one less or one more than C2, this might have started
5039 out as a MIN or MAX and been transformed by this function.
5040 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5042 if (INTEGRAL_TYPE_P (type)
5043 && TREE_CODE (arg01) == INTEGER_CST
5044 && TREE_CODE (arg2) == INTEGER_CST)
5045 switch (comp_code)
5047 case EQ_EXPR:
5048 if (TREE_CODE (arg1) == INTEGER_CST)
5049 break;
5050 /* We can replace A with C1 in this case. */
5051 arg1 = fold_convert_loc (loc, type, arg01);
5052 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5054 case LT_EXPR:
5055 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5056 MIN_EXPR, to preserve the signedness of the comparison. */
5057 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5058 OEP_ONLY_CONST)
5059 && operand_equal_p (arg01,
5060 const_binop (PLUS_EXPR, arg2,
5061 build_int_cst (type, 1)),
5062 OEP_ONLY_CONST))
5064 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5065 fold_convert_loc (loc, TREE_TYPE (arg00),
5066 arg2));
5067 return pedantic_non_lvalue_loc (loc,
5068 fold_convert_loc (loc, type, tem));
5070 break;
5072 case LE_EXPR:
5073 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5074 as above. */
5075 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5076 OEP_ONLY_CONST)
5077 && operand_equal_p (arg01,
5078 const_binop (MINUS_EXPR, arg2,
5079 build_int_cst (type, 1)),
5080 OEP_ONLY_CONST))
5082 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5083 fold_convert_loc (loc, TREE_TYPE (arg00),
5084 arg2));
5085 return pedantic_non_lvalue_loc (loc,
5086 fold_convert_loc (loc, type, tem));
5088 break;
5090 case GT_EXPR:
5091 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5092 MAX_EXPR, to preserve the signedness of the comparison. */
5093 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5094 OEP_ONLY_CONST)
5095 && operand_equal_p (arg01,
5096 const_binop (MINUS_EXPR, arg2,
5097 build_int_cst (type, 1)),
5098 OEP_ONLY_CONST))
5100 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5101 fold_convert_loc (loc, TREE_TYPE (arg00),
5102 arg2));
5103 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5105 break;
5107 case GE_EXPR:
5108 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5109 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5110 OEP_ONLY_CONST)
5111 && operand_equal_p (arg01,
5112 const_binop (PLUS_EXPR, arg2,
5113 build_int_cst (type, 1)),
5114 OEP_ONLY_CONST))
5116 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5117 fold_convert_loc (loc, TREE_TYPE (arg00),
5118 arg2));
5119 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5121 break;
5122 case NE_EXPR:
5123 break;
5124 default:
5125 gcc_unreachable ();
5128 return NULL_TREE;
5133 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5134 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5135 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5136 false) >= 2)
5137 #endif
5139 /* EXP is some logical combination of boolean tests. See if we can
5140 merge it into some range test. Return the new tree if so. */
5142 static tree
5143 fold_range_test (location_t loc, enum tree_code code, tree type,
5144 tree op0, tree op1)
5146 int or_op = (code == TRUTH_ORIF_EXPR
5147 || code == TRUTH_OR_EXPR);
5148 int in0_p, in1_p, in_p;
5149 tree low0, low1, low, high0, high1, high;
5150 bool strict_overflow_p = false;
5151 tree tem, lhs, rhs;
5152 const char * const warnmsg = G_("assuming signed overflow does not occur "
5153 "when simplifying range test");
5155 if (!INTEGRAL_TYPE_P (type))
5156 return 0;
5158 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5159 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5161 /* If this is an OR operation, invert both sides; we will invert
5162 again at the end. */
5163 if (or_op)
5164 in0_p = ! in0_p, in1_p = ! in1_p;
5166 /* If both expressions are the same, if we can merge the ranges, and we
5167 can build the range test, return it or it inverted. If one of the
5168 ranges is always true or always false, consider it to be the same
5169 expression as the other. */
5170 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5171 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5172 in1_p, low1, high1)
5173 && 0 != (tem = (build_range_check (loc, type,
5174 lhs != 0 ? lhs
5175 : rhs != 0 ? rhs : integer_zero_node,
5176 in_p, low, high))))
5178 if (strict_overflow_p)
5179 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5180 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5183 /* On machines where the branch cost is expensive, if this is a
5184 short-circuited branch and the underlying object on both sides
5185 is the same, make a non-short-circuit operation. */
5186 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5187 && lhs != 0 && rhs != 0
5188 && (code == TRUTH_ANDIF_EXPR
5189 || code == TRUTH_ORIF_EXPR)
5190 && operand_equal_p (lhs, rhs, 0))
5192 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5193 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5194 which cases we can't do this. */
5195 if (simple_operand_p (lhs))
5196 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5197 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5198 type, op0, op1);
5200 else if (!lang_hooks.decls.global_bindings_p ()
5201 && !CONTAINS_PLACEHOLDER_P (lhs))
5203 tree common = save_expr (lhs);
5205 if (0 != (lhs = build_range_check (loc, type, common,
5206 or_op ? ! in0_p : in0_p,
5207 low0, high0))
5208 && (0 != (rhs = build_range_check (loc, type, common,
5209 or_op ? ! in1_p : in1_p,
5210 low1, high1))))
5212 if (strict_overflow_p)
5213 fold_overflow_warning (warnmsg,
5214 WARN_STRICT_OVERFLOW_COMPARISON);
5215 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5216 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5217 type, lhs, rhs);
5222 return 0;
5225 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5226 bit value. Arrange things so the extra bits will be set to zero if and
5227 only if C is signed-extended to its full width. If MASK is nonzero,
5228 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5230 static tree
5231 unextend (tree c, int p, int unsignedp, tree mask)
5233 tree type = TREE_TYPE (c);
5234 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5235 tree temp;
5237 if (p == modesize || unsignedp)
5238 return c;
5240 /* We work by getting just the sign bit into the low-order bit, then
5241 into the high-order bit, then sign-extend. We then XOR that value
5242 with C. */
5243 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5245 /* We must use a signed type in order to get an arithmetic right shift.
5246 However, we must also avoid introducing accidental overflows, so that
5247 a subsequent call to integer_zerop will work. Hence we must
5248 do the type conversion here. At this point, the constant is either
5249 zero or one, and the conversion to a signed type can never overflow.
5250 We could get an overflow if this conversion is done anywhere else. */
5251 if (TYPE_UNSIGNED (type))
5252 temp = fold_convert (signed_type_for (type), temp);
5254 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5255 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5256 if (mask != 0)
5257 temp = const_binop (BIT_AND_EXPR, temp,
5258 fold_convert (TREE_TYPE (c), mask));
5259 /* If necessary, convert the type back to match the type of C. */
5260 if (TYPE_UNSIGNED (type))
5261 temp = fold_convert (type, temp);
5263 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5266 /* For an expression that has the form
5267 (A && B) || ~B
5269 (A || B) && ~B,
5270 we can drop one of the inner expressions and simplify to
5271 A || ~B
5273 A && ~B
5274 LOC is the location of the resulting expression. OP is the inner
5275 logical operation; the left-hand side in the examples above, while CMPOP
5276 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5277 removing a condition that guards another, as in
5278 (A != NULL && A->...) || A == NULL
5279 which we must not transform. If RHS_ONLY is true, only eliminate the
5280 right-most operand of the inner logical operation. */
5282 static tree
5283 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5284 bool rhs_only)
5286 tree type = TREE_TYPE (cmpop);
5287 enum tree_code code = TREE_CODE (cmpop);
5288 enum tree_code truthop_code = TREE_CODE (op);
5289 tree lhs = TREE_OPERAND (op, 0);
5290 tree rhs = TREE_OPERAND (op, 1);
5291 tree orig_lhs = lhs, orig_rhs = rhs;
5292 enum tree_code rhs_code = TREE_CODE (rhs);
5293 enum tree_code lhs_code = TREE_CODE (lhs);
5294 enum tree_code inv_code;
5296 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5297 return NULL_TREE;
5299 if (TREE_CODE_CLASS (code) != tcc_comparison)
5300 return NULL_TREE;
5302 if (rhs_code == truthop_code)
5304 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5305 if (newrhs != NULL_TREE)
5307 rhs = newrhs;
5308 rhs_code = TREE_CODE (rhs);
5311 if (lhs_code == truthop_code && !rhs_only)
5313 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5314 if (newlhs != NULL_TREE)
5316 lhs = newlhs;
5317 lhs_code = TREE_CODE (lhs);
5321 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5322 if (inv_code == rhs_code
5323 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5324 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5325 return lhs;
5326 if (!rhs_only && inv_code == lhs_code
5327 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5328 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5329 return rhs;
5330 if (rhs != orig_rhs || lhs != orig_lhs)
5331 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5332 lhs, rhs);
5333 return NULL_TREE;
5336 /* Find ways of folding logical expressions of LHS and RHS:
5337 Try to merge two comparisons to the same innermost item.
5338 Look for range tests like "ch >= '0' && ch <= '9'".
5339 Look for combinations of simple terms on machines with expensive branches
5340 and evaluate the RHS unconditionally.
5342 For example, if we have p->a == 2 && p->b == 4 and we can make an
5343 object large enough to span both A and B, we can do this with a comparison
5344 against the object ANDed with the a mask.
5346 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5347 operations to do this with one comparison.
5349 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5350 function and the one above.
5352 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5353 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5355 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5356 two operands.
5358 We return the simplified tree or 0 if no optimization is possible. */
5360 static tree
5361 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5362 tree lhs, tree rhs)
5364 /* If this is the "or" of two comparisons, we can do something if
5365 the comparisons are NE_EXPR. If this is the "and", we can do something
5366 if the comparisons are EQ_EXPR. I.e.,
5367 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5369 WANTED_CODE is this operation code. For single bit fields, we can
5370 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5371 comparison for one-bit fields. */
5373 enum tree_code wanted_code;
5374 enum tree_code lcode, rcode;
5375 tree ll_arg, lr_arg, rl_arg, rr_arg;
5376 tree ll_inner, lr_inner, rl_inner, rr_inner;
5377 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5378 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5379 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5380 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5381 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5382 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5383 machine_mode lnmode, rnmode;
5384 tree ll_mask, lr_mask, rl_mask, rr_mask;
5385 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5386 tree l_const, r_const;
5387 tree lntype, rntype, result;
5388 HOST_WIDE_INT first_bit, end_bit;
5389 int volatilep;
5391 /* Start by getting the comparison codes. Fail if anything is volatile.
5392 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5393 it were surrounded with a NE_EXPR. */
5395 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5396 return 0;
5398 lcode = TREE_CODE (lhs);
5399 rcode = TREE_CODE (rhs);
5401 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5403 lhs = build2 (NE_EXPR, truth_type, lhs,
5404 build_int_cst (TREE_TYPE (lhs), 0));
5405 lcode = NE_EXPR;
5408 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5410 rhs = build2 (NE_EXPR, truth_type, rhs,
5411 build_int_cst (TREE_TYPE (rhs), 0));
5412 rcode = NE_EXPR;
5415 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5416 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5417 return 0;
5419 ll_arg = TREE_OPERAND (lhs, 0);
5420 lr_arg = TREE_OPERAND (lhs, 1);
5421 rl_arg = TREE_OPERAND (rhs, 0);
5422 rr_arg = TREE_OPERAND (rhs, 1);
5424 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5425 if (simple_operand_p (ll_arg)
5426 && simple_operand_p (lr_arg))
5428 if (operand_equal_p (ll_arg, rl_arg, 0)
5429 && operand_equal_p (lr_arg, rr_arg, 0))
5431 result = combine_comparisons (loc, code, lcode, rcode,
5432 truth_type, ll_arg, lr_arg);
5433 if (result)
5434 return result;
5436 else if (operand_equal_p (ll_arg, rr_arg, 0)
5437 && operand_equal_p (lr_arg, rl_arg, 0))
5439 result = combine_comparisons (loc, code, lcode,
5440 swap_tree_comparison (rcode),
5441 truth_type, ll_arg, lr_arg);
5442 if (result)
5443 return result;
5447 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5448 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5450 /* If the RHS can be evaluated unconditionally and its operands are
5451 simple, it wins to evaluate the RHS unconditionally on machines
5452 with expensive branches. In this case, this isn't a comparison
5453 that can be merged. */
5455 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5456 false) >= 2
5457 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5458 && simple_operand_p (rl_arg)
5459 && simple_operand_p (rr_arg))
5461 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5462 if (code == TRUTH_OR_EXPR
5463 && lcode == NE_EXPR && integer_zerop (lr_arg)
5464 && rcode == NE_EXPR && integer_zerop (rr_arg)
5465 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5466 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5467 return build2_loc (loc, NE_EXPR, truth_type,
5468 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5469 ll_arg, rl_arg),
5470 build_int_cst (TREE_TYPE (ll_arg), 0));
5472 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5473 if (code == TRUTH_AND_EXPR
5474 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5475 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5476 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5477 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5478 return build2_loc (loc, EQ_EXPR, truth_type,
5479 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5480 ll_arg, rl_arg),
5481 build_int_cst (TREE_TYPE (ll_arg), 0));
5484 /* See if the comparisons can be merged. Then get all the parameters for
5485 each side. */
5487 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5488 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5489 return 0;
5491 volatilep = 0;
5492 ll_inner = decode_field_reference (loc, ll_arg,
5493 &ll_bitsize, &ll_bitpos, &ll_mode,
5494 &ll_unsignedp, &volatilep, &ll_mask,
5495 &ll_and_mask);
5496 lr_inner = decode_field_reference (loc, lr_arg,
5497 &lr_bitsize, &lr_bitpos, &lr_mode,
5498 &lr_unsignedp, &volatilep, &lr_mask,
5499 &lr_and_mask);
5500 rl_inner = decode_field_reference (loc, rl_arg,
5501 &rl_bitsize, &rl_bitpos, &rl_mode,
5502 &rl_unsignedp, &volatilep, &rl_mask,
5503 &rl_and_mask);
5504 rr_inner = decode_field_reference (loc, rr_arg,
5505 &rr_bitsize, &rr_bitpos, &rr_mode,
5506 &rr_unsignedp, &volatilep, &rr_mask,
5507 &rr_and_mask);
5509 /* It must be true that the inner operation on the lhs of each
5510 comparison must be the same if we are to be able to do anything.
5511 Then see if we have constants. If not, the same must be true for
5512 the rhs's. */
5513 if (volatilep || ll_inner == 0 || rl_inner == 0
5514 || ! operand_equal_p (ll_inner, rl_inner, 0))
5515 return 0;
5517 if (TREE_CODE (lr_arg) == INTEGER_CST
5518 && TREE_CODE (rr_arg) == INTEGER_CST)
5519 l_const = lr_arg, r_const = rr_arg;
5520 else if (lr_inner == 0 || rr_inner == 0
5521 || ! operand_equal_p (lr_inner, rr_inner, 0))
5522 return 0;
5523 else
5524 l_const = r_const = 0;
5526 /* If either comparison code is not correct for our logical operation,
5527 fail. However, we can convert a one-bit comparison against zero into
5528 the opposite comparison against that bit being set in the field. */
5530 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5531 if (lcode != wanted_code)
5533 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5535 /* Make the left operand unsigned, since we are only interested
5536 in the value of one bit. Otherwise we are doing the wrong
5537 thing below. */
5538 ll_unsignedp = 1;
5539 l_const = ll_mask;
5541 else
5542 return 0;
5545 /* This is analogous to the code for l_const above. */
5546 if (rcode != wanted_code)
5548 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5550 rl_unsignedp = 1;
5551 r_const = rl_mask;
5553 else
5554 return 0;
5557 /* See if we can find a mode that contains both fields being compared on
5558 the left. If we can't, fail. Otherwise, update all constants and masks
5559 to be relative to a field of that size. */
5560 first_bit = MIN (ll_bitpos, rl_bitpos);
5561 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5562 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5563 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5564 volatilep);
5565 if (lnmode == VOIDmode)
5566 return 0;
5568 lnbitsize = GET_MODE_BITSIZE (lnmode);
5569 lnbitpos = first_bit & ~ (lnbitsize - 1);
5570 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5571 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5573 if (BYTES_BIG_ENDIAN)
5575 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5576 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5579 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5580 size_int (xll_bitpos));
5581 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5582 size_int (xrl_bitpos));
5584 if (l_const)
5586 l_const = fold_convert_loc (loc, lntype, l_const);
5587 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5588 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5589 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5590 fold_build1_loc (loc, BIT_NOT_EXPR,
5591 lntype, ll_mask))))
5593 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5595 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5598 if (r_const)
5600 r_const = fold_convert_loc (loc, lntype, r_const);
5601 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5602 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5603 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5604 fold_build1_loc (loc, BIT_NOT_EXPR,
5605 lntype, rl_mask))))
5607 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5609 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5613 /* If the right sides are not constant, do the same for it. Also,
5614 disallow this optimization if a size or signedness mismatch occurs
5615 between the left and right sides. */
5616 if (l_const == 0)
5618 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5619 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5620 /* Make sure the two fields on the right
5621 correspond to the left without being swapped. */
5622 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5623 return 0;
5625 first_bit = MIN (lr_bitpos, rr_bitpos);
5626 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5627 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5628 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5629 volatilep);
5630 if (rnmode == VOIDmode)
5631 return 0;
5633 rnbitsize = GET_MODE_BITSIZE (rnmode);
5634 rnbitpos = first_bit & ~ (rnbitsize - 1);
5635 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5636 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5638 if (BYTES_BIG_ENDIAN)
5640 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5641 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5644 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5645 rntype, lr_mask),
5646 size_int (xlr_bitpos));
5647 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5648 rntype, rr_mask),
5649 size_int (xrr_bitpos));
5651 /* Make a mask that corresponds to both fields being compared.
5652 Do this for both items being compared. If the operands are the
5653 same size and the bits being compared are in the same position
5654 then we can do this by masking both and comparing the masked
5655 results. */
5656 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5657 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5658 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5660 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5661 ll_unsignedp || rl_unsignedp);
5662 if (! all_ones_mask_p (ll_mask, lnbitsize))
5663 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5665 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5666 lr_unsignedp || rr_unsignedp);
5667 if (! all_ones_mask_p (lr_mask, rnbitsize))
5668 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5670 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5673 /* There is still another way we can do something: If both pairs of
5674 fields being compared are adjacent, we may be able to make a wider
5675 field containing them both.
5677 Note that we still must mask the lhs/rhs expressions. Furthermore,
5678 the mask must be shifted to account for the shift done by
5679 make_bit_field_ref. */
5680 if ((ll_bitsize + ll_bitpos == rl_bitpos
5681 && lr_bitsize + lr_bitpos == rr_bitpos)
5682 || (ll_bitpos == rl_bitpos + rl_bitsize
5683 && lr_bitpos == rr_bitpos + rr_bitsize))
5685 tree type;
5687 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5688 ll_bitsize + rl_bitsize,
5689 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5690 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5691 lr_bitsize + rr_bitsize,
5692 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5694 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5695 size_int (MIN (xll_bitpos, xrl_bitpos)));
5696 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5697 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5699 /* Convert to the smaller type before masking out unwanted bits. */
5700 type = lntype;
5701 if (lntype != rntype)
5703 if (lnbitsize > rnbitsize)
5705 lhs = fold_convert_loc (loc, rntype, lhs);
5706 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5707 type = rntype;
5709 else if (lnbitsize < rnbitsize)
5711 rhs = fold_convert_loc (loc, lntype, rhs);
5712 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5713 type = lntype;
5717 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5718 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5720 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5721 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5723 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5726 return 0;
5729 /* Handle the case of comparisons with constants. If there is something in
5730 common between the masks, those bits of the constants must be the same.
5731 If not, the condition is always false. Test for this to avoid generating
5732 incorrect code below. */
5733 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5734 if (! integer_zerop (result)
5735 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5736 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5738 if (wanted_code == NE_EXPR)
5740 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5741 return constant_boolean_node (true, truth_type);
5743 else
5745 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5746 return constant_boolean_node (false, truth_type);
5750 /* Construct the expression we will return. First get the component
5751 reference we will make. Unless the mask is all ones the width of
5752 that field, perform the mask operation. Then compare with the
5753 merged constant. */
5754 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5755 ll_unsignedp || rl_unsignedp);
5757 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5758 if (! all_ones_mask_p (ll_mask, lnbitsize))
5759 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5761 return build2_loc (loc, wanted_code, truth_type, result,
5762 const_binop (BIT_IOR_EXPR, l_const, r_const));
5765 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5766 constant. */
5768 static tree
5769 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5770 tree op0, tree op1)
5772 tree arg0 = op0;
5773 enum tree_code op_code;
5774 tree comp_const;
5775 tree minmax_const;
5776 int consts_equal, consts_lt;
5777 tree inner;
5779 STRIP_SIGN_NOPS (arg0);
5781 op_code = TREE_CODE (arg0);
5782 minmax_const = TREE_OPERAND (arg0, 1);
5783 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5784 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5785 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5786 inner = TREE_OPERAND (arg0, 0);
5788 /* If something does not permit us to optimize, return the original tree. */
5789 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5790 || TREE_CODE (comp_const) != INTEGER_CST
5791 || TREE_OVERFLOW (comp_const)
5792 || TREE_CODE (minmax_const) != INTEGER_CST
5793 || TREE_OVERFLOW (minmax_const))
5794 return NULL_TREE;
5796 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5797 and GT_EXPR, doing the rest with recursive calls using logical
5798 simplifications. */
5799 switch (code)
5801 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5803 tree tem
5804 = optimize_minmax_comparison (loc,
5805 invert_tree_comparison (code, false),
5806 type, op0, op1);
5807 if (tem)
5808 return invert_truthvalue_loc (loc, tem);
5809 return NULL_TREE;
5812 case GE_EXPR:
5813 return
5814 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5815 optimize_minmax_comparison
5816 (loc, EQ_EXPR, type, arg0, comp_const),
5817 optimize_minmax_comparison
5818 (loc, GT_EXPR, type, arg0, comp_const));
5820 case EQ_EXPR:
5821 if (op_code == MAX_EXPR && consts_equal)
5822 /* MAX (X, 0) == 0 -> X <= 0 */
5823 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5825 else if (op_code == MAX_EXPR && consts_lt)
5826 /* MAX (X, 0) == 5 -> X == 5 */
5827 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5829 else if (op_code == MAX_EXPR)
5830 /* MAX (X, 0) == -1 -> false */
5831 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5833 else if (consts_equal)
5834 /* MIN (X, 0) == 0 -> X >= 0 */
5835 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5837 else if (consts_lt)
5838 /* MIN (X, 0) == 5 -> false */
5839 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5841 else
5842 /* MIN (X, 0) == -1 -> X == -1 */
5843 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5845 case GT_EXPR:
5846 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5847 /* MAX (X, 0) > 0 -> X > 0
5848 MAX (X, 0) > 5 -> X > 5 */
5849 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5851 else if (op_code == MAX_EXPR)
5852 /* MAX (X, 0) > -1 -> true */
5853 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5855 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5856 /* MIN (X, 0) > 0 -> false
5857 MIN (X, 0) > 5 -> false */
5858 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5860 else
5861 /* MIN (X, 0) > -1 -> X > -1 */
5862 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5864 default:
5865 return NULL_TREE;
5869 /* T is an integer expression that is being multiplied, divided, or taken a
5870 modulus (CODE says which and what kind of divide or modulus) by a
5871 constant C. See if we can eliminate that operation by folding it with
5872 other operations already in T. WIDE_TYPE, if non-null, is a type that
5873 should be used for the computation if wider than our type.
5875 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5876 (X * 2) + (Y * 4). We must, however, be assured that either the original
5877 expression would not overflow or that overflow is undefined for the type
5878 in the language in question.
5880 If we return a non-null expression, it is an equivalent form of the
5881 original computation, but need not be in the original type.
5883 We set *STRICT_OVERFLOW_P to true if the return values depends on
5884 signed overflow being undefined. Otherwise we do not change
5885 *STRICT_OVERFLOW_P. */
5887 static tree
5888 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5889 bool *strict_overflow_p)
5891 /* To avoid exponential search depth, refuse to allow recursion past
5892 three levels. Beyond that (1) it's highly unlikely that we'll find
5893 something interesting and (2) we've probably processed it before
5894 when we built the inner expression. */
5896 static int depth;
5897 tree ret;
5899 if (depth > 3)
5900 return NULL;
5902 depth++;
5903 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5904 depth--;
5906 return ret;
5909 static tree
5910 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5911 bool *strict_overflow_p)
5913 tree type = TREE_TYPE (t);
5914 enum tree_code tcode = TREE_CODE (t);
5915 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5916 > GET_MODE_SIZE (TYPE_MODE (type)))
5917 ? wide_type : type);
5918 tree t1, t2;
5919 int same_p = tcode == code;
5920 tree op0 = NULL_TREE, op1 = NULL_TREE;
5921 bool sub_strict_overflow_p;
5923 /* Don't deal with constants of zero here; they confuse the code below. */
5924 if (integer_zerop (c))
5925 return NULL_TREE;
5927 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5928 op0 = TREE_OPERAND (t, 0);
5930 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5931 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5933 /* Note that we need not handle conditional operations here since fold
5934 already handles those cases. So just do arithmetic here. */
5935 switch (tcode)
5937 case INTEGER_CST:
5938 /* For a constant, we can always simplify if we are a multiply
5939 or (for divide and modulus) if it is a multiple of our constant. */
5940 if (code == MULT_EXPR
5941 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5942 return const_binop (code, fold_convert (ctype, t),
5943 fold_convert (ctype, c));
5944 break;
5946 CASE_CONVERT: case NON_LVALUE_EXPR:
5947 /* If op0 is an expression ... */
5948 if ((COMPARISON_CLASS_P (op0)
5949 || UNARY_CLASS_P (op0)
5950 || BINARY_CLASS_P (op0)
5951 || VL_EXP_CLASS_P (op0)
5952 || EXPRESSION_CLASS_P (op0))
5953 /* ... and has wrapping overflow, and its type is smaller
5954 than ctype, then we cannot pass through as widening. */
5955 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5956 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5957 && (TYPE_PRECISION (ctype)
5958 > TYPE_PRECISION (TREE_TYPE (op0))))
5959 /* ... or this is a truncation (t is narrower than op0),
5960 then we cannot pass through this narrowing. */
5961 || (TYPE_PRECISION (type)
5962 < TYPE_PRECISION (TREE_TYPE (op0)))
5963 /* ... or signedness changes for division or modulus,
5964 then we cannot pass through this conversion. */
5965 || (code != MULT_EXPR
5966 && (TYPE_UNSIGNED (ctype)
5967 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5968 /* ... or has undefined overflow while the converted to
5969 type has not, we cannot do the operation in the inner type
5970 as that would introduce undefined overflow. */
5971 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5972 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5973 && !TYPE_OVERFLOW_UNDEFINED (type))))
5974 break;
5976 /* Pass the constant down and see if we can make a simplification. If
5977 we can, replace this expression with the inner simplification for
5978 possible later conversion to our or some other type. */
5979 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5980 && TREE_CODE (t2) == INTEGER_CST
5981 && !TREE_OVERFLOW (t2)
5982 && (0 != (t1 = extract_muldiv (op0, t2, code,
5983 code == MULT_EXPR
5984 ? ctype : NULL_TREE,
5985 strict_overflow_p))))
5986 return t1;
5987 break;
5989 case ABS_EXPR:
5990 /* If widening the type changes it from signed to unsigned, then we
5991 must avoid building ABS_EXPR itself as unsigned. */
5992 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5994 tree cstype = (*signed_type_for) (ctype);
5995 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5996 != 0)
5998 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5999 return fold_convert (ctype, t1);
6001 break;
6003 /* If the constant is negative, we cannot simplify this. */
6004 if (tree_int_cst_sgn (c) == -1)
6005 break;
6006 /* FALLTHROUGH */
6007 case NEGATE_EXPR:
6008 /* For division and modulus, type can't be unsigned, as e.g.
6009 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6010 For signed types, even with wrapping overflow, this is fine. */
6011 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6012 break;
6013 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6014 != 0)
6015 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6016 break;
6018 case MIN_EXPR: case MAX_EXPR:
6019 /* If widening the type changes the signedness, then we can't perform
6020 this optimization as that changes the result. */
6021 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6022 break;
6024 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6025 sub_strict_overflow_p = false;
6026 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6027 &sub_strict_overflow_p)) != 0
6028 && (t2 = extract_muldiv (op1, c, code, wide_type,
6029 &sub_strict_overflow_p)) != 0)
6031 if (tree_int_cst_sgn (c) < 0)
6032 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6033 if (sub_strict_overflow_p)
6034 *strict_overflow_p = true;
6035 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6036 fold_convert (ctype, t2));
6038 break;
6040 case LSHIFT_EXPR: case RSHIFT_EXPR:
6041 /* If the second operand is constant, this is a multiplication
6042 or floor division, by a power of two, so we can treat it that
6043 way unless the multiplier or divisor overflows. Signed
6044 left-shift overflow is implementation-defined rather than
6045 undefined in C90, so do not convert signed left shift into
6046 multiplication. */
6047 if (TREE_CODE (op1) == INTEGER_CST
6048 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6049 /* const_binop may not detect overflow correctly,
6050 so check for it explicitly here. */
6051 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6052 && 0 != (t1 = fold_convert (ctype,
6053 const_binop (LSHIFT_EXPR,
6054 size_one_node,
6055 op1)))
6056 && !TREE_OVERFLOW (t1))
6057 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6058 ? MULT_EXPR : FLOOR_DIV_EXPR,
6059 ctype,
6060 fold_convert (ctype, op0),
6061 t1),
6062 c, code, wide_type, strict_overflow_p);
6063 break;
6065 case PLUS_EXPR: case MINUS_EXPR:
6066 /* See if we can eliminate the operation on both sides. If we can, we
6067 can return a new PLUS or MINUS. If we can't, the only remaining
6068 cases where we can do anything are if the second operand is a
6069 constant. */
6070 sub_strict_overflow_p = false;
6071 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6072 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6073 if (t1 != 0 && t2 != 0
6074 && (code == MULT_EXPR
6075 /* If not multiplication, we can only do this if both operands
6076 are divisible by c. */
6077 || (multiple_of_p (ctype, op0, c)
6078 && multiple_of_p (ctype, op1, c))))
6080 if (sub_strict_overflow_p)
6081 *strict_overflow_p = true;
6082 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6083 fold_convert (ctype, t2));
6086 /* If this was a subtraction, negate OP1 and set it to be an addition.
6087 This simplifies the logic below. */
6088 if (tcode == MINUS_EXPR)
6090 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6091 /* If OP1 was not easily negatable, the constant may be OP0. */
6092 if (TREE_CODE (op0) == INTEGER_CST)
6094 tree tem = op0;
6095 op0 = op1;
6096 op1 = tem;
6097 tem = t1;
6098 t1 = t2;
6099 t2 = tem;
6103 if (TREE_CODE (op1) != INTEGER_CST)
6104 break;
6106 /* If either OP1 or C are negative, this optimization is not safe for
6107 some of the division and remainder types while for others we need
6108 to change the code. */
6109 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6111 if (code == CEIL_DIV_EXPR)
6112 code = FLOOR_DIV_EXPR;
6113 else if (code == FLOOR_DIV_EXPR)
6114 code = CEIL_DIV_EXPR;
6115 else if (code != MULT_EXPR
6116 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6117 break;
6120 /* If it's a multiply or a division/modulus operation of a multiple
6121 of our constant, do the operation and verify it doesn't overflow. */
6122 if (code == MULT_EXPR
6123 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6125 op1 = const_binop (code, fold_convert (ctype, op1),
6126 fold_convert (ctype, c));
6127 /* We allow the constant to overflow with wrapping semantics. */
6128 if (op1 == 0
6129 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6130 break;
6132 else
6133 break;
6135 /* If we have an unsigned type, we cannot widen the operation since it
6136 will change the result if the original computation overflowed. */
6137 if (TYPE_UNSIGNED (ctype) && ctype != type)
6138 break;
6140 /* If we were able to eliminate our operation from the first side,
6141 apply our operation to the second side and reform the PLUS. */
6142 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6143 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6145 /* The last case is if we are a multiply. In that case, we can
6146 apply the distributive law to commute the multiply and addition
6147 if the multiplication of the constants doesn't overflow
6148 and overflow is defined. With undefined overflow
6149 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6150 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6151 return fold_build2 (tcode, ctype,
6152 fold_build2 (code, ctype,
6153 fold_convert (ctype, op0),
6154 fold_convert (ctype, c)),
6155 op1);
6157 break;
6159 case MULT_EXPR:
6160 /* We have a special case here if we are doing something like
6161 (C * 8) % 4 since we know that's zero. */
6162 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6163 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6164 /* If the multiplication can overflow we cannot optimize this. */
6165 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6166 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6167 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6169 *strict_overflow_p = true;
6170 return omit_one_operand (type, integer_zero_node, op0);
6173 /* ... fall through ... */
6175 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6176 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6177 /* If we can extract our operation from the LHS, do so and return a
6178 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6179 do something only if the second operand is a constant. */
6180 if (same_p
6181 && (t1 = extract_muldiv (op0, c, code, wide_type,
6182 strict_overflow_p)) != 0)
6183 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6184 fold_convert (ctype, op1));
6185 else if (tcode == MULT_EXPR && code == MULT_EXPR
6186 && (t1 = extract_muldiv (op1, c, code, wide_type,
6187 strict_overflow_p)) != 0)
6188 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6189 fold_convert (ctype, t1));
6190 else if (TREE_CODE (op1) != INTEGER_CST)
6191 return 0;
6193 /* If these are the same operation types, we can associate them
6194 assuming no overflow. */
6195 if (tcode == code)
6197 bool overflow_p = false;
6198 bool overflow_mul_p;
6199 signop sign = TYPE_SIGN (ctype);
6200 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6201 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6202 if (overflow_mul_p
6203 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6204 overflow_p = true;
6205 if (!overflow_p)
6206 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6207 wide_int_to_tree (ctype, mul));
6210 /* If these operations "cancel" each other, we have the main
6211 optimizations of this pass, which occur when either constant is a
6212 multiple of the other, in which case we replace this with either an
6213 operation or CODE or TCODE.
6215 If we have an unsigned type, we cannot do this since it will change
6216 the result if the original computation overflowed. */
6217 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6218 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6219 || (tcode == MULT_EXPR
6220 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6221 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6222 && code != MULT_EXPR)))
6224 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6226 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6227 *strict_overflow_p = true;
6228 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6229 fold_convert (ctype,
6230 const_binop (TRUNC_DIV_EXPR,
6231 op1, c)));
6233 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6235 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6236 *strict_overflow_p = true;
6237 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6238 fold_convert (ctype,
6239 const_binop (TRUNC_DIV_EXPR,
6240 c, op1)));
6243 break;
6245 default:
6246 break;
6249 return 0;
6252 /* Return a node which has the indicated constant VALUE (either 0 or
6253 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6254 and is of the indicated TYPE. */
6256 tree
6257 constant_boolean_node (bool value, tree type)
6259 if (type == integer_type_node)
6260 return value ? integer_one_node : integer_zero_node;
6261 else if (type == boolean_type_node)
6262 return value ? boolean_true_node : boolean_false_node;
6263 else if (TREE_CODE (type) == VECTOR_TYPE)
6264 return build_vector_from_val (type,
6265 build_int_cst (TREE_TYPE (type),
6266 value ? -1 : 0));
6267 else
6268 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6272 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6273 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6274 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6275 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6276 COND is the first argument to CODE; otherwise (as in the example
6277 given here), it is the second argument. TYPE is the type of the
6278 original expression. Return NULL_TREE if no simplification is
6279 possible. */
6281 static tree
6282 fold_binary_op_with_conditional_arg (location_t loc,
6283 enum tree_code code,
6284 tree type, tree op0, tree op1,
6285 tree cond, tree arg, int cond_first_p)
6287 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6288 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6289 tree test, true_value, false_value;
6290 tree lhs = NULL_TREE;
6291 tree rhs = NULL_TREE;
6292 enum tree_code cond_code = COND_EXPR;
6294 if (TREE_CODE (cond) == COND_EXPR
6295 || TREE_CODE (cond) == VEC_COND_EXPR)
6297 test = TREE_OPERAND (cond, 0);
6298 true_value = TREE_OPERAND (cond, 1);
6299 false_value = TREE_OPERAND (cond, 2);
6300 /* If this operand throws an expression, then it does not make
6301 sense to try to perform a logical or arithmetic operation
6302 involving it. */
6303 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6304 lhs = true_value;
6305 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6306 rhs = false_value;
6308 else
6310 tree testtype = TREE_TYPE (cond);
6311 test = cond;
6312 true_value = constant_boolean_node (true, testtype);
6313 false_value = constant_boolean_node (false, testtype);
6316 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6317 cond_code = VEC_COND_EXPR;
6319 /* This transformation is only worthwhile if we don't have to wrap ARG
6320 in a SAVE_EXPR and the operation can be simplified without recursing
6321 on at least one of the branches once its pushed inside the COND_EXPR. */
6322 if (!TREE_CONSTANT (arg)
6323 && (TREE_SIDE_EFFECTS (arg)
6324 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6325 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6326 return NULL_TREE;
6328 arg = fold_convert_loc (loc, arg_type, arg);
6329 if (lhs == 0)
6331 true_value = fold_convert_loc (loc, cond_type, true_value);
6332 if (cond_first_p)
6333 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6334 else
6335 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6337 if (rhs == 0)
6339 false_value = fold_convert_loc (loc, cond_type, false_value);
6340 if (cond_first_p)
6341 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6342 else
6343 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6346 /* Check that we have simplified at least one of the branches. */
6347 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6348 return NULL_TREE;
6350 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6354 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6356 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6357 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6358 ADDEND is the same as X.
6360 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6361 and finite. The problematic cases are when X is zero, and its mode
6362 has signed zeros. In the case of rounding towards -infinity,
6363 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6364 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6366 bool
6367 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6369 if (!real_zerop (addend))
6370 return false;
6372 /* Don't allow the fold with -fsignaling-nans. */
6373 if (HONOR_SNANS (element_mode (type)))
6374 return false;
6376 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6377 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6378 return true;
6380 /* In a vector or complex, we would need to check the sign of all zeros. */
6381 if (TREE_CODE (addend) != REAL_CST)
6382 return false;
6384 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6385 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6386 negate = !negate;
6388 /* The mode has signed zeros, and we have to honor their sign.
6389 In this situation, there is only one case we can return true for.
6390 X - 0 is the same as X unless rounding towards -infinity is
6391 supported. */
6392 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6395 /* Subroutine of fold() that checks comparisons of built-in math
6396 functions against real constants.
6398 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6399 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6400 is the type of the result and ARG0 and ARG1 are the operands of the
6401 comparison. ARG1 must be a TREE_REAL_CST.
6403 The function returns the constant folded tree if a simplification
6404 can be made, and NULL_TREE otherwise. */
6406 static tree
6407 fold_mathfn_compare (location_t loc,
6408 enum built_in_function fcode, enum tree_code code,
6409 tree type, tree arg0, tree arg1)
6411 REAL_VALUE_TYPE c;
6413 if (BUILTIN_SQRT_P (fcode))
6415 tree arg = CALL_EXPR_ARG (arg0, 0);
6416 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6418 c = TREE_REAL_CST (arg1);
6419 if (REAL_VALUE_NEGATIVE (c))
6421 /* sqrt(x) < y is always false, if y is negative. */
6422 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6423 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6425 /* sqrt(x) > y is always true, if y is negative and we
6426 don't care about NaNs, i.e. negative values of x. */
6427 if (code == NE_EXPR || !HONOR_NANS (mode))
6428 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6430 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6431 return fold_build2_loc (loc, GE_EXPR, type, arg,
6432 build_real (TREE_TYPE (arg), dconst0));
6434 else if (code == GT_EXPR || code == GE_EXPR)
6436 REAL_VALUE_TYPE c2;
6438 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6439 real_convert (&c2, mode, &c2);
6441 if (REAL_VALUE_ISINF (c2))
6443 /* sqrt(x) > y is x == +Inf, when y is very large. */
6444 if (HONOR_INFINITIES (mode))
6445 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6446 build_real (TREE_TYPE (arg), c2));
6448 /* sqrt(x) > y is always false, when y is very large
6449 and we don't care about infinities. */
6450 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6453 /* sqrt(x) > c is the same as x > c*c. */
6454 return fold_build2_loc (loc, code, type, arg,
6455 build_real (TREE_TYPE (arg), c2));
6457 else if (code == LT_EXPR || code == LE_EXPR)
6459 REAL_VALUE_TYPE c2;
6461 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6462 real_convert (&c2, mode, &c2);
6464 if (REAL_VALUE_ISINF (c2))
6466 /* sqrt(x) < y is always true, when y is a very large
6467 value and we don't care about NaNs or Infinities. */
6468 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6469 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6471 /* sqrt(x) < y is x != +Inf when y is very large and we
6472 don't care about NaNs. */
6473 if (! HONOR_NANS (mode))
6474 return fold_build2_loc (loc, NE_EXPR, type, arg,
6475 build_real (TREE_TYPE (arg), c2));
6477 /* sqrt(x) < y is x >= 0 when y is very large and we
6478 don't care about Infinities. */
6479 if (! HONOR_INFINITIES (mode))
6480 return fold_build2_loc (loc, GE_EXPR, type, arg,
6481 build_real (TREE_TYPE (arg), dconst0));
6483 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6484 arg = save_expr (arg);
6485 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6486 fold_build2_loc (loc, GE_EXPR, type, arg,
6487 build_real (TREE_TYPE (arg),
6488 dconst0)),
6489 fold_build2_loc (loc, NE_EXPR, type, arg,
6490 build_real (TREE_TYPE (arg),
6491 c2)));
6494 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6495 if (! HONOR_NANS (mode))
6496 return fold_build2_loc (loc, code, type, arg,
6497 build_real (TREE_TYPE (arg), c2));
6499 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6500 arg = save_expr (arg);
6501 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6502 fold_build2_loc (loc, GE_EXPR, type, arg,
6503 build_real (TREE_TYPE (arg),
6504 dconst0)),
6505 fold_build2_loc (loc, code, type, arg,
6506 build_real (TREE_TYPE (arg),
6507 c2)));
6511 return NULL_TREE;
6514 /* Subroutine of fold() that optimizes comparisons against Infinities,
6515 either +Inf or -Inf.
6517 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6518 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6519 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6521 The function returns the constant folded tree if a simplification
6522 can be made, and NULL_TREE otherwise. */
6524 static tree
6525 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6526 tree arg0, tree arg1)
6528 machine_mode mode;
6529 REAL_VALUE_TYPE max;
6530 tree temp;
6531 bool neg;
6533 mode = TYPE_MODE (TREE_TYPE (arg0));
6535 /* For negative infinity swap the sense of the comparison. */
6536 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6537 if (neg)
6538 code = swap_tree_comparison (code);
6540 switch (code)
6542 case GT_EXPR:
6543 /* x > +Inf is always false, if with ignore sNANs. */
6544 if (HONOR_SNANS (mode))
6545 return NULL_TREE;
6546 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6548 case LE_EXPR:
6549 /* x <= +Inf is always true, if we don't case about NaNs. */
6550 if (! HONOR_NANS (mode))
6551 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6553 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6554 arg0 = save_expr (arg0);
6555 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6557 case EQ_EXPR:
6558 case GE_EXPR:
6559 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6560 real_maxval (&max, neg, mode);
6561 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6562 arg0, build_real (TREE_TYPE (arg0), max));
6564 case LT_EXPR:
6565 /* x < +Inf is always equal to x <= DBL_MAX. */
6566 real_maxval (&max, neg, mode);
6567 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6568 arg0, build_real (TREE_TYPE (arg0), max));
6570 case NE_EXPR:
6571 /* x != +Inf is always equal to !(x > DBL_MAX). */
6572 real_maxval (&max, neg, mode);
6573 if (! HONOR_NANS (mode))
6574 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6575 arg0, build_real (TREE_TYPE (arg0), max));
6577 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6578 arg0, build_real (TREE_TYPE (arg0), max));
6579 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6581 default:
6582 break;
6585 return NULL_TREE;
6588 /* Subroutine of fold() that optimizes comparisons of a division by
6589 a nonzero integer constant against an integer constant, i.e.
6590 X/C1 op C2.
6592 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6593 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6594 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6596 The function returns the constant folded tree if a simplification
6597 can be made, and NULL_TREE otherwise. */
6599 static tree
6600 fold_div_compare (location_t loc,
6601 enum tree_code code, tree type, tree arg0, tree arg1)
6603 tree prod, tmp, hi, lo;
6604 tree arg00 = TREE_OPERAND (arg0, 0);
6605 tree arg01 = TREE_OPERAND (arg0, 1);
6606 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6607 bool neg_overflow = false;
6608 bool overflow;
6610 /* We have to do this the hard way to detect unsigned overflow.
6611 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6612 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6613 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6614 neg_overflow = false;
6616 if (sign == UNSIGNED)
6618 tmp = int_const_binop (MINUS_EXPR, arg01,
6619 build_int_cst (TREE_TYPE (arg01), 1));
6620 lo = prod;
6622 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6623 val = wi::add (prod, tmp, sign, &overflow);
6624 hi = force_fit_type (TREE_TYPE (arg00), val,
6625 -1, overflow | TREE_OVERFLOW (prod));
6627 else if (tree_int_cst_sgn (arg01) >= 0)
6629 tmp = int_const_binop (MINUS_EXPR, arg01,
6630 build_int_cst (TREE_TYPE (arg01), 1));
6631 switch (tree_int_cst_sgn (arg1))
6633 case -1:
6634 neg_overflow = true;
6635 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6636 hi = prod;
6637 break;
6639 case 0:
6640 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6641 hi = tmp;
6642 break;
6644 case 1:
6645 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6646 lo = prod;
6647 break;
6649 default:
6650 gcc_unreachable ();
6653 else
6655 /* A negative divisor reverses the relational operators. */
6656 code = swap_tree_comparison (code);
6658 tmp = int_const_binop (PLUS_EXPR, arg01,
6659 build_int_cst (TREE_TYPE (arg01), 1));
6660 switch (tree_int_cst_sgn (arg1))
6662 case -1:
6663 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6664 lo = prod;
6665 break;
6667 case 0:
6668 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6669 lo = tmp;
6670 break;
6672 case 1:
6673 neg_overflow = true;
6674 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6675 hi = prod;
6676 break;
6678 default:
6679 gcc_unreachable ();
6683 switch (code)
6685 case EQ_EXPR:
6686 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6687 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6688 if (TREE_OVERFLOW (hi))
6689 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6690 if (TREE_OVERFLOW (lo))
6691 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6692 return build_range_check (loc, type, arg00, 1, lo, hi);
6694 case NE_EXPR:
6695 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6696 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6697 if (TREE_OVERFLOW (hi))
6698 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6699 if (TREE_OVERFLOW (lo))
6700 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6701 return build_range_check (loc, type, arg00, 0, lo, hi);
6703 case LT_EXPR:
6704 if (TREE_OVERFLOW (lo))
6706 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6707 return omit_one_operand_loc (loc, type, tmp, arg00);
6709 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6711 case LE_EXPR:
6712 if (TREE_OVERFLOW (hi))
6714 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6715 return omit_one_operand_loc (loc, type, tmp, arg00);
6717 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6719 case GT_EXPR:
6720 if (TREE_OVERFLOW (hi))
6722 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6723 return omit_one_operand_loc (loc, type, tmp, arg00);
6725 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6727 case GE_EXPR:
6728 if (TREE_OVERFLOW (lo))
6730 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6731 return omit_one_operand_loc (loc, type, tmp, arg00);
6733 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6735 default:
6736 break;
6739 return NULL_TREE;
6743 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6744 equality/inequality test, then return a simplified form of the test
6745 using a sign testing. Otherwise return NULL. TYPE is the desired
6746 result type. */
6748 static tree
6749 fold_single_bit_test_into_sign_test (location_t loc,
6750 enum tree_code code, tree arg0, tree arg1,
6751 tree result_type)
6753 /* If this is testing a single bit, we can optimize the test. */
6754 if ((code == NE_EXPR || code == EQ_EXPR)
6755 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6756 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6758 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6759 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6760 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6762 if (arg00 != NULL_TREE
6763 /* This is only a win if casting to a signed type is cheap,
6764 i.e. when arg00's type is not a partial mode. */
6765 && TYPE_PRECISION (TREE_TYPE (arg00))
6766 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6768 tree stype = signed_type_for (TREE_TYPE (arg00));
6769 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6770 result_type,
6771 fold_convert_loc (loc, stype, arg00),
6772 build_int_cst (stype, 0));
6776 return NULL_TREE;
6779 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6780 equality/inequality test, then return a simplified form of
6781 the test using shifts and logical operations. Otherwise return
6782 NULL. TYPE is the desired result type. */
6784 tree
6785 fold_single_bit_test (location_t loc, enum tree_code code,
6786 tree arg0, tree arg1, tree result_type)
6788 /* If this is testing a single bit, we can optimize the test. */
6789 if ((code == NE_EXPR || code == EQ_EXPR)
6790 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6791 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6793 tree inner = TREE_OPERAND (arg0, 0);
6794 tree type = TREE_TYPE (arg0);
6795 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6796 machine_mode operand_mode = TYPE_MODE (type);
6797 int ops_unsigned;
6798 tree signed_type, unsigned_type, intermediate_type;
6799 tree tem, one;
6801 /* First, see if we can fold the single bit test into a sign-bit
6802 test. */
6803 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6804 result_type);
6805 if (tem)
6806 return tem;
6808 /* Otherwise we have (A & C) != 0 where C is a single bit,
6809 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6810 Similarly for (A & C) == 0. */
6812 /* If INNER is a right shift of a constant and it plus BITNUM does
6813 not overflow, adjust BITNUM and INNER. */
6814 if (TREE_CODE (inner) == RSHIFT_EXPR
6815 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6816 && bitnum < TYPE_PRECISION (type)
6817 && wi::ltu_p (TREE_OPERAND (inner, 1),
6818 TYPE_PRECISION (type) - bitnum))
6820 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6821 inner = TREE_OPERAND (inner, 0);
6824 /* If we are going to be able to omit the AND below, we must do our
6825 operations as unsigned. If we must use the AND, we have a choice.
6826 Normally unsigned is faster, but for some machines signed is. */
6827 #ifdef LOAD_EXTEND_OP
6828 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6829 && !flag_syntax_only) ? 0 : 1;
6830 #else
6831 ops_unsigned = 1;
6832 #endif
6834 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6835 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6836 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6837 inner = fold_convert_loc (loc, intermediate_type, inner);
6839 if (bitnum != 0)
6840 inner = build2 (RSHIFT_EXPR, intermediate_type,
6841 inner, size_int (bitnum));
6843 one = build_int_cst (intermediate_type, 1);
6845 if (code == EQ_EXPR)
6846 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6848 /* Put the AND last so it can combine with more things. */
6849 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6851 /* Make sure to return the proper type. */
6852 inner = fold_convert_loc (loc, result_type, inner);
6854 return inner;
6856 return NULL_TREE;
6859 /* Check whether we are allowed to reorder operands arg0 and arg1,
6860 such that the evaluation of arg1 occurs before arg0. */
6862 static bool
6863 reorder_operands_p (const_tree arg0, const_tree arg1)
6865 if (! flag_evaluation_order)
6866 return true;
6867 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6868 return true;
6869 return ! TREE_SIDE_EFFECTS (arg0)
6870 && ! TREE_SIDE_EFFECTS (arg1);
6873 /* Test whether it is preferable two swap two operands, ARG0 and
6874 ARG1, for example because ARG0 is an integer constant and ARG1
6875 isn't. If REORDER is true, only recommend swapping if we can
6876 evaluate the operands in reverse order. */
6878 bool
6879 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6881 if (CONSTANT_CLASS_P (arg1))
6882 return 0;
6883 if (CONSTANT_CLASS_P (arg0))
6884 return 1;
6886 STRIP_NOPS (arg0);
6887 STRIP_NOPS (arg1);
6889 if (TREE_CONSTANT (arg1))
6890 return 0;
6891 if (TREE_CONSTANT (arg0))
6892 return 1;
6894 if (reorder && flag_evaluation_order
6895 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6896 return 0;
6898 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6899 for commutative and comparison operators. Ensuring a canonical
6900 form allows the optimizers to find additional redundancies without
6901 having to explicitly check for both orderings. */
6902 if (TREE_CODE (arg0) == SSA_NAME
6903 && TREE_CODE (arg1) == SSA_NAME
6904 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6905 return 1;
6907 /* Put SSA_NAMEs last. */
6908 if (TREE_CODE (arg1) == SSA_NAME)
6909 return 0;
6910 if (TREE_CODE (arg0) == SSA_NAME)
6911 return 1;
6913 /* Put variables last. */
6914 if (DECL_P (arg1))
6915 return 0;
6916 if (DECL_P (arg0))
6917 return 1;
6919 return 0;
6922 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6923 ARG0 is extended to a wider type. */
6925 static tree
6926 fold_widened_comparison (location_t loc, enum tree_code code,
6927 tree type, tree arg0, tree arg1)
6929 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6930 tree arg1_unw;
6931 tree shorter_type, outer_type;
6932 tree min, max;
6933 bool above, below;
6935 if (arg0_unw == arg0)
6936 return NULL_TREE;
6937 shorter_type = TREE_TYPE (arg0_unw);
6939 #ifdef HAVE_canonicalize_funcptr_for_compare
6940 /* Disable this optimization if we're casting a function pointer
6941 type on targets that require function pointer canonicalization. */
6942 if (HAVE_canonicalize_funcptr_for_compare
6943 && TREE_CODE (shorter_type) == POINTER_TYPE
6944 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6945 return NULL_TREE;
6946 #endif
6948 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6949 return NULL_TREE;
6951 arg1_unw = get_unwidened (arg1, NULL_TREE);
6953 /* If possible, express the comparison in the shorter mode. */
6954 if ((code == EQ_EXPR || code == NE_EXPR
6955 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6956 && (TREE_TYPE (arg1_unw) == shorter_type
6957 || ((TYPE_PRECISION (shorter_type)
6958 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6959 && (TYPE_UNSIGNED (shorter_type)
6960 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6961 || (TREE_CODE (arg1_unw) == INTEGER_CST
6962 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6963 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6964 && int_fits_type_p (arg1_unw, shorter_type))))
6965 return fold_build2_loc (loc, code, type, arg0_unw,
6966 fold_convert_loc (loc, shorter_type, arg1_unw));
6968 if (TREE_CODE (arg1_unw) != INTEGER_CST
6969 || TREE_CODE (shorter_type) != INTEGER_TYPE
6970 || !int_fits_type_p (arg1_unw, shorter_type))
6971 return NULL_TREE;
6973 /* If we are comparing with the integer that does not fit into the range
6974 of the shorter type, the result is known. */
6975 outer_type = TREE_TYPE (arg1_unw);
6976 min = lower_bound_in_type (outer_type, shorter_type);
6977 max = upper_bound_in_type (outer_type, shorter_type);
6979 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6980 max, arg1_unw));
6981 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6982 arg1_unw, min));
6984 switch (code)
6986 case EQ_EXPR:
6987 if (above || below)
6988 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6989 break;
6991 case NE_EXPR:
6992 if (above || below)
6993 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6994 break;
6996 case LT_EXPR:
6997 case LE_EXPR:
6998 if (above)
6999 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7000 else if (below)
7001 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7003 case GT_EXPR:
7004 case GE_EXPR:
7005 if (above)
7006 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7007 else if (below)
7008 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7010 default:
7011 break;
7014 return NULL_TREE;
7017 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7018 ARG0 just the signedness is changed. */
7020 static tree
7021 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7022 tree arg0, tree arg1)
7024 tree arg0_inner;
7025 tree inner_type, outer_type;
7027 if (!CONVERT_EXPR_P (arg0))
7028 return NULL_TREE;
7030 outer_type = TREE_TYPE (arg0);
7031 arg0_inner = TREE_OPERAND (arg0, 0);
7032 inner_type = TREE_TYPE (arg0_inner);
7034 #ifdef HAVE_canonicalize_funcptr_for_compare
7035 /* Disable this optimization if we're casting a function pointer
7036 type on targets that require function pointer canonicalization. */
7037 if (HAVE_canonicalize_funcptr_for_compare
7038 && TREE_CODE (inner_type) == POINTER_TYPE
7039 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7040 return NULL_TREE;
7041 #endif
7043 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7044 return NULL_TREE;
7046 if (TREE_CODE (arg1) != INTEGER_CST
7047 && !(CONVERT_EXPR_P (arg1)
7048 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7049 return NULL_TREE;
7051 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7052 && code != NE_EXPR
7053 && code != EQ_EXPR)
7054 return NULL_TREE;
7056 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7057 return NULL_TREE;
7059 if (TREE_CODE (arg1) == INTEGER_CST)
7060 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7061 TREE_OVERFLOW (arg1));
7062 else
7063 arg1 = fold_convert_loc (loc, inner_type, arg1);
7065 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7069 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7070 means A >= Y && A != MAX, but in this case we know that
7071 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7073 static tree
7074 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7076 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7078 if (TREE_CODE (bound) == LT_EXPR)
7079 a = TREE_OPERAND (bound, 0);
7080 else if (TREE_CODE (bound) == GT_EXPR)
7081 a = TREE_OPERAND (bound, 1);
7082 else
7083 return NULL_TREE;
7085 typea = TREE_TYPE (a);
7086 if (!INTEGRAL_TYPE_P (typea)
7087 && !POINTER_TYPE_P (typea))
7088 return NULL_TREE;
7090 if (TREE_CODE (ineq) == LT_EXPR)
7092 a1 = TREE_OPERAND (ineq, 1);
7093 y = TREE_OPERAND (ineq, 0);
7095 else if (TREE_CODE (ineq) == GT_EXPR)
7097 a1 = TREE_OPERAND (ineq, 0);
7098 y = TREE_OPERAND (ineq, 1);
7100 else
7101 return NULL_TREE;
7103 if (TREE_TYPE (a1) != typea)
7104 return NULL_TREE;
7106 if (POINTER_TYPE_P (typea))
7108 /* Convert the pointer types into integer before taking the difference. */
7109 tree ta = fold_convert_loc (loc, ssizetype, a);
7110 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7111 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7113 else
7114 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7116 if (!diff || !integer_onep (diff))
7117 return NULL_TREE;
7119 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7122 /* Fold a sum or difference of at least one multiplication.
7123 Returns the folded tree or NULL if no simplification could be made. */
7125 static tree
7126 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7127 tree arg0, tree arg1)
7129 tree arg00, arg01, arg10, arg11;
7130 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7132 /* (A * C) +- (B * C) -> (A+-B) * C.
7133 (A * C) +- A -> A * (C+-1).
7134 We are most concerned about the case where C is a constant,
7135 but other combinations show up during loop reduction. Since
7136 it is not difficult, try all four possibilities. */
7138 if (TREE_CODE (arg0) == MULT_EXPR)
7140 arg00 = TREE_OPERAND (arg0, 0);
7141 arg01 = TREE_OPERAND (arg0, 1);
7143 else if (TREE_CODE (arg0) == INTEGER_CST)
7145 arg00 = build_one_cst (type);
7146 arg01 = arg0;
7148 else
7150 /* We cannot generate constant 1 for fract. */
7151 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7152 return NULL_TREE;
7153 arg00 = arg0;
7154 arg01 = build_one_cst (type);
7156 if (TREE_CODE (arg1) == MULT_EXPR)
7158 arg10 = TREE_OPERAND (arg1, 0);
7159 arg11 = TREE_OPERAND (arg1, 1);
7161 else if (TREE_CODE (arg1) == INTEGER_CST)
7163 arg10 = build_one_cst (type);
7164 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7165 the purpose of this canonicalization. */
7166 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7167 && negate_expr_p (arg1)
7168 && code == PLUS_EXPR)
7170 arg11 = negate_expr (arg1);
7171 code = MINUS_EXPR;
7173 else
7174 arg11 = arg1;
7176 else
7178 /* We cannot generate constant 1 for fract. */
7179 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7180 return NULL_TREE;
7181 arg10 = arg1;
7182 arg11 = build_one_cst (type);
7184 same = NULL_TREE;
7186 if (operand_equal_p (arg01, arg11, 0))
7187 same = arg01, alt0 = arg00, alt1 = arg10;
7188 else if (operand_equal_p (arg00, arg10, 0))
7189 same = arg00, alt0 = arg01, alt1 = arg11;
7190 else if (operand_equal_p (arg00, arg11, 0))
7191 same = arg00, alt0 = arg01, alt1 = arg10;
7192 else if (operand_equal_p (arg01, arg10, 0))
7193 same = arg01, alt0 = arg00, alt1 = arg11;
7195 /* No identical multiplicands; see if we can find a common
7196 power-of-two factor in non-power-of-two multiplies. This
7197 can help in multi-dimensional array access. */
7198 else if (tree_fits_shwi_p (arg01)
7199 && tree_fits_shwi_p (arg11))
7201 HOST_WIDE_INT int01, int11, tmp;
7202 bool swap = false;
7203 tree maybe_same;
7204 int01 = tree_to_shwi (arg01);
7205 int11 = tree_to_shwi (arg11);
7207 /* Move min of absolute values to int11. */
7208 if (absu_hwi (int01) < absu_hwi (int11))
7210 tmp = int01, int01 = int11, int11 = tmp;
7211 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7212 maybe_same = arg01;
7213 swap = true;
7215 else
7216 maybe_same = arg11;
7218 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7219 /* The remainder should not be a constant, otherwise we
7220 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7221 increased the number of multiplications necessary. */
7222 && TREE_CODE (arg10) != INTEGER_CST)
7224 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7225 build_int_cst (TREE_TYPE (arg00),
7226 int01 / int11));
7227 alt1 = arg10;
7228 same = maybe_same;
7229 if (swap)
7230 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7234 if (same)
7235 return fold_build2_loc (loc, MULT_EXPR, type,
7236 fold_build2_loc (loc, code, type,
7237 fold_convert_loc (loc, type, alt0),
7238 fold_convert_loc (loc, type, alt1)),
7239 fold_convert_loc (loc, type, same));
7241 return NULL_TREE;
7244 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7247 upon failure. */
7249 static int
7250 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7252 tree type = TREE_TYPE (expr);
7253 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7254 int byte, offset, word, words;
7255 unsigned char value;
7257 if ((off == -1 && total_bytes > len)
7258 || off >= total_bytes)
7259 return 0;
7260 if (off == -1)
7261 off = 0;
7262 words = total_bytes / UNITS_PER_WORD;
7264 for (byte = 0; byte < total_bytes; byte++)
7266 int bitpos = byte * BITS_PER_UNIT;
7267 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7268 number of bytes. */
7269 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7271 if (total_bytes > UNITS_PER_WORD)
7273 word = byte / UNITS_PER_WORD;
7274 if (WORDS_BIG_ENDIAN)
7275 word = (words - 1) - word;
7276 offset = word * UNITS_PER_WORD;
7277 if (BYTES_BIG_ENDIAN)
7278 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7279 else
7280 offset += byte % UNITS_PER_WORD;
7282 else
7283 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7284 if (offset >= off
7285 && offset - off < len)
7286 ptr[offset - off] = value;
7288 return MIN (len, total_bytes - off);
7292 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7293 specified by EXPR into the buffer PTR of length LEN bytes.
7294 Return the number of bytes placed in the buffer, or zero
7295 upon failure. */
7297 static int
7298 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7300 tree type = TREE_TYPE (expr);
7301 machine_mode mode = TYPE_MODE (type);
7302 int total_bytes = GET_MODE_SIZE (mode);
7303 FIXED_VALUE_TYPE value;
7304 tree i_value, i_type;
7306 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7307 return 0;
7309 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7311 if (NULL_TREE == i_type
7312 || TYPE_PRECISION (i_type) != total_bytes)
7313 return 0;
7315 value = TREE_FIXED_CST (expr);
7316 i_value = double_int_to_tree (i_type, value.data);
7318 return native_encode_int (i_value, ptr, len, off);
7322 /* Subroutine of native_encode_expr. Encode the REAL_CST
7323 specified by EXPR into the buffer PTR of length LEN bytes.
7324 Return the number of bytes placed in the buffer, or zero
7325 upon failure. */
7327 static int
7328 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7330 tree type = TREE_TYPE (expr);
7331 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7332 int byte, offset, word, words, bitpos;
7333 unsigned char value;
7335 /* There are always 32 bits in each long, no matter the size of
7336 the hosts long. We handle floating point representations with
7337 up to 192 bits. */
7338 long tmp[6];
7340 if ((off == -1 && total_bytes > len)
7341 || off >= total_bytes)
7342 return 0;
7343 if (off == -1)
7344 off = 0;
7345 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7347 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7349 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7350 bitpos += BITS_PER_UNIT)
7352 byte = (bitpos / BITS_PER_UNIT) & 3;
7353 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7355 if (UNITS_PER_WORD < 4)
7357 word = byte / UNITS_PER_WORD;
7358 if (WORDS_BIG_ENDIAN)
7359 word = (words - 1) - word;
7360 offset = word * UNITS_PER_WORD;
7361 if (BYTES_BIG_ENDIAN)
7362 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7363 else
7364 offset += byte % UNITS_PER_WORD;
7366 else
7367 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7368 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7369 if (offset >= off
7370 && offset - off < len)
7371 ptr[offset - off] = value;
7373 return MIN (len, total_bytes - off);
7376 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7377 specified by EXPR into the buffer PTR of length LEN bytes.
7378 Return the number of bytes placed in the buffer, or zero
7379 upon failure. */
7381 static int
7382 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7384 int rsize, isize;
7385 tree part;
7387 part = TREE_REALPART (expr);
7388 rsize = native_encode_expr (part, ptr, len, off);
7389 if (off == -1
7390 && rsize == 0)
7391 return 0;
7392 part = TREE_IMAGPART (expr);
7393 if (off != -1)
7394 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7395 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7396 if (off == -1
7397 && isize != rsize)
7398 return 0;
7399 return rsize + isize;
7403 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7404 specified by EXPR into the buffer PTR of length LEN bytes.
7405 Return the number of bytes placed in the buffer, or zero
7406 upon failure. */
7408 static int
7409 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7411 unsigned i, count;
7412 int size, offset;
7413 tree itype, elem;
7415 offset = 0;
7416 count = VECTOR_CST_NELTS (expr);
7417 itype = TREE_TYPE (TREE_TYPE (expr));
7418 size = GET_MODE_SIZE (TYPE_MODE (itype));
7419 for (i = 0; i < count; i++)
7421 if (off >= size)
7423 off -= size;
7424 continue;
7426 elem = VECTOR_CST_ELT (expr, i);
7427 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7428 if ((off == -1 && res != size)
7429 || res == 0)
7430 return 0;
7431 offset += res;
7432 if (offset >= len)
7433 return offset;
7434 if (off != -1)
7435 off = 0;
7437 return offset;
7441 /* Subroutine of native_encode_expr. Encode the STRING_CST
7442 specified by EXPR into the buffer PTR of length LEN bytes.
7443 Return the number of bytes placed in the buffer, or zero
7444 upon failure. */
7446 static int
7447 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7449 tree type = TREE_TYPE (expr);
7450 HOST_WIDE_INT total_bytes;
7452 if (TREE_CODE (type) != ARRAY_TYPE
7453 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7454 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7455 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7456 return 0;
7457 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7458 if ((off == -1 && total_bytes > len)
7459 || off >= total_bytes)
7460 return 0;
7461 if (off == -1)
7462 off = 0;
7463 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7465 int written = 0;
7466 if (off < TREE_STRING_LENGTH (expr))
7468 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7469 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7471 memset (ptr + written, 0,
7472 MIN (total_bytes - written, len - written));
7474 else
7475 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7476 return MIN (total_bytes - off, len);
7480 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7481 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7482 buffer PTR of length LEN bytes. If OFF is not -1 then start
7483 the encoding at byte offset OFF and encode at most LEN bytes.
7484 Return the number of bytes placed in the buffer, or zero upon failure. */
7487 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7489 switch (TREE_CODE (expr))
7491 case INTEGER_CST:
7492 return native_encode_int (expr, ptr, len, off);
7494 case REAL_CST:
7495 return native_encode_real (expr, ptr, len, off);
7497 case FIXED_CST:
7498 return native_encode_fixed (expr, ptr, len, off);
7500 case COMPLEX_CST:
7501 return native_encode_complex (expr, ptr, len, off);
7503 case VECTOR_CST:
7504 return native_encode_vector (expr, ptr, len, off);
7506 case STRING_CST:
7507 return native_encode_string (expr, ptr, len, off);
7509 default:
7510 return 0;
7515 /* Subroutine of native_interpret_expr. Interpret the contents of
7516 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7517 If the buffer cannot be interpreted, return NULL_TREE. */
7519 static tree
7520 native_interpret_int (tree type, const unsigned char *ptr, int len)
7522 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7524 if (total_bytes > len
7525 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7526 return NULL_TREE;
7528 wide_int result = wi::from_buffer (ptr, total_bytes);
7530 return wide_int_to_tree (type, result);
7534 /* Subroutine of native_interpret_expr. Interpret the contents of
7535 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7536 If the buffer cannot be interpreted, return NULL_TREE. */
7538 static tree
7539 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7541 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7542 double_int result;
7543 FIXED_VALUE_TYPE fixed_value;
7545 if (total_bytes > len
7546 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7547 return NULL_TREE;
7549 result = double_int::from_buffer (ptr, total_bytes);
7550 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7552 return build_fixed (type, fixed_value);
7556 /* Subroutine of native_interpret_expr. Interpret the contents of
7557 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7558 If the buffer cannot be interpreted, return NULL_TREE. */
7560 static tree
7561 native_interpret_real (tree type, const unsigned char *ptr, int len)
7563 machine_mode mode = TYPE_MODE (type);
7564 int total_bytes = GET_MODE_SIZE (mode);
7565 int byte, offset, word, words, bitpos;
7566 unsigned char value;
7567 /* There are always 32 bits in each long, no matter the size of
7568 the hosts long. We handle floating point representations with
7569 up to 192 bits. */
7570 REAL_VALUE_TYPE r;
7571 long tmp[6];
7573 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7574 if (total_bytes > len || total_bytes > 24)
7575 return NULL_TREE;
7576 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7578 memset (tmp, 0, sizeof (tmp));
7579 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7580 bitpos += BITS_PER_UNIT)
7582 byte = (bitpos / BITS_PER_UNIT) & 3;
7583 if (UNITS_PER_WORD < 4)
7585 word = byte / UNITS_PER_WORD;
7586 if (WORDS_BIG_ENDIAN)
7587 word = (words - 1) - word;
7588 offset = word * UNITS_PER_WORD;
7589 if (BYTES_BIG_ENDIAN)
7590 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7591 else
7592 offset += byte % UNITS_PER_WORD;
7594 else
7595 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7596 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7598 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7601 real_from_target (&r, tmp, mode);
7602 return build_real (type, r);
7606 /* Subroutine of native_interpret_expr. Interpret the contents of
7607 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7608 If the buffer cannot be interpreted, return NULL_TREE. */
7610 static tree
7611 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7613 tree etype, rpart, ipart;
7614 int size;
7616 etype = TREE_TYPE (type);
7617 size = GET_MODE_SIZE (TYPE_MODE (etype));
7618 if (size * 2 > len)
7619 return NULL_TREE;
7620 rpart = native_interpret_expr (etype, ptr, size);
7621 if (!rpart)
7622 return NULL_TREE;
7623 ipart = native_interpret_expr (etype, ptr+size, size);
7624 if (!ipart)
7625 return NULL_TREE;
7626 return build_complex (type, rpart, ipart);
7630 /* Subroutine of native_interpret_expr. Interpret the contents of
7631 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7632 If the buffer cannot be interpreted, return NULL_TREE. */
7634 static tree
7635 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7637 tree etype, elem;
7638 int i, size, count;
7639 tree *elements;
7641 etype = TREE_TYPE (type);
7642 size = GET_MODE_SIZE (TYPE_MODE (etype));
7643 count = TYPE_VECTOR_SUBPARTS (type);
7644 if (size * count > len)
7645 return NULL_TREE;
7647 elements = XALLOCAVEC (tree, count);
7648 for (i = count - 1; i >= 0; i--)
7650 elem = native_interpret_expr (etype, ptr+(i*size), size);
7651 if (!elem)
7652 return NULL_TREE;
7653 elements[i] = elem;
7655 return build_vector (type, elements);
7659 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7660 the buffer PTR of length LEN as a constant of type TYPE. For
7661 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7662 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7663 return NULL_TREE. */
7665 tree
7666 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7668 switch (TREE_CODE (type))
7670 case INTEGER_TYPE:
7671 case ENUMERAL_TYPE:
7672 case BOOLEAN_TYPE:
7673 case POINTER_TYPE:
7674 case REFERENCE_TYPE:
7675 return native_interpret_int (type, ptr, len);
7677 case REAL_TYPE:
7678 return native_interpret_real (type, ptr, len);
7680 case FIXED_POINT_TYPE:
7681 return native_interpret_fixed (type, ptr, len);
7683 case COMPLEX_TYPE:
7684 return native_interpret_complex (type, ptr, len);
7686 case VECTOR_TYPE:
7687 return native_interpret_vector (type, ptr, len);
7689 default:
7690 return NULL_TREE;
7694 /* Returns true if we can interpret the contents of a native encoding
7695 as TYPE. */
7697 static bool
7698 can_native_interpret_type_p (tree type)
7700 switch (TREE_CODE (type))
7702 case INTEGER_TYPE:
7703 case ENUMERAL_TYPE:
7704 case BOOLEAN_TYPE:
7705 case POINTER_TYPE:
7706 case REFERENCE_TYPE:
7707 case FIXED_POINT_TYPE:
7708 case REAL_TYPE:
7709 case COMPLEX_TYPE:
7710 case VECTOR_TYPE:
7711 return true;
7712 default:
7713 return false;
7717 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7718 TYPE at compile-time. If we're unable to perform the conversion
7719 return NULL_TREE. */
7721 static tree
7722 fold_view_convert_expr (tree type, tree expr)
7724 /* We support up to 512-bit values (for V8DFmode). */
7725 unsigned char buffer[64];
7726 int len;
7728 /* Check that the host and target are sane. */
7729 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7730 return NULL_TREE;
7732 len = native_encode_expr (expr, buffer, sizeof (buffer));
7733 if (len == 0)
7734 return NULL_TREE;
7736 return native_interpret_expr (type, buffer, len);
7739 /* Build an expression for the address of T. Folds away INDIRECT_REF
7740 to avoid confusing the gimplify process. */
7742 tree
7743 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7745 /* The size of the object is not relevant when talking about its address. */
7746 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7747 t = TREE_OPERAND (t, 0);
7749 if (TREE_CODE (t) == INDIRECT_REF)
7751 t = TREE_OPERAND (t, 0);
7753 if (TREE_TYPE (t) != ptrtype)
7754 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7756 else if (TREE_CODE (t) == MEM_REF
7757 && integer_zerop (TREE_OPERAND (t, 1)))
7758 return TREE_OPERAND (t, 0);
7759 else if (TREE_CODE (t) == MEM_REF
7760 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7761 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7762 TREE_OPERAND (t, 0),
7763 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7764 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7766 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7768 if (TREE_TYPE (t) != ptrtype)
7769 t = fold_convert_loc (loc, ptrtype, t);
7771 else
7772 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7774 return t;
7777 /* Build an expression for the address of T. */
7779 tree
7780 build_fold_addr_expr_loc (location_t loc, tree t)
7782 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7784 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7787 /* Fold a unary expression of code CODE and type TYPE with operand
7788 OP0. Return the folded expression if folding is successful.
7789 Otherwise, return NULL_TREE. */
7791 tree
7792 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7794 tree tem;
7795 tree arg0;
7796 enum tree_code_class kind = TREE_CODE_CLASS (code);
7798 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7799 && TREE_CODE_LENGTH (code) == 1);
7801 arg0 = op0;
7802 if (arg0)
7804 if (CONVERT_EXPR_CODE_P (code)
7805 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7807 /* Don't use STRIP_NOPS, because signedness of argument type
7808 matters. */
7809 STRIP_SIGN_NOPS (arg0);
7811 else
7813 /* Strip any conversions that don't change the mode. This
7814 is safe for every expression, except for a comparison
7815 expression because its signedness is derived from its
7816 operands.
7818 Note that this is done as an internal manipulation within
7819 the constant folder, in order to find the simplest
7820 representation of the arguments so that their form can be
7821 studied. In any cases, the appropriate type conversions
7822 should be put back in the tree that will get out of the
7823 constant folder. */
7824 STRIP_NOPS (arg0);
7827 if (CONSTANT_CLASS_P (arg0))
7829 tree tem = const_unop (code, type, arg0);
7830 if (tem)
7832 if (TREE_TYPE (tem) != type)
7833 tem = fold_convert_loc (loc, type, tem);
7834 return tem;
7839 tem = generic_simplify (loc, code, type, op0);
7840 if (tem)
7841 return tem;
7843 if (TREE_CODE_CLASS (code) == tcc_unary)
7845 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7846 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7847 fold_build1_loc (loc, code, type,
7848 fold_convert_loc (loc, TREE_TYPE (op0),
7849 TREE_OPERAND (arg0, 1))));
7850 else if (TREE_CODE (arg0) == COND_EXPR)
7852 tree arg01 = TREE_OPERAND (arg0, 1);
7853 tree arg02 = TREE_OPERAND (arg0, 2);
7854 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7855 arg01 = fold_build1_loc (loc, code, type,
7856 fold_convert_loc (loc,
7857 TREE_TYPE (op0), arg01));
7858 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7859 arg02 = fold_build1_loc (loc, code, type,
7860 fold_convert_loc (loc,
7861 TREE_TYPE (op0), arg02));
7862 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7863 arg01, arg02);
7865 /* If this was a conversion, and all we did was to move into
7866 inside the COND_EXPR, bring it back out. But leave it if
7867 it is a conversion from integer to integer and the
7868 result precision is no wider than a word since such a
7869 conversion is cheap and may be optimized away by combine,
7870 while it couldn't if it were outside the COND_EXPR. Then return
7871 so we don't get into an infinite recursion loop taking the
7872 conversion out and then back in. */
7874 if ((CONVERT_EXPR_CODE_P (code)
7875 || code == NON_LVALUE_EXPR)
7876 && TREE_CODE (tem) == COND_EXPR
7877 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7878 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7879 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7880 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7881 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7882 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7883 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7884 && (INTEGRAL_TYPE_P
7885 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7886 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7887 || flag_syntax_only))
7888 tem = build1_loc (loc, code, type,
7889 build3 (COND_EXPR,
7890 TREE_TYPE (TREE_OPERAND
7891 (TREE_OPERAND (tem, 1), 0)),
7892 TREE_OPERAND (tem, 0),
7893 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7894 TREE_OPERAND (TREE_OPERAND (tem, 2),
7895 0)));
7896 return tem;
7900 switch (code)
7902 case NON_LVALUE_EXPR:
7903 if (!maybe_lvalue_p (op0))
7904 return fold_convert_loc (loc, type, op0);
7905 return NULL_TREE;
7907 CASE_CONVERT:
7908 case FLOAT_EXPR:
7909 case FIX_TRUNC_EXPR:
7910 if (COMPARISON_CLASS_P (op0))
7912 /* If we have (type) (a CMP b) and type is an integral type, return
7913 new expression involving the new type. Canonicalize
7914 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7915 non-integral type.
7916 Do not fold the result as that would not simplify further, also
7917 folding again results in recursions. */
7918 if (TREE_CODE (type) == BOOLEAN_TYPE)
7919 return build2_loc (loc, TREE_CODE (op0), type,
7920 TREE_OPERAND (op0, 0),
7921 TREE_OPERAND (op0, 1));
7922 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7923 && TREE_CODE (type) != VECTOR_TYPE)
7924 return build3_loc (loc, COND_EXPR, type, op0,
7925 constant_boolean_node (true, type),
7926 constant_boolean_node (false, type));
7929 /* Handle (T *)&A.B.C for A being of type T and B and C
7930 living at offset zero. This occurs frequently in
7931 C++ upcasting and then accessing the base. */
7932 if (TREE_CODE (op0) == ADDR_EXPR
7933 && POINTER_TYPE_P (type)
7934 && handled_component_p (TREE_OPERAND (op0, 0)))
7936 HOST_WIDE_INT bitsize, bitpos;
7937 tree offset;
7938 machine_mode mode;
7939 int unsignedp, volatilep;
7940 tree base = TREE_OPERAND (op0, 0);
7941 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7942 &mode, &unsignedp, &volatilep, false);
7943 /* If the reference was to a (constant) zero offset, we can use
7944 the address of the base if it has the same base type
7945 as the result type and the pointer type is unqualified. */
7946 if (! offset && bitpos == 0
7947 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7948 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7949 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7950 return fold_convert_loc (loc, type,
7951 build_fold_addr_expr_loc (loc, base));
7954 if (TREE_CODE (op0) == MODIFY_EXPR
7955 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7956 /* Detect assigning a bitfield. */
7957 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7958 && DECL_BIT_FIELD
7959 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7961 /* Don't leave an assignment inside a conversion
7962 unless assigning a bitfield. */
7963 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7964 /* First do the assignment, then return converted constant. */
7965 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7966 TREE_NO_WARNING (tem) = 1;
7967 TREE_USED (tem) = 1;
7968 return tem;
7971 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7972 constants (if x has signed type, the sign bit cannot be set
7973 in c). This folds extension into the BIT_AND_EXPR.
7974 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7975 very likely don't have maximal range for their precision and this
7976 transformation effectively doesn't preserve non-maximal ranges. */
7977 if (TREE_CODE (type) == INTEGER_TYPE
7978 && TREE_CODE (op0) == BIT_AND_EXPR
7979 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7981 tree and_expr = op0;
7982 tree and0 = TREE_OPERAND (and_expr, 0);
7983 tree and1 = TREE_OPERAND (and_expr, 1);
7984 int change = 0;
7986 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7987 || (TYPE_PRECISION (type)
7988 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7989 change = 1;
7990 else if (TYPE_PRECISION (TREE_TYPE (and1))
7991 <= HOST_BITS_PER_WIDE_INT
7992 && tree_fits_uhwi_p (and1))
7994 unsigned HOST_WIDE_INT cst;
7996 cst = tree_to_uhwi (and1);
7997 cst &= HOST_WIDE_INT_M1U
7998 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7999 change = (cst == 0);
8000 #ifdef LOAD_EXTEND_OP
8001 if (change
8002 && !flag_syntax_only
8003 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8004 == ZERO_EXTEND))
8006 tree uns = unsigned_type_for (TREE_TYPE (and0));
8007 and0 = fold_convert_loc (loc, uns, and0);
8008 and1 = fold_convert_loc (loc, uns, and1);
8010 #endif
8012 if (change)
8014 tem = force_fit_type (type, wi::to_widest (and1), 0,
8015 TREE_OVERFLOW (and1));
8016 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8017 fold_convert_loc (loc, type, and0), tem);
8021 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8022 when one of the new casts will fold away. Conservatively we assume
8023 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8024 if (POINTER_TYPE_P (type)
8025 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8026 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8027 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8028 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8029 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8031 tree arg00 = TREE_OPERAND (arg0, 0);
8032 tree arg01 = TREE_OPERAND (arg0, 1);
8034 return fold_build_pointer_plus_loc
8035 (loc, fold_convert_loc (loc, type, arg00), arg01);
8038 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8039 of the same precision, and X is an integer type not narrower than
8040 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8041 if (INTEGRAL_TYPE_P (type)
8042 && TREE_CODE (op0) == BIT_NOT_EXPR
8043 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8044 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8045 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8047 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8048 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8049 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8050 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8051 fold_convert_loc (loc, type, tem));
8054 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8055 type of X and Y (integer types only). */
8056 if (INTEGRAL_TYPE_P (type)
8057 && TREE_CODE (op0) == MULT_EXPR
8058 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8059 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8061 /* Be careful not to introduce new overflows. */
8062 tree mult_type;
8063 if (TYPE_OVERFLOW_WRAPS (type))
8064 mult_type = type;
8065 else
8066 mult_type = unsigned_type_for (type);
8068 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8070 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8071 fold_convert_loc (loc, mult_type,
8072 TREE_OPERAND (op0, 0)),
8073 fold_convert_loc (loc, mult_type,
8074 TREE_OPERAND (op0, 1)));
8075 return fold_convert_loc (loc, type, tem);
8079 return NULL_TREE;
8081 case VIEW_CONVERT_EXPR:
8082 if (TREE_CODE (op0) == MEM_REF)
8083 return fold_build2_loc (loc, MEM_REF, type,
8084 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8086 return NULL_TREE;
8088 case NEGATE_EXPR:
8089 tem = fold_negate_expr (loc, arg0);
8090 if (tem)
8091 return fold_convert_loc (loc, type, tem);
8092 return NULL_TREE;
8094 case ABS_EXPR:
8095 /* Convert fabs((double)float) into (double)fabsf(float). */
8096 if (TREE_CODE (arg0) == NOP_EXPR
8097 && TREE_CODE (type) == REAL_TYPE)
8099 tree targ0 = strip_float_extensions (arg0);
8100 if (targ0 != arg0)
8101 return fold_convert_loc (loc, type,
8102 fold_build1_loc (loc, ABS_EXPR,
8103 TREE_TYPE (targ0),
8104 targ0));
8106 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8107 else if (TREE_CODE (arg0) == ABS_EXPR)
8108 return arg0;
8110 /* Strip sign ops from argument. */
8111 if (TREE_CODE (type) == REAL_TYPE)
8113 tem = fold_strip_sign_ops (arg0);
8114 if (tem)
8115 return fold_build1_loc (loc, ABS_EXPR, type,
8116 fold_convert_loc (loc, type, tem));
8118 return NULL_TREE;
8120 case CONJ_EXPR:
8121 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8122 return fold_convert_loc (loc, type, arg0);
8123 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8125 tree itype = TREE_TYPE (type);
8126 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8127 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8128 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8129 negate_expr (ipart));
8131 if (TREE_CODE (arg0) == CONJ_EXPR)
8132 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8133 return NULL_TREE;
8135 case BIT_NOT_EXPR:
8136 /* Convert ~ (-A) to A - 1. */
8137 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8138 return fold_build2_loc (loc, MINUS_EXPR, type,
8139 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8140 build_int_cst (type, 1));
8141 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8142 else if (INTEGRAL_TYPE_P (type)
8143 && ((TREE_CODE (arg0) == MINUS_EXPR
8144 && integer_onep (TREE_OPERAND (arg0, 1)))
8145 || (TREE_CODE (arg0) == PLUS_EXPR
8146 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8148 /* Perform the negation in ARG0's type and only then convert
8149 to TYPE as to avoid introducing undefined behavior. */
8150 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8151 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8152 TREE_OPERAND (arg0, 0));
8153 return fold_convert_loc (loc, type, t);
8155 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8156 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8157 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8158 fold_convert_loc (loc, type,
8159 TREE_OPERAND (arg0, 0)))))
8160 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8161 fold_convert_loc (loc, type,
8162 TREE_OPERAND (arg0, 1)));
8163 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8164 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8165 fold_convert_loc (loc, type,
8166 TREE_OPERAND (arg0, 1)))))
8167 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8168 fold_convert_loc (loc, type,
8169 TREE_OPERAND (arg0, 0)), tem);
8171 return NULL_TREE;
8173 case TRUTH_NOT_EXPR:
8174 /* Note that the operand of this must be an int
8175 and its values must be 0 or 1.
8176 ("true" is a fixed value perhaps depending on the language,
8177 but we don't handle values other than 1 correctly yet.) */
8178 tem = fold_truth_not_expr (loc, arg0);
8179 if (!tem)
8180 return NULL_TREE;
8181 return fold_convert_loc (loc, type, tem);
8183 case REALPART_EXPR:
8184 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8185 return fold_convert_loc (loc, type, arg0);
8186 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8188 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8189 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8190 fold_build1_loc (loc, REALPART_EXPR, itype,
8191 TREE_OPERAND (arg0, 0)),
8192 fold_build1_loc (loc, REALPART_EXPR, itype,
8193 TREE_OPERAND (arg0, 1)));
8194 return fold_convert_loc (loc, type, tem);
8196 if (TREE_CODE (arg0) == CONJ_EXPR)
8198 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8199 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8200 TREE_OPERAND (arg0, 0));
8201 return fold_convert_loc (loc, type, tem);
8203 if (TREE_CODE (arg0) == CALL_EXPR)
8205 tree fn = get_callee_fndecl (arg0);
8206 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8207 switch (DECL_FUNCTION_CODE (fn))
8209 CASE_FLT_FN (BUILT_IN_CEXPI):
8210 fn = mathfn_built_in (type, BUILT_IN_COS);
8211 if (fn)
8212 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8213 break;
8215 default:
8216 break;
8219 return NULL_TREE;
8221 case IMAGPART_EXPR:
8222 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8223 return build_zero_cst (type);
8224 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8226 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8227 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8228 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8229 TREE_OPERAND (arg0, 0)),
8230 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8231 TREE_OPERAND (arg0, 1)));
8232 return fold_convert_loc (loc, type, tem);
8234 if (TREE_CODE (arg0) == CONJ_EXPR)
8236 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8237 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8238 return fold_convert_loc (loc, type, negate_expr (tem));
8240 if (TREE_CODE (arg0) == CALL_EXPR)
8242 tree fn = get_callee_fndecl (arg0);
8243 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8244 switch (DECL_FUNCTION_CODE (fn))
8246 CASE_FLT_FN (BUILT_IN_CEXPI):
8247 fn = mathfn_built_in (type, BUILT_IN_SIN);
8248 if (fn)
8249 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8250 break;
8252 default:
8253 break;
8256 return NULL_TREE;
8258 case INDIRECT_REF:
8259 /* Fold *&X to X if X is an lvalue. */
8260 if (TREE_CODE (op0) == ADDR_EXPR)
8262 tree op00 = TREE_OPERAND (op0, 0);
8263 if ((TREE_CODE (op00) == VAR_DECL
8264 || TREE_CODE (op00) == PARM_DECL
8265 || TREE_CODE (op00) == RESULT_DECL)
8266 && !TREE_READONLY (op00))
8267 return op00;
8269 return NULL_TREE;
8271 default:
8272 return NULL_TREE;
8273 } /* switch (code) */
8277 /* If the operation was a conversion do _not_ mark a resulting constant
8278 with TREE_OVERFLOW if the original constant was not. These conversions
8279 have implementation defined behavior and retaining the TREE_OVERFLOW
8280 flag here would confuse later passes such as VRP. */
8281 tree
8282 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8283 tree type, tree op0)
8285 tree res = fold_unary_loc (loc, code, type, op0);
8286 if (res
8287 && TREE_CODE (res) == INTEGER_CST
8288 && TREE_CODE (op0) == INTEGER_CST
8289 && CONVERT_EXPR_CODE_P (code))
8290 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8292 return res;
8295 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8296 operands OP0 and OP1. LOC is the location of the resulting expression.
8297 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8298 Return the folded expression if folding is successful. Otherwise,
8299 return NULL_TREE. */
8300 static tree
8301 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8302 tree arg0, tree arg1, tree op0, tree op1)
8304 tree tem;
8306 /* We only do these simplifications if we are optimizing. */
8307 if (!optimize)
8308 return NULL_TREE;
8310 /* Check for things like (A || B) && (A || C). We can convert this
8311 to A || (B && C). Note that either operator can be any of the four
8312 truth and/or operations and the transformation will still be
8313 valid. Also note that we only care about order for the
8314 ANDIF and ORIF operators. If B contains side effects, this
8315 might change the truth-value of A. */
8316 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8317 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8318 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8319 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8320 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8321 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8323 tree a00 = TREE_OPERAND (arg0, 0);
8324 tree a01 = TREE_OPERAND (arg0, 1);
8325 tree a10 = TREE_OPERAND (arg1, 0);
8326 tree a11 = TREE_OPERAND (arg1, 1);
8327 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8328 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8329 && (code == TRUTH_AND_EXPR
8330 || code == TRUTH_OR_EXPR));
8332 if (operand_equal_p (a00, a10, 0))
8333 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8334 fold_build2_loc (loc, code, type, a01, a11));
8335 else if (commutative && operand_equal_p (a00, a11, 0))
8336 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8337 fold_build2_loc (loc, code, type, a01, a10));
8338 else if (commutative && operand_equal_p (a01, a10, 0))
8339 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8340 fold_build2_loc (loc, code, type, a00, a11));
8342 /* This case if tricky because we must either have commutative
8343 operators or else A10 must not have side-effects. */
8345 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8346 && operand_equal_p (a01, a11, 0))
8347 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8348 fold_build2_loc (loc, code, type, a00, a10),
8349 a01);
8352 /* See if we can build a range comparison. */
8353 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8354 return tem;
8356 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8357 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8359 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8360 if (tem)
8361 return fold_build2_loc (loc, code, type, tem, arg1);
8364 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8365 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8367 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8368 if (tem)
8369 return fold_build2_loc (loc, code, type, arg0, tem);
8372 /* Check for the possibility of merging component references. If our
8373 lhs is another similar operation, try to merge its rhs with our
8374 rhs. Then try to merge our lhs and rhs. */
8375 if (TREE_CODE (arg0) == code
8376 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8377 TREE_OPERAND (arg0, 1), arg1)))
8378 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8380 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8381 return tem;
8383 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8384 && (code == TRUTH_AND_EXPR
8385 || code == TRUTH_ANDIF_EXPR
8386 || code == TRUTH_OR_EXPR
8387 || code == TRUTH_ORIF_EXPR))
8389 enum tree_code ncode, icode;
8391 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8392 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8393 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8395 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8396 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8397 We don't want to pack more than two leafs to a non-IF AND/OR
8398 expression.
8399 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8400 equal to IF-CODE, then we don't want to add right-hand operand.
8401 If the inner right-hand side of left-hand operand has
8402 side-effects, or isn't simple, then we can't add to it,
8403 as otherwise we might destroy if-sequence. */
8404 if (TREE_CODE (arg0) == icode
8405 && simple_operand_p_2 (arg1)
8406 /* Needed for sequence points to handle trappings, and
8407 side-effects. */
8408 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8410 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8411 arg1);
8412 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8413 tem);
8415 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8416 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8417 else if (TREE_CODE (arg1) == icode
8418 && simple_operand_p_2 (arg0)
8419 /* Needed for sequence points to handle trappings, and
8420 side-effects. */
8421 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8423 tem = fold_build2_loc (loc, ncode, type,
8424 arg0, TREE_OPERAND (arg1, 0));
8425 return fold_build2_loc (loc, icode, type, tem,
8426 TREE_OPERAND (arg1, 1));
8428 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8429 into (A OR B).
8430 For sequence point consistancy, we need to check for trapping,
8431 and side-effects. */
8432 else if (code == icode && simple_operand_p_2 (arg0)
8433 && simple_operand_p_2 (arg1))
8434 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8437 return NULL_TREE;
8440 /* Fold a binary expression of code CODE and type TYPE with operands
8441 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8442 Return the folded expression if folding is successful. Otherwise,
8443 return NULL_TREE. */
8445 static tree
8446 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8448 enum tree_code compl_code;
8450 if (code == MIN_EXPR)
8451 compl_code = MAX_EXPR;
8452 else if (code == MAX_EXPR)
8453 compl_code = MIN_EXPR;
8454 else
8455 gcc_unreachable ();
8457 /* MIN (MAX (a, b), b) == b. */
8458 if (TREE_CODE (op0) == compl_code
8459 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8460 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8462 /* MIN (MAX (b, a), b) == b. */
8463 if (TREE_CODE (op0) == compl_code
8464 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8465 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8466 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8468 /* MIN (a, MAX (a, b)) == a. */
8469 if (TREE_CODE (op1) == compl_code
8470 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8471 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8472 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8474 /* MIN (a, MAX (b, a)) == a. */
8475 if (TREE_CODE (op1) == compl_code
8476 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8477 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8478 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8480 return NULL_TREE;
8483 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8484 by changing CODE to reduce the magnitude of constants involved in
8485 ARG0 of the comparison.
8486 Returns a canonicalized comparison tree if a simplification was
8487 possible, otherwise returns NULL_TREE.
8488 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8489 valid if signed overflow is undefined. */
8491 static tree
8492 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8493 tree arg0, tree arg1,
8494 bool *strict_overflow_p)
8496 enum tree_code code0 = TREE_CODE (arg0);
8497 tree t, cst0 = NULL_TREE;
8498 int sgn0;
8499 bool swap = false;
8501 /* Match A +- CST code arg1 and CST code arg1. We can change the
8502 first form only if overflow is undefined. */
8503 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8504 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8505 /* In principle pointers also have undefined overflow behavior,
8506 but that causes problems elsewhere. */
8507 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8508 && (code0 == MINUS_EXPR
8509 || code0 == PLUS_EXPR)
8510 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8511 || code0 == INTEGER_CST))
8512 return NULL_TREE;
8514 /* Identify the constant in arg0 and its sign. */
8515 if (code0 == INTEGER_CST)
8516 cst0 = arg0;
8517 else
8518 cst0 = TREE_OPERAND (arg0, 1);
8519 sgn0 = tree_int_cst_sgn (cst0);
8521 /* Overflowed constants and zero will cause problems. */
8522 if (integer_zerop (cst0)
8523 || TREE_OVERFLOW (cst0))
8524 return NULL_TREE;
8526 /* See if we can reduce the magnitude of the constant in
8527 arg0 by changing the comparison code. */
8528 if (code0 == INTEGER_CST)
8530 /* CST <= arg1 -> CST-1 < arg1. */
8531 if (code == LE_EXPR && sgn0 == 1)
8532 code = LT_EXPR;
8533 /* -CST < arg1 -> -CST-1 <= arg1. */
8534 else if (code == LT_EXPR && sgn0 == -1)
8535 code = LE_EXPR;
8536 /* CST > arg1 -> CST-1 >= arg1. */
8537 else if (code == GT_EXPR && sgn0 == 1)
8538 code = GE_EXPR;
8539 /* -CST >= arg1 -> -CST-1 > arg1. */
8540 else if (code == GE_EXPR && sgn0 == -1)
8541 code = GT_EXPR;
8542 else
8543 return NULL_TREE;
8544 /* arg1 code' CST' might be more canonical. */
8545 swap = true;
8547 else
8549 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8550 if (code == LT_EXPR
8551 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8552 code = LE_EXPR;
8553 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8554 else if (code == GT_EXPR
8555 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8556 code = GE_EXPR;
8557 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8558 else if (code == LE_EXPR
8559 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8560 code = LT_EXPR;
8561 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8562 else if (code == GE_EXPR
8563 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8564 code = GT_EXPR;
8565 else
8566 return NULL_TREE;
8567 *strict_overflow_p = true;
8570 /* Now build the constant reduced in magnitude. But not if that
8571 would produce one outside of its types range. */
8572 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8573 && ((sgn0 == 1
8574 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8575 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8576 || (sgn0 == -1
8577 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8578 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8579 /* We cannot swap the comparison here as that would cause us to
8580 endlessly recurse. */
8581 return NULL_TREE;
8583 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8584 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8585 if (code0 != INTEGER_CST)
8586 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8587 t = fold_convert (TREE_TYPE (arg1), t);
8589 /* If swapping might yield to a more canonical form, do so. */
8590 if (swap)
8591 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8592 else
8593 return fold_build2_loc (loc, code, type, t, arg1);
8596 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8597 overflow further. Try to decrease the magnitude of constants involved
8598 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8599 and put sole constants at the second argument position.
8600 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8602 static tree
8603 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8604 tree arg0, tree arg1)
8606 tree t;
8607 bool strict_overflow_p;
8608 const char * const warnmsg = G_("assuming signed overflow does not occur "
8609 "when reducing constant in comparison");
8611 /* Try canonicalization by simplifying arg0. */
8612 strict_overflow_p = false;
8613 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8614 &strict_overflow_p);
8615 if (t)
8617 if (strict_overflow_p)
8618 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8619 return t;
8622 /* Try canonicalization by simplifying arg1 using the swapped
8623 comparison. */
8624 code = swap_tree_comparison (code);
8625 strict_overflow_p = false;
8626 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8627 &strict_overflow_p);
8628 if (t && strict_overflow_p)
8629 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8630 return t;
8633 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8634 space. This is used to avoid issuing overflow warnings for
8635 expressions like &p->x which can not wrap. */
8637 static bool
8638 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8640 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8641 return true;
8643 if (bitpos < 0)
8644 return true;
8646 wide_int wi_offset;
8647 int precision = TYPE_PRECISION (TREE_TYPE (base));
8648 if (offset == NULL_TREE)
8649 wi_offset = wi::zero (precision);
8650 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8651 return true;
8652 else
8653 wi_offset = offset;
8655 bool overflow;
8656 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8657 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8658 if (overflow)
8659 return true;
8661 if (!wi::fits_uhwi_p (total))
8662 return true;
8664 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8665 if (size <= 0)
8666 return true;
8668 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8669 array. */
8670 if (TREE_CODE (base) == ADDR_EXPR)
8672 HOST_WIDE_INT base_size;
8674 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8675 if (base_size > 0 && size < base_size)
8676 size = base_size;
8679 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8682 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8683 kind INTEGER_CST. This makes sure to properly sign-extend the
8684 constant. */
8686 static HOST_WIDE_INT
8687 size_low_cst (const_tree t)
8689 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8690 int prec = TYPE_PRECISION (TREE_TYPE (t));
8691 if (prec < HOST_BITS_PER_WIDE_INT)
8692 return sext_hwi (w, prec);
8693 return w;
8696 /* Subroutine of fold_binary. This routine performs all of the
8697 transformations that are common to the equality/inequality
8698 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8699 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8700 fold_binary should call fold_binary. Fold a comparison with
8701 tree code CODE and type TYPE with operands OP0 and OP1. Return
8702 the folded comparison or NULL_TREE. */
8704 static tree
8705 fold_comparison (location_t loc, enum tree_code code, tree type,
8706 tree op0, tree op1)
8708 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8709 tree arg0, arg1, tem;
8711 arg0 = op0;
8712 arg1 = op1;
8714 STRIP_SIGN_NOPS (arg0);
8715 STRIP_SIGN_NOPS (arg1);
8717 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8718 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8719 && (equality_code
8720 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8721 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8722 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8723 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8724 && TREE_CODE (arg1) == INTEGER_CST
8725 && !TREE_OVERFLOW (arg1))
8727 const enum tree_code
8728 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8729 tree const1 = TREE_OPERAND (arg0, 1);
8730 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8731 tree variable = TREE_OPERAND (arg0, 0);
8732 tree new_const = int_const_binop (reverse_op, const2, const1);
8734 /* If the constant operation overflowed this can be
8735 simplified as a comparison against INT_MAX/INT_MIN. */
8736 if (TREE_OVERFLOW (new_const)
8737 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8739 int const1_sgn = tree_int_cst_sgn (const1);
8740 enum tree_code code2 = code;
8742 /* Get the sign of the constant on the lhs if the
8743 operation were VARIABLE + CONST1. */
8744 if (TREE_CODE (arg0) == MINUS_EXPR)
8745 const1_sgn = -const1_sgn;
8747 /* The sign of the constant determines if we overflowed
8748 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8749 Canonicalize to the INT_MIN overflow by swapping the comparison
8750 if necessary. */
8751 if (const1_sgn == -1)
8752 code2 = swap_tree_comparison (code);
8754 /* We now can look at the canonicalized case
8755 VARIABLE + 1 CODE2 INT_MIN
8756 and decide on the result. */
8757 switch (code2)
8759 case EQ_EXPR:
8760 case LT_EXPR:
8761 case LE_EXPR:
8762 return
8763 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8765 case NE_EXPR:
8766 case GE_EXPR:
8767 case GT_EXPR:
8768 return
8769 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8771 default:
8772 gcc_unreachable ();
8775 else
8777 if (!equality_code)
8778 fold_overflow_warning ("assuming signed overflow does not occur "
8779 "when changing X +- C1 cmp C2 to "
8780 "X cmp C2 -+ C1",
8781 WARN_STRICT_OVERFLOW_COMPARISON);
8782 return fold_build2_loc (loc, code, type, variable, new_const);
8786 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8787 if (TREE_CODE (arg0) == MINUS_EXPR
8788 && equality_code
8789 && integer_zerop (arg1))
8791 /* ??? The transformation is valid for the other operators if overflow
8792 is undefined for the type, but performing it here badly interacts
8793 with the transformation in fold_cond_expr_with_comparison which
8794 attempts to synthetize ABS_EXPR. */
8795 if (!equality_code)
8796 fold_overflow_warning ("assuming signed overflow does not occur "
8797 "when changing X - Y cmp 0 to X cmp Y",
8798 WARN_STRICT_OVERFLOW_COMPARISON);
8799 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8800 TREE_OPERAND (arg0, 1));
8803 /* For comparisons of pointers we can decompose it to a compile time
8804 comparison of the base objects and the offsets into the object.
8805 This requires at least one operand being an ADDR_EXPR or a
8806 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8807 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8808 && (TREE_CODE (arg0) == ADDR_EXPR
8809 || TREE_CODE (arg1) == ADDR_EXPR
8810 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8811 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8813 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8814 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8815 machine_mode mode;
8816 int volatilep, unsignedp;
8817 bool indirect_base0 = false, indirect_base1 = false;
8819 /* Get base and offset for the access. Strip ADDR_EXPR for
8820 get_inner_reference, but put it back by stripping INDIRECT_REF
8821 off the base object if possible. indirect_baseN will be true
8822 if baseN is not an address but refers to the object itself. */
8823 base0 = arg0;
8824 if (TREE_CODE (arg0) == ADDR_EXPR)
8826 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8827 &bitsize, &bitpos0, &offset0, &mode,
8828 &unsignedp, &volatilep, false);
8829 if (TREE_CODE (base0) == INDIRECT_REF)
8830 base0 = TREE_OPERAND (base0, 0);
8831 else
8832 indirect_base0 = true;
8834 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8836 base0 = TREE_OPERAND (arg0, 0);
8837 STRIP_SIGN_NOPS (base0);
8838 if (TREE_CODE (base0) == ADDR_EXPR)
8840 base0 = TREE_OPERAND (base0, 0);
8841 indirect_base0 = true;
8843 offset0 = TREE_OPERAND (arg0, 1);
8844 if (tree_fits_shwi_p (offset0))
8846 HOST_WIDE_INT off = size_low_cst (offset0);
8847 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8848 * BITS_PER_UNIT)
8849 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8851 bitpos0 = off * BITS_PER_UNIT;
8852 offset0 = NULL_TREE;
8857 base1 = arg1;
8858 if (TREE_CODE (arg1) == ADDR_EXPR)
8860 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8861 &bitsize, &bitpos1, &offset1, &mode,
8862 &unsignedp, &volatilep, false);
8863 if (TREE_CODE (base1) == INDIRECT_REF)
8864 base1 = TREE_OPERAND (base1, 0);
8865 else
8866 indirect_base1 = true;
8868 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8870 base1 = TREE_OPERAND (arg1, 0);
8871 STRIP_SIGN_NOPS (base1);
8872 if (TREE_CODE (base1) == ADDR_EXPR)
8874 base1 = TREE_OPERAND (base1, 0);
8875 indirect_base1 = true;
8877 offset1 = TREE_OPERAND (arg1, 1);
8878 if (tree_fits_shwi_p (offset1))
8880 HOST_WIDE_INT off = size_low_cst (offset1);
8881 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8882 * BITS_PER_UNIT)
8883 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8885 bitpos1 = off * BITS_PER_UNIT;
8886 offset1 = NULL_TREE;
8891 /* A local variable can never be pointed to by
8892 the default SSA name of an incoming parameter. */
8893 if ((TREE_CODE (arg0) == ADDR_EXPR
8894 && indirect_base0
8895 && TREE_CODE (base0) == VAR_DECL
8896 && auto_var_in_fn_p (base0, current_function_decl)
8897 && !indirect_base1
8898 && TREE_CODE (base1) == SSA_NAME
8899 && SSA_NAME_IS_DEFAULT_DEF (base1)
8900 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8901 || (TREE_CODE (arg1) == ADDR_EXPR
8902 && indirect_base1
8903 && TREE_CODE (base1) == VAR_DECL
8904 && auto_var_in_fn_p (base1, current_function_decl)
8905 && !indirect_base0
8906 && TREE_CODE (base0) == SSA_NAME
8907 && SSA_NAME_IS_DEFAULT_DEF (base0)
8908 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8910 if (code == NE_EXPR)
8911 return constant_boolean_node (1, type);
8912 else if (code == EQ_EXPR)
8913 return constant_boolean_node (0, type);
8915 /* If we have equivalent bases we might be able to simplify. */
8916 else if (indirect_base0 == indirect_base1
8917 && operand_equal_p (base0, base1, 0))
8919 /* We can fold this expression to a constant if the non-constant
8920 offset parts are equal. */
8921 if ((offset0 == offset1
8922 || (offset0 && offset1
8923 && operand_equal_p (offset0, offset1, 0)))
8924 && (code == EQ_EXPR
8925 || code == NE_EXPR
8926 || (indirect_base0 && DECL_P (base0))
8927 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8930 if (!equality_code
8931 && bitpos0 != bitpos1
8932 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8933 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8934 fold_overflow_warning (("assuming pointer wraparound does not "
8935 "occur when comparing P +- C1 with "
8936 "P +- C2"),
8937 WARN_STRICT_OVERFLOW_CONDITIONAL);
8939 switch (code)
8941 case EQ_EXPR:
8942 return constant_boolean_node (bitpos0 == bitpos1, type);
8943 case NE_EXPR:
8944 return constant_boolean_node (bitpos0 != bitpos1, type);
8945 case LT_EXPR:
8946 return constant_boolean_node (bitpos0 < bitpos1, type);
8947 case LE_EXPR:
8948 return constant_boolean_node (bitpos0 <= bitpos1, type);
8949 case GE_EXPR:
8950 return constant_boolean_node (bitpos0 >= bitpos1, type);
8951 case GT_EXPR:
8952 return constant_boolean_node (bitpos0 > bitpos1, type);
8953 default:;
8956 /* We can simplify the comparison to a comparison of the variable
8957 offset parts if the constant offset parts are equal.
8958 Be careful to use signed sizetype here because otherwise we
8959 mess with array offsets in the wrong way. This is possible
8960 because pointer arithmetic is restricted to retain within an
8961 object and overflow on pointer differences is undefined as of
8962 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8963 else if (bitpos0 == bitpos1
8964 && (equality_code
8965 || (indirect_base0 && DECL_P (base0))
8966 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8968 /* By converting to signed sizetype we cover middle-end pointer
8969 arithmetic which operates on unsigned pointer types of size
8970 type size and ARRAY_REF offsets which are properly sign or
8971 zero extended from their type in case it is narrower than
8972 sizetype. */
8973 if (offset0 == NULL_TREE)
8974 offset0 = build_int_cst (ssizetype, 0);
8975 else
8976 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8977 if (offset1 == NULL_TREE)
8978 offset1 = build_int_cst (ssizetype, 0);
8979 else
8980 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8982 if (!equality_code
8983 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8984 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8985 fold_overflow_warning (("assuming pointer wraparound does not "
8986 "occur when comparing P +- C1 with "
8987 "P +- C2"),
8988 WARN_STRICT_OVERFLOW_COMPARISON);
8990 return fold_build2_loc (loc, code, type, offset0, offset1);
8993 /* For non-equal bases we can simplify if they are addresses
8994 declarations with different addresses. */
8995 else if (indirect_base0 && indirect_base1
8996 /* We know that !operand_equal_p (base0, base1, 0)
8997 because the if condition was false. But make
8998 sure two decls are not the same. */
8999 && base0 != base1
9000 && TREE_CODE (arg0) == ADDR_EXPR
9001 && TREE_CODE (arg1) == ADDR_EXPR
9002 && DECL_P (base0)
9003 && DECL_P (base1)
9004 /* Watch for aliases. */
9005 && (!decl_in_symtab_p (base0)
9006 || !decl_in_symtab_p (base1)
9007 || !symtab_node::get_create (base0)->equal_address_to
9008 (symtab_node::get_create (base1))))
9010 if (code == EQ_EXPR)
9011 return omit_two_operands_loc (loc, type, boolean_false_node,
9012 arg0, arg1);
9013 else if (code == NE_EXPR)
9014 return omit_two_operands_loc (loc, type, boolean_true_node,
9015 arg0, arg1);
9017 /* For equal offsets we can simplify to a comparison of the
9018 base addresses. */
9019 else if (bitpos0 == bitpos1
9020 && (indirect_base0
9021 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9022 && (indirect_base1
9023 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9024 && ((offset0 == offset1)
9025 || (offset0 && offset1
9026 && operand_equal_p (offset0, offset1, 0))))
9028 if (indirect_base0)
9029 base0 = build_fold_addr_expr_loc (loc, base0);
9030 if (indirect_base1)
9031 base1 = build_fold_addr_expr_loc (loc, base1);
9032 return fold_build2_loc (loc, code, type, base0, base1);
9036 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9037 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9038 the resulting offset is smaller in absolute value than the
9039 original one and has the same sign. */
9040 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9041 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9042 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9043 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9044 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9045 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9046 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9047 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9049 tree const1 = TREE_OPERAND (arg0, 1);
9050 tree const2 = TREE_OPERAND (arg1, 1);
9051 tree variable1 = TREE_OPERAND (arg0, 0);
9052 tree variable2 = TREE_OPERAND (arg1, 0);
9053 tree cst;
9054 const char * const warnmsg = G_("assuming signed overflow does not "
9055 "occur when combining constants around "
9056 "a comparison");
9058 /* Put the constant on the side where it doesn't overflow and is
9059 of lower absolute value and of same sign than before. */
9060 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9061 ? MINUS_EXPR : PLUS_EXPR,
9062 const2, const1);
9063 if (!TREE_OVERFLOW (cst)
9064 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9065 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9067 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9068 return fold_build2_loc (loc, code, type,
9069 variable1,
9070 fold_build2_loc (loc, TREE_CODE (arg1),
9071 TREE_TYPE (arg1),
9072 variable2, cst));
9075 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9076 ? MINUS_EXPR : PLUS_EXPR,
9077 const1, const2);
9078 if (!TREE_OVERFLOW (cst)
9079 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9080 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9082 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9083 return fold_build2_loc (loc, code, type,
9084 fold_build2_loc (loc, TREE_CODE (arg0),
9085 TREE_TYPE (arg0),
9086 variable1, cst),
9087 variable2);
9091 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9092 signed arithmetic case. That form is created by the compiler
9093 often enough for folding it to be of value. One example is in
9094 computing loop trip counts after Operator Strength Reduction. */
9095 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9096 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9097 && TREE_CODE (arg0) == MULT_EXPR
9098 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9099 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9100 && integer_zerop (arg1))
9102 tree const1 = TREE_OPERAND (arg0, 1);
9103 tree const2 = arg1; /* zero */
9104 tree variable1 = TREE_OPERAND (arg0, 0);
9105 enum tree_code cmp_code = code;
9107 /* Handle unfolded multiplication by zero. */
9108 if (integer_zerop (const1))
9109 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9111 fold_overflow_warning (("assuming signed overflow does not occur when "
9112 "eliminating multiplication in comparison "
9113 "with zero"),
9114 WARN_STRICT_OVERFLOW_COMPARISON);
9116 /* If const1 is negative we swap the sense of the comparison. */
9117 if (tree_int_cst_sgn (const1) < 0)
9118 cmp_code = swap_tree_comparison (cmp_code);
9120 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9123 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9124 if (tem)
9125 return tem;
9127 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9129 tree targ0 = strip_float_extensions (arg0);
9130 tree targ1 = strip_float_extensions (arg1);
9131 tree newtype = TREE_TYPE (targ0);
9133 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9134 newtype = TREE_TYPE (targ1);
9136 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9137 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9138 return fold_build2_loc (loc, code, type,
9139 fold_convert_loc (loc, newtype, targ0),
9140 fold_convert_loc (loc, newtype, targ1));
9142 /* (-a) CMP (-b) -> b CMP a */
9143 if (TREE_CODE (arg0) == NEGATE_EXPR
9144 && TREE_CODE (arg1) == NEGATE_EXPR)
9145 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9146 TREE_OPERAND (arg0, 0));
9148 if (TREE_CODE (arg1) == REAL_CST)
9150 REAL_VALUE_TYPE cst;
9151 cst = TREE_REAL_CST (arg1);
9153 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9154 if (TREE_CODE (arg0) == NEGATE_EXPR)
9155 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9156 TREE_OPERAND (arg0, 0),
9157 build_real (TREE_TYPE (arg1),
9158 real_value_negate (&cst)));
9160 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9161 /* a CMP (-0) -> a CMP 0 */
9162 if (REAL_VALUE_MINUS_ZERO (cst))
9163 return fold_build2_loc (loc, code, type, arg0,
9164 build_real (TREE_TYPE (arg1), dconst0));
9166 /* x != NaN is always true, other ops are always false. */
9167 if (REAL_VALUE_ISNAN (cst)
9168 && ! HONOR_SNANS (arg1))
9170 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9171 return omit_one_operand_loc (loc, type, tem, arg0);
9174 /* Fold comparisons against infinity. */
9175 if (REAL_VALUE_ISINF (cst)
9176 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9178 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9179 if (tem != NULL_TREE)
9180 return tem;
9184 /* If this is a comparison of a real constant with a PLUS_EXPR
9185 or a MINUS_EXPR of a real constant, we can convert it into a
9186 comparison with a revised real constant as long as no overflow
9187 occurs when unsafe_math_optimizations are enabled. */
9188 if (flag_unsafe_math_optimizations
9189 && TREE_CODE (arg1) == REAL_CST
9190 && (TREE_CODE (arg0) == PLUS_EXPR
9191 || TREE_CODE (arg0) == MINUS_EXPR)
9192 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9193 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9194 ? MINUS_EXPR : PLUS_EXPR,
9195 arg1, TREE_OPERAND (arg0, 1)))
9196 && !TREE_OVERFLOW (tem))
9197 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9199 /* Likewise, we can simplify a comparison of a real constant with
9200 a MINUS_EXPR whose first operand is also a real constant, i.e.
9201 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9202 floating-point types only if -fassociative-math is set. */
9203 if (flag_associative_math
9204 && TREE_CODE (arg1) == REAL_CST
9205 && TREE_CODE (arg0) == MINUS_EXPR
9206 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9207 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9208 arg1))
9209 && !TREE_OVERFLOW (tem))
9210 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9211 TREE_OPERAND (arg0, 1), tem);
9213 /* Fold comparisons against built-in math functions. */
9214 if (TREE_CODE (arg1) == REAL_CST
9215 && flag_unsafe_math_optimizations
9216 && ! flag_errno_math)
9218 enum built_in_function fcode = builtin_mathfn_code (arg0);
9220 if (fcode != END_BUILTINS)
9222 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9223 if (tem != NULL_TREE)
9224 return tem;
9229 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9230 && CONVERT_EXPR_P (arg0))
9232 /* If we are widening one operand of an integer comparison,
9233 see if the other operand is similarly being widened. Perhaps we
9234 can do the comparison in the narrower type. */
9235 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9236 if (tem)
9237 return tem;
9239 /* Or if we are changing signedness. */
9240 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9241 if (tem)
9242 return tem;
9245 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9246 constant, we can simplify it. */
9247 if (TREE_CODE (arg1) == INTEGER_CST
9248 && (TREE_CODE (arg0) == MIN_EXPR
9249 || TREE_CODE (arg0) == MAX_EXPR)
9250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9252 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9253 if (tem)
9254 return tem;
9257 /* Simplify comparison of something with itself. (For IEEE
9258 floating-point, we can only do some of these simplifications.) */
9259 if (operand_equal_p (arg0, arg1, 0))
9261 switch (code)
9263 case EQ_EXPR:
9264 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9265 || ! HONOR_NANS (arg0))
9266 return constant_boolean_node (1, type);
9267 break;
9269 case GE_EXPR:
9270 case LE_EXPR:
9271 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9272 || ! HONOR_NANS (arg0))
9273 return constant_boolean_node (1, type);
9274 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9276 case NE_EXPR:
9277 /* For NE, we can only do this simplification if integer
9278 or we don't honor IEEE floating point NaNs. */
9279 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9280 && HONOR_NANS (arg0))
9281 break;
9282 /* ... fall through ... */
9283 case GT_EXPR:
9284 case LT_EXPR:
9285 return constant_boolean_node (0, type);
9286 default:
9287 gcc_unreachable ();
9291 /* If we are comparing an expression that just has comparisons
9292 of two integer values, arithmetic expressions of those comparisons,
9293 and constants, we can simplify it. There are only three cases
9294 to check: the two values can either be equal, the first can be
9295 greater, or the second can be greater. Fold the expression for
9296 those three values. Since each value must be 0 or 1, we have
9297 eight possibilities, each of which corresponds to the constant 0
9298 or 1 or one of the six possible comparisons.
9300 This handles common cases like (a > b) == 0 but also handles
9301 expressions like ((x > y) - (y > x)) > 0, which supposedly
9302 occur in macroized code. */
9304 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9306 tree cval1 = 0, cval2 = 0;
9307 int save_p = 0;
9309 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9310 /* Don't handle degenerate cases here; they should already
9311 have been handled anyway. */
9312 && cval1 != 0 && cval2 != 0
9313 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9314 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9315 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9316 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9317 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9318 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9319 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9321 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9322 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9324 /* We can't just pass T to eval_subst in case cval1 or cval2
9325 was the same as ARG1. */
9327 tree high_result
9328 = fold_build2_loc (loc, code, type,
9329 eval_subst (loc, arg0, cval1, maxval,
9330 cval2, minval),
9331 arg1);
9332 tree equal_result
9333 = fold_build2_loc (loc, code, type,
9334 eval_subst (loc, arg0, cval1, maxval,
9335 cval2, maxval),
9336 arg1);
9337 tree low_result
9338 = fold_build2_loc (loc, code, type,
9339 eval_subst (loc, arg0, cval1, minval,
9340 cval2, maxval),
9341 arg1);
9343 /* All three of these results should be 0 or 1. Confirm they are.
9344 Then use those values to select the proper code to use. */
9346 if (TREE_CODE (high_result) == INTEGER_CST
9347 && TREE_CODE (equal_result) == INTEGER_CST
9348 && TREE_CODE (low_result) == INTEGER_CST)
9350 /* Make a 3-bit mask with the high-order bit being the
9351 value for `>', the next for '=', and the low for '<'. */
9352 switch ((integer_onep (high_result) * 4)
9353 + (integer_onep (equal_result) * 2)
9354 + integer_onep (low_result))
9356 case 0:
9357 /* Always false. */
9358 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9359 case 1:
9360 code = LT_EXPR;
9361 break;
9362 case 2:
9363 code = EQ_EXPR;
9364 break;
9365 case 3:
9366 code = LE_EXPR;
9367 break;
9368 case 4:
9369 code = GT_EXPR;
9370 break;
9371 case 5:
9372 code = NE_EXPR;
9373 break;
9374 case 6:
9375 code = GE_EXPR;
9376 break;
9377 case 7:
9378 /* Always true. */
9379 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9382 if (save_p)
9384 tem = save_expr (build2 (code, type, cval1, cval2));
9385 SET_EXPR_LOCATION (tem, loc);
9386 return tem;
9388 return fold_build2_loc (loc, code, type, cval1, cval2);
9393 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9394 into a single range test. */
9395 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9396 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9397 && TREE_CODE (arg1) == INTEGER_CST
9398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9399 && !integer_zerop (TREE_OPERAND (arg0, 1))
9400 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9401 && !TREE_OVERFLOW (arg1))
9403 tem = fold_div_compare (loc, code, type, arg0, arg1);
9404 if (tem != NULL_TREE)
9405 return tem;
9408 /* Fold ~X op ~Y as Y op X. */
9409 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9410 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9412 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9413 return fold_build2_loc (loc, code, type,
9414 fold_convert_loc (loc, cmp_type,
9415 TREE_OPERAND (arg1, 0)),
9416 TREE_OPERAND (arg0, 0));
9419 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9420 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9421 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9423 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9424 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9425 TREE_OPERAND (arg0, 0),
9426 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9427 fold_convert_loc (loc, cmp_type, arg1)));
9430 return NULL_TREE;
9434 /* Subroutine of fold_binary. Optimize complex multiplications of the
9435 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9436 argument EXPR represents the expression "z" of type TYPE. */
9438 static tree
9439 fold_mult_zconjz (location_t loc, tree type, tree expr)
9441 tree itype = TREE_TYPE (type);
9442 tree rpart, ipart, tem;
9444 if (TREE_CODE (expr) == COMPLEX_EXPR)
9446 rpart = TREE_OPERAND (expr, 0);
9447 ipart = TREE_OPERAND (expr, 1);
9449 else if (TREE_CODE (expr) == COMPLEX_CST)
9451 rpart = TREE_REALPART (expr);
9452 ipart = TREE_IMAGPART (expr);
9454 else
9456 expr = save_expr (expr);
9457 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9458 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9461 rpart = save_expr (rpart);
9462 ipart = save_expr (ipart);
9463 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9464 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9465 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9466 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9467 build_zero_cst (itype));
9471 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9472 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9473 guarantees that P and N have the same least significant log2(M) bits.
9474 N is not otherwise constrained. In particular, N is not normalized to
9475 0 <= N < M as is common. In general, the precise value of P is unknown.
9476 M is chosen as large as possible such that constant N can be determined.
9478 Returns M and sets *RESIDUE to N.
9480 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9481 account. This is not always possible due to PR 35705.
9484 static unsigned HOST_WIDE_INT
9485 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9486 bool allow_func_align)
9488 enum tree_code code;
9490 *residue = 0;
9492 code = TREE_CODE (expr);
9493 if (code == ADDR_EXPR)
9495 unsigned int bitalign;
9496 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9497 *residue /= BITS_PER_UNIT;
9498 return bitalign / BITS_PER_UNIT;
9500 else if (code == POINTER_PLUS_EXPR)
9502 tree op0, op1;
9503 unsigned HOST_WIDE_INT modulus;
9504 enum tree_code inner_code;
9506 op0 = TREE_OPERAND (expr, 0);
9507 STRIP_NOPS (op0);
9508 modulus = get_pointer_modulus_and_residue (op0, residue,
9509 allow_func_align);
9511 op1 = TREE_OPERAND (expr, 1);
9512 STRIP_NOPS (op1);
9513 inner_code = TREE_CODE (op1);
9514 if (inner_code == INTEGER_CST)
9516 *residue += TREE_INT_CST_LOW (op1);
9517 return modulus;
9519 else if (inner_code == MULT_EXPR)
9521 op1 = TREE_OPERAND (op1, 1);
9522 if (TREE_CODE (op1) == INTEGER_CST)
9524 unsigned HOST_WIDE_INT align;
9526 /* Compute the greatest power-of-2 divisor of op1. */
9527 align = TREE_INT_CST_LOW (op1);
9528 align &= -align;
9530 /* If align is non-zero and less than *modulus, replace
9531 *modulus with align., If align is 0, then either op1 is 0
9532 or the greatest power-of-2 divisor of op1 doesn't fit in an
9533 unsigned HOST_WIDE_INT. In either case, no additional
9534 constraint is imposed. */
9535 if (align)
9536 modulus = MIN (modulus, align);
9538 return modulus;
9543 /* If we get here, we were unable to determine anything useful about the
9544 expression. */
9545 return 1;
9548 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9549 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9551 static bool
9552 vec_cst_ctor_to_array (tree arg, tree *elts)
9554 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9556 if (TREE_CODE (arg) == VECTOR_CST)
9558 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9559 elts[i] = VECTOR_CST_ELT (arg, i);
9561 else if (TREE_CODE (arg) == CONSTRUCTOR)
9563 constructor_elt *elt;
9565 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9566 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9567 return false;
9568 else
9569 elts[i] = elt->value;
9571 else
9572 return false;
9573 for (; i < nelts; i++)
9574 elts[i]
9575 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9576 return true;
9579 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9580 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9581 NULL_TREE otherwise. */
9583 static tree
9584 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9586 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9587 tree *elts;
9588 bool need_ctor = false;
9590 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9591 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9592 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9593 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9594 return NULL_TREE;
9596 elts = XALLOCAVEC (tree, nelts * 3);
9597 if (!vec_cst_ctor_to_array (arg0, elts)
9598 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9599 return NULL_TREE;
9601 for (i = 0; i < nelts; i++)
9603 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9604 need_ctor = true;
9605 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9608 if (need_ctor)
9610 vec<constructor_elt, va_gc> *v;
9611 vec_alloc (v, nelts);
9612 for (i = 0; i < nelts; i++)
9613 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9614 return build_constructor (type, v);
9616 else
9617 return build_vector (type, &elts[2 * nelts]);
9620 /* Try to fold a pointer difference of type TYPE two address expressions of
9621 array references AREF0 and AREF1 using location LOC. Return a
9622 simplified expression for the difference or NULL_TREE. */
9624 static tree
9625 fold_addr_of_array_ref_difference (location_t loc, tree type,
9626 tree aref0, tree aref1)
9628 tree base0 = TREE_OPERAND (aref0, 0);
9629 tree base1 = TREE_OPERAND (aref1, 0);
9630 tree base_offset = build_int_cst (type, 0);
9632 /* If the bases are array references as well, recurse. If the bases
9633 are pointer indirections compute the difference of the pointers.
9634 If the bases are equal, we are set. */
9635 if ((TREE_CODE (base0) == ARRAY_REF
9636 && TREE_CODE (base1) == ARRAY_REF
9637 && (base_offset
9638 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9639 || (INDIRECT_REF_P (base0)
9640 && INDIRECT_REF_P (base1)
9641 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9642 TREE_OPERAND (base0, 0),
9643 TREE_OPERAND (base1, 0))))
9644 || operand_equal_p (base0, base1, 0))
9646 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9647 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9648 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9649 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9650 return fold_build2_loc (loc, PLUS_EXPR, type,
9651 base_offset,
9652 fold_build2_loc (loc, MULT_EXPR, type,
9653 diff, esz));
9655 return NULL_TREE;
9658 /* If the real or vector real constant CST of type TYPE has an exact
9659 inverse, return it, else return NULL. */
9661 tree
9662 exact_inverse (tree type, tree cst)
9664 REAL_VALUE_TYPE r;
9665 tree unit_type, *elts;
9666 machine_mode mode;
9667 unsigned vec_nelts, i;
9669 switch (TREE_CODE (cst))
9671 case REAL_CST:
9672 r = TREE_REAL_CST (cst);
9674 if (exact_real_inverse (TYPE_MODE (type), &r))
9675 return build_real (type, r);
9677 return NULL_TREE;
9679 case VECTOR_CST:
9680 vec_nelts = VECTOR_CST_NELTS (cst);
9681 elts = XALLOCAVEC (tree, vec_nelts);
9682 unit_type = TREE_TYPE (type);
9683 mode = TYPE_MODE (unit_type);
9685 for (i = 0; i < vec_nelts; i++)
9687 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9688 if (!exact_real_inverse (mode, &r))
9689 return NULL_TREE;
9690 elts[i] = build_real (unit_type, r);
9693 return build_vector (type, elts);
9695 default:
9696 return NULL_TREE;
9700 /* Mask out the tz least significant bits of X of type TYPE where
9701 tz is the number of trailing zeroes in Y. */
9702 static wide_int
9703 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9705 int tz = wi::ctz (y);
9706 if (tz > 0)
9707 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9708 return x;
9711 /* Return true when T is an address and is known to be nonzero.
9712 For floating point we further ensure that T is not denormal.
9713 Similar logic is present in nonzero_address in rtlanal.h.
9715 If the return value is based on the assumption that signed overflow
9716 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9717 change *STRICT_OVERFLOW_P. */
9719 static bool
9720 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9722 tree type = TREE_TYPE (t);
9723 enum tree_code code;
9725 /* Doing something useful for floating point would need more work. */
9726 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9727 return false;
9729 code = TREE_CODE (t);
9730 switch (TREE_CODE_CLASS (code))
9732 case tcc_unary:
9733 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9734 strict_overflow_p);
9735 case tcc_binary:
9736 case tcc_comparison:
9737 return tree_binary_nonzero_warnv_p (code, type,
9738 TREE_OPERAND (t, 0),
9739 TREE_OPERAND (t, 1),
9740 strict_overflow_p);
9741 case tcc_constant:
9742 case tcc_declaration:
9743 case tcc_reference:
9744 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9746 default:
9747 break;
9750 switch (code)
9752 case TRUTH_NOT_EXPR:
9753 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9754 strict_overflow_p);
9756 case TRUTH_AND_EXPR:
9757 case TRUTH_OR_EXPR:
9758 case TRUTH_XOR_EXPR:
9759 return tree_binary_nonzero_warnv_p (code, type,
9760 TREE_OPERAND (t, 0),
9761 TREE_OPERAND (t, 1),
9762 strict_overflow_p);
9764 case COND_EXPR:
9765 case CONSTRUCTOR:
9766 case OBJ_TYPE_REF:
9767 case ASSERT_EXPR:
9768 case ADDR_EXPR:
9769 case WITH_SIZE_EXPR:
9770 case SSA_NAME:
9771 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9773 case COMPOUND_EXPR:
9774 case MODIFY_EXPR:
9775 case BIND_EXPR:
9776 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9777 strict_overflow_p);
9779 case SAVE_EXPR:
9780 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9781 strict_overflow_p);
9783 case CALL_EXPR:
9785 tree fndecl = get_callee_fndecl (t);
9786 if (!fndecl) return false;
9787 if (flag_delete_null_pointer_checks && !flag_check_new
9788 && DECL_IS_OPERATOR_NEW (fndecl)
9789 && !TREE_NOTHROW (fndecl))
9790 return true;
9791 if (flag_delete_null_pointer_checks
9792 && lookup_attribute ("returns_nonnull",
9793 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9794 return true;
9795 return alloca_call_p (t);
9798 default:
9799 break;
9801 return false;
9804 /* Return true when T is an address and is known to be nonzero.
9805 Handle warnings about undefined signed overflow. */
9807 static bool
9808 tree_expr_nonzero_p (tree t)
9810 bool ret, strict_overflow_p;
9812 strict_overflow_p = false;
9813 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9814 if (strict_overflow_p)
9815 fold_overflow_warning (("assuming signed overflow does not occur when "
9816 "determining that expression is always "
9817 "non-zero"),
9818 WARN_STRICT_OVERFLOW_MISC);
9819 return ret;
9822 /* Fold a binary expression of code CODE and type TYPE with operands
9823 OP0 and OP1. LOC is the location of the resulting expression.
9824 Return the folded expression if folding is successful. Otherwise,
9825 return NULL_TREE. */
9827 tree
9828 fold_binary_loc (location_t loc,
9829 enum tree_code code, tree type, tree op0, tree op1)
9831 enum tree_code_class kind = TREE_CODE_CLASS (code);
9832 tree arg0, arg1, tem;
9833 tree t1 = NULL_TREE;
9834 bool strict_overflow_p;
9835 unsigned int prec;
9837 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9838 && TREE_CODE_LENGTH (code) == 2
9839 && op0 != NULL_TREE
9840 && op1 != NULL_TREE);
9842 arg0 = op0;
9843 arg1 = op1;
9845 /* Strip any conversions that don't change the mode. This is
9846 safe for every expression, except for a comparison expression
9847 because its signedness is derived from its operands. So, in
9848 the latter case, only strip conversions that don't change the
9849 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9850 preserved.
9852 Note that this is done as an internal manipulation within the
9853 constant folder, in order to find the simplest representation
9854 of the arguments so that their form can be studied. In any
9855 cases, the appropriate type conversions should be put back in
9856 the tree that will get out of the constant folder. */
9858 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9860 STRIP_SIGN_NOPS (arg0);
9861 STRIP_SIGN_NOPS (arg1);
9863 else
9865 STRIP_NOPS (arg0);
9866 STRIP_NOPS (arg1);
9869 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9870 constant but we can't do arithmetic on them. */
9871 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9873 tem = const_binop (code, type, arg0, arg1);
9874 if (tem != NULL_TREE)
9876 if (TREE_TYPE (tem) != type)
9877 tem = fold_convert_loc (loc, type, tem);
9878 return tem;
9882 /* If this is a commutative operation, and ARG0 is a constant, move it
9883 to ARG1 to reduce the number of tests below. */
9884 if (commutative_tree_code (code)
9885 && tree_swap_operands_p (arg0, arg1, true))
9886 return fold_build2_loc (loc, code, type, op1, op0);
9888 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9889 to ARG1 to reduce the number of tests below. */
9890 if (kind == tcc_comparison
9891 && tree_swap_operands_p (arg0, arg1, true))
9892 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9894 tem = generic_simplify (loc, code, type, op0, op1);
9895 if (tem)
9896 return tem;
9898 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9900 First check for cases where an arithmetic operation is applied to a
9901 compound, conditional, or comparison operation. Push the arithmetic
9902 operation inside the compound or conditional to see if any folding
9903 can then be done. Convert comparison to conditional for this purpose.
9904 The also optimizes non-constant cases that used to be done in
9905 expand_expr.
9907 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9908 one of the operands is a comparison and the other is a comparison, a
9909 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9910 code below would make the expression more complex. Change it to a
9911 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9912 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9914 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9915 || code == EQ_EXPR || code == NE_EXPR)
9916 && TREE_CODE (type) != VECTOR_TYPE
9917 && ((truth_value_p (TREE_CODE (arg0))
9918 && (truth_value_p (TREE_CODE (arg1))
9919 || (TREE_CODE (arg1) == BIT_AND_EXPR
9920 && integer_onep (TREE_OPERAND (arg1, 1)))))
9921 || (truth_value_p (TREE_CODE (arg1))
9922 && (truth_value_p (TREE_CODE (arg0))
9923 || (TREE_CODE (arg0) == BIT_AND_EXPR
9924 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9926 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9927 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9928 : TRUTH_XOR_EXPR,
9929 boolean_type_node,
9930 fold_convert_loc (loc, boolean_type_node, arg0),
9931 fold_convert_loc (loc, boolean_type_node, arg1));
9933 if (code == EQ_EXPR)
9934 tem = invert_truthvalue_loc (loc, tem);
9936 return fold_convert_loc (loc, type, tem);
9939 if (TREE_CODE_CLASS (code) == tcc_binary
9940 || TREE_CODE_CLASS (code) == tcc_comparison)
9942 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9944 tem = fold_build2_loc (loc, code, type,
9945 fold_convert_loc (loc, TREE_TYPE (op0),
9946 TREE_OPERAND (arg0, 1)), op1);
9947 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9948 tem);
9950 if (TREE_CODE (arg1) == COMPOUND_EXPR
9951 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9953 tem = fold_build2_loc (loc, code, type, op0,
9954 fold_convert_loc (loc, TREE_TYPE (op1),
9955 TREE_OPERAND (arg1, 1)));
9956 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9957 tem);
9960 if (TREE_CODE (arg0) == COND_EXPR
9961 || TREE_CODE (arg0) == VEC_COND_EXPR
9962 || COMPARISON_CLASS_P (arg0))
9964 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9965 arg0, arg1,
9966 /*cond_first_p=*/1);
9967 if (tem != NULL_TREE)
9968 return tem;
9971 if (TREE_CODE (arg1) == COND_EXPR
9972 || TREE_CODE (arg1) == VEC_COND_EXPR
9973 || COMPARISON_CLASS_P (arg1))
9975 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9976 arg1, arg0,
9977 /*cond_first_p=*/0);
9978 if (tem != NULL_TREE)
9979 return tem;
9983 switch (code)
9985 case MEM_REF:
9986 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9987 if (TREE_CODE (arg0) == ADDR_EXPR
9988 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9990 tree iref = TREE_OPERAND (arg0, 0);
9991 return fold_build2 (MEM_REF, type,
9992 TREE_OPERAND (iref, 0),
9993 int_const_binop (PLUS_EXPR, arg1,
9994 TREE_OPERAND (iref, 1)));
9997 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9998 if (TREE_CODE (arg0) == ADDR_EXPR
9999 && handled_component_p (TREE_OPERAND (arg0, 0)))
10001 tree base;
10002 HOST_WIDE_INT coffset;
10003 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10004 &coffset);
10005 if (!base)
10006 return NULL_TREE;
10007 return fold_build2 (MEM_REF, type,
10008 build_fold_addr_expr (base),
10009 int_const_binop (PLUS_EXPR, arg1,
10010 size_int (coffset)));
10013 return NULL_TREE;
10015 case POINTER_PLUS_EXPR:
10016 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10017 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10018 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10019 return fold_convert_loc (loc, type,
10020 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10021 fold_convert_loc (loc, sizetype,
10022 arg1),
10023 fold_convert_loc (loc, sizetype,
10024 arg0)));
10026 return NULL_TREE;
10028 case PLUS_EXPR:
10029 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10031 /* X + (X / CST) * -CST is X % CST. */
10032 if (TREE_CODE (arg1) == MULT_EXPR
10033 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10034 && operand_equal_p (arg0,
10035 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10037 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10038 tree cst1 = TREE_OPERAND (arg1, 1);
10039 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10040 cst1, cst0);
10041 if (sum && integer_zerop (sum))
10042 return fold_convert_loc (loc, type,
10043 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10044 TREE_TYPE (arg0), arg0,
10045 cst0));
10049 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10050 one. Make sure the type is not saturating and has the signedness of
10051 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10052 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10053 if ((TREE_CODE (arg0) == MULT_EXPR
10054 || TREE_CODE (arg1) == MULT_EXPR)
10055 && !TYPE_SATURATING (type)
10056 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10057 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10058 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10060 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10061 if (tem)
10062 return tem;
10065 if (! FLOAT_TYPE_P (type))
10067 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10068 with a constant, and the two constants have no bits in common,
10069 we should treat this as a BIT_IOR_EXPR since this may produce more
10070 simplifications. */
10071 if (TREE_CODE (arg0) == BIT_AND_EXPR
10072 && TREE_CODE (arg1) == BIT_AND_EXPR
10073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10074 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10075 && wi::bit_and (TREE_OPERAND (arg0, 1),
10076 TREE_OPERAND (arg1, 1)) == 0)
10078 code = BIT_IOR_EXPR;
10079 goto bit_ior;
10082 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10083 (plus (plus (mult) (mult)) (foo)) so that we can
10084 take advantage of the factoring cases below. */
10085 if (ANY_INTEGRAL_TYPE_P (type)
10086 && TYPE_OVERFLOW_WRAPS (type)
10087 && (((TREE_CODE (arg0) == PLUS_EXPR
10088 || TREE_CODE (arg0) == MINUS_EXPR)
10089 && TREE_CODE (arg1) == MULT_EXPR)
10090 || ((TREE_CODE (arg1) == PLUS_EXPR
10091 || TREE_CODE (arg1) == MINUS_EXPR)
10092 && TREE_CODE (arg0) == MULT_EXPR)))
10094 tree parg0, parg1, parg, marg;
10095 enum tree_code pcode;
10097 if (TREE_CODE (arg1) == MULT_EXPR)
10098 parg = arg0, marg = arg1;
10099 else
10100 parg = arg1, marg = arg0;
10101 pcode = TREE_CODE (parg);
10102 parg0 = TREE_OPERAND (parg, 0);
10103 parg1 = TREE_OPERAND (parg, 1);
10104 STRIP_NOPS (parg0);
10105 STRIP_NOPS (parg1);
10107 if (TREE_CODE (parg0) == MULT_EXPR
10108 && TREE_CODE (parg1) != MULT_EXPR)
10109 return fold_build2_loc (loc, pcode, type,
10110 fold_build2_loc (loc, PLUS_EXPR, type,
10111 fold_convert_loc (loc, type,
10112 parg0),
10113 fold_convert_loc (loc, type,
10114 marg)),
10115 fold_convert_loc (loc, type, parg1));
10116 if (TREE_CODE (parg0) != MULT_EXPR
10117 && TREE_CODE (parg1) == MULT_EXPR)
10118 return
10119 fold_build2_loc (loc, PLUS_EXPR, type,
10120 fold_convert_loc (loc, type, parg0),
10121 fold_build2_loc (loc, pcode, type,
10122 fold_convert_loc (loc, type, marg),
10123 fold_convert_loc (loc, type,
10124 parg1)));
10127 else
10129 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10130 to __complex__ ( x, y ). This is not the same for SNaNs or
10131 if signed zeros are involved. */
10132 if (!HONOR_SNANS (element_mode (arg0))
10133 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10134 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10136 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10137 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10138 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10139 bool arg0rz = false, arg0iz = false;
10140 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10141 || (arg0i && (arg0iz = real_zerop (arg0i))))
10143 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10144 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10145 if (arg0rz && arg1i && real_zerop (arg1i))
10147 tree rp = arg1r ? arg1r
10148 : build1 (REALPART_EXPR, rtype, arg1);
10149 tree ip = arg0i ? arg0i
10150 : build1 (IMAGPART_EXPR, rtype, arg0);
10151 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10153 else if (arg0iz && arg1r && real_zerop (arg1r))
10155 tree rp = arg0r ? arg0r
10156 : build1 (REALPART_EXPR, rtype, arg0);
10157 tree ip = arg1i ? arg1i
10158 : build1 (IMAGPART_EXPR, rtype, arg1);
10159 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10164 if (flag_unsafe_math_optimizations
10165 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10166 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10167 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10168 return tem;
10170 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10171 We associate floats only if the user has specified
10172 -fassociative-math. */
10173 if (flag_associative_math
10174 && TREE_CODE (arg1) == PLUS_EXPR
10175 && TREE_CODE (arg0) != MULT_EXPR)
10177 tree tree10 = TREE_OPERAND (arg1, 0);
10178 tree tree11 = TREE_OPERAND (arg1, 1);
10179 if (TREE_CODE (tree11) == MULT_EXPR
10180 && TREE_CODE (tree10) == MULT_EXPR)
10182 tree tree0;
10183 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10184 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10187 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10188 We associate floats only if the user has specified
10189 -fassociative-math. */
10190 if (flag_associative_math
10191 && TREE_CODE (arg0) == PLUS_EXPR
10192 && TREE_CODE (arg1) != MULT_EXPR)
10194 tree tree00 = TREE_OPERAND (arg0, 0);
10195 tree tree01 = TREE_OPERAND (arg0, 1);
10196 if (TREE_CODE (tree01) == MULT_EXPR
10197 && TREE_CODE (tree00) == MULT_EXPR)
10199 tree tree0;
10200 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10201 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10206 bit_rotate:
10207 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10208 is a rotate of A by C1 bits. */
10209 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10210 is a rotate of A by B bits. */
10212 enum tree_code code0, code1;
10213 tree rtype;
10214 code0 = TREE_CODE (arg0);
10215 code1 = TREE_CODE (arg1);
10216 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10217 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10218 && operand_equal_p (TREE_OPERAND (arg0, 0),
10219 TREE_OPERAND (arg1, 0), 0)
10220 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10221 TYPE_UNSIGNED (rtype))
10222 /* Only create rotates in complete modes. Other cases are not
10223 expanded properly. */
10224 && (element_precision (rtype)
10225 == element_precision (TYPE_MODE (rtype))))
10227 tree tree01, tree11;
10228 enum tree_code code01, code11;
10230 tree01 = TREE_OPERAND (arg0, 1);
10231 tree11 = TREE_OPERAND (arg1, 1);
10232 STRIP_NOPS (tree01);
10233 STRIP_NOPS (tree11);
10234 code01 = TREE_CODE (tree01);
10235 code11 = TREE_CODE (tree11);
10236 if (code01 == INTEGER_CST
10237 && code11 == INTEGER_CST
10238 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10239 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10241 tem = build2_loc (loc, LROTATE_EXPR,
10242 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10243 TREE_OPERAND (arg0, 0),
10244 code0 == LSHIFT_EXPR ? tree01 : tree11);
10245 return fold_convert_loc (loc, type, tem);
10247 else if (code11 == MINUS_EXPR)
10249 tree tree110, tree111;
10250 tree110 = TREE_OPERAND (tree11, 0);
10251 tree111 = TREE_OPERAND (tree11, 1);
10252 STRIP_NOPS (tree110);
10253 STRIP_NOPS (tree111);
10254 if (TREE_CODE (tree110) == INTEGER_CST
10255 && 0 == compare_tree_int (tree110,
10256 element_precision
10257 (TREE_TYPE (TREE_OPERAND
10258 (arg0, 0))))
10259 && operand_equal_p (tree01, tree111, 0))
10260 return
10261 fold_convert_loc (loc, type,
10262 build2 ((code0 == LSHIFT_EXPR
10263 ? LROTATE_EXPR
10264 : RROTATE_EXPR),
10265 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10266 TREE_OPERAND (arg0, 0), tree01));
10268 else if (code01 == MINUS_EXPR)
10270 tree tree010, tree011;
10271 tree010 = TREE_OPERAND (tree01, 0);
10272 tree011 = TREE_OPERAND (tree01, 1);
10273 STRIP_NOPS (tree010);
10274 STRIP_NOPS (tree011);
10275 if (TREE_CODE (tree010) == INTEGER_CST
10276 && 0 == compare_tree_int (tree010,
10277 element_precision
10278 (TREE_TYPE (TREE_OPERAND
10279 (arg0, 0))))
10280 && operand_equal_p (tree11, tree011, 0))
10281 return fold_convert_loc
10282 (loc, type,
10283 build2 ((code0 != LSHIFT_EXPR
10284 ? LROTATE_EXPR
10285 : RROTATE_EXPR),
10286 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10287 TREE_OPERAND (arg0, 0), tree11));
10292 associate:
10293 /* In most languages, can't associate operations on floats through
10294 parentheses. Rather than remember where the parentheses were, we
10295 don't associate floats at all, unless the user has specified
10296 -fassociative-math.
10297 And, we need to make sure type is not saturating. */
10299 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10300 && !TYPE_SATURATING (type))
10302 tree var0, con0, lit0, minus_lit0;
10303 tree var1, con1, lit1, minus_lit1;
10304 tree atype = type;
10305 bool ok = true;
10307 /* Split both trees into variables, constants, and literals. Then
10308 associate each group together, the constants with literals,
10309 then the result with variables. This increases the chances of
10310 literals being recombined later and of generating relocatable
10311 expressions for the sum of a constant and literal. */
10312 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10313 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10314 code == MINUS_EXPR);
10316 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10317 if (code == MINUS_EXPR)
10318 code = PLUS_EXPR;
10320 /* With undefined overflow prefer doing association in a type
10321 which wraps on overflow, if that is one of the operand types. */
10322 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10323 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10325 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10326 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10327 atype = TREE_TYPE (arg0);
10328 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10329 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10330 atype = TREE_TYPE (arg1);
10331 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10334 /* With undefined overflow we can only associate constants with one
10335 variable, and constants whose association doesn't overflow. */
10336 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10337 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10339 if (var0 && var1)
10341 tree tmp0 = var0;
10342 tree tmp1 = var1;
10344 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10345 tmp0 = TREE_OPERAND (tmp0, 0);
10346 if (CONVERT_EXPR_P (tmp0)
10347 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10348 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10349 <= TYPE_PRECISION (atype)))
10350 tmp0 = TREE_OPERAND (tmp0, 0);
10351 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10352 tmp1 = TREE_OPERAND (tmp1, 0);
10353 if (CONVERT_EXPR_P (tmp1)
10354 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10355 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10356 <= TYPE_PRECISION (atype)))
10357 tmp1 = TREE_OPERAND (tmp1, 0);
10358 /* The only case we can still associate with two variables
10359 is if they are the same, modulo negation and bit-pattern
10360 preserving conversions. */
10361 if (!operand_equal_p (tmp0, tmp1, 0))
10362 ok = false;
10366 /* Only do something if we found more than two objects. Otherwise,
10367 nothing has changed and we risk infinite recursion. */
10368 if (ok
10369 && (2 < ((var0 != 0) + (var1 != 0)
10370 + (con0 != 0) + (con1 != 0)
10371 + (lit0 != 0) + (lit1 != 0)
10372 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10374 bool any_overflows = false;
10375 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10376 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10377 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10378 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10379 var0 = associate_trees (loc, var0, var1, code, atype);
10380 con0 = associate_trees (loc, con0, con1, code, atype);
10381 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10382 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10383 code, atype);
10385 /* Preserve the MINUS_EXPR if the negative part of the literal is
10386 greater than the positive part. Otherwise, the multiplicative
10387 folding code (i.e extract_muldiv) may be fooled in case
10388 unsigned constants are subtracted, like in the following
10389 example: ((X*2 + 4) - 8U)/2. */
10390 if (minus_lit0 && lit0)
10392 if (TREE_CODE (lit0) == INTEGER_CST
10393 && TREE_CODE (minus_lit0) == INTEGER_CST
10394 && tree_int_cst_lt (lit0, minus_lit0))
10396 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10397 MINUS_EXPR, atype);
10398 lit0 = 0;
10400 else
10402 lit0 = associate_trees (loc, lit0, minus_lit0,
10403 MINUS_EXPR, atype);
10404 minus_lit0 = 0;
10408 /* Don't introduce overflows through reassociation. */
10409 if (!any_overflows
10410 && ((lit0 && TREE_OVERFLOW_P (lit0))
10411 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10412 return NULL_TREE;
10414 if (minus_lit0)
10416 if (con0 == 0)
10417 return
10418 fold_convert_loc (loc, type,
10419 associate_trees (loc, var0, minus_lit0,
10420 MINUS_EXPR, atype));
10421 else
10423 con0 = associate_trees (loc, con0, minus_lit0,
10424 MINUS_EXPR, atype);
10425 return
10426 fold_convert_loc (loc, type,
10427 associate_trees (loc, var0, con0,
10428 PLUS_EXPR, atype));
10432 con0 = associate_trees (loc, con0, lit0, code, atype);
10433 return
10434 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10435 code, atype));
10439 return NULL_TREE;
10441 case MINUS_EXPR:
10442 /* Pointer simplifications for subtraction, simple reassociations. */
10443 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10445 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10446 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10447 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10449 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10450 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10451 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10452 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10453 return fold_build2_loc (loc, PLUS_EXPR, type,
10454 fold_build2_loc (loc, MINUS_EXPR, type,
10455 arg00, arg10),
10456 fold_build2_loc (loc, MINUS_EXPR, type,
10457 arg01, arg11));
10459 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10460 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10462 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10463 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10464 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10465 fold_convert_loc (loc, type, arg1));
10466 if (tmp)
10467 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10469 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10470 simplifies. */
10471 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10473 tree arg10 = fold_convert_loc (loc, type,
10474 TREE_OPERAND (arg1, 0));
10475 tree arg11 = fold_convert_loc (loc, type,
10476 TREE_OPERAND (arg1, 1));
10477 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10478 fold_convert_loc (loc, type, arg0),
10479 arg10);
10480 if (tmp)
10481 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10484 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10485 if (TREE_CODE (arg0) == NEGATE_EXPR
10486 && negate_expr_p (arg1)
10487 && reorder_operands_p (arg0, arg1))
10488 return fold_build2_loc (loc, MINUS_EXPR, type,
10489 fold_convert_loc (loc, type,
10490 negate_expr (arg1)),
10491 fold_convert_loc (loc, type,
10492 TREE_OPERAND (arg0, 0)));
10494 /* X - (X / Y) * Y is X % Y. */
10495 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10496 && TREE_CODE (arg1) == MULT_EXPR
10497 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10498 && operand_equal_p (arg0,
10499 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10500 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10501 TREE_OPERAND (arg1, 1), 0))
10502 return
10503 fold_convert_loc (loc, type,
10504 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10505 arg0, TREE_OPERAND (arg1, 1)));
10507 if (! FLOAT_TYPE_P (type))
10509 /* Fold A - (A & B) into ~B & A. */
10510 if (!TREE_SIDE_EFFECTS (arg0)
10511 && TREE_CODE (arg1) == BIT_AND_EXPR)
10513 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10515 tree arg10 = fold_convert_loc (loc, type,
10516 TREE_OPERAND (arg1, 0));
10517 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10518 fold_build1_loc (loc, BIT_NOT_EXPR,
10519 type, arg10),
10520 fold_convert_loc (loc, type, arg0));
10522 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10524 tree arg11 = fold_convert_loc (loc,
10525 type, TREE_OPERAND (arg1, 1));
10526 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10527 fold_build1_loc (loc, BIT_NOT_EXPR,
10528 type, arg11),
10529 fold_convert_loc (loc, type, arg0));
10533 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10534 any power of 2 minus 1. */
10535 if (TREE_CODE (arg0) == BIT_AND_EXPR
10536 && TREE_CODE (arg1) == BIT_AND_EXPR
10537 && operand_equal_p (TREE_OPERAND (arg0, 0),
10538 TREE_OPERAND (arg1, 0), 0))
10540 tree mask0 = TREE_OPERAND (arg0, 1);
10541 tree mask1 = TREE_OPERAND (arg1, 1);
10542 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10544 if (operand_equal_p (tem, mask1, 0))
10546 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10547 TREE_OPERAND (arg0, 0), mask1);
10548 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10553 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10554 __complex__ ( x, -y ). This is not the same for SNaNs or if
10555 signed zeros are involved. */
10556 if (!HONOR_SNANS (element_mode (arg0))
10557 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10558 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10560 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10561 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10562 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10563 bool arg0rz = false, arg0iz = false;
10564 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10565 || (arg0i && (arg0iz = real_zerop (arg0i))))
10567 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10568 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10569 if (arg0rz && arg1i && real_zerop (arg1i))
10571 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10572 arg1r ? arg1r
10573 : build1 (REALPART_EXPR, rtype, arg1));
10574 tree ip = arg0i ? arg0i
10575 : build1 (IMAGPART_EXPR, rtype, arg0);
10576 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10578 else if (arg0iz && arg1r && real_zerop (arg1r))
10580 tree rp = arg0r ? arg0r
10581 : build1 (REALPART_EXPR, rtype, arg0);
10582 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10583 arg1i ? arg1i
10584 : build1 (IMAGPART_EXPR, rtype, arg1));
10585 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10590 /* A - B -> A + (-B) if B is easily negatable. */
10591 if (negate_expr_p (arg1)
10592 && !TYPE_OVERFLOW_SANITIZED (type)
10593 && ((FLOAT_TYPE_P (type)
10594 /* Avoid this transformation if B is a positive REAL_CST. */
10595 && (TREE_CODE (arg1) != REAL_CST
10596 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10597 || INTEGRAL_TYPE_P (type)))
10598 return fold_build2_loc (loc, PLUS_EXPR, type,
10599 fold_convert_loc (loc, type, arg0),
10600 fold_convert_loc (loc, type,
10601 negate_expr (arg1)));
10603 /* Try folding difference of addresses. */
10605 HOST_WIDE_INT diff;
10607 if ((TREE_CODE (arg0) == ADDR_EXPR
10608 || TREE_CODE (arg1) == ADDR_EXPR)
10609 && ptr_difference_const (arg0, arg1, &diff))
10610 return build_int_cst_type (type, diff);
10613 /* Fold &a[i] - &a[j] to i-j. */
10614 if (TREE_CODE (arg0) == ADDR_EXPR
10615 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10616 && TREE_CODE (arg1) == ADDR_EXPR
10617 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10619 tree tem = fold_addr_of_array_ref_difference (loc, type,
10620 TREE_OPERAND (arg0, 0),
10621 TREE_OPERAND (arg1, 0));
10622 if (tem)
10623 return tem;
10626 if (FLOAT_TYPE_P (type)
10627 && flag_unsafe_math_optimizations
10628 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10629 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10630 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10631 return tem;
10633 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10634 one. Make sure the type is not saturating and has the signedness of
10635 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10636 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10637 if ((TREE_CODE (arg0) == MULT_EXPR
10638 || TREE_CODE (arg1) == MULT_EXPR)
10639 && !TYPE_SATURATING (type)
10640 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10641 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10642 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10644 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10645 if (tem)
10646 return tem;
10649 goto associate;
10651 case MULT_EXPR:
10652 /* (-A) * (-B) -> A * B */
10653 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10654 return fold_build2_loc (loc, MULT_EXPR, type,
10655 fold_convert_loc (loc, type,
10656 TREE_OPERAND (arg0, 0)),
10657 fold_convert_loc (loc, type,
10658 negate_expr (arg1)));
10659 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10660 return fold_build2_loc (loc, MULT_EXPR, type,
10661 fold_convert_loc (loc, type,
10662 negate_expr (arg0)),
10663 fold_convert_loc (loc, type,
10664 TREE_OPERAND (arg1, 0)));
10666 if (! FLOAT_TYPE_P (type))
10668 /* Transform x * -C into -x * C if x is easily negatable. */
10669 if (TREE_CODE (arg1) == INTEGER_CST
10670 && tree_int_cst_sgn (arg1) == -1
10671 && negate_expr_p (arg0)
10672 && (tem = negate_expr (arg1)) != arg1
10673 && !TREE_OVERFLOW (tem))
10674 return fold_build2_loc (loc, MULT_EXPR, type,
10675 fold_convert_loc (loc, type,
10676 negate_expr (arg0)),
10677 tem);
10679 /* (a * (1 << b)) is (a << b) */
10680 if (TREE_CODE (arg1) == LSHIFT_EXPR
10681 && integer_onep (TREE_OPERAND (arg1, 0)))
10682 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10683 TREE_OPERAND (arg1, 1));
10684 if (TREE_CODE (arg0) == LSHIFT_EXPR
10685 && integer_onep (TREE_OPERAND (arg0, 0)))
10686 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10687 TREE_OPERAND (arg0, 1));
10689 /* (A + A) * C -> A * 2 * C */
10690 if (TREE_CODE (arg0) == PLUS_EXPR
10691 && TREE_CODE (arg1) == INTEGER_CST
10692 && operand_equal_p (TREE_OPERAND (arg0, 0),
10693 TREE_OPERAND (arg0, 1), 0))
10694 return fold_build2_loc (loc, MULT_EXPR, type,
10695 omit_one_operand_loc (loc, type,
10696 TREE_OPERAND (arg0, 0),
10697 TREE_OPERAND (arg0, 1)),
10698 fold_build2_loc (loc, MULT_EXPR, type,
10699 build_int_cst (type, 2) , arg1));
10701 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10702 sign-changing only. */
10703 if (TREE_CODE (arg1) == INTEGER_CST
10704 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10705 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10706 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10708 strict_overflow_p = false;
10709 if (TREE_CODE (arg1) == INTEGER_CST
10710 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10711 &strict_overflow_p)))
10713 if (strict_overflow_p)
10714 fold_overflow_warning (("assuming signed overflow does not "
10715 "occur when simplifying "
10716 "multiplication"),
10717 WARN_STRICT_OVERFLOW_MISC);
10718 return fold_convert_loc (loc, type, tem);
10721 /* Optimize z * conj(z) for integer complex numbers. */
10722 if (TREE_CODE (arg0) == CONJ_EXPR
10723 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10724 return fold_mult_zconjz (loc, type, arg1);
10725 if (TREE_CODE (arg1) == CONJ_EXPR
10726 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10727 return fold_mult_zconjz (loc, type, arg0);
10729 else
10731 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10732 the result for floating point types due to rounding so it is applied
10733 only if -fassociative-math was specify. */
10734 if (flag_associative_math
10735 && TREE_CODE (arg0) == RDIV_EXPR
10736 && TREE_CODE (arg1) == REAL_CST
10737 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10739 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10740 arg1);
10741 if (tem)
10742 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10743 TREE_OPERAND (arg0, 1));
10746 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10747 if (operand_equal_p (arg0, arg1, 0))
10749 tree tem = fold_strip_sign_ops (arg0);
10750 if (tem != NULL_TREE)
10752 tem = fold_convert_loc (loc, type, tem);
10753 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10757 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10758 This is not the same for NaNs or if signed zeros are
10759 involved. */
10760 if (!HONOR_NANS (arg0)
10761 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10762 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10763 && TREE_CODE (arg1) == COMPLEX_CST
10764 && real_zerop (TREE_REALPART (arg1)))
10766 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10767 if (real_onep (TREE_IMAGPART (arg1)))
10768 return
10769 fold_build2_loc (loc, COMPLEX_EXPR, type,
10770 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10771 rtype, arg0)),
10772 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10773 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10774 return
10775 fold_build2_loc (loc, COMPLEX_EXPR, type,
10776 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10777 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10778 rtype, arg0)));
10781 /* Optimize z * conj(z) for floating point complex numbers.
10782 Guarded by flag_unsafe_math_optimizations as non-finite
10783 imaginary components don't produce scalar results. */
10784 if (flag_unsafe_math_optimizations
10785 && TREE_CODE (arg0) == CONJ_EXPR
10786 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10787 return fold_mult_zconjz (loc, type, arg1);
10788 if (flag_unsafe_math_optimizations
10789 && TREE_CODE (arg1) == CONJ_EXPR
10790 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10791 return fold_mult_zconjz (loc, type, arg0);
10793 if (flag_unsafe_math_optimizations)
10795 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10796 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10798 /* Optimizations of root(...)*root(...). */
10799 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10801 tree rootfn, arg;
10802 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10803 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10805 /* Optimize sqrt(x)*sqrt(x) as x. */
10806 if (BUILTIN_SQRT_P (fcode0)
10807 && operand_equal_p (arg00, arg10, 0)
10808 && ! HONOR_SNANS (element_mode (type)))
10809 return arg00;
10811 /* Optimize root(x)*root(y) as root(x*y). */
10812 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10813 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10814 return build_call_expr_loc (loc, rootfn, 1, arg);
10817 /* Optimize expN(x)*expN(y) as expN(x+y). */
10818 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10820 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10821 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10822 CALL_EXPR_ARG (arg0, 0),
10823 CALL_EXPR_ARG (arg1, 0));
10824 return build_call_expr_loc (loc, expfn, 1, arg);
10827 /* Optimizations of pow(...)*pow(...). */
10828 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10829 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10830 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10832 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10833 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10834 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10835 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10837 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10838 if (operand_equal_p (arg01, arg11, 0))
10840 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10841 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10842 arg00, arg10);
10843 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10846 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10847 if (operand_equal_p (arg00, arg10, 0))
10849 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10850 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10851 arg01, arg11);
10852 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10856 /* Optimize tan(x)*cos(x) as sin(x). */
10857 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10858 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10859 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10860 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10861 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10862 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10863 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10864 CALL_EXPR_ARG (arg1, 0), 0))
10866 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10868 if (sinfn != NULL_TREE)
10869 return build_call_expr_loc (loc, sinfn, 1,
10870 CALL_EXPR_ARG (arg0, 0));
10873 /* Optimize x*pow(x,c) as pow(x,c+1). */
10874 if (fcode1 == BUILT_IN_POW
10875 || fcode1 == BUILT_IN_POWF
10876 || fcode1 == BUILT_IN_POWL)
10878 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10879 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10880 if (TREE_CODE (arg11) == REAL_CST
10881 && !TREE_OVERFLOW (arg11)
10882 && operand_equal_p (arg0, arg10, 0))
10884 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10885 REAL_VALUE_TYPE c;
10886 tree arg;
10888 c = TREE_REAL_CST (arg11);
10889 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10890 arg = build_real (type, c);
10891 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10895 /* Optimize pow(x,c)*x as pow(x,c+1). */
10896 if (fcode0 == BUILT_IN_POW
10897 || fcode0 == BUILT_IN_POWF
10898 || fcode0 == BUILT_IN_POWL)
10900 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10901 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10902 if (TREE_CODE (arg01) == REAL_CST
10903 && !TREE_OVERFLOW (arg01)
10904 && operand_equal_p (arg1, arg00, 0))
10906 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10907 REAL_VALUE_TYPE c;
10908 tree arg;
10910 c = TREE_REAL_CST (arg01);
10911 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10912 arg = build_real (type, c);
10913 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10917 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10918 if (!in_gimple_form
10919 && optimize
10920 && operand_equal_p (arg0, arg1, 0))
10922 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10924 if (powfn)
10926 tree arg = build_real (type, dconst2);
10927 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10932 goto associate;
10934 case BIT_IOR_EXPR:
10935 bit_ior:
10936 /* ~X | X is -1. */
10937 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10938 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10940 t1 = build_zero_cst (type);
10941 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10942 return omit_one_operand_loc (loc, type, t1, arg1);
10945 /* X | ~X is -1. */
10946 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10949 t1 = build_zero_cst (type);
10950 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10951 return omit_one_operand_loc (loc, type, t1, arg0);
10954 /* Canonicalize (X & C1) | C2. */
10955 if (TREE_CODE (arg0) == BIT_AND_EXPR
10956 && TREE_CODE (arg1) == INTEGER_CST
10957 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10959 int width = TYPE_PRECISION (type), w;
10960 wide_int c1 = TREE_OPERAND (arg0, 1);
10961 wide_int c2 = arg1;
10963 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10964 if ((c1 & c2) == c1)
10965 return omit_one_operand_loc (loc, type, arg1,
10966 TREE_OPERAND (arg0, 0));
10968 wide_int msk = wi::mask (width, false,
10969 TYPE_PRECISION (TREE_TYPE (arg1)));
10971 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10972 if (msk.and_not (c1 | c2) == 0)
10973 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10974 TREE_OPERAND (arg0, 0), arg1);
10976 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10977 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10978 mode which allows further optimizations. */
10979 c1 &= msk;
10980 c2 &= msk;
10981 wide_int c3 = c1.and_not (c2);
10982 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10984 wide_int mask = wi::mask (w, false,
10985 TYPE_PRECISION (type));
10986 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10988 c3 = mask;
10989 break;
10993 if (c3 != c1)
10994 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10995 fold_build2_loc (loc, BIT_AND_EXPR, type,
10996 TREE_OPERAND (arg0, 0),
10997 wide_int_to_tree (type,
10998 c3)),
10999 arg1);
11002 /* (X & ~Y) | (~X & Y) is X ^ Y */
11003 if (TREE_CODE (arg0) == BIT_AND_EXPR
11004 && TREE_CODE (arg1) == BIT_AND_EXPR)
11006 tree a0, a1, l0, l1, n0, n1;
11008 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11009 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11011 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11012 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11014 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11015 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11017 if ((operand_equal_p (n0, a0, 0)
11018 && operand_equal_p (n1, a1, 0))
11019 || (operand_equal_p (n0, a1, 0)
11020 && operand_equal_p (n1, a0, 0)))
11021 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11024 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11025 if (t1 != NULL_TREE)
11026 return t1;
11028 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11030 This results in more efficient code for machines without a NAND
11031 instruction. Combine will canonicalize to the first form
11032 which will allow use of NAND instructions provided by the
11033 backend if they exist. */
11034 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11035 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11037 return
11038 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11039 build2 (BIT_AND_EXPR, type,
11040 fold_convert_loc (loc, type,
11041 TREE_OPERAND (arg0, 0)),
11042 fold_convert_loc (loc, type,
11043 TREE_OPERAND (arg1, 0))));
11046 /* See if this can be simplified into a rotate first. If that
11047 is unsuccessful continue in the association code. */
11048 goto bit_rotate;
11050 case BIT_XOR_EXPR:
11051 /* ~X ^ X is -1. */
11052 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11053 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11055 t1 = build_zero_cst (type);
11056 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11057 return omit_one_operand_loc (loc, type, t1, arg1);
11060 /* X ^ ~X is -1. */
11061 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11062 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11064 t1 = build_zero_cst (type);
11065 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11066 return omit_one_operand_loc (loc, type, t1, arg0);
11069 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11070 with a constant, and the two constants have no bits in common,
11071 we should treat this as a BIT_IOR_EXPR since this may produce more
11072 simplifications. */
11073 if (TREE_CODE (arg0) == BIT_AND_EXPR
11074 && TREE_CODE (arg1) == BIT_AND_EXPR
11075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11076 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11077 && wi::bit_and (TREE_OPERAND (arg0, 1),
11078 TREE_OPERAND (arg1, 1)) == 0)
11080 code = BIT_IOR_EXPR;
11081 goto bit_ior;
11084 /* (X | Y) ^ X -> Y & ~ X*/
11085 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11086 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11088 tree t2 = TREE_OPERAND (arg0, 1);
11089 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11090 arg1);
11091 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11092 fold_convert_loc (loc, type, t2),
11093 fold_convert_loc (loc, type, t1));
11094 return t1;
11097 /* (Y | X) ^ X -> Y & ~ X*/
11098 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11099 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11101 tree t2 = TREE_OPERAND (arg0, 0);
11102 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11103 arg1);
11104 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11105 fold_convert_loc (loc, type, t2),
11106 fold_convert_loc (loc, type, t1));
11107 return t1;
11110 /* X ^ (X | Y) -> Y & ~ X*/
11111 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11112 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11114 tree t2 = TREE_OPERAND (arg1, 1);
11115 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11116 arg0);
11117 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11118 fold_convert_loc (loc, type, t2),
11119 fold_convert_loc (loc, type, t1));
11120 return t1;
11123 /* X ^ (Y | X) -> Y & ~ X*/
11124 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11125 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11127 tree t2 = TREE_OPERAND (arg1, 0);
11128 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11129 arg0);
11130 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11131 fold_convert_loc (loc, type, t2),
11132 fold_convert_loc (loc, type, t1));
11133 return t1;
11136 /* Convert ~X ^ ~Y to X ^ Y. */
11137 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11138 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11139 return fold_build2_loc (loc, code, type,
11140 fold_convert_loc (loc, type,
11141 TREE_OPERAND (arg0, 0)),
11142 fold_convert_loc (loc, type,
11143 TREE_OPERAND (arg1, 0)));
11145 /* Convert ~X ^ C to X ^ ~C. */
11146 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11147 && TREE_CODE (arg1) == INTEGER_CST)
11148 return fold_build2_loc (loc, code, type,
11149 fold_convert_loc (loc, type,
11150 TREE_OPERAND (arg0, 0)),
11151 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11153 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11154 if (TREE_CODE (arg0) == BIT_AND_EXPR
11155 && INTEGRAL_TYPE_P (type)
11156 && integer_onep (TREE_OPERAND (arg0, 1))
11157 && integer_onep (arg1))
11158 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11159 build_zero_cst (TREE_TYPE (arg0)));
11161 /* Fold (X & Y) ^ Y as ~X & Y. */
11162 if (TREE_CODE (arg0) == BIT_AND_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11165 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11166 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11167 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11168 fold_convert_loc (loc, type, arg1));
11170 /* Fold (X & Y) ^ X as ~Y & X. */
11171 if (TREE_CODE (arg0) == BIT_AND_EXPR
11172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11173 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11175 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11176 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11177 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11178 fold_convert_loc (loc, type, arg1));
11180 /* Fold X ^ (X & Y) as X & ~Y. */
11181 if (TREE_CODE (arg1) == BIT_AND_EXPR
11182 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11184 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11185 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11186 fold_convert_loc (loc, type, arg0),
11187 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11189 /* Fold X ^ (Y & X) as ~Y & X. */
11190 if (TREE_CODE (arg1) == BIT_AND_EXPR
11191 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11192 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11194 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11195 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11196 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11197 fold_convert_loc (loc, type, arg0));
11200 /* See if this can be simplified into a rotate first. If that
11201 is unsuccessful continue in the association code. */
11202 goto bit_rotate;
11204 case BIT_AND_EXPR:
11205 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11206 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11207 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11208 || (TREE_CODE (arg0) == EQ_EXPR
11209 && integer_zerop (TREE_OPERAND (arg0, 1))))
11210 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11211 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11213 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11214 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11215 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11216 || (TREE_CODE (arg1) == EQ_EXPR
11217 && integer_zerop (TREE_OPERAND (arg1, 1))))
11218 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11219 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11221 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11222 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11223 && INTEGRAL_TYPE_P (type)
11224 && integer_onep (TREE_OPERAND (arg0, 1))
11225 && integer_onep (arg1))
11227 tree tem2;
11228 tem = TREE_OPERAND (arg0, 0);
11229 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11230 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11231 tem, tem2);
11232 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11233 build_zero_cst (TREE_TYPE (tem)));
11235 /* Fold ~X & 1 as (X & 1) == 0. */
11236 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11237 && INTEGRAL_TYPE_P (type)
11238 && integer_onep (arg1))
11240 tree tem2;
11241 tem = TREE_OPERAND (arg0, 0);
11242 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11243 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11244 tem, tem2);
11245 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11246 build_zero_cst (TREE_TYPE (tem)));
11248 /* Fold !X & 1 as X == 0. */
11249 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11250 && integer_onep (arg1))
11252 tem = TREE_OPERAND (arg0, 0);
11253 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11254 build_zero_cst (TREE_TYPE (tem)));
11257 /* Fold (X ^ Y) & Y as ~X & Y. */
11258 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11259 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11261 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11262 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11263 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11264 fold_convert_loc (loc, type, arg1));
11266 /* Fold (X ^ Y) & X as ~Y & X. */
11267 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11268 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11269 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11271 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11272 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11273 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11274 fold_convert_loc (loc, type, arg1));
11276 /* Fold X & (X ^ Y) as X & ~Y. */
11277 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11278 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11280 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11281 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11282 fold_convert_loc (loc, type, arg0),
11283 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11285 /* Fold X & (Y ^ X) as ~Y & X. */
11286 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11287 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11288 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11290 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11291 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11292 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11293 fold_convert_loc (loc, type, arg0));
11296 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11297 multiple of 1 << CST. */
11298 if (TREE_CODE (arg1) == INTEGER_CST)
11300 wide_int cst1 = arg1;
11301 wide_int ncst1 = -cst1;
11302 if ((cst1 & ncst1) == ncst1
11303 && multiple_of_p (type, arg0,
11304 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11305 return fold_convert_loc (loc, type, arg0);
11308 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11309 bits from CST2. */
11310 if (TREE_CODE (arg1) == INTEGER_CST
11311 && TREE_CODE (arg0) == MULT_EXPR
11312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11314 wide_int warg1 = arg1;
11315 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11317 if (masked == 0)
11318 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11319 arg0, arg1);
11320 else if (masked != warg1)
11322 /* Avoid the transform if arg1 is a mask of some
11323 mode which allows further optimizations. */
11324 int pop = wi::popcount (warg1);
11325 if (!(pop >= BITS_PER_UNIT
11326 && exact_log2 (pop) != -1
11327 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11328 return fold_build2_loc (loc, code, type, op0,
11329 wide_int_to_tree (type, masked));
11333 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11334 ((A & N) + B) & M -> (A + B) & M
11335 Similarly if (N & M) == 0,
11336 ((A | N) + B) & M -> (A + B) & M
11337 and for - instead of + (or unary - instead of +)
11338 and/or ^ instead of |.
11339 If B is constant and (B & M) == 0, fold into A & M. */
11340 if (TREE_CODE (arg1) == INTEGER_CST)
11342 wide_int cst1 = arg1;
11343 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11344 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11345 && (TREE_CODE (arg0) == PLUS_EXPR
11346 || TREE_CODE (arg0) == MINUS_EXPR
11347 || TREE_CODE (arg0) == NEGATE_EXPR)
11348 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11349 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11351 tree pmop[2];
11352 int which = 0;
11353 wide_int cst0;
11355 /* Now we know that arg0 is (C + D) or (C - D) or
11356 -C and arg1 (M) is == (1LL << cst) - 1.
11357 Store C into PMOP[0] and D into PMOP[1]. */
11358 pmop[0] = TREE_OPERAND (arg0, 0);
11359 pmop[1] = NULL;
11360 if (TREE_CODE (arg0) != NEGATE_EXPR)
11362 pmop[1] = TREE_OPERAND (arg0, 1);
11363 which = 1;
11366 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11367 which = -1;
11369 for (; which >= 0; which--)
11370 switch (TREE_CODE (pmop[which]))
11372 case BIT_AND_EXPR:
11373 case BIT_IOR_EXPR:
11374 case BIT_XOR_EXPR:
11375 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11376 != INTEGER_CST)
11377 break;
11378 cst0 = TREE_OPERAND (pmop[which], 1);
11379 cst0 &= cst1;
11380 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11382 if (cst0 != cst1)
11383 break;
11385 else if (cst0 != 0)
11386 break;
11387 /* If C or D is of the form (A & N) where
11388 (N & M) == M, or of the form (A | N) or
11389 (A ^ N) where (N & M) == 0, replace it with A. */
11390 pmop[which] = TREE_OPERAND (pmop[which], 0);
11391 break;
11392 case INTEGER_CST:
11393 /* If C or D is a N where (N & M) == 0, it can be
11394 omitted (assumed 0). */
11395 if ((TREE_CODE (arg0) == PLUS_EXPR
11396 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11397 && (cst1 & pmop[which]) == 0)
11398 pmop[which] = NULL;
11399 break;
11400 default:
11401 break;
11404 /* Only build anything new if we optimized one or both arguments
11405 above. */
11406 if (pmop[0] != TREE_OPERAND (arg0, 0)
11407 || (TREE_CODE (arg0) != NEGATE_EXPR
11408 && pmop[1] != TREE_OPERAND (arg0, 1)))
11410 tree utype = TREE_TYPE (arg0);
11411 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11413 /* Perform the operations in a type that has defined
11414 overflow behavior. */
11415 utype = unsigned_type_for (TREE_TYPE (arg0));
11416 if (pmop[0] != NULL)
11417 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11418 if (pmop[1] != NULL)
11419 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11422 if (TREE_CODE (arg0) == NEGATE_EXPR)
11423 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11424 else if (TREE_CODE (arg0) == PLUS_EXPR)
11426 if (pmop[0] != NULL && pmop[1] != NULL)
11427 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11428 pmop[0], pmop[1]);
11429 else if (pmop[0] != NULL)
11430 tem = pmop[0];
11431 else if (pmop[1] != NULL)
11432 tem = pmop[1];
11433 else
11434 return build_int_cst (type, 0);
11436 else if (pmop[0] == NULL)
11437 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11438 else
11439 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11440 pmop[0], pmop[1]);
11441 /* TEM is now the new binary +, - or unary - replacement. */
11442 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11443 fold_convert_loc (loc, utype, arg1));
11444 return fold_convert_loc (loc, type, tem);
11449 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11450 if (t1 != NULL_TREE)
11451 return t1;
11452 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11453 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11454 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11456 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11458 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11459 if (mask == -1)
11460 return
11461 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11464 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11466 This results in more efficient code for machines without a NOR
11467 instruction. Combine will canonicalize to the first form
11468 which will allow use of NOR instructions provided by the
11469 backend if they exist. */
11470 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11471 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11473 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11474 build2 (BIT_IOR_EXPR, type,
11475 fold_convert_loc (loc, type,
11476 TREE_OPERAND (arg0, 0)),
11477 fold_convert_loc (loc, type,
11478 TREE_OPERAND (arg1, 0))));
11481 /* If arg0 is derived from the address of an object or function, we may
11482 be able to fold this expression using the object or function's
11483 alignment. */
11484 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11486 unsigned HOST_WIDE_INT modulus, residue;
11487 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11489 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11490 integer_onep (arg1));
11492 /* This works because modulus is a power of 2. If this weren't the
11493 case, we'd have to replace it by its greatest power-of-2
11494 divisor: modulus & -modulus. */
11495 if (low < modulus)
11496 return build_int_cst (type, residue & low);
11499 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11500 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11501 if the new mask might be further optimized. */
11502 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11503 || TREE_CODE (arg0) == RSHIFT_EXPR)
11504 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11505 && TREE_CODE (arg1) == INTEGER_CST
11506 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11507 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11508 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11509 < TYPE_PRECISION (TREE_TYPE (arg0))))
11511 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11512 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11513 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11514 tree shift_type = TREE_TYPE (arg0);
11516 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11517 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11518 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11519 && TYPE_PRECISION (TREE_TYPE (arg0))
11520 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11522 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11523 tree arg00 = TREE_OPERAND (arg0, 0);
11524 /* See if more bits can be proven as zero because of
11525 zero extension. */
11526 if (TREE_CODE (arg00) == NOP_EXPR
11527 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11529 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11530 if (TYPE_PRECISION (inner_type)
11531 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11532 && TYPE_PRECISION (inner_type) < prec)
11534 prec = TYPE_PRECISION (inner_type);
11535 /* See if we can shorten the right shift. */
11536 if (shiftc < prec)
11537 shift_type = inner_type;
11538 /* Otherwise X >> C1 is all zeros, so we'll optimize
11539 it into (X, 0) later on by making sure zerobits
11540 is all ones. */
11543 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11544 if (shiftc < prec)
11546 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11547 zerobits <<= prec - shiftc;
11549 /* For arithmetic shift if sign bit could be set, zerobits
11550 can contain actually sign bits, so no transformation is
11551 possible, unless MASK masks them all away. In that
11552 case the shift needs to be converted into logical shift. */
11553 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11554 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11556 if ((mask & zerobits) == 0)
11557 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11558 else
11559 zerobits = 0;
11563 /* ((X << 16) & 0xff00) is (X, 0). */
11564 if ((mask & zerobits) == mask)
11565 return omit_one_operand_loc (loc, type,
11566 build_int_cst (type, 0), arg0);
11568 newmask = mask | zerobits;
11569 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11571 /* Only do the transformation if NEWMASK is some integer
11572 mode's mask. */
11573 for (prec = BITS_PER_UNIT;
11574 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11575 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11576 break;
11577 if (prec < HOST_BITS_PER_WIDE_INT
11578 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11580 tree newmaskt;
11582 if (shift_type != TREE_TYPE (arg0))
11584 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11585 fold_convert_loc (loc, shift_type,
11586 TREE_OPERAND (arg0, 0)),
11587 TREE_OPERAND (arg0, 1));
11588 tem = fold_convert_loc (loc, type, tem);
11590 else
11591 tem = op0;
11592 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11593 if (!tree_int_cst_equal (newmaskt, arg1))
11594 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11599 goto associate;
11601 case RDIV_EXPR:
11602 /* Don't touch a floating-point divide by zero unless the mode
11603 of the constant can represent infinity. */
11604 if (TREE_CODE (arg1) == REAL_CST
11605 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11606 && real_zerop (arg1))
11607 return NULL_TREE;
11609 /* (-A) / (-B) -> A / B */
11610 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11611 return fold_build2_loc (loc, RDIV_EXPR, type,
11612 TREE_OPERAND (arg0, 0),
11613 negate_expr (arg1));
11614 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11615 return fold_build2_loc (loc, RDIV_EXPR, type,
11616 negate_expr (arg0),
11617 TREE_OPERAND (arg1, 0));
11619 /* Convert A/B/C to A/(B*C). */
11620 if (flag_reciprocal_math
11621 && TREE_CODE (arg0) == RDIV_EXPR)
11622 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11623 fold_build2_loc (loc, MULT_EXPR, type,
11624 TREE_OPERAND (arg0, 1), arg1));
11626 /* Convert A/(B/C) to (A/B)*C. */
11627 if (flag_reciprocal_math
11628 && TREE_CODE (arg1) == RDIV_EXPR)
11629 return fold_build2_loc (loc, MULT_EXPR, type,
11630 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11631 TREE_OPERAND (arg1, 0)),
11632 TREE_OPERAND (arg1, 1));
11634 /* Convert C1/(X*C2) into (C1/C2)/X. */
11635 if (flag_reciprocal_math
11636 && TREE_CODE (arg1) == MULT_EXPR
11637 && TREE_CODE (arg0) == REAL_CST
11638 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11640 tree tem = const_binop (RDIV_EXPR, arg0,
11641 TREE_OPERAND (arg1, 1));
11642 if (tem)
11643 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11644 TREE_OPERAND (arg1, 0));
11647 if (flag_unsafe_math_optimizations)
11649 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11650 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11652 /* Optimize sin(x)/cos(x) as tan(x). */
11653 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11654 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11655 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11656 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11657 CALL_EXPR_ARG (arg1, 0), 0))
11659 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11661 if (tanfn != NULL_TREE)
11662 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11665 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11666 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11667 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11668 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11669 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11670 CALL_EXPR_ARG (arg1, 0), 0))
11672 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11674 if (tanfn != NULL_TREE)
11676 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11677 CALL_EXPR_ARG (arg0, 0));
11678 return fold_build2_loc (loc, RDIV_EXPR, type,
11679 build_real (type, dconst1), tmp);
11683 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11684 NaNs or Infinities. */
11685 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11686 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11687 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11689 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11690 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11692 if (! HONOR_NANS (arg00)
11693 && ! HONOR_INFINITIES (element_mode (arg00))
11694 && operand_equal_p (arg00, arg01, 0))
11696 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11698 if (cosfn != NULL_TREE)
11699 return build_call_expr_loc (loc, cosfn, 1, arg00);
11703 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11704 NaNs or Infinities. */
11705 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11706 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11707 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11709 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11710 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11712 if (! HONOR_NANS (arg00)
11713 && ! HONOR_INFINITIES (element_mode (arg00))
11714 && operand_equal_p (arg00, arg01, 0))
11716 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11718 if (cosfn != NULL_TREE)
11720 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11721 return fold_build2_loc (loc, RDIV_EXPR, type,
11722 build_real (type, dconst1),
11723 tmp);
11728 /* Optimize pow(x,c)/x as pow(x,c-1). */
11729 if (fcode0 == BUILT_IN_POW
11730 || fcode0 == BUILT_IN_POWF
11731 || fcode0 == BUILT_IN_POWL)
11733 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11734 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11735 if (TREE_CODE (arg01) == REAL_CST
11736 && !TREE_OVERFLOW (arg01)
11737 && operand_equal_p (arg1, arg00, 0))
11739 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11740 REAL_VALUE_TYPE c;
11741 tree arg;
11743 c = TREE_REAL_CST (arg01);
11744 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11745 arg = build_real (type, c);
11746 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11750 /* Optimize a/root(b/c) into a*root(c/b). */
11751 if (BUILTIN_ROOT_P (fcode1))
11753 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11755 if (TREE_CODE (rootarg) == RDIV_EXPR)
11757 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11758 tree b = TREE_OPERAND (rootarg, 0);
11759 tree c = TREE_OPERAND (rootarg, 1);
11761 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11763 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11764 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11768 /* Optimize x/expN(y) into x*expN(-y). */
11769 if (BUILTIN_EXPONENT_P (fcode1))
11771 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11772 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11773 arg1 = build_call_expr_loc (loc,
11774 expfn, 1,
11775 fold_convert_loc (loc, type, arg));
11776 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11779 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11780 if (fcode1 == BUILT_IN_POW
11781 || fcode1 == BUILT_IN_POWF
11782 || fcode1 == BUILT_IN_POWL)
11784 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11785 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11786 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11787 tree neg11 = fold_convert_loc (loc, type,
11788 negate_expr (arg11));
11789 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11790 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11793 return NULL_TREE;
11795 case TRUNC_DIV_EXPR:
11796 /* Optimize (X & (-A)) / A where A is a power of 2,
11797 to X >> log2(A) */
11798 if (TREE_CODE (arg0) == BIT_AND_EXPR
11799 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11800 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11802 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11803 arg1, TREE_OPERAND (arg0, 1));
11804 if (sum && integer_zerop (sum)) {
11805 tree pow2 = build_int_cst (integer_type_node,
11806 wi::exact_log2 (arg1));
11807 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11808 TREE_OPERAND (arg0, 0), pow2);
11812 /* Fall through */
11814 case FLOOR_DIV_EXPR:
11815 /* Simplify A / (B << N) where A and B are positive and B is
11816 a power of 2, to A >> (N + log2(B)). */
11817 strict_overflow_p = false;
11818 if (TREE_CODE (arg1) == LSHIFT_EXPR
11819 && (TYPE_UNSIGNED (type)
11820 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11822 tree sval = TREE_OPERAND (arg1, 0);
11823 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11825 tree sh_cnt = TREE_OPERAND (arg1, 1);
11826 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11827 wi::exact_log2 (sval));
11829 if (strict_overflow_p)
11830 fold_overflow_warning (("assuming signed overflow does not "
11831 "occur when simplifying A / (B << N)"),
11832 WARN_STRICT_OVERFLOW_MISC);
11834 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11835 sh_cnt, pow2);
11836 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11837 fold_convert_loc (loc, type, arg0), sh_cnt);
11841 /* Fall through */
11843 case ROUND_DIV_EXPR:
11844 case CEIL_DIV_EXPR:
11845 case EXACT_DIV_EXPR:
11846 if (integer_zerop (arg1))
11847 return NULL_TREE;
11849 /* Convert -A / -B to A / B when the type is signed and overflow is
11850 undefined. */
11851 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11852 && TREE_CODE (arg0) == NEGATE_EXPR
11853 && negate_expr_p (arg1))
11855 if (INTEGRAL_TYPE_P (type))
11856 fold_overflow_warning (("assuming signed overflow does not occur "
11857 "when distributing negation across "
11858 "division"),
11859 WARN_STRICT_OVERFLOW_MISC);
11860 return fold_build2_loc (loc, code, type,
11861 fold_convert_loc (loc, type,
11862 TREE_OPERAND (arg0, 0)),
11863 fold_convert_loc (loc, type,
11864 negate_expr (arg1)));
11866 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11867 && TREE_CODE (arg1) == NEGATE_EXPR
11868 && negate_expr_p (arg0))
11870 if (INTEGRAL_TYPE_P (type))
11871 fold_overflow_warning (("assuming signed overflow does not occur "
11872 "when distributing negation across "
11873 "division"),
11874 WARN_STRICT_OVERFLOW_MISC);
11875 return fold_build2_loc (loc, code, type,
11876 fold_convert_loc (loc, type,
11877 negate_expr (arg0)),
11878 fold_convert_loc (loc, type,
11879 TREE_OPERAND (arg1, 0)));
11882 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11883 operation, EXACT_DIV_EXPR.
11885 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11886 At one time others generated faster code, it's not clear if they do
11887 after the last round to changes to the DIV code in expmed.c. */
11888 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11889 && multiple_of_p (type, arg0, arg1))
11890 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11892 strict_overflow_p = false;
11893 if (TREE_CODE (arg1) == INTEGER_CST
11894 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11895 &strict_overflow_p)))
11897 if (strict_overflow_p)
11898 fold_overflow_warning (("assuming signed overflow does not occur "
11899 "when simplifying division"),
11900 WARN_STRICT_OVERFLOW_MISC);
11901 return fold_convert_loc (loc, type, tem);
11904 return NULL_TREE;
11906 case CEIL_MOD_EXPR:
11907 case FLOOR_MOD_EXPR:
11908 case ROUND_MOD_EXPR:
11909 case TRUNC_MOD_EXPR:
11910 /* X % -Y is the same as X % Y. */
11911 if (code == TRUNC_MOD_EXPR
11912 && !TYPE_UNSIGNED (type)
11913 && TREE_CODE (arg1) == NEGATE_EXPR
11914 && !TYPE_OVERFLOW_TRAPS (type))
11915 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11916 fold_convert_loc (loc, type,
11917 TREE_OPERAND (arg1, 0)));
11919 strict_overflow_p = false;
11920 if (TREE_CODE (arg1) == INTEGER_CST
11921 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11922 &strict_overflow_p)))
11924 if (strict_overflow_p)
11925 fold_overflow_warning (("assuming signed overflow does not occur "
11926 "when simplifying modulus"),
11927 WARN_STRICT_OVERFLOW_MISC);
11928 return fold_convert_loc (loc, type, tem);
11931 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11932 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11933 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11934 && (TYPE_UNSIGNED (type)
11935 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11937 tree c = arg1;
11938 /* Also optimize A % (C << N) where C is a power of 2,
11939 to A & ((C << N) - 1). */
11940 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11941 c = TREE_OPERAND (arg1, 0);
11943 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11945 tree mask
11946 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11947 build_int_cst (TREE_TYPE (arg1), 1));
11948 if (strict_overflow_p)
11949 fold_overflow_warning (("assuming signed overflow does not "
11950 "occur when simplifying "
11951 "X % (power of two)"),
11952 WARN_STRICT_OVERFLOW_MISC);
11953 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11954 fold_convert_loc (loc, type, arg0),
11955 fold_convert_loc (loc, type, mask));
11959 return NULL_TREE;
11961 case LROTATE_EXPR:
11962 case RROTATE_EXPR:
11963 case RSHIFT_EXPR:
11964 case LSHIFT_EXPR:
11965 /* Since negative shift count is not well-defined,
11966 don't try to compute it in the compiler. */
11967 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11968 return NULL_TREE;
11970 prec = element_precision (type);
11972 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11973 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11974 && tree_to_uhwi (arg1) < prec
11975 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11976 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11978 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11979 + tree_to_uhwi (arg1));
11981 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11982 being well defined. */
11983 if (low >= prec)
11985 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11986 low = low % prec;
11987 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11988 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11989 TREE_OPERAND (arg0, 0));
11990 else
11991 low = prec - 1;
11994 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11995 build_int_cst (TREE_TYPE (arg1), low));
11998 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11999 into x & ((unsigned)-1 >> c) for unsigned types. */
12000 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12001 || (TYPE_UNSIGNED (type)
12002 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12003 && tree_fits_uhwi_p (arg1)
12004 && tree_to_uhwi (arg1) < prec
12005 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12006 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12008 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12009 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12010 tree lshift;
12011 tree arg00;
12013 if (low0 == low1)
12015 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12017 lshift = build_minus_one_cst (type);
12018 lshift = const_binop (code, lshift, arg1);
12020 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12024 /* If we have a rotate of a bit operation with the rotate count and
12025 the second operand of the bit operation both constant,
12026 permute the two operations. */
12027 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12028 && (TREE_CODE (arg0) == BIT_AND_EXPR
12029 || TREE_CODE (arg0) == BIT_IOR_EXPR
12030 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12032 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12033 fold_build2_loc (loc, code, type,
12034 TREE_OPERAND (arg0, 0), arg1),
12035 fold_build2_loc (loc, code, type,
12036 TREE_OPERAND (arg0, 1), arg1));
12038 /* Two consecutive rotates adding up to the some integer
12039 multiple of the precision of the type can be ignored. */
12040 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12041 && TREE_CODE (arg0) == RROTATE_EXPR
12042 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12043 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12044 prec) == 0)
12045 return TREE_OPERAND (arg0, 0);
12047 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12048 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12049 if the latter can be further optimized. */
12050 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12051 && TREE_CODE (arg0) == BIT_AND_EXPR
12052 && TREE_CODE (arg1) == INTEGER_CST
12053 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12055 tree mask = fold_build2_loc (loc, code, type,
12056 fold_convert_loc (loc, type,
12057 TREE_OPERAND (arg0, 1)),
12058 arg1);
12059 tree shift = fold_build2_loc (loc, code, type,
12060 fold_convert_loc (loc, type,
12061 TREE_OPERAND (arg0, 0)),
12062 arg1);
12063 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12064 if (tem)
12065 return tem;
12068 return NULL_TREE;
12070 case MIN_EXPR:
12071 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12072 if (tem)
12073 return tem;
12074 goto associate;
12076 case MAX_EXPR:
12077 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12078 if (tem)
12079 return tem;
12080 goto associate;
12082 case TRUTH_ANDIF_EXPR:
12083 /* Note that the operands of this must be ints
12084 and their values must be 0 or 1.
12085 ("true" is a fixed value perhaps depending on the language.) */
12086 /* If first arg is constant zero, return it. */
12087 if (integer_zerop (arg0))
12088 return fold_convert_loc (loc, type, arg0);
12089 case TRUTH_AND_EXPR:
12090 /* If either arg is constant true, drop it. */
12091 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12092 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12093 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12094 /* Preserve sequence points. */
12095 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12096 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12097 /* If second arg is constant zero, result is zero, but first arg
12098 must be evaluated. */
12099 if (integer_zerop (arg1))
12100 return omit_one_operand_loc (loc, type, arg1, arg0);
12101 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12102 case will be handled here. */
12103 if (integer_zerop (arg0))
12104 return omit_one_operand_loc (loc, type, arg0, arg1);
12106 /* !X && X is always false. */
12107 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12108 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12109 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12110 /* X && !X is always false. */
12111 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12112 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12113 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12115 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12116 means A >= Y && A != MAX, but in this case we know that
12117 A < X <= MAX. */
12119 if (!TREE_SIDE_EFFECTS (arg0)
12120 && !TREE_SIDE_EFFECTS (arg1))
12122 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12123 if (tem && !operand_equal_p (tem, arg0, 0))
12124 return fold_build2_loc (loc, code, type, tem, arg1);
12126 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12127 if (tem && !operand_equal_p (tem, arg1, 0))
12128 return fold_build2_loc (loc, code, type, arg0, tem);
12131 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12132 != NULL_TREE)
12133 return tem;
12135 return NULL_TREE;
12137 case TRUTH_ORIF_EXPR:
12138 /* Note that the operands of this must be ints
12139 and their values must be 0 or true.
12140 ("true" is a fixed value perhaps depending on the language.) */
12141 /* If first arg is constant true, return it. */
12142 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12143 return fold_convert_loc (loc, type, arg0);
12144 case TRUTH_OR_EXPR:
12145 /* If either arg is constant zero, drop it. */
12146 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12147 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12148 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12149 /* Preserve sequence points. */
12150 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12151 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12152 /* If second arg is constant true, result is true, but we must
12153 evaluate first arg. */
12154 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12155 return omit_one_operand_loc (loc, type, arg1, arg0);
12156 /* Likewise for first arg, but note this only occurs here for
12157 TRUTH_OR_EXPR. */
12158 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12159 return omit_one_operand_loc (loc, type, arg0, arg1);
12161 /* !X || X is always true. */
12162 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12164 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12165 /* X || !X is always true. */
12166 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12167 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12168 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12170 /* (X && !Y) || (!X && Y) is X ^ Y */
12171 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12172 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12174 tree a0, a1, l0, l1, n0, n1;
12176 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12177 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12179 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12180 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12182 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12183 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12185 if ((operand_equal_p (n0, a0, 0)
12186 && operand_equal_p (n1, a1, 0))
12187 || (operand_equal_p (n0, a1, 0)
12188 && operand_equal_p (n1, a0, 0)))
12189 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12192 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12193 != NULL_TREE)
12194 return tem;
12196 return NULL_TREE;
12198 case TRUTH_XOR_EXPR:
12199 /* If the second arg is constant zero, drop it. */
12200 if (integer_zerop (arg1))
12201 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12202 /* If the second arg is constant true, this is a logical inversion. */
12203 if (integer_onep (arg1))
12205 tem = invert_truthvalue_loc (loc, arg0);
12206 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12208 /* Identical arguments cancel to zero. */
12209 if (operand_equal_p (arg0, arg1, 0))
12210 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12212 /* !X ^ X is always true. */
12213 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12214 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12215 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12217 /* X ^ !X is always true. */
12218 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12219 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12220 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12222 return NULL_TREE;
12224 case EQ_EXPR:
12225 case NE_EXPR:
12226 STRIP_NOPS (arg0);
12227 STRIP_NOPS (arg1);
12229 tem = fold_comparison (loc, code, type, op0, op1);
12230 if (tem != NULL_TREE)
12231 return tem;
12233 /* bool_var != 0 becomes bool_var. */
12234 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12235 && code == NE_EXPR)
12236 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12238 /* bool_var == 1 becomes bool_var. */
12239 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12240 && code == EQ_EXPR)
12241 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12243 /* bool_var != 1 becomes !bool_var. */
12244 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12245 && code == NE_EXPR)
12246 return fold_convert_loc (loc, type,
12247 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12248 TREE_TYPE (arg0), arg0));
12250 /* bool_var == 0 becomes !bool_var. */
12251 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12252 && code == EQ_EXPR)
12253 return fold_convert_loc (loc, type,
12254 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12255 TREE_TYPE (arg0), arg0));
12257 /* !exp != 0 becomes !exp */
12258 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12259 && code == NE_EXPR)
12260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12262 /* If this is an equality comparison of the address of two non-weak,
12263 unaliased symbols neither of which are extern (since we do not
12264 have access to attributes for externs), then we know the result. */
12265 if (TREE_CODE (arg0) == ADDR_EXPR
12266 && DECL_P (TREE_OPERAND (arg0, 0))
12267 && TREE_CODE (arg1) == ADDR_EXPR
12268 && DECL_P (TREE_OPERAND (arg1, 0)))
12270 int equal;
12272 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12273 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12274 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12275 ->equal_address_to (symtab_node::get_create
12276 (TREE_OPERAND (arg1, 0)));
12277 else
12278 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12279 if (equal != 2)
12280 return constant_boolean_node (equal
12281 ? code == EQ_EXPR : code != EQ_EXPR,
12282 type);
12285 /* Similarly for a NEGATE_EXPR. */
12286 if (TREE_CODE (arg0) == NEGATE_EXPR
12287 && TREE_CODE (arg1) == INTEGER_CST
12288 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12289 arg1)))
12290 && TREE_CODE (tem) == INTEGER_CST
12291 && !TREE_OVERFLOW (tem))
12292 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12294 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12295 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12296 && TREE_CODE (arg1) == INTEGER_CST
12297 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12298 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12299 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12300 fold_convert_loc (loc,
12301 TREE_TYPE (arg0),
12302 arg1),
12303 TREE_OPERAND (arg0, 1)));
12305 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12306 if ((TREE_CODE (arg0) == PLUS_EXPR
12307 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12308 || TREE_CODE (arg0) == MINUS_EXPR)
12309 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12310 0)),
12311 arg1, 0)
12312 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12313 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12315 tree val = TREE_OPERAND (arg0, 1);
12316 return omit_two_operands_loc (loc, type,
12317 fold_build2_loc (loc, code, type,
12318 val,
12319 build_int_cst (TREE_TYPE (val),
12320 0)),
12321 TREE_OPERAND (arg0, 0), arg1);
12324 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12325 if (TREE_CODE (arg0) == MINUS_EXPR
12326 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12327 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12328 1)),
12329 arg1, 0)
12330 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12332 return omit_two_operands_loc (loc, type,
12333 code == NE_EXPR
12334 ? boolean_true_node : boolean_false_node,
12335 TREE_OPERAND (arg0, 1), arg1);
12338 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12339 if (TREE_CODE (arg0) == ABS_EXPR
12340 && (integer_zerop (arg1) || real_zerop (arg1)))
12341 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12343 /* If this is an EQ or NE comparison with zero and ARG0 is
12344 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12345 two operations, but the latter can be done in one less insn
12346 on machines that have only two-operand insns or on which a
12347 constant cannot be the first operand. */
12348 if (TREE_CODE (arg0) == BIT_AND_EXPR
12349 && integer_zerop (arg1))
12351 tree arg00 = TREE_OPERAND (arg0, 0);
12352 tree arg01 = TREE_OPERAND (arg0, 1);
12353 if (TREE_CODE (arg00) == LSHIFT_EXPR
12354 && integer_onep (TREE_OPERAND (arg00, 0)))
12356 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12357 arg01, TREE_OPERAND (arg00, 1));
12358 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12359 build_int_cst (TREE_TYPE (arg0), 1));
12360 return fold_build2_loc (loc, code, type,
12361 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12362 arg1);
12364 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12365 && integer_onep (TREE_OPERAND (arg01, 0)))
12367 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12368 arg00, TREE_OPERAND (arg01, 1));
12369 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12370 build_int_cst (TREE_TYPE (arg0), 1));
12371 return fold_build2_loc (loc, code, type,
12372 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12373 arg1);
12377 /* If this is an NE or EQ comparison of zero against the result of a
12378 signed MOD operation whose second operand is a power of 2, make
12379 the MOD operation unsigned since it is simpler and equivalent. */
12380 if (integer_zerop (arg1)
12381 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12382 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12383 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12384 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12385 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12386 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12388 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12389 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12390 fold_convert_loc (loc, newtype,
12391 TREE_OPERAND (arg0, 0)),
12392 fold_convert_loc (loc, newtype,
12393 TREE_OPERAND (arg0, 1)));
12395 return fold_build2_loc (loc, code, type, newmod,
12396 fold_convert_loc (loc, newtype, arg1));
12399 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12400 C1 is a valid shift constant, and C2 is a power of two, i.e.
12401 a single bit. */
12402 if (TREE_CODE (arg0) == BIT_AND_EXPR
12403 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12404 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12405 == INTEGER_CST
12406 && integer_pow2p (TREE_OPERAND (arg0, 1))
12407 && integer_zerop (arg1))
12409 tree itype = TREE_TYPE (arg0);
12410 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12411 prec = TYPE_PRECISION (itype);
12413 /* Check for a valid shift count. */
12414 if (wi::ltu_p (arg001, prec))
12416 tree arg01 = TREE_OPERAND (arg0, 1);
12417 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12418 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12419 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12420 can be rewritten as (X & (C2 << C1)) != 0. */
12421 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12423 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12424 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12425 return fold_build2_loc (loc, code, type, tem,
12426 fold_convert_loc (loc, itype, arg1));
12428 /* Otherwise, for signed (arithmetic) shifts,
12429 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12430 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12431 else if (!TYPE_UNSIGNED (itype))
12432 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12433 arg000, build_int_cst (itype, 0));
12434 /* Otherwise, of unsigned (logical) shifts,
12435 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12436 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12437 else
12438 return omit_one_operand_loc (loc, type,
12439 code == EQ_EXPR ? integer_one_node
12440 : integer_zero_node,
12441 arg000);
12445 /* If we have (A & C) == C where C is a power of 2, convert this into
12446 (A & C) != 0. Similarly for NE_EXPR. */
12447 if (TREE_CODE (arg0) == BIT_AND_EXPR
12448 && integer_pow2p (TREE_OPERAND (arg0, 1))
12449 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12450 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12451 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12452 integer_zero_node));
12454 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12455 bit, then fold the expression into A < 0 or A >= 0. */
12456 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12457 if (tem)
12458 return tem;
12460 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12461 Similarly for NE_EXPR. */
12462 if (TREE_CODE (arg0) == BIT_AND_EXPR
12463 && TREE_CODE (arg1) == INTEGER_CST
12464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12466 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12467 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12468 TREE_OPERAND (arg0, 1));
12469 tree dandnotc
12470 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12471 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12472 notc);
12473 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12474 if (integer_nonzerop (dandnotc))
12475 return omit_one_operand_loc (loc, type, rslt, arg0);
12478 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12479 Similarly for NE_EXPR. */
12480 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12481 && TREE_CODE (arg1) == INTEGER_CST
12482 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12484 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12485 tree candnotd
12486 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12487 TREE_OPERAND (arg0, 1),
12488 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12489 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12490 if (integer_nonzerop (candnotd))
12491 return omit_one_operand_loc (loc, type, rslt, arg0);
12494 /* If this is a comparison of a field, we may be able to simplify it. */
12495 if ((TREE_CODE (arg0) == COMPONENT_REF
12496 || TREE_CODE (arg0) == BIT_FIELD_REF)
12497 /* Handle the constant case even without -O
12498 to make sure the warnings are given. */
12499 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12501 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12502 if (t1)
12503 return t1;
12506 /* Optimize comparisons of strlen vs zero to a compare of the
12507 first character of the string vs zero. To wit,
12508 strlen(ptr) == 0 => *ptr == 0
12509 strlen(ptr) != 0 => *ptr != 0
12510 Other cases should reduce to one of these two (or a constant)
12511 due to the return value of strlen being unsigned. */
12512 if (TREE_CODE (arg0) == CALL_EXPR
12513 && integer_zerop (arg1))
12515 tree fndecl = get_callee_fndecl (arg0);
12517 if (fndecl
12518 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12519 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12520 && call_expr_nargs (arg0) == 1
12521 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12523 tree iref = build_fold_indirect_ref_loc (loc,
12524 CALL_EXPR_ARG (arg0, 0));
12525 return fold_build2_loc (loc, code, type, iref,
12526 build_int_cst (TREE_TYPE (iref), 0));
12530 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12531 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12532 if (TREE_CODE (arg0) == RSHIFT_EXPR
12533 && integer_zerop (arg1)
12534 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12536 tree arg00 = TREE_OPERAND (arg0, 0);
12537 tree arg01 = TREE_OPERAND (arg0, 1);
12538 tree itype = TREE_TYPE (arg00);
12539 if (wi::eq_p (arg01, element_precision (itype) - 1))
12541 if (TYPE_UNSIGNED (itype))
12543 itype = signed_type_for (itype);
12544 arg00 = fold_convert_loc (loc, itype, arg00);
12546 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12547 type, arg00, build_zero_cst (itype));
12551 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12552 if (integer_zerop (arg1)
12553 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12554 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12555 TREE_OPERAND (arg0, 1));
12557 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12558 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12559 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12560 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12561 build_zero_cst (TREE_TYPE (arg0)));
12562 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12563 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12564 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12565 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12566 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12567 build_zero_cst (TREE_TYPE (arg0)));
12569 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12570 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12571 && TREE_CODE (arg1) == INTEGER_CST
12572 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12573 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12574 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12575 TREE_OPERAND (arg0, 1), arg1));
12577 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12578 (X & C) == 0 when C is a single bit. */
12579 if (TREE_CODE (arg0) == BIT_AND_EXPR
12580 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12581 && integer_zerop (arg1)
12582 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12584 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12585 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12586 TREE_OPERAND (arg0, 1));
12587 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12588 type, tem,
12589 fold_convert_loc (loc, TREE_TYPE (arg0),
12590 arg1));
12593 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12594 constant C is a power of two, i.e. a single bit. */
12595 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12596 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12597 && integer_zerop (arg1)
12598 && integer_pow2p (TREE_OPERAND (arg0, 1))
12599 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12600 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12602 tree arg00 = TREE_OPERAND (arg0, 0);
12603 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12604 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12607 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12608 when is C is a power of two, i.e. a single bit. */
12609 if (TREE_CODE (arg0) == BIT_AND_EXPR
12610 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12611 && integer_zerop (arg1)
12612 && integer_pow2p (TREE_OPERAND (arg0, 1))
12613 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12614 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12616 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12617 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12618 arg000, TREE_OPERAND (arg0, 1));
12619 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12620 tem, build_int_cst (TREE_TYPE (tem), 0));
12623 if (integer_zerop (arg1)
12624 && tree_expr_nonzero_p (arg0))
12626 tree res = constant_boolean_node (code==NE_EXPR, type);
12627 return omit_one_operand_loc (loc, type, res, arg0);
12630 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12631 if (TREE_CODE (arg0) == NEGATE_EXPR
12632 && TREE_CODE (arg1) == NEGATE_EXPR)
12633 return fold_build2_loc (loc, code, type,
12634 TREE_OPERAND (arg0, 0),
12635 fold_convert_loc (loc, TREE_TYPE (arg0),
12636 TREE_OPERAND (arg1, 0)));
12638 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12639 if (TREE_CODE (arg0) == BIT_AND_EXPR
12640 && TREE_CODE (arg1) == BIT_AND_EXPR)
12642 tree arg00 = TREE_OPERAND (arg0, 0);
12643 tree arg01 = TREE_OPERAND (arg0, 1);
12644 tree arg10 = TREE_OPERAND (arg1, 0);
12645 tree arg11 = TREE_OPERAND (arg1, 1);
12646 tree itype = TREE_TYPE (arg0);
12648 if (operand_equal_p (arg01, arg11, 0))
12649 return fold_build2_loc (loc, code, type,
12650 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12651 fold_build2_loc (loc,
12652 BIT_XOR_EXPR, itype,
12653 arg00, arg10),
12654 arg01),
12655 build_zero_cst (itype));
12657 if (operand_equal_p (arg01, arg10, 0))
12658 return fold_build2_loc (loc, code, type,
12659 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12660 fold_build2_loc (loc,
12661 BIT_XOR_EXPR, itype,
12662 arg00, arg11),
12663 arg01),
12664 build_zero_cst (itype));
12666 if (operand_equal_p (arg00, arg11, 0))
12667 return fold_build2_loc (loc, code, type,
12668 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12669 fold_build2_loc (loc,
12670 BIT_XOR_EXPR, itype,
12671 arg01, arg10),
12672 arg00),
12673 build_zero_cst (itype));
12675 if (operand_equal_p (arg00, arg10, 0))
12676 return fold_build2_loc (loc, code, type,
12677 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12678 fold_build2_loc (loc,
12679 BIT_XOR_EXPR, itype,
12680 arg01, arg11),
12681 arg00),
12682 build_zero_cst (itype));
12685 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12686 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12688 tree arg00 = TREE_OPERAND (arg0, 0);
12689 tree arg01 = TREE_OPERAND (arg0, 1);
12690 tree arg10 = TREE_OPERAND (arg1, 0);
12691 tree arg11 = TREE_OPERAND (arg1, 1);
12692 tree itype = TREE_TYPE (arg0);
12694 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12695 operand_equal_p guarantees no side-effects so we don't need
12696 to use omit_one_operand on Z. */
12697 if (operand_equal_p (arg01, arg11, 0))
12698 return fold_build2_loc (loc, code, type, arg00,
12699 fold_convert_loc (loc, TREE_TYPE (arg00),
12700 arg10));
12701 if (operand_equal_p (arg01, arg10, 0))
12702 return fold_build2_loc (loc, code, type, arg00,
12703 fold_convert_loc (loc, TREE_TYPE (arg00),
12704 arg11));
12705 if (operand_equal_p (arg00, arg11, 0))
12706 return fold_build2_loc (loc, code, type, arg01,
12707 fold_convert_loc (loc, TREE_TYPE (arg01),
12708 arg10));
12709 if (operand_equal_p (arg00, arg10, 0))
12710 return fold_build2_loc (loc, code, type, arg01,
12711 fold_convert_loc (loc, TREE_TYPE (arg01),
12712 arg11));
12714 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12715 if (TREE_CODE (arg01) == INTEGER_CST
12716 && TREE_CODE (arg11) == INTEGER_CST)
12718 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12719 fold_convert_loc (loc, itype, arg11));
12720 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12721 return fold_build2_loc (loc, code, type, tem,
12722 fold_convert_loc (loc, itype, arg10));
12726 /* Attempt to simplify equality/inequality comparisons of complex
12727 values. Only lower the comparison if the result is known or
12728 can be simplified to a single scalar comparison. */
12729 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12730 || TREE_CODE (arg0) == COMPLEX_CST)
12731 && (TREE_CODE (arg1) == COMPLEX_EXPR
12732 || TREE_CODE (arg1) == COMPLEX_CST))
12734 tree real0, imag0, real1, imag1;
12735 tree rcond, icond;
12737 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12739 real0 = TREE_OPERAND (arg0, 0);
12740 imag0 = TREE_OPERAND (arg0, 1);
12742 else
12744 real0 = TREE_REALPART (arg0);
12745 imag0 = TREE_IMAGPART (arg0);
12748 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12750 real1 = TREE_OPERAND (arg1, 0);
12751 imag1 = TREE_OPERAND (arg1, 1);
12753 else
12755 real1 = TREE_REALPART (arg1);
12756 imag1 = TREE_IMAGPART (arg1);
12759 rcond = fold_binary_loc (loc, code, type, real0, real1);
12760 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12762 if (integer_zerop (rcond))
12764 if (code == EQ_EXPR)
12765 return omit_two_operands_loc (loc, type, boolean_false_node,
12766 imag0, imag1);
12767 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12769 else
12771 if (code == NE_EXPR)
12772 return omit_two_operands_loc (loc, type, boolean_true_node,
12773 imag0, imag1);
12774 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12778 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12779 if (icond && TREE_CODE (icond) == INTEGER_CST)
12781 if (integer_zerop (icond))
12783 if (code == EQ_EXPR)
12784 return omit_two_operands_loc (loc, type, boolean_false_node,
12785 real0, real1);
12786 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12788 else
12790 if (code == NE_EXPR)
12791 return omit_two_operands_loc (loc, type, boolean_true_node,
12792 real0, real1);
12793 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12798 return NULL_TREE;
12800 case LT_EXPR:
12801 case GT_EXPR:
12802 case LE_EXPR:
12803 case GE_EXPR:
12804 tem = fold_comparison (loc, code, type, op0, op1);
12805 if (tem != NULL_TREE)
12806 return tem;
12808 /* Transform comparisons of the form X +- C CMP X. */
12809 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12811 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12812 && !HONOR_SNANS (arg0))
12813 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12814 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12816 tree arg01 = TREE_OPERAND (arg0, 1);
12817 enum tree_code code0 = TREE_CODE (arg0);
12818 int is_positive;
12820 if (TREE_CODE (arg01) == REAL_CST)
12821 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12822 else
12823 is_positive = tree_int_cst_sgn (arg01);
12825 /* (X - c) > X becomes false. */
12826 if (code == GT_EXPR
12827 && ((code0 == MINUS_EXPR && is_positive >= 0)
12828 || (code0 == PLUS_EXPR && is_positive <= 0)))
12830 if (TREE_CODE (arg01) == INTEGER_CST
12831 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12832 fold_overflow_warning (("assuming signed overflow does not "
12833 "occur when assuming that (X - c) > X "
12834 "is always false"),
12835 WARN_STRICT_OVERFLOW_ALL);
12836 return constant_boolean_node (0, type);
12839 /* Likewise (X + c) < X becomes false. */
12840 if (code == LT_EXPR
12841 && ((code0 == PLUS_EXPR && is_positive >= 0)
12842 || (code0 == MINUS_EXPR && is_positive <= 0)))
12844 if (TREE_CODE (arg01) == INTEGER_CST
12845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12846 fold_overflow_warning (("assuming signed overflow does not "
12847 "occur when assuming that "
12848 "(X + c) < X is always false"),
12849 WARN_STRICT_OVERFLOW_ALL);
12850 return constant_boolean_node (0, type);
12853 /* Convert (X - c) <= X to true. */
12854 if (!HONOR_NANS (arg1)
12855 && code == LE_EXPR
12856 && ((code0 == MINUS_EXPR && is_positive >= 0)
12857 || (code0 == PLUS_EXPR && is_positive <= 0)))
12859 if (TREE_CODE (arg01) == INTEGER_CST
12860 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12861 fold_overflow_warning (("assuming signed overflow does not "
12862 "occur when assuming that "
12863 "(X - c) <= X is always true"),
12864 WARN_STRICT_OVERFLOW_ALL);
12865 return constant_boolean_node (1, type);
12868 /* Convert (X + c) >= X to true. */
12869 if (!HONOR_NANS (arg1)
12870 && code == GE_EXPR
12871 && ((code0 == PLUS_EXPR && is_positive >= 0)
12872 || (code0 == MINUS_EXPR && is_positive <= 0)))
12874 if (TREE_CODE (arg01) == INTEGER_CST
12875 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12876 fold_overflow_warning (("assuming signed overflow does not "
12877 "occur when assuming that "
12878 "(X + c) >= X is always true"),
12879 WARN_STRICT_OVERFLOW_ALL);
12880 return constant_boolean_node (1, type);
12883 if (TREE_CODE (arg01) == INTEGER_CST)
12885 /* Convert X + c > X and X - c < X to true for integers. */
12886 if (code == GT_EXPR
12887 && ((code0 == PLUS_EXPR && is_positive > 0)
12888 || (code0 == MINUS_EXPR && is_positive < 0)))
12890 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12891 fold_overflow_warning (("assuming signed overflow does "
12892 "not occur when assuming that "
12893 "(X + c) > X is always true"),
12894 WARN_STRICT_OVERFLOW_ALL);
12895 return constant_boolean_node (1, type);
12898 if (code == LT_EXPR
12899 && ((code0 == MINUS_EXPR && is_positive > 0)
12900 || (code0 == PLUS_EXPR && is_positive < 0)))
12902 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12903 fold_overflow_warning (("assuming signed overflow does "
12904 "not occur when assuming that "
12905 "(X - c) < X is always true"),
12906 WARN_STRICT_OVERFLOW_ALL);
12907 return constant_boolean_node (1, type);
12910 /* Convert X + c <= X and X - c >= X to false for integers. */
12911 if (code == LE_EXPR
12912 && ((code0 == PLUS_EXPR && is_positive > 0)
12913 || (code0 == MINUS_EXPR && is_positive < 0)))
12915 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12916 fold_overflow_warning (("assuming signed overflow does "
12917 "not occur when assuming that "
12918 "(X + c) <= X is always false"),
12919 WARN_STRICT_OVERFLOW_ALL);
12920 return constant_boolean_node (0, type);
12923 if (code == GE_EXPR
12924 && ((code0 == MINUS_EXPR && is_positive > 0)
12925 || (code0 == PLUS_EXPR && is_positive < 0)))
12927 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12928 fold_overflow_warning (("assuming signed overflow does "
12929 "not occur when assuming that "
12930 "(X - c) >= X is always false"),
12931 WARN_STRICT_OVERFLOW_ALL);
12932 return constant_boolean_node (0, type);
12937 /* Comparisons with the highest or lowest possible integer of
12938 the specified precision will have known values. */
12940 tree arg1_type = TREE_TYPE (arg1);
12941 unsigned int prec = TYPE_PRECISION (arg1_type);
12943 if (TREE_CODE (arg1) == INTEGER_CST
12944 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12946 wide_int max = wi::max_value (arg1_type);
12947 wide_int signed_max = wi::max_value (prec, SIGNED);
12948 wide_int min = wi::min_value (arg1_type);
12950 if (wi::eq_p (arg1, max))
12951 switch (code)
12953 case GT_EXPR:
12954 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12956 case GE_EXPR:
12957 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12959 case LE_EXPR:
12960 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12962 case LT_EXPR:
12963 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12965 /* The GE_EXPR and LT_EXPR cases above are not normally
12966 reached because of previous transformations. */
12968 default:
12969 break;
12971 else if (wi::eq_p (arg1, max - 1))
12972 switch (code)
12974 case GT_EXPR:
12975 arg1 = const_binop (PLUS_EXPR, arg1,
12976 build_int_cst (TREE_TYPE (arg1), 1));
12977 return fold_build2_loc (loc, EQ_EXPR, type,
12978 fold_convert_loc (loc,
12979 TREE_TYPE (arg1), arg0),
12980 arg1);
12981 case LE_EXPR:
12982 arg1 = const_binop (PLUS_EXPR, arg1,
12983 build_int_cst (TREE_TYPE (arg1), 1));
12984 return fold_build2_loc (loc, NE_EXPR, type,
12985 fold_convert_loc (loc, TREE_TYPE (arg1),
12986 arg0),
12987 arg1);
12988 default:
12989 break;
12991 else if (wi::eq_p (arg1, min))
12992 switch (code)
12994 case LT_EXPR:
12995 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12997 case LE_EXPR:
12998 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13000 case GE_EXPR:
13001 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13003 case GT_EXPR:
13004 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13006 default:
13007 break;
13009 else if (wi::eq_p (arg1, min + 1))
13010 switch (code)
13012 case GE_EXPR:
13013 arg1 = const_binop (MINUS_EXPR, arg1,
13014 build_int_cst (TREE_TYPE (arg1), 1));
13015 return fold_build2_loc (loc, NE_EXPR, type,
13016 fold_convert_loc (loc,
13017 TREE_TYPE (arg1), arg0),
13018 arg1);
13019 case LT_EXPR:
13020 arg1 = const_binop (MINUS_EXPR, arg1,
13021 build_int_cst (TREE_TYPE (arg1), 1));
13022 return fold_build2_loc (loc, EQ_EXPR, type,
13023 fold_convert_loc (loc, TREE_TYPE (arg1),
13024 arg0),
13025 arg1);
13026 default:
13027 break;
13030 else if (wi::eq_p (arg1, signed_max)
13031 && TYPE_UNSIGNED (arg1_type)
13032 /* We will flip the signedness of the comparison operator
13033 associated with the mode of arg1, so the sign bit is
13034 specified by this mode. Check that arg1 is the signed
13035 max associated with this sign bit. */
13036 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13037 /* signed_type does not work on pointer types. */
13038 && INTEGRAL_TYPE_P (arg1_type))
13040 /* The following case also applies to X < signed_max+1
13041 and X >= signed_max+1 because previous transformations. */
13042 if (code == LE_EXPR || code == GT_EXPR)
13044 tree st = signed_type_for (arg1_type);
13045 return fold_build2_loc (loc,
13046 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13047 type, fold_convert_loc (loc, st, arg0),
13048 build_int_cst (st, 0));
13054 /* If we are comparing an ABS_EXPR with a constant, we can
13055 convert all the cases into explicit comparisons, but they may
13056 well not be faster than doing the ABS and one comparison.
13057 But ABS (X) <= C is a range comparison, which becomes a subtraction
13058 and a comparison, and is probably faster. */
13059 if (code == LE_EXPR
13060 && TREE_CODE (arg1) == INTEGER_CST
13061 && TREE_CODE (arg0) == ABS_EXPR
13062 && ! TREE_SIDE_EFFECTS (arg0)
13063 && (0 != (tem = negate_expr (arg1)))
13064 && TREE_CODE (tem) == INTEGER_CST
13065 && !TREE_OVERFLOW (tem))
13066 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13067 build2 (GE_EXPR, type,
13068 TREE_OPERAND (arg0, 0), tem),
13069 build2 (LE_EXPR, type,
13070 TREE_OPERAND (arg0, 0), arg1));
13072 /* Convert ABS_EXPR<x> >= 0 to true. */
13073 strict_overflow_p = false;
13074 if (code == GE_EXPR
13075 && (integer_zerop (arg1)
13076 || (! HONOR_NANS (arg0)
13077 && real_zerop (arg1)))
13078 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13080 if (strict_overflow_p)
13081 fold_overflow_warning (("assuming signed overflow does not occur "
13082 "when simplifying comparison of "
13083 "absolute value and zero"),
13084 WARN_STRICT_OVERFLOW_CONDITIONAL);
13085 return omit_one_operand_loc (loc, type,
13086 constant_boolean_node (true, type),
13087 arg0);
13090 /* Convert ABS_EXPR<x> < 0 to false. */
13091 strict_overflow_p = false;
13092 if (code == LT_EXPR
13093 && (integer_zerop (arg1) || real_zerop (arg1))
13094 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13096 if (strict_overflow_p)
13097 fold_overflow_warning (("assuming signed overflow does not occur "
13098 "when simplifying comparison of "
13099 "absolute value and zero"),
13100 WARN_STRICT_OVERFLOW_CONDITIONAL);
13101 return omit_one_operand_loc (loc, type,
13102 constant_boolean_node (false, type),
13103 arg0);
13106 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13107 and similarly for >= into !=. */
13108 if ((code == LT_EXPR || code == GE_EXPR)
13109 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13110 && TREE_CODE (arg1) == LSHIFT_EXPR
13111 && integer_onep (TREE_OPERAND (arg1, 0)))
13112 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13113 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13114 TREE_OPERAND (arg1, 1)),
13115 build_zero_cst (TREE_TYPE (arg0)));
13117 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13118 otherwise Y might be >= # of bits in X's type and thus e.g.
13119 (unsigned char) (1 << Y) for Y 15 might be 0.
13120 If the cast is widening, then 1 << Y should have unsigned type,
13121 otherwise if Y is number of bits in the signed shift type minus 1,
13122 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13123 31 might be 0xffffffff80000000. */
13124 if ((code == LT_EXPR || code == GE_EXPR)
13125 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13126 && CONVERT_EXPR_P (arg1)
13127 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13128 && (element_precision (TREE_TYPE (arg1))
13129 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13130 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13131 || (element_precision (TREE_TYPE (arg1))
13132 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13133 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13135 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13136 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13137 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13138 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13139 build_zero_cst (TREE_TYPE (arg0)));
13142 return NULL_TREE;
13144 case UNORDERED_EXPR:
13145 case ORDERED_EXPR:
13146 case UNLT_EXPR:
13147 case UNLE_EXPR:
13148 case UNGT_EXPR:
13149 case UNGE_EXPR:
13150 case UNEQ_EXPR:
13151 case LTGT_EXPR:
13152 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13154 t1 = fold_relational_const (code, type, arg0, arg1);
13155 if (t1 != NULL_TREE)
13156 return t1;
13159 /* If the first operand is NaN, the result is constant. */
13160 if (TREE_CODE (arg0) == REAL_CST
13161 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13162 && (code != LTGT_EXPR || ! flag_trapping_math))
13164 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13165 ? integer_zero_node
13166 : integer_one_node;
13167 return omit_one_operand_loc (loc, type, t1, arg1);
13170 /* If the second operand is NaN, the result is constant. */
13171 if (TREE_CODE (arg1) == REAL_CST
13172 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13173 && (code != LTGT_EXPR || ! flag_trapping_math))
13175 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13176 ? integer_zero_node
13177 : integer_one_node;
13178 return omit_one_operand_loc (loc, type, t1, arg0);
13181 /* Simplify unordered comparison of something with itself. */
13182 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13183 && operand_equal_p (arg0, arg1, 0))
13184 return constant_boolean_node (1, type);
13186 if (code == LTGT_EXPR
13187 && !flag_trapping_math
13188 && operand_equal_p (arg0, arg1, 0))
13189 return constant_boolean_node (0, type);
13191 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13193 tree targ0 = strip_float_extensions (arg0);
13194 tree targ1 = strip_float_extensions (arg1);
13195 tree newtype = TREE_TYPE (targ0);
13197 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13198 newtype = TREE_TYPE (targ1);
13200 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13201 return fold_build2_loc (loc, code, type,
13202 fold_convert_loc (loc, newtype, targ0),
13203 fold_convert_loc (loc, newtype, targ1));
13206 return NULL_TREE;
13208 case COMPOUND_EXPR:
13209 /* When pedantic, a compound expression can be neither an lvalue
13210 nor an integer constant expression. */
13211 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13212 return NULL_TREE;
13213 /* Don't let (0, 0) be null pointer constant. */
13214 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13215 : fold_convert_loc (loc, type, arg1);
13216 return pedantic_non_lvalue_loc (loc, tem);
13218 case ASSERT_EXPR:
13219 /* An ASSERT_EXPR should never be passed to fold_binary. */
13220 gcc_unreachable ();
13222 default:
13223 return NULL_TREE;
13224 } /* switch (code) */
13227 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13228 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13229 of GOTO_EXPR. */
13231 static tree
13232 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13234 switch (TREE_CODE (*tp))
13236 case LABEL_EXPR:
13237 return *tp;
13239 case GOTO_EXPR:
13240 *walk_subtrees = 0;
13242 /* ... fall through ... */
13244 default:
13245 return NULL_TREE;
13249 /* Return whether the sub-tree ST contains a label which is accessible from
13250 outside the sub-tree. */
13252 static bool
13253 contains_label_p (tree st)
13255 return
13256 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13259 /* Fold a ternary expression of code CODE and type TYPE with operands
13260 OP0, OP1, and OP2. Return the folded expression if folding is
13261 successful. Otherwise, return NULL_TREE. */
13263 tree
13264 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13265 tree op0, tree op1, tree op2)
13267 tree tem;
13268 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13269 enum tree_code_class kind = TREE_CODE_CLASS (code);
13271 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13272 && TREE_CODE_LENGTH (code) == 3);
13274 /* If this is a commutative operation, and OP0 is a constant, move it
13275 to OP1 to reduce the number of tests below. */
13276 if (commutative_ternary_tree_code (code)
13277 && tree_swap_operands_p (op0, op1, true))
13278 return fold_build3_loc (loc, code, type, op1, op0, op2);
13280 tem = generic_simplify (loc, code, type, op0, op1, op2);
13281 if (tem)
13282 return tem;
13284 /* Strip any conversions that don't change the mode. This is safe
13285 for every expression, except for a comparison expression because
13286 its signedness is derived from its operands. So, in the latter
13287 case, only strip conversions that don't change the signedness.
13289 Note that this is done as an internal manipulation within the
13290 constant folder, in order to find the simplest representation of
13291 the arguments so that their form can be studied. In any cases,
13292 the appropriate type conversions should be put back in the tree
13293 that will get out of the constant folder. */
13294 if (op0)
13296 arg0 = op0;
13297 STRIP_NOPS (arg0);
13300 if (op1)
13302 arg1 = op1;
13303 STRIP_NOPS (arg1);
13306 if (op2)
13308 arg2 = op2;
13309 STRIP_NOPS (arg2);
13312 switch (code)
13314 case COMPONENT_REF:
13315 if (TREE_CODE (arg0) == CONSTRUCTOR
13316 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13318 unsigned HOST_WIDE_INT idx;
13319 tree field, value;
13320 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13321 if (field == arg1)
13322 return value;
13324 return NULL_TREE;
13326 case COND_EXPR:
13327 case VEC_COND_EXPR:
13328 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13329 so all simple results must be passed through pedantic_non_lvalue. */
13330 if (TREE_CODE (arg0) == INTEGER_CST)
13332 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13333 tem = integer_zerop (arg0) ? op2 : op1;
13334 /* Only optimize constant conditions when the selected branch
13335 has the same type as the COND_EXPR. This avoids optimizing
13336 away "c ? x : throw", where the throw has a void type.
13337 Avoid throwing away that operand which contains label. */
13338 if ((!TREE_SIDE_EFFECTS (unused_op)
13339 || !contains_label_p (unused_op))
13340 && (! VOID_TYPE_P (TREE_TYPE (tem))
13341 || VOID_TYPE_P (type)))
13342 return pedantic_non_lvalue_loc (loc, tem);
13343 return NULL_TREE;
13345 else if (TREE_CODE (arg0) == VECTOR_CST)
13347 if ((TREE_CODE (arg1) == VECTOR_CST
13348 || TREE_CODE (arg1) == CONSTRUCTOR)
13349 && (TREE_CODE (arg2) == VECTOR_CST
13350 || TREE_CODE (arg2) == CONSTRUCTOR))
13352 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13353 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13354 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13355 for (i = 0; i < nelts; i++)
13357 tree val = VECTOR_CST_ELT (arg0, i);
13358 if (integer_all_onesp (val))
13359 sel[i] = i;
13360 else if (integer_zerop (val))
13361 sel[i] = nelts + i;
13362 else /* Currently unreachable. */
13363 return NULL_TREE;
13365 tree t = fold_vec_perm (type, arg1, arg2, sel);
13366 if (t != NULL_TREE)
13367 return t;
13371 /* If we have A op B ? A : C, we may be able to convert this to a
13372 simpler expression, depending on the operation and the values
13373 of B and C. Signed zeros prevent all of these transformations,
13374 for reasons given above each one.
13376 Also try swapping the arguments and inverting the conditional. */
13377 if (COMPARISON_CLASS_P (arg0)
13378 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13379 arg1, TREE_OPERAND (arg0, 1))
13380 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13382 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13383 if (tem)
13384 return tem;
13387 if (COMPARISON_CLASS_P (arg0)
13388 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13389 op2,
13390 TREE_OPERAND (arg0, 1))
13391 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13393 location_t loc0 = expr_location_or (arg0, loc);
13394 tem = fold_invert_truthvalue (loc0, arg0);
13395 if (tem && COMPARISON_CLASS_P (tem))
13397 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13398 if (tem)
13399 return tem;
13403 /* If the second operand is simpler than the third, swap them
13404 since that produces better jump optimization results. */
13405 if (truth_value_p (TREE_CODE (arg0))
13406 && tree_swap_operands_p (op1, op2, false))
13408 location_t loc0 = expr_location_or (arg0, loc);
13409 /* See if this can be inverted. If it can't, possibly because
13410 it was a floating-point inequality comparison, don't do
13411 anything. */
13412 tem = fold_invert_truthvalue (loc0, arg0);
13413 if (tem)
13414 return fold_build3_loc (loc, code, type, tem, op2, op1);
13417 /* Convert A ? 1 : 0 to simply A. */
13418 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13419 : (integer_onep (op1)
13420 && !VECTOR_TYPE_P (type)))
13421 && integer_zerop (op2)
13422 /* If we try to convert OP0 to our type, the
13423 call to fold will try to move the conversion inside
13424 a COND, which will recurse. In that case, the COND_EXPR
13425 is probably the best choice, so leave it alone. */
13426 && type == TREE_TYPE (arg0))
13427 return pedantic_non_lvalue_loc (loc, arg0);
13429 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13430 over COND_EXPR in cases such as floating point comparisons. */
13431 if (integer_zerop (op1)
13432 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13433 : (integer_onep (op2)
13434 && !VECTOR_TYPE_P (type)))
13435 && truth_value_p (TREE_CODE (arg0)))
13436 return pedantic_non_lvalue_loc (loc,
13437 fold_convert_loc (loc, type,
13438 invert_truthvalue_loc (loc,
13439 arg0)));
13441 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13442 if (TREE_CODE (arg0) == LT_EXPR
13443 && integer_zerop (TREE_OPERAND (arg0, 1))
13444 && integer_zerop (op2)
13445 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13447 /* sign_bit_p looks through both zero and sign extensions,
13448 but for this optimization only sign extensions are
13449 usable. */
13450 tree tem2 = TREE_OPERAND (arg0, 0);
13451 while (tem != tem2)
13453 if (TREE_CODE (tem2) != NOP_EXPR
13454 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13456 tem = NULL_TREE;
13457 break;
13459 tem2 = TREE_OPERAND (tem2, 0);
13461 /* sign_bit_p only checks ARG1 bits within A's precision.
13462 If <sign bit of A> has wider type than A, bits outside
13463 of A's precision in <sign bit of A> need to be checked.
13464 If they are all 0, this optimization needs to be done
13465 in unsigned A's type, if they are all 1 in signed A's type,
13466 otherwise this can't be done. */
13467 if (tem
13468 && TYPE_PRECISION (TREE_TYPE (tem))
13469 < TYPE_PRECISION (TREE_TYPE (arg1))
13470 && TYPE_PRECISION (TREE_TYPE (tem))
13471 < TYPE_PRECISION (type))
13473 int inner_width, outer_width;
13474 tree tem_type;
13476 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13477 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13478 if (outer_width > TYPE_PRECISION (type))
13479 outer_width = TYPE_PRECISION (type);
13481 wide_int mask = wi::shifted_mask
13482 (inner_width, outer_width - inner_width, false,
13483 TYPE_PRECISION (TREE_TYPE (arg1)));
13485 wide_int common = mask & arg1;
13486 if (common == mask)
13488 tem_type = signed_type_for (TREE_TYPE (tem));
13489 tem = fold_convert_loc (loc, tem_type, tem);
13491 else if (common == 0)
13493 tem_type = unsigned_type_for (TREE_TYPE (tem));
13494 tem = fold_convert_loc (loc, tem_type, tem);
13496 else
13497 tem = NULL;
13500 if (tem)
13501 return
13502 fold_convert_loc (loc, type,
13503 fold_build2_loc (loc, BIT_AND_EXPR,
13504 TREE_TYPE (tem), tem,
13505 fold_convert_loc (loc,
13506 TREE_TYPE (tem),
13507 arg1)));
13510 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13511 already handled above. */
13512 if (TREE_CODE (arg0) == BIT_AND_EXPR
13513 && integer_onep (TREE_OPERAND (arg0, 1))
13514 && integer_zerop (op2)
13515 && integer_pow2p (arg1))
13517 tree tem = TREE_OPERAND (arg0, 0);
13518 STRIP_NOPS (tem);
13519 if (TREE_CODE (tem) == RSHIFT_EXPR
13520 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13521 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13522 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13523 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13524 TREE_OPERAND (tem, 0), arg1);
13527 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13528 is probably obsolete because the first operand should be a
13529 truth value (that's why we have the two cases above), but let's
13530 leave it in until we can confirm this for all front-ends. */
13531 if (integer_zerop (op2)
13532 && TREE_CODE (arg0) == NE_EXPR
13533 && integer_zerop (TREE_OPERAND (arg0, 1))
13534 && integer_pow2p (arg1)
13535 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13536 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13537 arg1, OEP_ONLY_CONST))
13538 return pedantic_non_lvalue_loc (loc,
13539 fold_convert_loc (loc, type,
13540 TREE_OPERAND (arg0, 0)));
13542 /* Disable the transformations below for vectors, since
13543 fold_binary_op_with_conditional_arg may undo them immediately,
13544 yielding an infinite loop. */
13545 if (code == VEC_COND_EXPR)
13546 return NULL_TREE;
13548 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13549 if (integer_zerop (op2)
13550 && truth_value_p (TREE_CODE (arg0))
13551 && truth_value_p (TREE_CODE (arg1))
13552 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13553 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13554 : TRUTH_ANDIF_EXPR,
13555 type, fold_convert_loc (loc, type, arg0), arg1);
13557 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13558 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13559 && truth_value_p (TREE_CODE (arg0))
13560 && truth_value_p (TREE_CODE (arg1))
13561 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13563 location_t loc0 = expr_location_or (arg0, loc);
13564 /* Only perform transformation if ARG0 is easily inverted. */
13565 tem = fold_invert_truthvalue (loc0, arg0);
13566 if (tem)
13567 return fold_build2_loc (loc, code == VEC_COND_EXPR
13568 ? BIT_IOR_EXPR
13569 : TRUTH_ORIF_EXPR,
13570 type, fold_convert_loc (loc, type, tem),
13571 arg1);
13574 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13575 if (integer_zerop (arg1)
13576 && truth_value_p (TREE_CODE (arg0))
13577 && truth_value_p (TREE_CODE (op2))
13578 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13580 location_t loc0 = expr_location_or (arg0, loc);
13581 /* Only perform transformation if ARG0 is easily inverted. */
13582 tem = fold_invert_truthvalue (loc0, arg0);
13583 if (tem)
13584 return fold_build2_loc (loc, code == VEC_COND_EXPR
13585 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13586 type, fold_convert_loc (loc, type, tem),
13587 op2);
13590 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13591 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13592 && truth_value_p (TREE_CODE (arg0))
13593 && truth_value_p (TREE_CODE (op2))
13594 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13595 return fold_build2_loc (loc, code == VEC_COND_EXPR
13596 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13597 type, fold_convert_loc (loc, type, arg0), op2);
13599 return NULL_TREE;
13601 case CALL_EXPR:
13602 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13603 of fold_ternary on them. */
13604 gcc_unreachable ();
13606 case BIT_FIELD_REF:
13607 if ((TREE_CODE (arg0) == VECTOR_CST
13608 || (TREE_CODE (arg0) == CONSTRUCTOR
13609 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13610 && (type == TREE_TYPE (TREE_TYPE (arg0))
13611 || (TREE_CODE (type) == VECTOR_TYPE
13612 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13614 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13615 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13616 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13617 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13619 if (n != 0
13620 && (idx % width) == 0
13621 && (n % width) == 0
13622 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13624 idx = idx / width;
13625 n = n / width;
13627 if (TREE_CODE (arg0) == VECTOR_CST)
13629 if (n == 1)
13630 return VECTOR_CST_ELT (arg0, idx);
13632 tree *vals = XALLOCAVEC (tree, n);
13633 for (unsigned i = 0; i < n; ++i)
13634 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13635 return build_vector (type, vals);
13638 /* Constructor elements can be subvectors. */
13639 unsigned HOST_WIDE_INT k = 1;
13640 if (CONSTRUCTOR_NELTS (arg0) != 0)
13642 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13643 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13644 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13647 /* We keep an exact subset of the constructor elements. */
13648 if ((idx % k) == 0 && (n % k) == 0)
13650 if (CONSTRUCTOR_NELTS (arg0) == 0)
13651 return build_constructor (type, NULL);
13652 idx /= k;
13653 n /= k;
13654 if (n == 1)
13656 if (idx < CONSTRUCTOR_NELTS (arg0))
13657 return CONSTRUCTOR_ELT (arg0, idx)->value;
13658 return build_zero_cst (type);
13661 vec<constructor_elt, va_gc> *vals;
13662 vec_alloc (vals, n);
13663 for (unsigned i = 0;
13664 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13665 ++i)
13666 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13667 CONSTRUCTOR_ELT
13668 (arg0, idx + i)->value);
13669 return build_constructor (type, vals);
13671 /* The bitfield references a single constructor element. */
13672 else if (idx + n <= (idx / k + 1) * k)
13674 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13675 return build_zero_cst (type);
13676 else if (n == k)
13677 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13678 else
13679 return fold_build3_loc (loc, code, type,
13680 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13681 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13686 /* A bit-field-ref that referenced the full argument can be stripped. */
13687 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13688 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13689 && integer_zerop (op2))
13690 return fold_convert_loc (loc, type, arg0);
13692 /* On constants we can use native encode/interpret to constant
13693 fold (nearly) all BIT_FIELD_REFs. */
13694 if (CONSTANT_CLASS_P (arg0)
13695 && can_native_interpret_type_p (type)
13696 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13697 /* This limitation should not be necessary, we just need to
13698 round this up to mode size. */
13699 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13700 /* Need bit-shifting of the buffer to relax the following. */
13701 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13703 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13704 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13705 unsigned HOST_WIDE_INT clen;
13706 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13707 /* ??? We cannot tell native_encode_expr to start at
13708 some random byte only. So limit us to a reasonable amount
13709 of work. */
13710 if (clen <= 4096)
13712 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13713 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13714 if (len > 0
13715 && len * BITS_PER_UNIT >= bitpos + bitsize)
13717 tree v = native_interpret_expr (type,
13718 b + bitpos / BITS_PER_UNIT,
13719 bitsize / BITS_PER_UNIT);
13720 if (v)
13721 return v;
13726 return NULL_TREE;
13728 case FMA_EXPR:
13729 /* For integers we can decompose the FMA if possible. */
13730 if (TREE_CODE (arg0) == INTEGER_CST
13731 && TREE_CODE (arg1) == INTEGER_CST)
13732 return fold_build2_loc (loc, PLUS_EXPR, type,
13733 const_binop (MULT_EXPR, arg0, arg1), arg2);
13734 if (integer_zerop (arg2))
13735 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13737 return fold_fma (loc, type, arg0, arg1, arg2);
13739 case VEC_PERM_EXPR:
13740 if (TREE_CODE (arg2) == VECTOR_CST)
13742 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13743 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13744 unsigned char *sel2 = sel + nelts;
13745 bool need_mask_canon = false;
13746 bool need_mask_canon2 = false;
13747 bool all_in_vec0 = true;
13748 bool all_in_vec1 = true;
13749 bool maybe_identity = true;
13750 bool single_arg = (op0 == op1);
13751 bool changed = false;
13753 mask2 = 2 * nelts - 1;
13754 mask = single_arg ? (nelts - 1) : mask2;
13755 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13756 for (i = 0; i < nelts; i++)
13758 tree val = VECTOR_CST_ELT (arg2, i);
13759 if (TREE_CODE (val) != INTEGER_CST)
13760 return NULL_TREE;
13762 /* Make sure that the perm value is in an acceptable
13763 range. */
13764 wide_int t = val;
13765 need_mask_canon |= wi::gtu_p (t, mask);
13766 need_mask_canon2 |= wi::gtu_p (t, mask2);
13767 sel[i] = t.to_uhwi () & mask;
13768 sel2[i] = t.to_uhwi () & mask2;
13770 if (sel[i] < nelts)
13771 all_in_vec1 = false;
13772 else
13773 all_in_vec0 = false;
13775 if ((sel[i] & (nelts-1)) != i)
13776 maybe_identity = false;
13779 if (maybe_identity)
13781 if (all_in_vec0)
13782 return op0;
13783 if (all_in_vec1)
13784 return op1;
13787 if (all_in_vec0)
13788 op1 = op0;
13789 else if (all_in_vec1)
13791 op0 = op1;
13792 for (i = 0; i < nelts; i++)
13793 sel[i] -= nelts;
13794 need_mask_canon = true;
13797 if ((TREE_CODE (op0) == VECTOR_CST
13798 || TREE_CODE (op0) == CONSTRUCTOR)
13799 && (TREE_CODE (op1) == VECTOR_CST
13800 || TREE_CODE (op1) == CONSTRUCTOR))
13802 tree t = fold_vec_perm (type, op0, op1, sel);
13803 if (t != NULL_TREE)
13804 return t;
13807 if (op0 == op1 && !single_arg)
13808 changed = true;
13810 /* Some targets are deficient and fail to expand a single
13811 argument permutation while still allowing an equivalent
13812 2-argument version. */
13813 if (need_mask_canon && arg2 == op2
13814 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13815 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13817 need_mask_canon = need_mask_canon2;
13818 sel = sel2;
13821 if (need_mask_canon && arg2 == op2)
13823 tree *tsel = XALLOCAVEC (tree, nelts);
13824 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13825 for (i = 0; i < nelts; i++)
13826 tsel[i] = build_int_cst (eltype, sel[i]);
13827 op2 = build_vector (TREE_TYPE (arg2), tsel);
13828 changed = true;
13831 if (changed)
13832 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13834 return NULL_TREE;
13836 default:
13837 return NULL_TREE;
13838 } /* switch (code) */
13841 /* Perform constant folding and related simplification of EXPR.
13842 The related simplifications include x*1 => x, x*0 => 0, etc.,
13843 and application of the associative law.
13844 NOP_EXPR conversions may be removed freely (as long as we
13845 are careful not to change the type of the overall expression).
13846 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13847 but we can constant-fold them if they have constant operands. */
13849 #ifdef ENABLE_FOLD_CHECKING
13850 # define fold(x) fold_1 (x)
13851 static tree fold_1 (tree);
13852 static
13853 #endif
13854 tree
13855 fold (tree expr)
13857 const tree t = expr;
13858 enum tree_code code = TREE_CODE (t);
13859 enum tree_code_class kind = TREE_CODE_CLASS (code);
13860 tree tem;
13861 location_t loc = EXPR_LOCATION (expr);
13863 /* Return right away if a constant. */
13864 if (kind == tcc_constant)
13865 return t;
13867 /* CALL_EXPR-like objects with variable numbers of operands are
13868 treated specially. */
13869 if (kind == tcc_vl_exp)
13871 if (code == CALL_EXPR)
13873 tem = fold_call_expr (loc, expr, false);
13874 return tem ? tem : expr;
13876 return expr;
13879 if (IS_EXPR_CODE_CLASS (kind))
13881 tree type = TREE_TYPE (t);
13882 tree op0, op1, op2;
13884 switch (TREE_CODE_LENGTH (code))
13886 case 1:
13887 op0 = TREE_OPERAND (t, 0);
13888 tem = fold_unary_loc (loc, code, type, op0);
13889 return tem ? tem : expr;
13890 case 2:
13891 op0 = TREE_OPERAND (t, 0);
13892 op1 = TREE_OPERAND (t, 1);
13893 tem = fold_binary_loc (loc, code, type, op0, op1);
13894 return tem ? tem : expr;
13895 case 3:
13896 op0 = TREE_OPERAND (t, 0);
13897 op1 = TREE_OPERAND (t, 1);
13898 op2 = TREE_OPERAND (t, 2);
13899 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13900 return tem ? tem : expr;
13901 default:
13902 break;
13906 switch (code)
13908 case ARRAY_REF:
13910 tree op0 = TREE_OPERAND (t, 0);
13911 tree op1 = TREE_OPERAND (t, 1);
13913 if (TREE_CODE (op1) == INTEGER_CST
13914 && TREE_CODE (op0) == CONSTRUCTOR
13915 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13917 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13918 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13919 unsigned HOST_WIDE_INT begin = 0;
13921 /* Find a matching index by means of a binary search. */
13922 while (begin != end)
13924 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13925 tree index = (*elts)[middle].index;
13927 if (TREE_CODE (index) == INTEGER_CST
13928 && tree_int_cst_lt (index, op1))
13929 begin = middle + 1;
13930 else if (TREE_CODE (index) == INTEGER_CST
13931 && tree_int_cst_lt (op1, index))
13932 end = middle;
13933 else if (TREE_CODE (index) == RANGE_EXPR
13934 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13935 begin = middle + 1;
13936 else if (TREE_CODE (index) == RANGE_EXPR
13937 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13938 end = middle;
13939 else
13940 return (*elts)[middle].value;
13944 return t;
13947 /* Return a VECTOR_CST if possible. */
13948 case CONSTRUCTOR:
13950 tree type = TREE_TYPE (t);
13951 if (TREE_CODE (type) != VECTOR_TYPE)
13952 return t;
13954 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13955 unsigned HOST_WIDE_INT idx, pos = 0;
13956 tree value;
13958 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13960 if (!CONSTANT_CLASS_P (value))
13961 return t;
13962 if (TREE_CODE (value) == VECTOR_CST)
13964 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13965 vec[pos++] = VECTOR_CST_ELT (value, i);
13967 else
13968 vec[pos++] = value;
13970 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13971 vec[pos] = build_zero_cst (TREE_TYPE (type));
13973 return build_vector (type, vec);
13976 case CONST_DECL:
13977 return fold (DECL_INITIAL (t));
13979 default:
13980 return t;
13981 } /* switch (code) */
13984 #ifdef ENABLE_FOLD_CHECKING
13985 #undef fold
13987 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13988 hash_table<pointer_hash<const tree_node> > *);
13989 static void fold_check_failed (const_tree, const_tree);
13990 void print_fold_checksum (const_tree);
13992 /* When --enable-checking=fold, compute a digest of expr before
13993 and after actual fold call to see if fold did not accidentally
13994 change original expr. */
13996 tree
13997 fold (tree expr)
13999 tree ret;
14000 struct md5_ctx ctx;
14001 unsigned char checksum_before[16], checksum_after[16];
14002 hash_table<pointer_hash<const tree_node> > ht (32);
14004 md5_init_ctx (&ctx);
14005 fold_checksum_tree (expr, &ctx, &ht);
14006 md5_finish_ctx (&ctx, checksum_before);
14007 ht.empty ();
14009 ret = fold_1 (expr);
14011 md5_init_ctx (&ctx);
14012 fold_checksum_tree (expr, &ctx, &ht);
14013 md5_finish_ctx (&ctx, checksum_after);
14015 if (memcmp (checksum_before, checksum_after, 16))
14016 fold_check_failed (expr, ret);
14018 return ret;
14021 void
14022 print_fold_checksum (const_tree expr)
14024 struct md5_ctx ctx;
14025 unsigned char checksum[16], cnt;
14026 hash_table<pointer_hash<const tree_node> > ht (32);
14028 md5_init_ctx (&ctx);
14029 fold_checksum_tree (expr, &ctx, &ht);
14030 md5_finish_ctx (&ctx, checksum);
14031 for (cnt = 0; cnt < 16; ++cnt)
14032 fprintf (stderr, "%02x", checksum[cnt]);
14033 putc ('\n', stderr);
14036 static void
14037 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14039 internal_error ("fold check: original tree changed by fold");
14042 static void
14043 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14044 hash_table<pointer_hash <const tree_node> > *ht)
14046 const tree_node **slot;
14047 enum tree_code code;
14048 union tree_node buf;
14049 int i, len;
14051 recursive_label:
14052 if (expr == NULL)
14053 return;
14054 slot = ht->find_slot (expr, INSERT);
14055 if (*slot != NULL)
14056 return;
14057 *slot = expr;
14058 code = TREE_CODE (expr);
14059 if (TREE_CODE_CLASS (code) == tcc_declaration
14060 && DECL_ASSEMBLER_NAME_SET_P (expr))
14062 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14063 memcpy ((char *) &buf, expr, tree_size (expr));
14064 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14065 expr = (tree) &buf;
14067 else if (TREE_CODE_CLASS (code) == tcc_type
14068 && (TYPE_POINTER_TO (expr)
14069 || TYPE_REFERENCE_TO (expr)
14070 || TYPE_CACHED_VALUES_P (expr)
14071 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14072 || TYPE_NEXT_VARIANT (expr)))
14074 /* Allow these fields to be modified. */
14075 tree tmp;
14076 memcpy ((char *) &buf, expr, tree_size (expr));
14077 expr = tmp = (tree) &buf;
14078 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14079 TYPE_POINTER_TO (tmp) = NULL;
14080 TYPE_REFERENCE_TO (tmp) = NULL;
14081 TYPE_NEXT_VARIANT (tmp) = NULL;
14082 if (TYPE_CACHED_VALUES_P (tmp))
14084 TYPE_CACHED_VALUES_P (tmp) = 0;
14085 TYPE_CACHED_VALUES (tmp) = NULL;
14088 md5_process_bytes (expr, tree_size (expr), ctx);
14089 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14090 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14091 if (TREE_CODE_CLASS (code) != tcc_type
14092 && TREE_CODE_CLASS (code) != tcc_declaration
14093 && code != TREE_LIST
14094 && code != SSA_NAME
14095 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14096 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14097 switch (TREE_CODE_CLASS (code))
14099 case tcc_constant:
14100 switch (code)
14102 case STRING_CST:
14103 md5_process_bytes (TREE_STRING_POINTER (expr),
14104 TREE_STRING_LENGTH (expr), ctx);
14105 break;
14106 case COMPLEX_CST:
14107 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14108 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14109 break;
14110 case VECTOR_CST:
14111 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14112 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14113 break;
14114 default:
14115 break;
14117 break;
14118 case tcc_exceptional:
14119 switch (code)
14121 case TREE_LIST:
14122 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14123 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14124 expr = TREE_CHAIN (expr);
14125 goto recursive_label;
14126 break;
14127 case TREE_VEC:
14128 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14129 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14130 break;
14131 default:
14132 break;
14134 break;
14135 case tcc_expression:
14136 case tcc_reference:
14137 case tcc_comparison:
14138 case tcc_unary:
14139 case tcc_binary:
14140 case tcc_statement:
14141 case tcc_vl_exp:
14142 len = TREE_OPERAND_LENGTH (expr);
14143 for (i = 0; i < len; ++i)
14144 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14145 break;
14146 case tcc_declaration:
14147 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14148 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14149 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14151 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14152 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14153 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14154 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14155 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14158 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14160 if (TREE_CODE (expr) == FUNCTION_DECL)
14162 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14163 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14165 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14167 break;
14168 case tcc_type:
14169 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14170 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14171 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14172 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14173 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14174 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14175 if (INTEGRAL_TYPE_P (expr)
14176 || SCALAR_FLOAT_TYPE_P (expr))
14178 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14179 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14181 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14182 if (TREE_CODE (expr) == RECORD_TYPE
14183 || TREE_CODE (expr) == UNION_TYPE
14184 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14185 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14186 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14187 break;
14188 default:
14189 break;
14193 /* Helper function for outputting the checksum of a tree T. When
14194 debugging with gdb, you can "define mynext" to be "next" followed
14195 by "call debug_fold_checksum (op0)", then just trace down till the
14196 outputs differ. */
14198 DEBUG_FUNCTION void
14199 debug_fold_checksum (const_tree t)
14201 int i;
14202 unsigned char checksum[16];
14203 struct md5_ctx ctx;
14204 hash_table<pointer_hash<const tree_node> > ht (32);
14206 md5_init_ctx (&ctx);
14207 fold_checksum_tree (t, &ctx, &ht);
14208 md5_finish_ctx (&ctx, checksum);
14209 ht.empty ();
14211 for (i = 0; i < 16; i++)
14212 fprintf (stderr, "%d ", checksum[i]);
14214 fprintf (stderr, "\n");
14217 #endif
14219 /* Fold a unary tree expression with code CODE of type TYPE with an
14220 operand OP0. LOC is the location of the resulting expression.
14221 Return a folded expression if successful. Otherwise, return a tree
14222 expression with code CODE of type TYPE with an operand OP0. */
14224 tree
14225 fold_build1_stat_loc (location_t loc,
14226 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14228 tree tem;
14229 #ifdef ENABLE_FOLD_CHECKING
14230 unsigned char checksum_before[16], checksum_after[16];
14231 struct md5_ctx ctx;
14232 hash_table<pointer_hash<const tree_node> > ht (32);
14234 md5_init_ctx (&ctx);
14235 fold_checksum_tree (op0, &ctx, &ht);
14236 md5_finish_ctx (&ctx, checksum_before);
14237 ht.empty ();
14238 #endif
14240 tem = fold_unary_loc (loc, code, type, op0);
14241 if (!tem)
14242 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14244 #ifdef ENABLE_FOLD_CHECKING
14245 md5_init_ctx (&ctx);
14246 fold_checksum_tree (op0, &ctx, &ht);
14247 md5_finish_ctx (&ctx, checksum_after);
14249 if (memcmp (checksum_before, checksum_after, 16))
14250 fold_check_failed (op0, tem);
14251 #endif
14252 return tem;
14255 /* Fold a binary tree expression with code CODE of type TYPE with
14256 operands OP0 and OP1. LOC is the location of the resulting
14257 expression. Return a folded expression if successful. Otherwise,
14258 return a tree expression with code CODE of type TYPE with operands
14259 OP0 and OP1. */
14261 tree
14262 fold_build2_stat_loc (location_t loc,
14263 enum tree_code code, tree type, tree op0, tree op1
14264 MEM_STAT_DECL)
14266 tree tem;
14267 #ifdef ENABLE_FOLD_CHECKING
14268 unsigned char checksum_before_op0[16],
14269 checksum_before_op1[16],
14270 checksum_after_op0[16],
14271 checksum_after_op1[16];
14272 struct md5_ctx ctx;
14273 hash_table<pointer_hash<const tree_node> > ht (32);
14275 md5_init_ctx (&ctx);
14276 fold_checksum_tree (op0, &ctx, &ht);
14277 md5_finish_ctx (&ctx, checksum_before_op0);
14278 ht.empty ();
14280 md5_init_ctx (&ctx);
14281 fold_checksum_tree (op1, &ctx, &ht);
14282 md5_finish_ctx (&ctx, checksum_before_op1);
14283 ht.empty ();
14284 #endif
14286 tem = fold_binary_loc (loc, code, type, op0, op1);
14287 if (!tem)
14288 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14290 #ifdef ENABLE_FOLD_CHECKING
14291 md5_init_ctx (&ctx);
14292 fold_checksum_tree (op0, &ctx, &ht);
14293 md5_finish_ctx (&ctx, checksum_after_op0);
14294 ht.empty ();
14296 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14297 fold_check_failed (op0, tem);
14299 md5_init_ctx (&ctx);
14300 fold_checksum_tree (op1, &ctx, &ht);
14301 md5_finish_ctx (&ctx, checksum_after_op1);
14303 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14304 fold_check_failed (op1, tem);
14305 #endif
14306 return tem;
14309 /* Fold a ternary tree expression with code CODE of type TYPE with
14310 operands OP0, OP1, and OP2. Return a folded expression if
14311 successful. Otherwise, return a tree expression with code CODE of
14312 type TYPE with operands OP0, OP1, and OP2. */
14314 tree
14315 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14316 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14318 tree tem;
14319 #ifdef ENABLE_FOLD_CHECKING
14320 unsigned char checksum_before_op0[16],
14321 checksum_before_op1[16],
14322 checksum_before_op2[16],
14323 checksum_after_op0[16],
14324 checksum_after_op1[16],
14325 checksum_after_op2[16];
14326 struct md5_ctx ctx;
14327 hash_table<pointer_hash<const tree_node> > ht (32);
14329 md5_init_ctx (&ctx);
14330 fold_checksum_tree (op0, &ctx, &ht);
14331 md5_finish_ctx (&ctx, checksum_before_op0);
14332 ht.empty ();
14334 md5_init_ctx (&ctx);
14335 fold_checksum_tree (op1, &ctx, &ht);
14336 md5_finish_ctx (&ctx, checksum_before_op1);
14337 ht.empty ();
14339 md5_init_ctx (&ctx);
14340 fold_checksum_tree (op2, &ctx, &ht);
14341 md5_finish_ctx (&ctx, checksum_before_op2);
14342 ht.empty ();
14343 #endif
14345 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14346 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14347 if (!tem)
14348 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14350 #ifdef ENABLE_FOLD_CHECKING
14351 md5_init_ctx (&ctx);
14352 fold_checksum_tree (op0, &ctx, &ht);
14353 md5_finish_ctx (&ctx, checksum_after_op0);
14354 ht.empty ();
14356 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14357 fold_check_failed (op0, tem);
14359 md5_init_ctx (&ctx);
14360 fold_checksum_tree (op1, &ctx, &ht);
14361 md5_finish_ctx (&ctx, checksum_after_op1);
14362 ht.empty ();
14364 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14365 fold_check_failed (op1, tem);
14367 md5_init_ctx (&ctx);
14368 fold_checksum_tree (op2, &ctx, &ht);
14369 md5_finish_ctx (&ctx, checksum_after_op2);
14371 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14372 fold_check_failed (op2, tem);
14373 #endif
14374 return tem;
14377 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14378 arguments in ARGARRAY, and a null static chain.
14379 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14380 of type TYPE from the given operands as constructed by build_call_array. */
14382 tree
14383 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14384 int nargs, tree *argarray)
14386 tree tem;
14387 #ifdef ENABLE_FOLD_CHECKING
14388 unsigned char checksum_before_fn[16],
14389 checksum_before_arglist[16],
14390 checksum_after_fn[16],
14391 checksum_after_arglist[16];
14392 struct md5_ctx ctx;
14393 hash_table<pointer_hash<const tree_node> > ht (32);
14394 int i;
14396 md5_init_ctx (&ctx);
14397 fold_checksum_tree (fn, &ctx, &ht);
14398 md5_finish_ctx (&ctx, checksum_before_fn);
14399 ht.empty ();
14401 md5_init_ctx (&ctx);
14402 for (i = 0; i < nargs; i++)
14403 fold_checksum_tree (argarray[i], &ctx, &ht);
14404 md5_finish_ctx (&ctx, checksum_before_arglist);
14405 ht.empty ();
14406 #endif
14408 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14409 if (!tem)
14410 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14412 #ifdef ENABLE_FOLD_CHECKING
14413 md5_init_ctx (&ctx);
14414 fold_checksum_tree (fn, &ctx, &ht);
14415 md5_finish_ctx (&ctx, checksum_after_fn);
14416 ht.empty ();
14418 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14419 fold_check_failed (fn, tem);
14421 md5_init_ctx (&ctx);
14422 for (i = 0; i < nargs; i++)
14423 fold_checksum_tree (argarray[i], &ctx, &ht);
14424 md5_finish_ctx (&ctx, checksum_after_arglist);
14426 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14427 fold_check_failed (NULL_TREE, tem);
14428 #endif
14429 return tem;
14432 /* Perform constant folding and related simplification of initializer
14433 expression EXPR. These behave identically to "fold_buildN" but ignore
14434 potential run-time traps and exceptions that fold must preserve. */
14436 #define START_FOLD_INIT \
14437 int saved_signaling_nans = flag_signaling_nans;\
14438 int saved_trapping_math = flag_trapping_math;\
14439 int saved_rounding_math = flag_rounding_math;\
14440 int saved_trapv = flag_trapv;\
14441 int saved_folding_initializer = folding_initializer;\
14442 flag_signaling_nans = 0;\
14443 flag_trapping_math = 0;\
14444 flag_rounding_math = 0;\
14445 flag_trapv = 0;\
14446 folding_initializer = 1;
14448 #define END_FOLD_INIT \
14449 flag_signaling_nans = saved_signaling_nans;\
14450 flag_trapping_math = saved_trapping_math;\
14451 flag_rounding_math = saved_rounding_math;\
14452 flag_trapv = saved_trapv;\
14453 folding_initializer = saved_folding_initializer;
14455 tree
14456 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14457 tree type, tree op)
14459 tree result;
14460 START_FOLD_INIT;
14462 result = fold_build1_loc (loc, code, type, op);
14464 END_FOLD_INIT;
14465 return result;
14468 tree
14469 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14470 tree type, tree op0, tree op1)
14472 tree result;
14473 START_FOLD_INIT;
14475 result = fold_build2_loc (loc, code, type, op0, op1);
14477 END_FOLD_INIT;
14478 return result;
14481 tree
14482 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14483 int nargs, tree *argarray)
14485 tree result;
14486 START_FOLD_INIT;
14488 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14490 END_FOLD_INIT;
14491 return result;
14494 #undef START_FOLD_INIT
14495 #undef END_FOLD_INIT
14497 /* Determine if first argument is a multiple of second argument. Return 0 if
14498 it is not, or we cannot easily determined it to be.
14500 An example of the sort of thing we care about (at this point; this routine
14501 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14502 fold cases do now) is discovering that
14504 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14506 is a multiple of
14508 SAVE_EXPR (J * 8)
14510 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14512 This code also handles discovering that
14514 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14516 is a multiple of 8 so we don't have to worry about dealing with a
14517 possible remainder.
14519 Note that we *look* inside a SAVE_EXPR only to determine how it was
14520 calculated; it is not safe for fold to do much of anything else with the
14521 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14522 at run time. For example, the latter example above *cannot* be implemented
14523 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14524 evaluation time of the original SAVE_EXPR is not necessarily the same at
14525 the time the new expression is evaluated. The only optimization of this
14526 sort that would be valid is changing
14528 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14530 divided by 8 to
14532 SAVE_EXPR (I) * SAVE_EXPR (J)
14534 (where the same SAVE_EXPR (J) is used in the original and the
14535 transformed version). */
14538 multiple_of_p (tree type, const_tree top, const_tree bottom)
14540 if (operand_equal_p (top, bottom, 0))
14541 return 1;
14543 if (TREE_CODE (type) != INTEGER_TYPE)
14544 return 0;
14546 switch (TREE_CODE (top))
14548 case BIT_AND_EXPR:
14549 /* Bitwise and provides a power of two multiple. If the mask is
14550 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14551 if (!integer_pow2p (bottom))
14552 return 0;
14553 /* FALLTHRU */
14555 case MULT_EXPR:
14556 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14557 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14559 case PLUS_EXPR:
14560 case MINUS_EXPR:
14561 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14562 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14564 case LSHIFT_EXPR:
14565 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14567 tree op1, t1;
14569 op1 = TREE_OPERAND (top, 1);
14570 /* const_binop may not detect overflow correctly,
14571 so check for it explicitly here. */
14572 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14573 && 0 != (t1 = fold_convert (type,
14574 const_binop (LSHIFT_EXPR,
14575 size_one_node,
14576 op1)))
14577 && !TREE_OVERFLOW (t1))
14578 return multiple_of_p (type, t1, bottom);
14580 return 0;
14582 case NOP_EXPR:
14583 /* Can't handle conversions from non-integral or wider integral type. */
14584 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14585 || (TYPE_PRECISION (type)
14586 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14587 return 0;
14589 /* .. fall through ... */
14591 case SAVE_EXPR:
14592 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14594 case COND_EXPR:
14595 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14596 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14598 case INTEGER_CST:
14599 if (TREE_CODE (bottom) != INTEGER_CST
14600 || integer_zerop (bottom)
14601 || (TYPE_UNSIGNED (type)
14602 && (tree_int_cst_sgn (top) < 0
14603 || tree_int_cst_sgn (bottom) < 0)))
14604 return 0;
14605 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14606 SIGNED);
14608 default:
14609 return 0;
14613 /* Return true if CODE or TYPE is known to be non-negative. */
14615 static bool
14616 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14618 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14619 && truth_value_p (code))
14620 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14621 have a signed:1 type (where the value is -1 and 0). */
14622 return true;
14623 return false;
14626 /* Return true if (CODE OP0) is known to be non-negative. If the return
14627 value is based on the assumption that signed overflow is undefined,
14628 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14629 *STRICT_OVERFLOW_P. */
14631 bool
14632 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14633 bool *strict_overflow_p)
14635 if (TYPE_UNSIGNED (type))
14636 return true;
14638 switch (code)
14640 case ABS_EXPR:
14641 /* We can't return 1 if flag_wrapv is set because
14642 ABS_EXPR<INT_MIN> = INT_MIN. */
14643 if (!INTEGRAL_TYPE_P (type))
14644 return true;
14645 if (TYPE_OVERFLOW_UNDEFINED (type))
14647 *strict_overflow_p = true;
14648 return true;
14650 break;
14652 case NON_LVALUE_EXPR:
14653 case FLOAT_EXPR:
14654 case FIX_TRUNC_EXPR:
14655 return tree_expr_nonnegative_warnv_p (op0,
14656 strict_overflow_p);
14658 CASE_CONVERT:
14660 tree inner_type = TREE_TYPE (op0);
14661 tree outer_type = type;
14663 if (TREE_CODE (outer_type) == REAL_TYPE)
14665 if (TREE_CODE (inner_type) == REAL_TYPE)
14666 return tree_expr_nonnegative_warnv_p (op0,
14667 strict_overflow_p);
14668 if (INTEGRAL_TYPE_P (inner_type))
14670 if (TYPE_UNSIGNED (inner_type))
14671 return true;
14672 return tree_expr_nonnegative_warnv_p (op0,
14673 strict_overflow_p);
14676 else if (INTEGRAL_TYPE_P (outer_type))
14678 if (TREE_CODE (inner_type) == REAL_TYPE)
14679 return tree_expr_nonnegative_warnv_p (op0,
14680 strict_overflow_p);
14681 if (INTEGRAL_TYPE_P (inner_type))
14682 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14683 && TYPE_UNSIGNED (inner_type);
14686 break;
14688 default:
14689 return tree_simple_nonnegative_warnv_p (code, type);
14692 /* We don't know sign of `t', so be conservative and return false. */
14693 return false;
14696 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14697 value is based on the assumption that signed overflow is undefined,
14698 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14699 *STRICT_OVERFLOW_P. */
14701 bool
14702 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14703 tree op1, bool *strict_overflow_p)
14705 if (TYPE_UNSIGNED (type))
14706 return true;
14708 switch (code)
14710 case POINTER_PLUS_EXPR:
14711 case PLUS_EXPR:
14712 if (FLOAT_TYPE_P (type))
14713 return (tree_expr_nonnegative_warnv_p (op0,
14714 strict_overflow_p)
14715 && tree_expr_nonnegative_warnv_p (op1,
14716 strict_overflow_p));
14718 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14719 both unsigned and at least 2 bits shorter than the result. */
14720 if (TREE_CODE (type) == INTEGER_TYPE
14721 && TREE_CODE (op0) == NOP_EXPR
14722 && TREE_CODE (op1) == NOP_EXPR)
14724 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14725 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14726 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14727 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14729 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14730 TYPE_PRECISION (inner2)) + 1;
14731 return prec < TYPE_PRECISION (type);
14734 break;
14736 case MULT_EXPR:
14737 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14739 /* x * x is always non-negative for floating point x
14740 or without overflow. */
14741 if (operand_equal_p (op0, op1, 0)
14742 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14743 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14745 if (ANY_INTEGRAL_TYPE_P (type)
14746 && TYPE_OVERFLOW_UNDEFINED (type))
14747 *strict_overflow_p = true;
14748 return true;
14752 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14753 both unsigned and their total bits is shorter than the result. */
14754 if (TREE_CODE (type) == INTEGER_TYPE
14755 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14756 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14758 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14759 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14760 : TREE_TYPE (op0);
14761 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14762 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14763 : TREE_TYPE (op1);
14765 bool unsigned0 = TYPE_UNSIGNED (inner0);
14766 bool unsigned1 = TYPE_UNSIGNED (inner1);
14768 if (TREE_CODE (op0) == INTEGER_CST)
14769 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14771 if (TREE_CODE (op1) == INTEGER_CST)
14772 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14774 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14775 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14777 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14778 ? tree_int_cst_min_precision (op0, UNSIGNED)
14779 : TYPE_PRECISION (inner0);
14781 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14782 ? tree_int_cst_min_precision (op1, UNSIGNED)
14783 : TYPE_PRECISION (inner1);
14785 return precision0 + precision1 < TYPE_PRECISION (type);
14788 return false;
14790 case BIT_AND_EXPR:
14791 case MAX_EXPR:
14792 return (tree_expr_nonnegative_warnv_p (op0,
14793 strict_overflow_p)
14794 || tree_expr_nonnegative_warnv_p (op1,
14795 strict_overflow_p));
14797 case BIT_IOR_EXPR:
14798 case BIT_XOR_EXPR:
14799 case MIN_EXPR:
14800 case RDIV_EXPR:
14801 case TRUNC_DIV_EXPR:
14802 case CEIL_DIV_EXPR:
14803 case FLOOR_DIV_EXPR:
14804 case ROUND_DIV_EXPR:
14805 return (tree_expr_nonnegative_warnv_p (op0,
14806 strict_overflow_p)
14807 && tree_expr_nonnegative_warnv_p (op1,
14808 strict_overflow_p));
14810 case TRUNC_MOD_EXPR:
14811 case CEIL_MOD_EXPR:
14812 case FLOOR_MOD_EXPR:
14813 case ROUND_MOD_EXPR:
14814 return tree_expr_nonnegative_warnv_p (op0,
14815 strict_overflow_p);
14816 default:
14817 return tree_simple_nonnegative_warnv_p (code, type);
14820 /* We don't know sign of `t', so be conservative and return false. */
14821 return false;
14824 /* Return true if T is known to be non-negative. If the return
14825 value is based on the assumption that signed overflow is undefined,
14826 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14827 *STRICT_OVERFLOW_P. */
14829 bool
14830 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14832 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14833 return true;
14835 switch (TREE_CODE (t))
14837 case INTEGER_CST:
14838 return tree_int_cst_sgn (t) >= 0;
14840 case REAL_CST:
14841 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14843 case FIXED_CST:
14844 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14846 case COND_EXPR:
14847 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14848 strict_overflow_p)
14849 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14850 strict_overflow_p));
14851 default:
14852 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14853 TREE_TYPE (t));
14855 /* We don't know sign of `t', so be conservative and return false. */
14856 return false;
14859 /* Return true if T is known to be non-negative. If the return
14860 value is based on the assumption that signed overflow is undefined,
14861 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14862 *STRICT_OVERFLOW_P. */
14864 bool
14865 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14866 tree arg0, tree arg1, bool *strict_overflow_p)
14868 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14869 switch (DECL_FUNCTION_CODE (fndecl))
14871 CASE_FLT_FN (BUILT_IN_ACOS):
14872 CASE_FLT_FN (BUILT_IN_ACOSH):
14873 CASE_FLT_FN (BUILT_IN_CABS):
14874 CASE_FLT_FN (BUILT_IN_COSH):
14875 CASE_FLT_FN (BUILT_IN_ERFC):
14876 CASE_FLT_FN (BUILT_IN_EXP):
14877 CASE_FLT_FN (BUILT_IN_EXP10):
14878 CASE_FLT_FN (BUILT_IN_EXP2):
14879 CASE_FLT_FN (BUILT_IN_FABS):
14880 CASE_FLT_FN (BUILT_IN_FDIM):
14881 CASE_FLT_FN (BUILT_IN_HYPOT):
14882 CASE_FLT_FN (BUILT_IN_POW10):
14883 CASE_INT_FN (BUILT_IN_FFS):
14884 CASE_INT_FN (BUILT_IN_PARITY):
14885 CASE_INT_FN (BUILT_IN_POPCOUNT):
14886 CASE_INT_FN (BUILT_IN_CLZ):
14887 CASE_INT_FN (BUILT_IN_CLRSB):
14888 case BUILT_IN_BSWAP32:
14889 case BUILT_IN_BSWAP64:
14890 /* Always true. */
14891 return true;
14893 CASE_FLT_FN (BUILT_IN_SQRT):
14894 /* sqrt(-0.0) is -0.0. */
14895 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14896 return true;
14897 return tree_expr_nonnegative_warnv_p (arg0,
14898 strict_overflow_p);
14900 CASE_FLT_FN (BUILT_IN_ASINH):
14901 CASE_FLT_FN (BUILT_IN_ATAN):
14902 CASE_FLT_FN (BUILT_IN_ATANH):
14903 CASE_FLT_FN (BUILT_IN_CBRT):
14904 CASE_FLT_FN (BUILT_IN_CEIL):
14905 CASE_FLT_FN (BUILT_IN_ERF):
14906 CASE_FLT_FN (BUILT_IN_EXPM1):
14907 CASE_FLT_FN (BUILT_IN_FLOOR):
14908 CASE_FLT_FN (BUILT_IN_FMOD):
14909 CASE_FLT_FN (BUILT_IN_FREXP):
14910 CASE_FLT_FN (BUILT_IN_ICEIL):
14911 CASE_FLT_FN (BUILT_IN_IFLOOR):
14912 CASE_FLT_FN (BUILT_IN_IRINT):
14913 CASE_FLT_FN (BUILT_IN_IROUND):
14914 CASE_FLT_FN (BUILT_IN_LCEIL):
14915 CASE_FLT_FN (BUILT_IN_LDEXP):
14916 CASE_FLT_FN (BUILT_IN_LFLOOR):
14917 CASE_FLT_FN (BUILT_IN_LLCEIL):
14918 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14919 CASE_FLT_FN (BUILT_IN_LLRINT):
14920 CASE_FLT_FN (BUILT_IN_LLROUND):
14921 CASE_FLT_FN (BUILT_IN_LRINT):
14922 CASE_FLT_FN (BUILT_IN_LROUND):
14923 CASE_FLT_FN (BUILT_IN_MODF):
14924 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14925 CASE_FLT_FN (BUILT_IN_RINT):
14926 CASE_FLT_FN (BUILT_IN_ROUND):
14927 CASE_FLT_FN (BUILT_IN_SCALB):
14928 CASE_FLT_FN (BUILT_IN_SCALBLN):
14929 CASE_FLT_FN (BUILT_IN_SCALBN):
14930 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14931 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14932 CASE_FLT_FN (BUILT_IN_SINH):
14933 CASE_FLT_FN (BUILT_IN_TANH):
14934 CASE_FLT_FN (BUILT_IN_TRUNC):
14935 /* True if the 1st argument is nonnegative. */
14936 return tree_expr_nonnegative_warnv_p (arg0,
14937 strict_overflow_p);
14939 CASE_FLT_FN (BUILT_IN_FMAX):
14940 /* True if the 1st OR 2nd arguments are nonnegative. */
14941 return (tree_expr_nonnegative_warnv_p (arg0,
14942 strict_overflow_p)
14943 || (tree_expr_nonnegative_warnv_p (arg1,
14944 strict_overflow_p)));
14946 CASE_FLT_FN (BUILT_IN_FMIN):
14947 /* True if the 1st AND 2nd arguments are nonnegative. */
14948 return (tree_expr_nonnegative_warnv_p (arg0,
14949 strict_overflow_p)
14950 && (tree_expr_nonnegative_warnv_p (arg1,
14951 strict_overflow_p)));
14953 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14954 /* True if the 2nd argument is nonnegative. */
14955 return tree_expr_nonnegative_warnv_p (arg1,
14956 strict_overflow_p);
14958 CASE_FLT_FN (BUILT_IN_POWI):
14959 /* True if the 1st argument is nonnegative or the second
14960 argument is an even integer. */
14961 if (TREE_CODE (arg1) == INTEGER_CST
14962 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14963 return true;
14964 return tree_expr_nonnegative_warnv_p (arg0,
14965 strict_overflow_p);
14967 CASE_FLT_FN (BUILT_IN_POW):
14968 /* True if the 1st argument is nonnegative or the second
14969 argument is an even integer valued real. */
14970 if (TREE_CODE (arg1) == REAL_CST)
14972 REAL_VALUE_TYPE c;
14973 HOST_WIDE_INT n;
14975 c = TREE_REAL_CST (arg1);
14976 n = real_to_integer (&c);
14977 if ((n & 1) == 0)
14979 REAL_VALUE_TYPE cint;
14980 real_from_integer (&cint, VOIDmode, n, SIGNED);
14981 if (real_identical (&c, &cint))
14982 return true;
14985 return tree_expr_nonnegative_warnv_p (arg0,
14986 strict_overflow_p);
14988 default:
14989 break;
14991 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14992 type);
14995 /* Return true if T is known to be non-negative. If the return
14996 value is based on the assumption that signed overflow is undefined,
14997 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14998 *STRICT_OVERFLOW_P. */
15000 static bool
15001 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15003 enum tree_code code = TREE_CODE (t);
15004 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15005 return true;
15007 switch (code)
15009 case TARGET_EXPR:
15011 tree temp = TARGET_EXPR_SLOT (t);
15012 t = TARGET_EXPR_INITIAL (t);
15014 /* If the initializer is non-void, then it's a normal expression
15015 that will be assigned to the slot. */
15016 if (!VOID_TYPE_P (t))
15017 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15019 /* Otherwise, the initializer sets the slot in some way. One common
15020 way is an assignment statement at the end of the initializer. */
15021 while (1)
15023 if (TREE_CODE (t) == BIND_EXPR)
15024 t = expr_last (BIND_EXPR_BODY (t));
15025 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15026 || TREE_CODE (t) == TRY_CATCH_EXPR)
15027 t = expr_last (TREE_OPERAND (t, 0));
15028 else if (TREE_CODE (t) == STATEMENT_LIST)
15029 t = expr_last (t);
15030 else
15031 break;
15033 if (TREE_CODE (t) == MODIFY_EXPR
15034 && TREE_OPERAND (t, 0) == temp)
15035 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15036 strict_overflow_p);
15038 return false;
15041 case CALL_EXPR:
15043 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15044 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15046 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15047 get_callee_fndecl (t),
15048 arg0,
15049 arg1,
15050 strict_overflow_p);
15052 case COMPOUND_EXPR:
15053 case MODIFY_EXPR:
15054 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15055 strict_overflow_p);
15056 case BIND_EXPR:
15057 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15058 strict_overflow_p);
15059 case SAVE_EXPR:
15060 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15061 strict_overflow_p);
15063 default:
15064 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15065 TREE_TYPE (t));
15068 /* We don't know sign of `t', so be conservative and return false. */
15069 return false;
15072 /* Return true if T is known to be non-negative. If the return
15073 value is based on the assumption that signed overflow is undefined,
15074 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15075 *STRICT_OVERFLOW_P. */
15077 bool
15078 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15080 enum tree_code code;
15081 if (t == error_mark_node)
15082 return false;
15084 code = TREE_CODE (t);
15085 switch (TREE_CODE_CLASS (code))
15087 case tcc_binary:
15088 case tcc_comparison:
15089 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15090 TREE_TYPE (t),
15091 TREE_OPERAND (t, 0),
15092 TREE_OPERAND (t, 1),
15093 strict_overflow_p);
15095 case tcc_unary:
15096 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15097 TREE_TYPE (t),
15098 TREE_OPERAND (t, 0),
15099 strict_overflow_p);
15101 case tcc_constant:
15102 case tcc_declaration:
15103 case tcc_reference:
15104 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15106 default:
15107 break;
15110 switch (code)
15112 case TRUTH_AND_EXPR:
15113 case TRUTH_OR_EXPR:
15114 case TRUTH_XOR_EXPR:
15115 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15116 TREE_TYPE (t),
15117 TREE_OPERAND (t, 0),
15118 TREE_OPERAND (t, 1),
15119 strict_overflow_p);
15120 case TRUTH_NOT_EXPR:
15121 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15122 TREE_TYPE (t),
15123 TREE_OPERAND (t, 0),
15124 strict_overflow_p);
15126 case COND_EXPR:
15127 case CONSTRUCTOR:
15128 case OBJ_TYPE_REF:
15129 case ASSERT_EXPR:
15130 case ADDR_EXPR:
15131 case WITH_SIZE_EXPR:
15132 case SSA_NAME:
15133 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15135 default:
15136 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15140 /* Return true if `t' is known to be non-negative. Handle warnings
15141 about undefined signed overflow. */
15143 bool
15144 tree_expr_nonnegative_p (tree t)
15146 bool ret, strict_overflow_p;
15148 strict_overflow_p = false;
15149 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15150 if (strict_overflow_p)
15151 fold_overflow_warning (("assuming signed overflow does not occur when "
15152 "determining that expression is always "
15153 "non-negative"),
15154 WARN_STRICT_OVERFLOW_MISC);
15155 return ret;
15159 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15160 For floating point we further ensure that T is not denormal.
15161 Similar logic is present in nonzero_address in rtlanal.h.
15163 If the return value is based on the assumption that signed overflow
15164 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15165 change *STRICT_OVERFLOW_P. */
15167 bool
15168 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15169 bool *strict_overflow_p)
15171 switch (code)
15173 case ABS_EXPR:
15174 return tree_expr_nonzero_warnv_p (op0,
15175 strict_overflow_p);
15177 case NOP_EXPR:
15179 tree inner_type = TREE_TYPE (op0);
15180 tree outer_type = type;
15182 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15183 && tree_expr_nonzero_warnv_p (op0,
15184 strict_overflow_p));
15186 break;
15188 case NON_LVALUE_EXPR:
15189 return tree_expr_nonzero_warnv_p (op0,
15190 strict_overflow_p);
15192 default:
15193 break;
15196 return false;
15199 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15200 For floating point we further ensure that T is not denormal.
15201 Similar logic is present in nonzero_address in rtlanal.h.
15203 If the return value is based on the assumption that signed overflow
15204 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15205 change *STRICT_OVERFLOW_P. */
15207 bool
15208 tree_binary_nonzero_warnv_p (enum tree_code code,
15209 tree type,
15210 tree op0,
15211 tree op1, bool *strict_overflow_p)
15213 bool sub_strict_overflow_p;
15214 switch (code)
15216 case POINTER_PLUS_EXPR:
15217 case PLUS_EXPR:
15218 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15220 /* With the presence of negative values it is hard
15221 to say something. */
15222 sub_strict_overflow_p = false;
15223 if (!tree_expr_nonnegative_warnv_p (op0,
15224 &sub_strict_overflow_p)
15225 || !tree_expr_nonnegative_warnv_p (op1,
15226 &sub_strict_overflow_p))
15227 return false;
15228 /* One of operands must be positive and the other non-negative. */
15229 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15230 overflows, on a twos-complement machine the sum of two
15231 nonnegative numbers can never be zero. */
15232 return (tree_expr_nonzero_warnv_p (op0,
15233 strict_overflow_p)
15234 || tree_expr_nonzero_warnv_p (op1,
15235 strict_overflow_p));
15237 break;
15239 case MULT_EXPR:
15240 if (TYPE_OVERFLOW_UNDEFINED (type))
15242 if (tree_expr_nonzero_warnv_p (op0,
15243 strict_overflow_p)
15244 && tree_expr_nonzero_warnv_p (op1,
15245 strict_overflow_p))
15247 *strict_overflow_p = true;
15248 return true;
15251 break;
15253 case MIN_EXPR:
15254 sub_strict_overflow_p = false;
15255 if (tree_expr_nonzero_warnv_p (op0,
15256 &sub_strict_overflow_p)
15257 && tree_expr_nonzero_warnv_p (op1,
15258 &sub_strict_overflow_p))
15260 if (sub_strict_overflow_p)
15261 *strict_overflow_p = true;
15263 break;
15265 case MAX_EXPR:
15266 sub_strict_overflow_p = false;
15267 if (tree_expr_nonzero_warnv_p (op0,
15268 &sub_strict_overflow_p))
15270 if (sub_strict_overflow_p)
15271 *strict_overflow_p = true;
15273 /* When both operands are nonzero, then MAX must be too. */
15274 if (tree_expr_nonzero_warnv_p (op1,
15275 strict_overflow_p))
15276 return true;
15278 /* MAX where operand 0 is positive is positive. */
15279 return tree_expr_nonnegative_warnv_p (op0,
15280 strict_overflow_p);
15282 /* MAX where operand 1 is positive is positive. */
15283 else if (tree_expr_nonzero_warnv_p (op1,
15284 &sub_strict_overflow_p)
15285 && tree_expr_nonnegative_warnv_p (op1,
15286 &sub_strict_overflow_p))
15288 if (sub_strict_overflow_p)
15289 *strict_overflow_p = true;
15290 return true;
15292 break;
15294 case BIT_IOR_EXPR:
15295 return (tree_expr_nonzero_warnv_p (op1,
15296 strict_overflow_p)
15297 || tree_expr_nonzero_warnv_p (op0,
15298 strict_overflow_p));
15300 default:
15301 break;
15304 return false;
15307 /* Return true when T is an address and is known to be nonzero.
15308 For floating point we further ensure that T is not denormal.
15309 Similar logic is present in nonzero_address in rtlanal.h.
15311 If the return value is based on the assumption that signed overflow
15312 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15313 change *STRICT_OVERFLOW_P. */
15315 bool
15316 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15318 bool sub_strict_overflow_p;
15319 switch (TREE_CODE (t))
15321 case INTEGER_CST:
15322 return !integer_zerop (t);
15324 case ADDR_EXPR:
15326 tree base = TREE_OPERAND (t, 0);
15328 if (!DECL_P (base))
15329 base = get_base_address (base);
15331 if (!base)
15332 return false;
15334 /* For objects in symbol table check if we know they are non-zero.
15335 Don't do anything for variables and functions before symtab is built;
15336 it is quite possible that they will be declared weak later. */
15337 if (DECL_P (base) && decl_in_symtab_p (base))
15339 struct symtab_node *symbol;
15341 symbol = symtab_node::get_create (base);
15342 if (symbol)
15343 return symbol->nonzero_address ();
15344 else
15345 return false;
15348 /* Function local objects are never NULL. */
15349 if (DECL_P (base)
15350 && (DECL_CONTEXT (base)
15351 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15352 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15353 return true;
15355 /* Constants are never weak. */
15356 if (CONSTANT_CLASS_P (base))
15357 return true;
15359 return false;
15362 case COND_EXPR:
15363 sub_strict_overflow_p = false;
15364 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15365 &sub_strict_overflow_p)
15366 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15367 &sub_strict_overflow_p))
15369 if (sub_strict_overflow_p)
15370 *strict_overflow_p = true;
15371 return true;
15373 break;
15375 default:
15376 break;
15378 return false;
15381 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15382 attempt to fold the expression to a constant without modifying TYPE,
15383 OP0 or OP1.
15385 If the expression could be simplified to a constant, then return
15386 the constant. If the expression would not be simplified to a
15387 constant, then return NULL_TREE. */
15389 tree
15390 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15392 tree tem = fold_binary (code, type, op0, op1);
15393 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15396 /* Given the components of a unary expression CODE, TYPE and OP0,
15397 attempt to fold the expression to a constant without modifying
15398 TYPE or OP0.
15400 If the expression could be simplified to a constant, then return
15401 the constant. If the expression would not be simplified to a
15402 constant, then return NULL_TREE. */
15404 tree
15405 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15407 tree tem = fold_unary (code, type, op0);
15408 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15411 /* If EXP represents referencing an element in a constant string
15412 (either via pointer arithmetic or array indexing), return the
15413 tree representing the value accessed, otherwise return NULL. */
15415 tree
15416 fold_read_from_constant_string (tree exp)
15418 if ((TREE_CODE (exp) == INDIRECT_REF
15419 || TREE_CODE (exp) == ARRAY_REF)
15420 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15422 tree exp1 = TREE_OPERAND (exp, 0);
15423 tree index;
15424 tree string;
15425 location_t loc = EXPR_LOCATION (exp);
15427 if (TREE_CODE (exp) == INDIRECT_REF)
15428 string = string_constant (exp1, &index);
15429 else
15431 tree low_bound = array_ref_low_bound (exp);
15432 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15434 /* Optimize the special-case of a zero lower bound.
15436 We convert the low_bound to sizetype to avoid some problems
15437 with constant folding. (E.g. suppose the lower bound is 1,
15438 and its mode is QI. Without the conversion,l (ARRAY
15439 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15440 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15441 if (! integer_zerop (low_bound))
15442 index = size_diffop_loc (loc, index,
15443 fold_convert_loc (loc, sizetype, low_bound));
15445 string = exp1;
15448 if (string
15449 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15450 && TREE_CODE (string) == STRING_CST
15451 && TREE_CODE (index) == INTEGER_CST
15452 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15453 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15454 == MODE_INT)
15455 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15456 return build_int_cst_type (TREE_TYPE (exp),
15457 (TREE_STRING_POINTER (string)
15458 [TREE_INT_CST_LOW (index)]));
15460 return NULL;
15463 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15464 an integer constant, real, or fixed-point constant.
15466 TYPE is the type of the result. */
15468 static tree
15469 fold_negate_const (tree arg0, tree type)
15471 tree t = NULL_TREE;
15473 switch (TREE_CODE (arg0))
15475 case INTEGER_CST:
15477 bool overflow;
15478 wide_int val = wi::neg (arg0, &overflow);
15479 t = force_fit_type (type, val, 1,
15480 (overflow | TREE_OVERFLOW (arg0))
15481 && !TYPE_UNSIGNED (type));
15482 break;
15485 case REAL_CST:
15486 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15487 break;
15489 case FIXED_CST:
15491 FIXED_VALUE_TYPE f;
15492 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15493 &(TREE_FIXED_CST (arg0)), NULL,
15494 TYPE_SATURATING (type));
15495 t = build_fixed (type, f);
15496 /* Propagate overflow flags. */
15497 if (overflow_p | TREE_OVERFLOW (arg0))
15498 TREE_OVERFLOW (t) = 1;
15499 break;
15502 default:
15503 gcc_unreachable ();
15506 return t;
15509 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15510 an integer constant or real constant.
15512 TYPE is the type of the result. */
15514 tree
15515 fold_abs_const (tree arg0, tree type)
15517 tree t = NULL_TREE;
15519 switch (TREE_CODE (arg0))
15521 case INTEGER_CST:
15523 /* If the value is unsigned or non-negative, then the absolute value
15524 is the same as the ordinary value. */
15525 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15526 t = arg0;
15528 /* If the value is negative, then the absolute value is
15529 its negation. */
15530 else
15532 bool overflow;
15533 wide_int val = wi::neg (arg0, &overflow);
15534 t = force_fit_type (type, val, -1,
15535 overflow | TREE_OVERFLOW (arg0));
15538 break;
15540 case REAL_CST:
15541 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15542 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15543 else
15544 t = arg0;
15545 break;
15547 default:
15548 gcc_unreachable ();
15551 return t;
15554 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15555 constant. TYPE is the type of the result. */
15557 static tree
15558 fold_not_const (const_tree arg0, tree type)
15560 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15562 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15565 /* Given CODE, a relational operator, the target type, TYPE and two
15566 constant operands OP0 and OP1, return the result of the
15567 relational operation. If the result is not a compile time
15568 constant, then return NULL_TREE. */
15570 static tree
15571 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15573 int result, invert;
15575 /* From here on, the only cases we handle are when the result is
15576 known to be a constant. */
15578 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15580 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15581 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15583 /* Handle the cases where either operand is a NaN. */
15584 if (real_isnan (c0) || real_isnan (c1))
15586 switch (code)
15588 case EQ_EXPR:
15589 case ORDERED_EXPR:
15590 result = 0;
15591 break;
15593 case NE_EXPR:
15594 case UNORDERED_EXPR:
15595 case UNLT_EXPR:
15596 case UNLE_EXPR:
15597 case UNGT_EXPR:
15598 case UNGE_EXPR:
15599 case UNEQ_EXPR:
15600 result = 1;
15601 break;
15603 case LT_EXPR:
15604 case LE_EXPR:
15605 case GT_EXPR:
15606 case GE_EXPR:
15607 case LTGT_EXPR:
15608 if (flag_trapping_math)
15609 return NULL_TREE;
15610 result = 0;
15611 break;
15613 default:
15614 gcc_unreachable ();
15617 return constant_boolean_node (result, type);
15620 return constant_boolean_node (real_compare (code, c0, c1), type);
15623 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15625 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15626 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15627 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15630 /* Handle equality/inequality of complex constants. */
15631 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15633 tree rcond = fold_relational_const (code, type,
15634 TREE_REALPART (op0),
15635 TREE_REALPART (op1));
15636 tree icond = fold_relational_const (code, type,
15637 TREE_IMAGPART (op0),
15638 TREE_IMAGPART (op1));
15639 if (code == EQ_EXPR)
15640 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15641 else if (code == NE_EXPR)
15642 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15643 else
15644 return NULL_TREE;
15647 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15649 unsigned count = VECTOR_CST_NELTS (op0);
15650 tree *elts = XALLOCAVEC (tree, count);
15651 gcc_assert (VECTOR_CST_NELTS (op1) == count
15652 && TYPE_VECTOR_SUBPARTS (type) == count);
15654 for (unsigned i = 0; i < count; i++)
15656 tree elem_type = TREE_TYPE (type);
15657 tree elem0 = VECTOR_CST_ELT (op0, i);
15658 tree elem1 = VECTOR_CST_ELT (op1, i);
15660 tree tem = fold_relational_const (code, elem_type,
15661 elem0, elem1);
15663 if (tem == NULL_TREE)
15664 return NULL_TREE;
15666 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15669 return build_vector (type, elts);
15672 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15674 To compute GT, swap the arguments and do LT.
15675 To compute GE, do LT and invert the result.
15676 To compute LE, swap the arguments, do LT and invert the result.
15677 To compute NE, do EQ and invert the result.
15679 Therefore, the code below must handle only EQ and LT. */
15681 if (code == LE_EXPR || code == GT_EXPR)
15683 tree tem = op0;
15684 op0 = op1;
15685 op1 = tem;
15686 code = swap_tree_comparison (code);
15689 /* Note that it is safe to invert for real values here because we
15690 have already handled the one case that it matters. */
15692 invert = 0;
15693 if (code == NE_EXPR || code == GE_EXPR)
15695 invert = 1;
15696 code = invert_tree_comparison (code, false);
15699 /* Compute a result for LT or EQ if args permit;
15700 Otherwise return T. */
15701 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15703 if (code == EQ_EXPR)
15704 result = tree_int_cst_equal (op0, op1);
15705 else
15706 result = tree_int_cst_lt (op0, op1);
15708 else
15709 return NULL_TREE;
15711 if (invert)
15712 result ^= 1;
15713 return constant_boolean_node (result, type);
15716 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15717 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15718 itself. */
15720 tree
15721 fold_build_cleanup_point_expr (tree type, tree expr)
15723 /* If the expression does not have side effects then we don't have to wrap
15724 it with a cleanup point expression. */
15725 if (!TREE_SIDE_EFFECTS (expr))
15726 return expr;
15728 /* If the expression is a return, check to see if the expression inside the
15729 return has no side effects or the right hand side of the modify expression
15730 inside the return. If either don't have side effects set we don't need to
15731 wrap the expression in a cleanup point expression. Note we don't check the
15732 left hand side of the modify because it should always be a return decl. */
15733 if (TREE_CODE (expr) == RETURN_EXPR)
15735 tree op = TREE_OPERAND (expr, 0);
15736 if (!op || !TREE_SIDE_EFFECTS (op))
15737 return expr;
15738 op = TREE_OPERAND (op, 1);
15739 if (!TREE_SIDE_EFFECTS (op))
15740 return expr;
15743 return build1 (CLEANUP_POINT_EXPR, type, expr);
15746 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15747 of an indirection through OP0, or NULL_TREE if no simplification is
15748 possible. */
15750 tree
15751 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15753 tree sub = op0;
15754 tree subtype;
15756 STRIP_NOPS (sub);
15757 subtype = TREE_TYPE (sub);
15758 if (!POINTER_TYPE_P (subtype))
15759 return NULL_TREE;
15761 if (TREE_CODE (sub) == ADDR_EXPR)
15763 tree op = TREE_OPERAND (sub, 0);
15764 tree optype = TREE_TYPE (op);
15765 /* *&CONST_DECL -> to the value of the const decl. */
15766 if (TREE_CODE (op) == CONST_DECL)
15767 return DECL_INITIAL (op);
15768 /* *&p => p; make sure to handle *&"str"[cst] here. */
15769 if (type == optype)
15771 tree fop = fold_read_from_constant_string (op);
15772 if (fop)
15773 return fop;
15774 else
15775 return op;
15777 /* *(foo *)&fooarray => fooarray[0] */
15778 else if (TREE_CODE (optype) == ARRAY_TYPE
15779 && type == TREE_TYPE (optype)
15780 && (!in_gimple_form
15781 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15783 tree type_domain = TYPE_DOMAIN (optype);
15784 tree min_val = size_zero_node;
15785 if (type_domain && TYPE_MIN_VALUE (type_domain))
15786 min_val = TYPE_MIN_VALUE (type_domain);
15787 if (in_gimple_form
15788 && TREE_CODE (min_val) != INTEGER_CST)
15789 return NULL_TREE;
15790 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15791 NULL_TREE, NULL_TREE);
15793 /* *(foo *)&complexfoo => __real__ complexfoo */
15794 else if (TREE_CODE (optype) == COMPLEX_TYPE
15795 && type == TREE_TYPE (optype))
15796 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15797 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15798 else if (TREE_CODE (optype) == VECTOR_TYPE
15799 && type == TREE_TYPE (optype))
15801 tree part_width = TYPE_SIZE (type);
15802 tree index = bitsize_int (0);
15803 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15807 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15808 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15810 tree op00 = TREE_OPERAND (sub, 0);
15811 tree op01 = TREE_OPERAND (sub, 1);
15813 STRIP_NOPS (op00);
15814 if (TREE_CODE (op00) == ADDR_EXPR)
15816 tree op00type;
15817 op00 = TREE_OPERAND (op00, 0);
15818 op00type = TREE_TYPE (op00);
15820 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15821 if (TREE_CODE (op00type) == VECTOR_TYPE
15822 && type == TREE_TYPE (op00type))
15824 HOST_WIDE_INT offset = tree_to_shwi (op01);
15825 tree part_width = TYPE_SIZE (type);
15826 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15827 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15828 tree index = bitsize_int (indexi);
15830 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15831 return fold_build3_loc (loc,
15832 BIT_FIELD_REF, type, op00,
15833 part_width, index);
15836 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15837 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15838 && type == TREE_TYPE (op00type))
15840 tree size = TYPE_SIZE_UNIT (type);
15841 if (tree_int_cst_equal (size, op01))
15842 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15844 /* ((foo *)&fooarray)[1] => fooarray[1] */
15845 else if (TREE_CODE (op00type) == ARRAY_TYPE
15846 && type == TREE_TYPE (op00type))
15848 tree type_domain = TYPE_DOMAIN (op00type);
15849 tree min_val = size_zero_node;
15850 if (type_domain && TYPE_MIN_VALUE (type_domain))
15851 min_val = TYPE_MIN_VALUE (type_domain);
15852 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15853 TYPE_SIZE_UNIT (type));
15854 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15855 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15856 NULL_TREE, NULL_TREE);
15861 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15862 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15863 && type == TREE_TYPE (TREE_TYPE (subtype))
15864 && (!in_gimple_form
15865 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15867 tree type_domain;
15868 tree min_val = size_zero_node;
15869 sub = build_fold_indirect_ref_loc (loc, sub);
15870 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15871 if (type_domain && TYPE_MIN_VALUE (type_domain))
15872 min_val = TYPE_MIN_VALUE (type_domain);
15873 if (in_gimple_form
15874 && TREE_CODE (min_val) != INTEGER_CST)
15875 return NULL_TREE;
15876 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15877 NULL_TREE);
15880 return NULL_TREE;
15883 /* Builds an expression for an indirection through T, simplifying some
15884 cases. */
15886 tree
15887 build_fold_indirect_ref_loc (location_t loc, tree t)
15889 tree type = TREE_TYPE (TREE_TYPE (t));
15890 tree sub = fold_indirect_ref_1 (loc, type, t);
15892 if (sub)
15893 return sub;
15895 return build1_loc (loc, INDIRECT_REF, type, t);
15898 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15900 tree
15901 fold_indirect_ref_loc (location_t loc, tree t)
15903 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15905 if (sub)
15906 return sub;
15907 else
15908 return t;
15911 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15912 whose result is ignored. The type of the returned tree need not be
15913 the same as the original expression. */
15915 tree
15916 fold_ignored_result (tree t)
15918 if (!TREE_SIDE_EFFECTS (t))
15919 return integer_zero_node;
15921 for (;;)
15922 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15924 case tcc_unary:
15925 t = TREE_OPERAND (t, 0);
15926 break;
15928 case tcc_binary:
15929 case tcc_comparison:
15930 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15931 t = TREE_OPERAND (t, 0);
15932 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15933 t = TREE_OPERAND (t, 1);
15934 else
15935 return t;
15936 break;
15938 case tcc_expression:
15939 switch (TREE_CODE (t))
15941 case COMPOUND_EXPR:
15942 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15943 return t;
15944 t = TREE_OPERAND (t, 0);
15945 break;
15947 case COND_EXPR:
15948 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15949 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15950 return t;
15951 t = TREE_OPERAND (t, 0);
15952 break;
15954 default:
15955 return t;
15957 break;
15959 default:
15960 return t;
15964 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15966 tree
15967 round_up_loc (location_t loc, tree value, unsigned int divisor)
15969 tree div = NULL_TREE;
15971 if (divisor == 1)
15972 return value;
15974 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15975 have to do anything. Only do this when we are not given a const,
15976 because in that case, this check is more expensive than just
15977 doing it. */
15978 if (TREE_CODE (value) != INTEGER_CST)
15980 div = build_int_cst (TREE_TYPE (value), divisor);
15982 if (multiple_of_p (TREE_TYPE (value), value, div))
15983 return value;
15986 /* If divisor is a power of two, simplify this to bit manipulation. */
15987 if (divisor == (divisor & -divisor))
15989 if (TREE_CODE (value) == INTEGER_CST)
15991 wide_int val = value;
15992 bool overflow_p;
15994 if ((val & (divisor - 1)) == 0)
15995 return value;
15997 overflow_p = TREE_OVERFLOW (value);
15998 val &= ~(divisor - 1);
15999 val += divisor;
16000 if (val == 0)
16001 overflow_p = true;
16003 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16005 else
16007 tree t;
16009 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16010 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16011 t = build_int_cst (TREE_TYPE (value), -divisor);
16012 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16015 else
16017 if (!div)
16018 div = build_int_cst (TREE_TYPE (value), divisor);
16019 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16020 value = size_binop_loc (loc, MULT_EXPR, value, div);
16023 return value;
16026 /* Likewise, but round down. */
16028 tree
16029 round_down_loc (location_t loc, tree value, int divisor)
16031 tree div = NULL_TREE;
16033 gcc_assert (divisor > 0);
16034 if (divisor == 1)
16035 return value;
16037 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16038 have to do anything. Only do this when we are not given a const,
16039 because in that case, this check is more expensive than just
16040 doing it. */
16041 if (TREE_CODE (value) != INTEGER_CST)
16043 div = build_int_cst (TREE_TYPE (value), divisor);
16045 if (multiple_of_p (TREE_TYPE (value), value, div))
16046 return value;
16049 /* If divisor is a power of two, simplify this to bit manipulation. */
16050 if (divisor == (divisor & -divisor))
16052 tree t;
16054 t = build_int_cst (TREE_TYPE (value), -divisor);
16055 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16057 else
16059 if (!div)
16060 div = build_int_cst (TREE_TYPE (value), divisor);
16061 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16062 value = size_binop_loc (loc, MULT_EXPR, value, div);
16065 return value;
16068 /* Returns the pointer to the base of the object addressed by EXP and
16069 extracts the information about the offset of the access, storing it
16070 to PBITPOS and POFFSET. */
16072 static tree
16073 split_address_to_core_and_offset (tree exp,
16074 HOST_WIDE_INT *pbitpos, tree *poffset)
16076 tree core;
16077 machine_mode mode;
16078 int unsignedp, volatilep;
16079 HOST_WIDE_INT bitsize;
16080 location_t loc = EXPR_LOCATION (exp);
16082 if (TREE_CODE (exp) == ADDR_EXPR)
16084 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16085 poffset, &mode, &unsignedp, &volatilep,
16086 false);
16087 core = build_fold_addr_expr_loc (loc, core);
16089 else
16091 core = exp;
16092 *pbitpos = 0;
16093 *poffset = NULL_TREE;
16096 return core;
16099 /* Returns true if addresses of E1 and E2 differ by a constant, false
16100 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16102 bool
16103 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16105 tree core1, core2;
16106 HOST_WIDE_INT bitpos1, bitpos2;
16107 tree toffset1, toffset2, tdiff, type;
16109 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16110 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16112 if (bitpos1 % BITS_PER_UNIT != 0
16113 || bitpos2 % BITS_PER_UNIT != 0
16114 || !operand_equal_p (core1, core2, 0))
16115 return false;
16117 if (toffset1 && toffset2)
16119 type = TREE_TYPE (toffset1);
16120 if (type != TREE_TYPE (toffset2))
16121 toffset2 = fold_convert (type, toffset2);
16123 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16124 if (!cst_and_fits_in_hwi (tdiff))
16125 return false;
16127 *diff = int_cst_value (tdiff);
16129 else if (toffset1 || toffset2)
16131 /* If only one of the offsets is non-constant, the difference cannot
16132 be a constant. */
16133 return false;
16135 else
16136 *diff = 0;
16138 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16139 return true;
16142 /* Simplify the floating point expression EXP when the sign of the
16143 result is not significant. Return NULL_TREE if no simplification
16144 is possible. */
16146 tree
16147 fold_strip_sign_ops (tree exp)
16149 tree arg0, arg1;
16150 location_t loc = EXPR_LOCATION (exp);
16152 switch (TREE_CODE (exp))
16154 case ABS_EXPR:
16155 case NEGATE_EXPR:
16156 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16157 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16159 case MULT_EXPR:
16160 case RDIV_EXPR:
16161 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16162 return NULL_TREE;
16163 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16164 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16165 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16166 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16167 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16168 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16169 break;
16171 case COMPOUND_EXPR:
16172 arg0 = TREE_OPERAND (exp, 0);
16173 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16174 if (arg1)
16175 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16176 break;
16178 case COND_EXPR:
16179 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16180 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16181 if (arg0 || arg1)
16182 return fold_build3_loc (loc,
16183 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16184 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16185 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16186 break;
16188 case CALL_EXPR:
16190 const enum built_in_function fcode = builtin_mathfn_code (exp);
16191 switch (fcode)
16193 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16194 /* Strip copysign function call, return the 1st argument. */
16195 arg0 = CALL_EXPR_ARG (exp, 0);
16196 arg1 = CALL_EXPR_ARG (exp, 1);
16197 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16199 default:
16200 /* Strip sign ops from the argument of "odd" math functions. */
16201 if (negate_mathfn_p (fcode))
16203 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16204 if (arg0)
16205 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16207 break;
16210 break;
16212 default:
16213 break;
16215 return NULL_TREE;