2015-08-24 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / fold-const.c
blob1e017262db83308ce5ba66a5c0edf2365a696367
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "predict.h"
48 #include "tree.h"
49 #include "gimple.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "alias.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
55 #include "calls.h"
56 #include "tree-iterator.h"
57 #include "realmpfr.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "varasm.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tm_p.h"
67 #include "target.h"
68 #include "diagnostic-core.h"
69 #include "intl.h"
70 #include "langhooks.h"
71 #include "md5.h"
72 #include "internal-fn.h"
73 #include "tree-eh.h"
74 #include "gimplify.h"
75 #include "tree-dfa.h"
76 #include "builtins.h"
77 #include "cgraph.h"
78 #include "generic-match.h"
79 #include "optabs.h"
81 #ifndef LOAD_EXTEND_OP
82 #define LOAD_EXTEND_OP(M) UNKNOWN
83 #endif
85 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */
87 int folding_initializer = 0;
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
93 COMPCODE_FALSE = 0,
94 COMPCODE_LT = 1,
95 COMPCODE_EQ = 2,
96 COMPCODE_LE = 3,
97 COMPCODE_GT = 4,
98 COMPCODE_LTGT = 5,
99 COMPCODE_GE = 6,
100 COMPCODE_ORD = 7,
101 COMPCODE_UNORD = 8,
102 COMPCODE_UNLT = 9,
103 COMPCODE_UNEQ = 10,
104 COMPCODE_UNLE = 11,
105 COMPCODE_UNGT = 12,
106 COMPCODE_NE = 13,
107 COMPCODE_UNGE = 14,
108 COMPCODE_TRUE = 15
111 static bool negate_mathfn_p (enum built_in_function);
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int operand_equal_for_comparison_p (tree, tree, tree);
119 static int twoval_comparison_p (tree, tree *, tree *, int *);
120 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
121 static tree make_bit_field_ref (location_t, tree, tree,
122 HOST_WIDE_INT, HOST_WIDE_INT, int);
123 static tree optimize_bit_field_compare (location_t, enum tree_code,
124 tree, tree, tree);
125 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
126 HOST_WIDE_INT *,
127 machine_mode *, int *, int *,
128 tree *, tree *);
129 static int simple_operand_p (const_tree);
130 static bool simple_operand_p_2 (tree);
131 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
132 static tree range_predecessor (tree);
133 static tree range_successor (tree);
134 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
135 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
136 static tree unextend (tree, int, int, tree);
137 static tree optimize_minmax_comparison (location_t, enum tree_code,
138 tree, tree, tree);
139 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
140 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
141 static tree fold_binary_op_with_conditional_arg (location_t,
142 enum tree_code, tree,
143 tree, tree,
144 tree, tree, int);
145 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
146 static bool reorder_operands_p (const_tree, const_tree);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static bool vec_cst_ctor_to_array (tree, tree *);
155 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
156 Otherwise, return LOC. */
158 static location_t
159 expr_location_or (tree t, location_t loc)
161 location_t tloc = EXPR_LOCATION (t);
162 return tloc == UNKNOWN_LOCATION ? loc : tloc;
165 /* Similar to protected_set_expr_location, but never modify x in place,
166 if location can and needs to be set, unshare it. */
168 static inline tree
169 protected_set_expr_location_unshare (tree x, location_t loc)
171 if (CAN_HAVE_LOCATION_P (x)
172 && EXPR_LOCATION (x) != loc
173 && !(TREE_CODE (x) == SAVE_EXPR
174 || TREE_CODE (x) == TARGET_EXPR
175 || TREE_CODE (x) == BIND_EXPR))
177 x = copy_node (x);
178 SET_EXPR_LOCATION (x, loc);
180 return x;
183 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
184 division and returns the quotient. Otherwise returns
185 NULL_TREE. */
187 tree
188 div_if_zero_remainder (const_tree arg1, const_tree arg2)
190 widest_int quo;
192 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
193 SIGNED, &quo))
194 return wide_int_to_tree (TREE_TYPE (arg1), quo);
196 return NULL_TREE;
199 /* This is nonzero if we should defer warnings about undefined
200 overflow. This facility exists because these warnings are a
201 special case. The code to estimate loop iterations does not want
202 to issue any warnings, since it works with expressions which do not
203 occur in user code. Various bits of cleanup code call fold(), but
204 only use the result if it has certain characteristics (e.g., is a
205 constant); that code only wants to issue a warning if the result is
206 used. */
208 static int fold_deferring_overflow_warnings;
210 /* If a warning about undefined overflow is deferred, this is the
211 warning. Note that this may cause us to turn two warnings into
212 one, but that is fine since it is sufficient to only give one
213 warning per expression. */
215 static const char* fold_deferred_overflow_warning;
217 /* If a warning about undefined overflow is deferred, this is the
218 level at which the warning should be emitted. */
220 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222 /* Start deferring overflow warnings. We could use a stack here to
223 permit nested calls, but at present it is not necessary. */
225 void
226 fold_defer_overflow_warnings (void)
228 ++fold_deferring_overflow_warnings;
231 /* Stop deferring overflow warnings. If there is a pending warning,
232 and ISSUE is true, then issue the warning if appropriate. STMT is
233 the statement with which the warning should be associated (used for
234 location information); STMT may be NULL. CODE is the level of the
235 warning--a warn_strict_overflow_code value. This function will use
236 the smaller of CODE and the deferred code when deciding whether to
237 issue the warning. CODE may be zero to mean to always use the
238 deferred code. */
240 void
241 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
243 const char *warnmsg;
244 location_t locus;
246 gcc_assert (fold_deferring_overflow_warnings > 0);
247 --fold_deferring_overflow_warnings;
248 if (fold_deferring_overflow_warnings > 0)
250 if (fold_deferred_overflow_warning != NULL
251 && code != 0
252 && code < (int) fold_deferred_overflow_code)
253 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
254 return;
257 warnmsg = fold_deferred_overflow_warning;
258 fold_deferred_overflow_warning = NULL;
260 if (!issue || warnmsg == NULL)
261 return;
263 if (gimple_no_warning_p (stmt))
264 return;
266 /* Use the smallest code level when deciding to issue the
267 warning. */
268 if (code == 0 || code > (int) fold_deferred_overflow_code)
269 code = fold_deferred_overflow_code;
271 if (!issue_strict_overflow_warning (code))
272 return;
274 if (stmt == NULL)
275 locus = input_location;
276 else
277 locus = gimple_location (stmt);
278 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
281 /* Stop deferring overflow warnings, ignoring any deferred
282 warnings. */
284 void
285 fold_undefer_and_ignore_overflow_warnings (void)
287 fold_undefer_overflow_warnings (false, NULL, 0);
290 /* Whether we are deferring overflow warnings. */
292 bool
293 fold_deferring_overflow_warnings_p (void)
295 return fold_deferring_overflow_warnings > 0;
298 /* This is called when we fold something based on the fact that signed
299 overflow is undefined. */
301 static void
302 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 if (fold_deferring_overflow_warnings > 0)
306 if (fold_deferred_overflow_warning == NULL
307 || wc < fold_deferred_overflow_code)
309 fold_deferred_overflow_warning = gmsgid;
310 fold_deferred_overflow_code = wc;
313 else if (issue_strict_overflow_warning (wc))
314 warning (OPT_Wstrict_overflow, gmsgid);
317 /* Return true if the built-in mathematical function specified by CODE
318 is odd, i.e. -f(x) == f(-x). */
320 static bool
321 negate_mathfn_p (enum built_in_function code)
323 switch (code)
325 CASE_FLT_FN (BUILT_IN_ASIN):
326 CASE_FLT_FN (BUILT_IN_ASINH):
327 CASE_FLT_FN (BUILT_IN_ATAN):
328 CASE_FLT_FN (BUILT_IN_ATANH):
329 CASE_FLT_FN (BUILT_IN_CASIN):
330 CASE_FLT_FN (BUILT_IN_CASINH):
331 CASE_FLT_FN (BUILT_IN_CATAN):
332 CASE_FLT_FN (BUILT_IN_CATANH):
333 CASE_FLT_FN (BUILT_IN_CBRT):
334 CASE_FLT_FN (BUILT_IN_CPROJ):
335 CASE_FLT_FN (BUILT_IN_CSIN):
336 CASE_FLT_FN (BUILT_IN_CSINH):
337 CASE_FLT_FN (BUILT_IN_CTAN):
338 CASE_FLT_FN (BUILT_IN_CTANH):
339 CASE_FLT_FN (BUILT_IN_ERF):
340 CASE_FLT_FN (BUILT_IN_LLROUND):
341 CASE_FLT_FN (BUILT_IN_LROUND):
342 CASE_FLT_FN (BUILT_IN_ROUND):
343 CASE_FLT_FN (BUILT_IN_SIN):
344 CASE_FLT_FN (BUILT_IN_SINH):
345 CASE_FLT_FN (BUILT_IN_TAN):
346 CASE_FLT_FN (BUILT_IN_TANH):
347 CASE_FLT_FN (BUILT_IN_TRUNC):
348 return true;
350 CASE_FLT_FN (BUILT_IN_LLRINT):
351 CASE_FLT_FN (BUILT_IN_LRINT):
352 CASE_FLT_FN (BUILT_IN_NEARBYINT):
353 CASE_FLT_FN (BUILT_IN_RINT):
354 return !flag_rounding_math;
356 default:
357 break;
359 return false;
362 /* Check whether we may negate an integer constant T without causing
363 overflow. */
365 bool
366 may_negate_without_overflow_p (const_tree t)
368 tree type;
370 gcc_assert (TREE_CODE (t) == INTEGER_CST);
372 type = TREE_TYPE (t);
373 if (TYPE_UNSIGNED (type))
374 return false;
376 return !wi::only_sign_bit_p (t);
379 /* Determine whether an expression T can be cheaply negated using
380 the function negate_expr without introducing undefined overflow. */
382 static bool
383 negate_expr_p (tree t)
385 tree type;
387 if (t == 0)
388 return false;
390 type = TREE_TYPE (t);
392 STRIP_SIGN_NOPS (t);
393 switch (TREE_CODE (t))
395 case INTEGER_CST:
396 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
397 return true;
399 /* Check that -CST will not overflow type. */
400 return may_negate_without_overflow_p (t);
401 case BIT_NOT_EXPR:
402 return (INTEGRAL_TYPE_P (type)
403 && TYPE_OVERFLOW_WRAPS (type));
405 case FIXED_CST:
406 return true;
408 case NEGATE_EXPR:
409 return !TYPE_OVERFLOW_SANITIZED (type);
411 case REAL_CST:
412 /* We want to canonicalize to positive real constants. Pretend
413 that only negative ones can be easily negated. */
414 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
416 case COMPLEX_CST:
417 return negate_expr_p (TREE_REALPART (t))
418 && negate_expr_p (TREE_IMAGPART (t));
420 case VECTOR_CST:
422 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
423 return true;
425 int count = TYPE_VECTOR_SUBPARTS (type), i;
427 for (i = 0; i < count; i++)
428 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
429 return false;
431 return true;
434 case COMPLEX_EXPR:
435 return negate_expr_p (TREE_OPERAND (t, 0))
436 && negate_expr_p (TREE_OPERAND (t, 1));
438 case CONJ_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0));
441 case PLUS_EXPR:
442 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 || HONOR_SIGNED_ZEROS (element_mode (type)))
444 return false;
445 /* -(A + B) -> (-B) - A. */
446 if (negate_expr_p (TREE_OPERAND (t, 1))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1)))
449 return true;
450 /* -(A + B) -> (-A) - B. */
451 return negate_expr_p (TREE_OPERAND (t, 0));
453 case MINUS_EXPR:
454 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
455 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
456 && !HONOR_SIGNED_ZEROS (element_mode (type))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1));
460 case MULT_EXPR:
461 if (TYPE_UNSIGNED (TREE_TYPE (t)))
462 break;
464 /* Fall through. */
466 case RDIV_EXPR:
467 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
470 break;
472 case TRUNC_DIV_EXPR:
473 case ROUND_DIV_EXPR:
474 case EXACT_DIV_EXPR:
475 /* In general we can't negate A / B, because if A is INT_MIN and
476 B is 1, we may turn this into INT_MIN / -1 which is undefined
477 and actually traps on some architectures. But if overflow is
478 undefined, we can negate, because - (INT_MIN / 1) is an
479 overflow. */
480 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
482 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
483 break;
484 /* If overflow is undefined then we have to be careful because
485 we ask whether it's ok to associate the negate with the
486 division which is not ok for example for
487 -((a - b) / c) where (-(a - b)) / c may invoke undefined
488 overflow because of negating INT_MIN. So do not use
489 negate_expr_p here but open-code the two important cases. */
490 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
491 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
492 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
493 return true;
495 else if (negate_expr_p (TREE_OPERAND (t, 0)))
496 return true;
497 return negate_expr_p (TREE_OPERAND (t, 1));
499 case NOP_EXPR:
500 /* Negate -((double)float) as (double)(-float). */
501 if (TREE_CODE (type) == REAL_TYPE)
503 tree tem = strip_float_extensions (t);
504 if (tem != t)
505 return negate_expr_p (tem);
507 break;
509 case CALL_EXPR:
510 /* Negate -f(x) as f(-x). */
511 if (negate_mathfn_p (builtin_mathfn_code (t)))
512 return negate_expr_p (CALL_EXPR_ARG (t, 0));
513 break;
515 case RSHIFT_EXPR:
516 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
517 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
519 tree op1 = TREE_OPERAND (t, 1);
520 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
521 return true;
523 break;
525 default:
526 break;
528 return false;
531 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
532 simplification is possible.
533 If negate_expr_p would return true for T, NULL_TREE will never be
534 returned. */
536 static tree
537 fold_negate_expr (location_t loc, tree t)
539 tree type = TREE_TYPE (t);
540 tree tem;
542 switch (TREE_CODE (t))
544 /* Convert - (~A) to A + 1. */
545 case BIT_NOT_EXPR:
546 if (INTEGRAL_TYPE_P (type))
547 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
548 build_one_cst (type));
549 break;
551 case INTEGER_CST:
552 tem = fold_negate_const (t, type);
553 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
554 || (ANY_INTEGRAL_TYPE_P (type)
555 && !TYPE_OVERFLOW_TRAPS (type)
556 && TYPE_OVERFLOW_WRAPS (type))
557 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
558 return tem;
559 break;
561 case REAL_CST:
562 tem = fold_negate_const (t, type);
563 return tem;
565 case FIXED_CST:
566 tem = fold_negate_const (t, type);
567 return tem;
569 case COMPLEX_CST:
571 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
572 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
573 if (rpart && ipart)
574 return build_complex (type, rpart, ipart);
576 break;
578 case VECTOR_CST:
580 int count = TYPE_VECTOR_SUBPARTS (type), i;
581 tree *elts = XALLOCAVEC (tree, count);
583 for (i = 0; i < count; i++)
585 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
586 if (elts[i] == NULL_TREE)
587 return NULL_TREE;
590 return build_vector (type, elts);
593 case COMPLEX_EXPR:
594 if (negate_expr_p (t))
595 return fold_build2_loc (loc, COMPLEX_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
597 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
598 break;
600 case CONJ_EXPR:
601 if (negate_expr_p (t))
602 return fold_build1_loc (loc, CONJ_EXPR, type,
603 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
604 break;
606 case NEGATE_EXPR:
607 if (!TYPE_OVERFLOW_SANITIZED (type))
608 return TREE_OPERAND (t, 0);
609 break;
611 case PLUS_EXPR:
612 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
613 && !HONOR_SIGNED_ZEROS (element_mode (type)))
615 /* -(A + B) -> (-B) - A. */
616 if (negate_expr_p (TREE_OPERAND (t, 1))
617 && reorder_operands_p (TREE_OPERAND (t, 0),
618 TREE_OPERAND (t, 1)))
620 tem = negate_expr (TREE_OPERAND (t, 1));
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 tem, TREE_OPERAND (t, 0));
625 /* -(A + B) -> (-A) - B. */
626 if (negate_expr_p (TREE_OPERAND (t, 0)))
628 tem = negate_expr (TREE_OPERAND (t, 0));
629 return fold_build2_loc (loc, MINUS_EXPR, type,
630 tem, TREE_OPERAND (t, 1));
633 break;
635 case MINUS_EXPR:
636 /* - (A - B) -> B - A */
637 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
638 && !HONOR_SIGNED_ZEROS (element_mode (type))
639 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
642 break;
644 case MULT_EXPR:
645 if (TYPE_UNSIGNED (type))
646 break;
648 /* Fall through. */
650 case RDIV_EXPR:
651 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
653 tem = TREE_OPERAND (t, 1);
654 if (negate_expr_p (tem))
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
657 tem = TREE_OPERAND (t, 0);
658 if (negate_expr_p (tem))
659 return fold_build2_loc (loc, TREE_CODE (t), type,
660 negate_expr (tem), TREE_OPERAND (t, 1));
662 break;
664 case TRUNC_DIV_EXPR:
665 case ROUND_DIV_EXPR:
666 case EXACT_DIV_EXPR:
667 /* In general we can't negate A / B, because if A is INT_MIN and
668 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 and actually traps on some architectures. But if overflow is
670 undefined, we can negate, because - (INT_MIN / 1) is an
671 overflow. */
672 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
674 const char * const warnmsg = G_("assuming signed overflow does not "
675 "occur when negating a division");
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || integer_onep (tem)))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0), negate_expr (tem));
686 /* If overflow is undefined then we have to be careful because
687 we ask whether it's ok to associate the negate with the
688 division which is not ok for example for
689 -((a - b) / c) where (-(a - b)) / c may invoke undefined
690 overflow because of negating INT_MIN. So do not use
691 negate_expr_p here but open-code the two important cases. */
692 tem = TREE_OPERAND (t, 0);
693 if ((INTEGRAL_TYPE_P (type)
694 && (TREE_CODE (tem) == NEGATE_EXPR
695 || (TREE_CODE (tem) == INTEGER_CST
696 && may_negate_without_overflow_p (tem))))
697 || !INTEGRAL_TYPE_P (type))
698 return fold_build2_loc (loc, TREE_CODE (t), type,
699 negate_expr (tem), TREE_OPERAND (t, 1));
701 break;
703 case NOP_EXPR:
704 /* Convert -((double)float) into (double)(-float). */
705 if (TREE_CODE (type) == REAL_TYPE)
707 tem = strip_float_extensions (t);
708 if (tem != t && negate_expr_p (tem))
709 return fold_convert_loc (loc, type, negate_expr (tem));
711 break;
713 case CALL_EXPR:
714 /* Negate -f(x) as f(-x). */
715 if (negate_mathfn_p (builtin_mathfn_code (t))
716 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
718 tree fndecl, arg;
720 fndecl = get_callee_fndecl (t);
721 arg = negate_expr (CALL_EXPR_ARG (t, 0));
722 return build_call_expr_loc (loc, fndecl, 1, arg);
724 break;
726 case RSHIFT_EXPR:
727 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
728 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
730 tree op1 = TREE_OPERAND (t, 1);
731 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
733 tree ntype = TYPE_UNSIGNED (type)
734 ? signed_type_for (type)
735 : unsigned_type_for (type);
736 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
737 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
738 return fold_convert_loc (loc, type, temp);
741 break;
743 default:
744 break;
747 return NULL_TREE;
750 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
751 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
752 return NULL_TREE. */
754 static tree
755 negate_expr (tree t)
757 tree type, tem;
758 location_t loc;
760 if (t == NULL_TREE)
761 return NULL_TREE;
763 loc = EXPR_LOCATION (t);
764 type = TREE_TYPE (t);
765 STRIP_SIGN_NOPS (t);
767 tem = fold_negate_expr (loc, t);
768 if (!tem)
769 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
770 return fold_convert_loc (loc, type, tem);
773 /* Split a tree IN into a constant, literal and variable parts that could be
774 combined with CODE to make IN. "constant" means an expression with
775 TREE_CONSTANT but that isn't an actual constant. CODE must be a
776 commutative arithmetic operation. Store the constant part into *CONP,
777 the literal in *LITP and return the variable part. If a part isn't
778 present, set it to null. If the tree does not decompose in this way,
779 return the entire tree as the variable part and the other parts as null.
781 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
782 case, we negate an operand that was subtracted. Except if it is a
783 literal for which we use *MINUS_LITP instead.
785 If NEGATE_P is true, we are negating all of IN, again except a literal
786 for which we use *MINUS_LITP instead.
788 If IN is itself a literal or constant, return it as appropriate.
790 Note that we do not guarantee that any of the three values will be the
791 same type as IN, but they will have the same signedness and mode. */
793 static tree
794 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
795 tree *minus_litp, int negate_p)
797 tree var = 0;
799 *conp = 0;
800 *litp = 0;
801 *minus_litp = 0;
803 /* Strip any conversions that don't change the machine mode or signedness. */
804 STRIP_SIGN_NOPS (in);
806 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
807 || TREE_CODE (in) == FIXED_CST)
808 *litp = in;
809 else if (TREE_CODE (in) == code
810 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
811 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
812 /* We can associate addition and subtraction together (even
813 though the C standard doesn't say so) for integers because
814 the value is not affected. For reals, the value might be
815 affected, so we can't. */
816 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
817 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
819 tree op0 = TREE_OPERAND (in, 0);
820 tree op1 = TREE_OPERAND (in, 1);
821 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
822 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
824 /* First see if either of the operands is a literal, then a constant. */
825 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
826 || TREE_CODE (op0) == FIXED_CST)
827 *litp = op0, op0 = 0;
828 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
829 || TREE_CODE (op1) == FIXED_CST)
830 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
832 if (op0 != 0 && TREE_CONSTANT (op0))
833 *conp = op0, op0 = 0;
834 else if (op1 != 0 && TREE_CONSTANT (op1))
835 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
837 /* If we haven't dealt with either operand, this is not a case we can
838 decompose. Otherwise, VAR is either of the ones remaining, if any. */
839 if (op0 != 0 && op1 != 0)
840 var = in;
841 else if (op0 != 0)
842 var = op0;
843 else
844 var = op1, neg_var_p = neg1_p;
846 /* Now do any needed negations. */
847 if (neg_litp_p)
848 *minus_litp = *litp, *litp = 0;
849 if (neg_conp_p)
850 *conp = negate_expr (*conp);
851 if (neg_var_p)
852 var = negate_expr (var);
854 else if (TREE_CODE (in) == BIT_NOT_EXPR
855 && code == PLUS_EXPR)
857 /* -X - 1 is folded to ~X, undo that here. */
858 *minus_litp = build_one_cst (TREE_TYPE (in));
859 var = negate_expr (TREE_OPERAND (in, 0));
861 else if (TREE_CONSTANT (in))
862 *conp = in;
863 else
864 var = in;
866 if (negate_p)
868 if (*litp)
869 *minus_litp = *litp, *litp = 0;
870 else if (*minus_litp)
871 *litp = *minus_litp, *minus_litp = 0;
872 *conp = negate_expr (*conp);
873 var = negate_expr (var);
876 return var;
879 /* Re-associate trees split by the above function. T1 and T2 are
880 either expressions to associate or null. Return the new
881 expression, if any. LOC is the location of the new expression. If
882 we build an operation, do it in TYPE and with CODE. */
884 static tree
885 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
887 if (t1 == 0)
888 return t2;
889 else if (t2 == 0)
890 return t1;
892 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
893 try to fold this since we will have infinite recursion. But do
894 deal with any NEGATE_EXPRs. */
895 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
896 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
898 if (code == PLUS_EXPR)
900 if (TREE_CODE (t1) == NEGATE_EXPR)
901 return build2_loc (loc, MINUS_EXPR, type,
902 fold_convert_loc (loc, type, t2),
903 fold_convert_loc (loc, type,
904 TREE_OPERAND (t1, 0)));
905 else if (TREE_CODE (t2) == NEGATE_EXPR)
906 return build2_loc (loc, MINUS_EXPR, type,
907 fold_convert_loc (loc, type, t1),
908 fold_convert_loc (loc, type,
909 TREE_OPERAND (t2, 0)));
910 else if (integer_zerop (t2))
911 return fold_convert_loc (loc, type, t1);
913 else if (code == MINUS_EXPR)
915 if (integer_zerop (t2))
916 return fold_convert_loc (loc, type, t1);
919 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
923 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
924 fold_convert_loc (loc, type, t2));
927 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
928 for use in int_const_binop, size_binop and size_diffop. */
930 static bool
931 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
933 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
934 return false;
935 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
936 return false;
938 switch (code)
940 case LSHIFT_EXPR:
941 case RSHIFT_EXPR:
942 case LROTATE_EXPR:
943 case RROTATE_EXPR:
944 return true;
946 default:
947 break;
950 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
951 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
952 && TYPE_MODE (type1) == TYPE_MODE (type2);
956 /* Combine two integer constants ARG1 and ARG2 under operation CODE
957 to produce a new constant. Return NULL_TREE if we don't know how
958 to evaluate CODE at compile-time. */
960 static tree
961 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
962 int overflowable)
964 wide_int res;
965 tree t;
966 tree type = TREE_TYPE (arg1);
967 signop sign = TYPE_SIGN (type);
968 bool overflow = false;
970 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
971 TYPE_SIGN (TREE_TYPE (parg2)));
973 switch (code)
975 case BIT_IOR_EXPR:
976 res = wi::bit_or (arg1, arg2);
977 break;
979 case BIT_XOR_EXPR:
980 res = wi::bit_xor (arg1, arg2);
981 break;
983 case BIT_AND_EXPR:
984 res = wi::bit_and (arg1, arg2);
985 break;
987 case RSHIFT_EXPR:
988 case LSHIFT_EXPR:
989 if (wi::neg_p (arg2))
991 arg2 = -arg2;
992 if (code == RSHIFT_EXPR)
993 code = LSHIFT_EXPR;
994 else
995 code = RSHIFT_EXPR;
998 if (code == RSHIFT_EXPR)
999 /* It's unclear from the C standard whether shifts can overflow.
1000 The following code ignores overflow; perhaps a C standard
1001 interpretation ruling is needed. */
1002 res = wi::rshift (arg1, arg2, sign);
1003 else
1004 res = wi::lshift (arg1, arg2);
1005 break;
1007 case RROTATE_EXPR:
1008 case LROTATE_EXPR:
1009 if (wi::neg_p (arg2))
1011 arg2 = -arg2;
1012 if (code == RROTATE_EXPR)
1013 code = LROTATE_EXPR;
1014 else
1015 code = RROTATE_EXPR;
1018 if (code == RROTATE_EXPR)
1019 res = wi::rrotate (arg1, arg2);
1020 else
1021 res = wi::lrotate (arg1, arg2);
1022 break;
1024 case PLUS_EXPR:
1025 res = wi::add (arg1, arg2, sign, &overflow);
1026 break;
1028 case MINUS_EXPR:
1029 res = wi::sub (arg1, arg2, sign, &overflow);
1030 break;
1032 case MULT_EXPR:
1033 res = wi::mul (arg1, arg2, sign, &overflow);
1034 break;
1036 case MULT_HIGHPART_EXPR:
1037 res = wi::mul_high (arg1, arg2, sign);
1038 break;
1040 case TRUNC_DIV_EXPR:
1041 case EXACT_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1045 break;
1047 case FLOOR_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_floor (arg1, arg2, sign, &overflow);
1051 break;
1053 case CEIL_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1057 break;
1059 case ROUND_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_round (arg1, arg2, sign, &overflow);
1063 break;
1065 case TRUNC_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1069 break;
1071 case FLOOR_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1075 break;
1077 case CEIL_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1081 break;
1083 case ROUND_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_round (arg1, arg2, sign, &overflow);
1087 break;
1089 case MIN_EXPR:
1090 res = wi::min (arg1, arg2, sign);
1091 break;
1093 case MAX_EXPR:
1094 res = wi::max (arg1, arg2, sign);
1095 break;
1097 default:
1098 return NULL_TREE;
1101 t = force_fit_type (type, res, overflowable,
1102 (((sign == SIGNED || overflowable == -1)
1103 && overflow)
1104 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1106 return t;
1109 tree
1110 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1112 return int_const_binop_1 (code, arg1, arg2, 1);
1115 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1116 constant. We assume ARG1 and ARG2 have the same data type, or at least
1117 are the same kind of constant and the same machine mode. Return zero if
1118 combining the constants is not allowed in the current operating mode. */
1120 static tree
1121 const_binop (enum tree_code code, tree arg1, tree arg2)
1123 /* Sanity check for the recursive cases. */
1124 if (!arg1 || !arg2)
1125 return NULL_TREE;
1127 STRIP_NOPS (arg1);
1128 STRIP_NOPS (arg2);
1130 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1132 if (code == POINTER_PLUS_EXPR)
1133 return int_const_binop (PLUS_EXPR,
1134 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1136 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1160 default:
1161 return NULL_TREE;
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 if (TREE_CODE (arg2) != FIXED_CST)
1234 return NULL_TREE;
1235 f2 = TREE_FIXED_CST (arg2);
1236 break;
1238 case LSHIFT_EXPR:
1239 case RSHIFT_EXPR:
1241 if (TREE_CODE (arg2) != INTEGER_CST)
1242 return NULL_TREE;
1243 wide_int w2 = arg2;
1244 f2.data.high = w2.elt (1);
1245 f2.data.low = w2.elt (0);
1246 f2.mode = SImode;
1248 break;
1250 default:
1251 return NULL_TREE;
1254 f1 = TREE_FIXED_CST (arg1);
1255 type = TREE_TYPE (arg1);
1256 sat_p = TYPE_SATURATING (type);
1257 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1258 t = build_fixed (type, result);
1259 /* Propagate overflow flags. */
1260 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1261 TREE_OVERFLOW (t) = 1;
1262 return t;
1265 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1267 tree type = TREE_TYPE (arg1);
1268 tree r1 = TREE_REALPART (arg1);
1269 tree i1 = TREE_IMAGPART (arg1);
1270 tree r2 = TREE_REALPART (arg2);
1271 tree i2 = TREE_IMAGPART (arg2);
1272 tree real, imag;
1274 switch (code)
1276 case PLUS_EXPR:
1277 case MINUS_EXPR:
1278 real = const_binop (code, r1, r2);
1279 imag = const_binop (code, i1, i2);
1280 break;
1282 case MULT_EXPR:
1283 if (COMPLEX_FLOAT_TYPE_P (type))
1284 return do_mpc_arg2 (arg1, arg2, type,
1285 /* do_nonfinite= */ folding_initializer,
1286 mpc_mul);
1288 real = const_binop (MINUS_EXPR,
1289 const_binop (MULT_EXPR, r1, r2),
1290 const_binop (MULT_EXPR, i1, i2));
1291 imag = const_binop (PLUS_EXPR,
1292 const_binop (MULT_EXPR, r1, i2),
1293 const_binop (MULT_EXPR, i1, r2));
1294 break;
1296 case RDIV_EXPR:
1297 if (COMPLEX_FLOAT_TYPE_P (type))
1298 return do_mpc_arg2 (arg1, arg2, type,
1299 /* do_nonfinite= */ folding_initializer,
1300 mpc_div);
1301 /* Fallthru ... */
1302 case TRUNC_DIV_EXPR:
1303 case CEIL_DIV_EXPR:
1304 case FLOOR_DIV_EXPR:
1305 case ROUND_DIV_EXPR:
1306 if (flag_complex_method == 0)
1308 /* Keep this algorithm in sync with
1309 tree-complex.c:expand_complex_div_straight().
1311 Expand complex division to scalars, straightforward algorithm.
1312 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1313 t = br*br + bi*bi
1315 tree magsquared
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r2, r2),
1318 const_binop (MULT_EXPR, i2, i2));
1319 tree t1
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r1, r2),
1322 const_binop (MULT_EXPR, i1, i2));
1323 tree t2
1324 = const_binop (MINUS_EXPR,
1325 const_binop (MULT_EXPR, i1, r2),
1326 const_binop (MULT_EXPR, r1, i2));
1328 real = const_binop (code, t1, magsquared);
1329 imag = const_binop (code, t2, magsquared);
1331 else
1333 /* Keep this algorithm in sync with
1334 tree-complex.c:expand_complex_div_wide().
1336 Expand complex division to scalars, modified algorithm to minimize
1337 overflow with wide input ranges. */
1338 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1339 fold_abs_const (r2, TREE_TYPE (type)),
1340 fold_abs_const (i2, TREE_TYPE (type)));
1342 if (integer_nonzerop (compare))
1344 /* In the TRUE branch, we compute
1345 ratio = br/bi;
1346 div = (br * ratio) + bi;
1347 tr = (ar * ratio) + ai;
1348 ti = (ai * ratio) - ar;
1349 tr = tr / div;
1350 ti = ti / div; */
1351 tree ratio = const_binop (code, r2, i2);
1352 tree div = const_binop (PLUS_EXPR, i2,
1353 const_binop (MULT_EXPR, r2, ratio));
1354 real = const_binop (MULT_EXPR, r1, ratio);
1355 real = const_binop (PLUS_EXPR, real, i1);
1356 real = const_binop (code, real, div);
1358 imag = const_binop (MULT_EXPR, i1, ratio);
1359 imag = const_binop (MINUS_EXPR, imag, r1);
1360 imag = const_binop (code, imag, div);
1362 else
1364 /* In the FALSE branch, we compute
1365 ratio = d/c;
1366 divisor = (d * ratio) + c;
1367 tr = (b * ratio) + a;
1368 ti = b - (a * ratio);
1369 tr = tr / div;
1370 ti = ti / div; */
1371 tree ratio = const_binop (code, i2, r2);
1372 tree div = const_binop (PLUS_EXPR, r2,
1373 const_binop (MULT_EXPR, i2, ratio));
1375 real = const_binop (MULT_EXPR, i1, ratio);
1376 real = const_binop (PLUS_EXPR, real, r1);
1377 real = const_binop (code, real, div);
1379 imag = const_binop (MULT_EXPR, r1, ratio);
1380 imag = const_binop (MINUS_EXPR, i1, imag);
1381 imag = const_binop (code, imag, div);
1384 break;
1386 default:
1387 return NULL_TREE;
1390 if (real && imag)
1391 return build_complex (type, real, imag);
1394 if (TREE_CODE (arg1) == VECTOR_CST
1395 && TREE_CODE (arg2) == VECTOR_CST)
1397 tree type = TREE_TYPE (arg1);
1398 int count = TYPE_VECTOR_SUBPARTS (type), i;
1399 tree *elts = XALLOCAVEC (tree, count);
1401 for (i = 0; i < count; i++)
1403 tree elem1 = VECTOR_CST_ELT (arg1, i);
1404 tree elem2 = VECTOR_CST_ELT (arg2, i);
1406 elts[i] = const_binop (code, elem1, elem2);
1408 /* It is possible that const_binop cannot handle the given
1409 code and return NULL_TREE */
1410 if (elts[i] == NULL_TREE)
1411 return NULL_TREE;
1414 return build_vector (type, elts);
1417 /* Shifts allow a scalar offset for a vector. */
1418 if (TREE_CODE (arg1) == VECTOR_CST
1419 && TREE_CODE (arg2) == INTEGER_CST)
1421 tree type = TREE_TYPE (arg1);
1422 int count = TYPE_VECTOR_SUBPARTS (type), i;
1423 tree *elts = XALLOCAVEC (tree, count);
1425 for (i = 0; i < count; i++)
1427 tree elem1 = VECTOR_CST_ELT (arg1, i);
1429 elts[i] = const_binop (code, elem1, arg2);
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE. */
1433 if (elts[i] == NULL_TREE)
1434 return NULL_TREE;
1437 return build_vector (type, elts);
1439 return NULL_TREE;
1442 /* Overload that adds a TYPE parameter to be able to dispatch
1443 to fold_relational_const. */
1445 tree
1446 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1448 if (TREE_CODE_CLASS (code) == tcc_comparison)
1449 return fold_relational_const (code, type, arg1, arg2);
1451 /* ??? Until we make the const_binop worker take the type of the
1452 result as argument put those cases that need it here. */
1453 switch (code)
1455 case COMPLEX_EXPR:
1456 if ((TREE_CODE (arg1) == REAL_CST
1457 && TREE_CODE (arg2) == REAL_CST)
1458 || (TREE_CODE (arg1) == INTEGER_CST
1459 && TREE_CODE (arg2) == INTEGER_CST))
1460 return build_complex (type, arg1, arg2);
1461 return NULL_TREE;
1463 case VEC_PACK_TRUNC_EXPR:
1464 case VEC_PACK_FIX_TRUNC_EXPR:
1466 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1467 tree *elts;
1469 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1470 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1471 if (TREE_CODE (arg1) != VECTOR_CST
1472 || TREE_CODE (arg2) != VECTOR_CST)
1473 return NULL_TREE;
1475 elts = XALLOCAVEC (tree, nelts);
1476 if (!vec_cst_ctor_to_array (arg1, elts)
1477 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1478 return NULL_TREE;
1480 for (i = 0; i < nelts; i++)
1482 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1483 ? NOP_EXPR : FIX_TRUNC_EXPR,
1484 TREE_TYPE (type), elts[i]);
1485 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1486 return NULL_TREE;
1489 return build_vector (type, elts);
1492 case VEC_WIDEN_MULT_LO_EXPR:
1493 case VEC_WIDEN_MULT_HI_EXPR:
1494 case VEC_WIDEN_MULT_EVEN_EXPR:
1495 case VEC_WIDEN_MULT_ODD_EXPR:
1497 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1498 unsigned int out, ofs, scale;
1499 tree *elts;
1501 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1502 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1503 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1504 return NULL_TREE;
1506 elts = XALLOCAVEC (tree, nelts * 4);
1507 if (!vec_cst_ctor_to_array (arg1, elts)
1508 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1509 return NULL_TREE;
1511 if (code == VEC_WIDEN_MULT_LO_EXPR)
1512 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1513 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1514 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1515 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1516 scale = 1, ofs = 0;
1517 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1518 scale = 1, ofs = 1;
1520 for (out = 0; out < nelts; out++)
1522 unsigned int in1 = (out << scale) + ofs;
1523 unsigned int in2 = in1 + nelts * 2;
1524 tree t1, t2;
1526 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1527 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1529 if (t1 == NULL_TREE || t2 == NULL_TREE)
1530 return NULL_TREE;
1531 elts[out] = const_binop (MULT_EXPR, t1, t2);
1532 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1533 return NULL_TREE;
1536 return build_vector (type, elts);
1539 default:;
1542 if (TREE_CODE_CLASS (code) != tcc_binary)
1543 return NULL_TREE;
1545 /* Make sure type and arg0 have the same saturating flag. */
1546 gcc_checking_assert (TYPE_SATURATING (type)
1547 == TYPE_SATURATING (TREE_TYPE (arg1)));
1549 return const_binop (code, arg1, arg2);
1552 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1553 Return zero if computing the constants is not possible. */
1555 tree
1556 const_unop (enum tree_code code, tree type, tree arg0)
1558 switch (code)
1560 CASE_CONVERT:
1561 case FLOAT_EXPR:
1562 case FIX_TRUNC_EXPR:
1563 case FIXED_CONVERT_EXPR:
1564 return fold_convert_const (code, type, arg0);
1566 case ADDR_SPACE_CONVERT_EXPR:
1567 if (integer_zerop (arg0))
1568 return fold_convert_const (code, type, arg0);
1569 break;
1571 case VIEW_CONVERT_EXPR:
1572 return fold_view_convert_expr (type, arg0);
1574 case NEGATE_EXPR:
1576 /* Can't call fold_negate_const directly here as that doesn't
1577 handle all cases and we might not be able to negate some
1578 constants. */
1579 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1580 if (tem && CONSTANT_CLASS_P (tem))
1581 return tem;
1582 break;
1585 case ABS_EXPR:
1586 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1587 return fold_abs_const (arg0, type);
1588 break;
1590 case CONJ_EXPR:
1591 if (TREE_CODE (arg0) == COMPLEX_CST)
1593 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1594 TREE_TYPE (type));
1595 return build_complex (type, TREE_REALPART (arg0), ipart);
1597 break;
1599 case BIT_NOT_EXPR:
1600 if (TREE_CODE (arg0) == INTEGER_CST)
1601 return fold_not_const (arg0, type);
1602 /* Perform BIT_NOT_EXPR on each element individually. */
1603 else if (TREE_CODE (arg0) == VECTOR_CST)
1605 tree *elements;
1606 tree elem;
1607 unsigned count = VECTOR_CST_NELTS (arg0), i;
1609 elements = XALLOCAVEC (tree, count);
1610 for (i = 0; i < count; i++)
1612 elem = VECTOR_CST_ELT (arg0, i);
1613 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1614 if (elem == NULL_TREE)
1615 break;
1616 elements[i] = elem;
1618 if (i == count)
1619 return build_vector (type, elements);
1621 break;
1623 case TRUTH_NOT_EXPR:
1624 if (TREE_CODE (arg0) == INTEGER_CST)
1625 return constant_boolean_node (integer_zerop (arg0), type);
1626 break;
1628 case REALPART_EXPR:
1629 if (TREE_CODE (arg0) == COMPLEX_CST)
1630 return fold_convert (type, TREE_REALPART (arg0));
1631 break;
1633 case IMAGPART_EXPR:
1634 if (TREE_CODE (arg0) == COMPLEX_CST)
1635 return fold_convert (type, TREE_IMAGPART (arg0));
1636 break;
1638 case VEC_UNPACK_LO_EXPR:
1639 case VEC_UNPACK_HI_EXPR:
1640 case VEC_UNPACK_FLOAT_LO_EXPR:
1641 case VEC_UNPACK_FLOAT_HI_EXPR:
1643 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1644 tree *elts;
1645 enum tree_code subcode;
1647 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1648 if (TREE_CODE (arg0) != VECTOR_CST)
1649 return NULL_TREE;
1651 elts = XALLOCAVEC (tree, nelts * 2);
1652 if (!vec_cst_ctor_to_array (arg0, elts))
1653 return NULL_TREE;
1655 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1656 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1657 elts += nelts;
1659 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1660 subcode = NOP_EXPR;
1661 else
1662 subcode = FLOAT_EXPR;
1664 for (i = 0; i < nelts; i++)
1666 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1667 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1668 return NULL_TREE;
1671 return build_vector (type, elts);
1674 case REDUC_MIN_EXPR:
1675 case REDUC_MAX_EXPR:
1676 case REDUC_PLUS_EXPR:
1678 unsigned int nelts, i;
1679 tree *elts;
1680 enum tree_code subcode;
1682 if (TREE_CODE (arg0) != VECTOR_CST)
1683 return NULL_TREE;
1684 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1686 elts = XALLOCAVEC (tree, nelts);
1687 if (!vec_cst_ctor_to_array (arg0, elts))
1688 return NULL_TREE;
1690 switch (code)
1692 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1693 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1694 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1695 default: gcc_unreachable ();
1698 for (i = 1; i < nelts; i++)
1700 elts[0] = const_binop (subcode, elts[0], elts[i]);
1701 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1702 return NULL_TREE;
1705 return elts[0];
1708 default:
1709 break;
1712 return NULL_TREE;
1715 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1716 indicates which particular sizetype to create. */
1718 tree
1719 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1721 return build_int_cst (sizetype_tab[(int) kind], number);
1724 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1725 is a tree code. The type of the result is taken from the operands.
1726 Both must be equivalent integer types, ala int_binop_types_match_p.
1727 If the operands are constant, so is the result. */
1729 tree
1730 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1732 tree type = TREE_TYPE (arg0);
1734 if (arg0 == error_mark_node || arg1 == error_mark_node)
1735 return error_mark_node;
1737 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1738 TREE_TYPE (arg1)));
1740 /* Handle the special case of two integer constants faster. */
1741 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1743 /* And some specific cases even faster than that. */
1744 if (code == PLUS_EXPR)
1746 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1747 return arg1;
1748 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1749 return arg0;
1751 else if (code == MINUS_EXPR)
1753 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1754 return arg0;
1756 else if (code == MULT_EXPR)
1758 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1759 return arg1;
1762 /* Handle general case of two integer constants. For sizetype
1763 constant calculations we always want to know about overflow,
1764 even in the unsigned case. */
1765 return int_const_binop_1 (code, arg0, arg1, -1);
1768 return fold_build2_loc (loc, code, type, arg0, arg1);
1771 /* Given two values, either both of sizetype or both of bitsizetype,
1772 compute the difference between the two values. Return the value
1773 in signed type corresponding to the type of the operands. */
1775 tree
1776 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1778 tree type = TREE_TYPE (arg0);
1779 tree ctype;
1781 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1782 TREE_TYPE (arg1)));
1784 /* If the type is already signed, just do the simple thing. */
1785 if (!TYPE_UNSIGNED (type))
1786 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1788 if (type == sizetype)
1789 ctype = ssizetype;
1790 else if (type == bitsizetype)
1791 ctype = sbitsizetype;
1792 else
1793 ctype = signed_type_for (type);
1795 /* If either operand is not a constant, do the conversions to the signed
1796 type and subtract. The hardware will do the right thing with any
1797 overflow in the subtraction. */
1798 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1799 return size_binop_loc (loc, MINUS_EXPR,
1800 fold_convert_loc (loc, ctype, arg0),
1801 fold_convert_loc (loc, ctype, arg1));
1803 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1804 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1805 overflow) and negate (which can't either). Special-case a result
1806 of zero while we're here. */
1807 if (tree_int_cst_equal (arg0, arg1))
1808 return build_int_cst (ctype, 0);
1809 else if (tree_int_cst_lt (arg1, arg0))
1810 return fold_convert_loc (loc, ctype,
1811 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1812 else
1813 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1814 fold_convert_loc (loc, ctype,
1815 size_binop_loc (loc,
1816 MINUS_EXPR,
1817 arg1, arg0)));
1820 /* A subroutine of fold_convert_const handling conversions of an
1821 INTEGER_CST to another integer type. */
1823 static tree
1824 fold_convert_const_int_from_int (tree type, const_tree arg1)
1826 /* Given an integer constant, make new constant with new type,
1827 appropriately sign-extended or truncated. Use widest_int
1828 so that any extension is done according ARG1's type. */
1829 return force_fit_type (type, wi::to_widest (arg1),
1830 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1831 TREE_OVERFLOW (arg1));
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1837 static tree
1838 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1840 bool overflow = false;
1841 tree t;
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1852 wide_int val;
1853 REAL_VALUE_TYPE r;
1854 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1856 switch (code)
1858 case FIX_TRUNC_EXPR:
1859 real_trunc (&r, VOIDmode, &x);
1860 break;
1862 default:
1863 gcc_unreachable ();
1866 /* If R is NaN, return zero and show we have an overflow. */
1867 if (REAL_VALUE_ISNAN (r))
1869 overflow = true;
1870 val = wi::zero (TYPE_PRECISION (type));
1873 /* See if R is less than the lower bound or greater than the
1874 upper bound. */
1876 if (! overflow)
1878 tree lt = TYPE_MIN_VALUE (type);
1879 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1880 if (REAL_VALUES_LESS (r, l))
1882 overflow = true;
1883 val = lt;
1887 if (! overflow)
1889 tree ut = TYPE_MAX_VALUE (type);
1890 if (ut)
1892 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1893 if (REAL_VALUES_LESS (u, r))
1895 overflow = true;
1896 val = ut;
1901 if (! overflow)
1902 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1904 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1905 return t;
1908 /* A subroutine of fold_convert_const handling conversions of a
1909 FIXED_CST to an integer type. */
1911 static tree
1912 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1914 tree t;
1915 double_int temp, temp_trunc;
1916 unsigned int mode;
1918 /* Right shift FIXED_CST to temp by fbit. */
1919 temp = TREE_FIXED_CST (arg1).data;
1920 mode = TREE_FIXED_CST (arg1).mode;
1921 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1923 temp = temp.rshift (GET_MODE_FBIT (mode),
1924 HOST_BITS_PER_DOUBLE_INT,
1925 SIGNED_FIXED_POINT_MODE_P (mode));
1927 /* Left shift temp to temp_trunc by fbit. */
1928 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1929 HOST_BITS_PER_DOUBLE_INT,
1930 SIGNED_FIXED_POINT_MODE_P (mode));
1932 else
1934 temp = double_int_zero;
1935 temp_trunc = double_int_zero;
1938 /* If FIXED_CST is negative, we need to round the value toward 0.
1939 By checking if the fractional bits are not zero to add 1 to temp. */
1940 if (SIGNED_FIXED_POINT_MODE_P (mode)
1941 && temp_trunc.is_negative ()
1942 && TREE_FIXED_CST (arg1).data != temp_trunc)
1943 temp += double_int_one;
1945 /* Given a fixed-point constant, make new constant with new type,
1946 appropriately sign-extended or truncated. */
1947 t = force_fit_type (type, temp, -1,
1948 (temp.is_negative ()
1949 && (TYPE_UNSIGNED (type)
1950 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1951 | TREE_OVERFLOW (arg1));
1953 return t;
1956 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1957 to another floating point type. */
1959 static tree
1960 fold_convert_const_real_from_real (tree type, const_tree arg1)
1962 REAL_VALUE_TYPE value;
1963 tree t;
1965 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1966 t = build_real (type, value);
1968 /* If converting an infinity or NAN to a representation that doesn't
1969 have one, set the overflow bit so that we can produce some kind of
1970 error message at the appropriate point if necessary. It's not the
1971 most user-friendly message, but it's better than nothing. */
1972 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1973 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1974 TREE_OVERFLOW (t) = 1;
1975 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1976 && !MODE_HAS_NANS (TYPE_MODE (type)))
1977 TREE_OVERFLOW (t) = 1;
1978 /* Regular overflow, conversion produced an infinity in a mode that
1979 can't represent them. */
1980 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1981 && REAL_VALUE_ISINF (value)
1982 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1983 TREE_OVERFLOW (t) = 1;
1984 else
1985 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1986 return t;
1989 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1990 to a floating point type. */
1992 static tree
1993 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1995 REAL_VALUE_TYPE value;
1996 tree t;
1998 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1999 t = build_real (type, value);
2001 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2002 return t;
2005 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2006 to another fixed-point type. */
2008 static tree
2009 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2011 FIXED_VALUE_TYPE value;
2012 tree t;
2013 bool overflow_p;
2015 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2016 TYPE_SATURATING (type));
2017 t = build_fixed (type, value);
2019 /* Propagate overflow flags. */
2020 if (overflow_p | TREE_OVERFLOW (arg1))
2021 TREE_OVERFLOW (t) = 1;
2022 return t;
2025 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2026 to a fixed-point type. */
2028 static tree
2029 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2031 FIXED_VALUE_TYPE value;
2032 tree t;
2033 bool overflow_p;
2034 double_int di;
2036 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2038 di.low = TREE_INT_CST_ELT (arg1, 0);
2039 if (TREE_INT_CST_NUNITS (arg1) == 1)
2040 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2041 else
2042 di.high = TREE_INT_CST_ELT (arg1, 1);
2044 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2045 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2046 TYPE_SATURATING (type));
2047 t = build_fixed (type, value);
2049 /* Propagate overflow flags. */
2050 if (overflow_p | TREE_OVERFLOW (arg1))
2051 TREE_OVERFLOW (t) = 1;
2052 return t;
2055 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2056 to a fixed-point type. */
2058 static tree
2059 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2061 FIXED_VALUE_TYPE value;
2062 tree t;
2063 bool overflow_p;
2065 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2066 &TREE_REAL_CST (arg1),
2067 TYPE_SATURATING (type));
2068 t = build_fixed (type, value);
2070 /* Propagate overflow flags. */
2071 if (overflow_p | TREE_OVERFLOW (arg1))
2072 TREE_OVERFLOW (t) = 1;
2073 return t;
2076 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2077 type TYPE. If no simplification can be done return NULL_TREE. */
2079 static tree
2080 fold_convert_const (enum tree_code code, tree type, tree arg1)
2082 if (TREE_TYPE (arg1) == type)
2083 return arg1;
2085 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2086 || TREE_CODE (type) == OFFSET_TYPE)
2088 if (TREE_CODE (arg1) == INTEGER_CST)
2089 return fold_convert_const_int_from_int (type, arg1);
2090 else if (TREE_CODE (arg1) == REAL_CST)
2091 return fold_convert_const_int_from_real (code, type, arg1);
2092 else if (TREE_CODE (arg1) == FIXED_CST)
2093 return fold_convert_const_int_from_fixed (type, arg1);
2095 else if (TREE_CODE (type) == REAL_TYPE)
2097 if (TREE_CODE (arg1) == INTEGER_CST)
2098 return build_real_from_int_cst (type, arg1);
2099 else if (TREE_CODE (arg1) == REAL_CST)
2100 return fold_convert_const_real_from_real (type, arg1);
2101 else if (TREE_CODE (arg1) == FIXED_CST)
2102 return fold_convert_const_real_from_fixed (type, arg1);
2104 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2106 if (TREE_CODE (arg1) == FIXED_CST)
2107 return fold_convert_const_fixed_from_fixed (type, arg1);
2108 else if (TREE_CODE (arg1) == INTEGER_CST)
2109 return fold_convert_const_fixed_from_int (type, arg1);
2110 else if (TREE_CODE (arg1) == REAL_CST)
2111 return fold_convert_const_fixed_from_real (type, arg1);
2113 return NULL_TREE;
2116 /* Construct a vector of zero elements of vector type TYPE. */
2118 static tree
2119 build_zero_vector (tree type)
2121 tree t;
2123 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2124 return build_vector_from_val (type, t);
2127 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2129 bool
2130 fold_convertible_p (const_tree type, const_tree arg)
2132 tree orig = TREE_TYPE (arg);
2134 if (type == orig)
2135 return true;
2137 if (TREE_CODE (arg) == ERROR_MARK
2138 || TREE_CODE (type) == ERROR_MARK
2139 || TREE_CODE (orig) == ERROR_MARK)
2140 return false;
2142 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2143 return true;
2145 switch (TREE_CODE (type))
2147 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2148 case POINTER_TYPE: case REFERENCE_TYPE:
2149 case OFFSET_TYPE:
2150 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2151 || TREE_CODE (orig) == OFFSET_TYPE)
2152 return true;
2153 return (TREE_CODE (orig) == VECTOR_TYPE
2154 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2156 case REAL_TYPE:
2157 case FIXED_POINT_TYPE:
2158 case COMPLEX_TYPE:
2159 case VECTOR_TYPE:
2160 case VOID_TYPE:
2161 return TREE_CODE (type) == TREE_CODE (orig);
2163 default:
2164 return false;
2168 /* Convert expression ARG to type TYPE. Used by the middle-end for
2169 simple conversions in preference to calling the front-end's convert. */
2171 tree
2172 fold_convert_loc (location_t loc, tree type, tree arg)
2174 tree orig = TREE_TYPE (arg);
2175 tree tem;
2177 if (type == orig)
2178 return arg;
2180 if (TREE_CODE (arg) == ERROR_MARK
2181 || TREE_CODE (type) == ERROR_MARK
2182 || TREE_CODE (orig) == ERROR_MARK)
2183 return error_mark_node;
2185 switch (TREE_CODE (type))
2187 case POINTER_TYPE:
2188 case REFERENCE_TYPE:
2189 /* Handle conversions between pointers to different address spaces. */
2190 if (POINTER_TYPE_P (orig)
2191 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2192 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2193 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2194 /* fall through */
2196 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2197 case OFFSET_TYPE:
2198 if (TREE_CODE (arg) == INTEGER_CST)
2200 tem = fold_convert_const (NOP_EXPR, type, arg);
2201 if (tem != NULL_TREE)
2202 return tem;
2204 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2205 || TREE_CODE (orig) == OFFSET_TYPE)
2206 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2207 if (TREE_CODE (orig) == COMPLEX_TYPE)
2208 return fold_convert_loc (loc, type,
2209 fold_build1_loc (loc, REALPART_EXPR,
2210 TREE_TYPE (orig), arg));
2211 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2212 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2213 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2215 case REAL_TYPE:
2216 if (TREE_CODE (arg) == INTEGER_CST)
2218 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2219 if (tem != NULL_TREE)
2220 return tem;
2222 else if (TREE_CODE (arg) == REAL_CST)
2224 tem = fold_convert_const (NOP_EXPR, type, arg);
2225 if (tem != NULL_TREE)
2226 return tem;
2228 else if (TREE_CODE (arg) == FIXED_CST)
2230 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2231 if (tem != NULL_TREE)
2232 return tem;
2235 switch (TREE_CODE (orig))
2237 case INTEGER_TYPE:
2238 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2239 case POINTER_TYPE: case REFERENCE_TYPE:
2240 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2242 case REAL_TYPE:
2243 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2245 case FIXED_POINT_TYPE:
2246 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2248 case COMPLEX_TYPE:
2249 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2250 return fold_convert_loc (loc, type, tem);
2252 default:
2253 gcc_unreachable ();
2256 case FIXED_POINT_TYPE:
2257 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2258 || TREE_CODE (arg) == REAL_CST)
2260 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2261 if (tem != NULL_TREE)
2262 goto fold_convert_exit;
2265 switch (TREE_CODE (orig))
2267 case FIXED_POINT_TYPE:
2268 case INTEGER_TYPE:
2269 case ENUMERAL_TYPE:
2270 case BOOLEAN_TYPE:
2271 case REAL_TYPE:
2272 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2274 case COMPLEX_TYPE:
2275 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2276 return fold_convert_loc (loc, type, tem);
2278 default:
2279 gcc_unreachable ();
2282 case COMPLEX_TYPE:
2283 switch (TREE_CODE (orig))
2285 case INTEGER_TYPE:
2286 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2288 case REAL_TYPE:
2289 case FIXED_POINT_TYPE:
2290 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2291 fold_convert_loc (loc, TREE_TYPE (type), arg),
2292 fold_convert_loc (loc, TREE_TYPE (type),
2293 integer_zero_node));
2294 case COMPLEX_TYPE:
2296 tree rpart, ipart;
2298 if (TREE_CODE (arg) == COMPLEX_EXPR)
2300 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2301 TREE_OPERAND (arg, 0));
2302 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2303 TREE_OPERAND (arg, 1));
2304 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2307 arg = save_expr (arg);
2308 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2309 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2310 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2311 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2312 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2315 default:
2316 gcc_unreachable ();
2319 case VECTOR_TYPE:
2320 if (integer_zerop (arg))
2321 return build_zero_vector (type);
2322 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2323 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2324 || TREE_CODE (orig) == VECTOR_TYPE);
2325 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2327 case VOID_TYPE:
2328 tem = fold_ignored_result (arg);
2329 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2331 default:
2332 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2333 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2334 gcc_unreachable ();
2336 fold_convert_exit:
2337 protected_set_expr_location_unshare (tem, loc);
2338 return tem;
2341 /* Return false if expr can be assumed not to be an lvalue, true
2342 otherwise. */
2344 static bool
2345 maybe_lvalue_p (const_tree x)
2347 /* We only need to wrap lvalue tree codes. */
2348 switch (TREE_CODE (x))
2350 case VAR_DECL:
2351 case PARM_DECL:
2352 case RESULT_DECL:
2353 case LABEL_DECL:
2354 case FUNCTION_DECL:
2355 case SSA_NAME:
2357 case COMPONENT_REF:
2358 case MEM_REF:
2359 case INDIRECT_REF:
2360 case ARRAY_REF:
2361 case ARRAY_RANGE_REF:
2362 case BIT_FIELD_REF:
2363 case OBJ_TYPE_REF:
2365 case REALPART_EXPR:
2366 case IMAGPART_EXPR:
2367 case PREINCREMENT_EXPR:
2368 case PREDECREMENT_EXPR:
2369 case SAVE_EXPR:
2370 case TRY_CATCH_EXPR:
2371 case WITH_CLEANUP_EXPR:
2372 case COMPOUND_EXPR:
2373 case MODIFY_EXPR:
2374 case TARGET_EXPR:
2375 case COND_EXPR:
2376 case BIND_EXPR:
2377 break;
2379 default:
2380 /* Assume the worst for front-end tree codes. */
2381 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2382 break;
2383 return false;
2386 return true;
2389 /* Return an expr equal to X but certainly not valid as an lvalue. */
2391 tree
2392 non_lvalue_loc (location_t loc, tree x)
2394 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2395 us. */
2396 if (in_gimple_form)
2397 return x;
2399 if (! maybe_lvalue_p (x))
2400 return x;
2401 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2404 /* When pedantic, return an expr equal to X but certainly not valid as a
2405 pedantic lvalue. Otherwise, return X. */
2407 static tree
2408 pedantic_non_lvalue_loc (location_t loc, tree x)
2410 return protected_set_expr_location_unshare (x, loc);
2413 /* Given a tree comparison code, return the code that is the logical inverse.
2414 It is generally not safe to do this for floating-point comparisons, except
2415 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2416 ERROR_MARK in this case. */
2418 enum tree_code
2419 invert_tree_comparison (enum tree_code code, bool honor_nans)
2421 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2422 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2423 return ERROR_MARK;
2425 switch (code)
2427 case EQ_EXPR:
2428 return NE_EXPR;
2429 case NE_EXPR:
2430 return EQ_EXPR;
2431 case GT_EXPR:
2432 return honor_nans ? UNLE_EXPR : LE_EXPR;
2433 case GE_EXPR:
2434 return honor_nans ? UNLT_EXPR : LT_EXPR;
2435 case LT_EXPR:
2436 return honor_nans ? UNGE_EXPR : GE_EXPR;
2437 case LE_EXPR:
2438 return honor_nans ? UNGT_EXPR : GT_EXPR;
2439 case LTGT_EXPR:
2440 return UNEQ_EXPR;
2441 case UNEQ_EXPR:
2442 return LTGT_EXPR;
2443 case UNGT_EXPR:
2444 return LE_EXPR;
2445 case UNGE_EXPR:
2446 return LT_EXPR;
2447 case UNLT_EXPR:
2448 return GE_EXPR;
2449 case UNLE_EXPR:
2450 return GT_EXPR;
2451 case ORDERED_EXPR:
2452 return UNORDERED_EXPR;
2453 case UNORDERED_EXPR:
2454 return ORDERED_EXPR;
2455 default:
2456 gcc_unreachable ();
2460 /* Similar, but return the comparison that results if the operands are
2461 swapped. This is safe for floating-point. */
2463 enum tree_code
2464 swap_tree_comparison (enum tree_code code)
2466 switch (code)
2468 case EQ_EXPR:
2469 case NE_EXPR:
2470 case ORDERED_EXPR:
2471 case UNORDERED_EXPR:
2472 case LTGT_EXPR:
2473 case UNEQ_EXPR:
2474 return code;
2475 case GT_EXPR:
2476 return LT_EXPR;
2477 case GE_EXPR:
2478 return LE_EXPR;
2479 case LT_EXPR:
2480 return GT_EXPR;
2481 case LE_EXPR:
2482 return GE_EXPR;
2483 case UNGT_EXPR:
2484 return UNLT_EXPR;
2485 case UNGE_EXPR:
2486 return UNLE_EXPR;
2487 case UNLT_EXPR:
2488 return UNGT_EXPR;
2489 case UNLE_EXPR:
2490 return UNGE_EXPR;
2491 default:
2492 gcc_unreachable ();
2497 /* Convert a comparison tree code from an enum tree_code representation
2498 into a compcode bit-based encoding. This function is the inverse of
2499 compcode_to_comparison. */
2501 static enum comparison_code
2502 comparison_to_compcode (enum tree_code code)
2504 switch (code)
2506 case LT_EXPR:
2507 return COMPCODE_LT;
2508 case EQ_EXPR:
2509 return COMPCODE_EQ;
2510 case LE_EXPR:
2511 return COMPCODE_LE;
2512 case GT_EXPR:
2513 return COMPCODE_GT;
2514 case NE_EXPR:
2515 return COMPCODE_NE;
2516 case GE_EXPR:
2517 return COMPCODE_GE;
2518 case ORDERED_EXPR:
2519 return COMPCODE_ORD;
2520 case UNORDERED_EXPR:
2521 return COMPCODE_UNORD;
2522 case UNLT_EXPR:
2523 return COMPCODE_UNLT;
2524 case UNEQ_EXPR:
2525 return COMPCODE_UNEQ;
2526 case UNLE_EXPR:
2527 return COMPCODE_UNLE;
2528 case UNGT_EXPR:
2529 return COMPCODE_UNGT;
2530 case LTGT_EXPR:
2531 return COMPCODE_LTGT;
2532 case UNGE_EXPR:
2533 return COMPCODE_UNGE;
2534 default:
2535 gcc_unreachable ();
2539 /* Convert a compcode bit-based encoding of a comparison operator back
2540 to GCC's enum tree_code representation. This function is the
2541 inverse of comparison_to_compcode. */
2543 static enum tree_code
2544 compcode_to_comparison (enum comparison_code code)
2546 switch (code)
2548 case COMPCODE_LT:
2549 return LT_EXPR;
2550 case COMPCODE_EQ:
2551 return EQ_EXPR;
2552 case COMPCODE_LE:
2553 return LE_EXPR;
2554 case COMPCODE_GT:
2555 return GT_EXPR;
2556 case COMPCODE_NE:
2557 return NE_EXPR;
2558 case COMPCODE_GE:
2559 return GE_EXPR;
2560 case COMPCODE_ORD:
2561 return ORDERED_EXPR;
2562 case COMPCODE_UNORD:
2563 return UNORDERED_EXPR;
2564 case COMPCODE_UNLT:
2565 return UNLT_EXPR;
2566 case COMPCODE_UNEQ:
2567 return UNEQ_EXPR;
2568 case COMPCODE_UNLE:
2569 return UNLE_EXPR;
2570 case COMPCODE_UNGT:
2571 return UNGT_EXPR;
2572 case COMPCODE_LTGT:
2573 return LTGT_EXPR;
2574 case COMPCODE_UNGE:
2575 return UNGE_EXPR;
2576 default:
2577 gcc_unreachable ();
2581 /* Return a tree for the comparison which is the combination of
2582 doing the AND or OR (depending on CODE) of the two operations LCODE
2583 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2584 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2585 if this makes the transformation invalid. */
2587 tree
2588 combine_comparisons (location_t loc,
2589 enum tree_code code, enum tree_code lcode,
2590 enum tree_code rcode, tree truth_type,
2591 tree ll_arg, tree lr_arg)
2593 bool honor_nans = HONOR_NANS (ll_arg);
2594 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2595 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2596 int compcode;
2598 switch (code)
2600 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2601 compcode = lcompcode & rcompcode;
2602 break;
2604 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2605 compcode = lcompcode | rcompcode;
2606 break;
2608 default:
2609 return NULL_TREE;
2612 if (!honor_nans)
2614 /* Eliminate unordered comparisons, as well as LTGT and ORD
2615 which are not used unless the mode has NaNs. */
2616 compcode &= ~COMPCODE_UNORD;
2617 if (compcode == COMPCODE_LTGT)
2618 compcode = COMPCODE_NE;
2619 else if (compcode == COMPCODE_ORD)
2620 compcode = COMPCODE_TRUE;
2622 else if (flag_trapping_math)
2624 /* Check that the original operation and the optimized ones will trap
2625 under the same condition. */
2626 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2627 && (lcompcode != COMPCODE_EQ)
2628 && (lcompcode != COMPCODE_ORD);
2629 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2630 && (rcompcode != COMPCODE_EQ)
2631 && (rcompcode != COMPCODE_ORD);
2632 bool trap = (compcode & COMPCODE_UNORD) == 0
2633 && (compcode != COMPCODE_EQ)
2634 && (compcode != COMPCODE_ORD);
2636 /* In a short-circuited boolean expression the LHS might be
2637 such that the RHS, if evaluated, will never trap. For
2638 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2639 if neither x nor y is NaN. (This is a mixed blessing: for
2640 example, the expression above will never trap, hence
2641 optimizing it to x < y would be invalid). */
2642 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2643 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2644 rtrap = false;
2646 /* If the comparison was short-circuited, and only the RHS
2647 trapped, we may now generate a spurious trap. */
2648 if (rtrap && !ltrap
2649 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2650 return NULL_TREE;
2652 /* If we changed the conditions that cause a trap, we lose. */
2653 if ((ltrap || rtrap) != trap)
2654 return NULL_TREE;
2657 if (compcode == COMPCODE_TRUE)
2658 return constant_boolean_node (true, truth_type);
2659 else if (compcode == COMPCODE_FALSE)
2660 return constant_boolean_node (false, truth_type);
2661 else
2663 enum tree_code tcode;
2665 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2666 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2670 /* Return nonzero if two operands (typically of the same tree node)
2671 are necessarily equal. If either argument has side-effects this
2672 function returns zero. FLAGS modifies behavior as follows:
2674 If OEP_ONLY_CONST is set, only return nonzero for constants.
2675 This function tests whether the operands are indistinguishable;
2676 it does not test whether they are equal using C's == operation.
2677 The distinction is important for IEEE floating point, because
2678 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2679 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2681 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2682 even though it may hold multiple values during a function.
2683 This is because a GCC tree node guarantees that nothing else is
2684 executed between the evaluation of its "operands" (which may often
2685 be evaluated in arbitrary order). Hence if the operands themselves
2686 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2687 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2688 unset means assuming isochronic (or instantaneous) tree equivalence.
2689 Unless comparing arbitrary expression trees, such as from different
2690 statements, this flag can usually be left unset.
2692 If OEP_PURE_SAME is set, then pure functions with identical arguments
2693 are considered the same. It is used when the caller has other ways
2694 to ensure that global memory is unchanged in between. */
2697 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2699 /* If either is ERROR_MARK, they aren't equal. */
2700 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2701 || TREE_TYPE (arg0) == error_mark_node
2702 || TREE_TYPE (arg1) == error_mark_node)
2703 return 0;
2705 /* Similar, if either does not have a type (like a released SSA name),
2706 they aren't equal. */
2707 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2708 return 0;
2710 /* Check equality of integer constants before bailing out due to
2711 precision differences. */
2712 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2713 return tree_int_cst_equal (arg0, arg1);
2715 /* If both types don't have the same signedness, then we can't consider
2716 them equal. We must check this before the STRIP_NOPS calls
2717 because they may change the signedness of the arguments. As pointers
2718 strictly don't have a signedness, require either two pointers or
2719 two non-pointers as well. */
2720 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2721 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2722 return 0;
2724 /* We cannot consider pointers to different address space equal. */
2725 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2726 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2727 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2728 return 0;
2730 /* If both types don't have the same precision, then it is not safe
2731 to strip NOPs. */
2732 if (element_precision (TREE_TYPE (arg0))
2733 != element_precision (TREE_TYPE (arg1)))
2734 return 0;
2736 STRIP_NOPS (arg0);
2737 STRIP_NOPS (arg1);
2739 /* In case both args are comparisons but with different comparison
2740 code, try to swap the comparison operands of one arg to produce
2741 a match and compare that variant. */
2742 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2743 && COMPARISON_CLASS_P (arg0)
2744 && COMPARISON_CLASS_P (arg1))
2746 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2748 if (TREE_CODE (arg0) == swap_code)
2749 return operand_equal_p (TREE_OPERAND (arg0, 0),
2750 TREE_OPERAND (arg1, 1), flags)
2751 && operand_equal_p (TREE_OPERAND (arg0, 1),
2752 TREE_OPERAND (arg1, 0), flags);
2755 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2756 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2757 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2758 return 0;
2760 /* This is needed for conversions and for COMPONENT_REF.
2761 Might as well play it safe and always test this. */
2762 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2763 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2764 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2765 return 0;
2767 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2768 We don't care about side effects in that case because the SAVE_EXPR
2769 takes care of that for us. In all other cases, two expressions are
2770 equal if they have no side effects. If we have two identical
2771 expressions with side effects that should be treated the same due
2772 to the only side effects being identical SAVE_EXPR's, that will
2773 be detected in the recursive calls below.
2774 If we are taking an invariant address of two identical objects
2775 they are necessarily equal as well. */
2776 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2777 && (TREE_CODE (arg0) == SAVE_EXPR
2778 || (flags & OEP_CONSTANT_ADDRESS_OF)
2779 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2780 return 1;
2782 /* Next handle constant cases, those for which we can return 1 even
2783 if ONLY_CONST is set. */
2784 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2785 switch (TREE_CODE (arg0))
2787 case INTEGER_CST:
2788 return tree_int_cst_equal (arg0, arg1);
2790 case FIXED_CST:
2791 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2792 TREE_FIXED_CST (arg1));
2794 case REAL_CST:
2795 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2796 TREE_REAL_CST (arg1)))
2797 return 1;
2800 if (!HONOR_SIGNED_ZEROS (arg0))
2802 /* If we do not distinguish between signed and unsigned zero,
2803 consider them equal. */
2804 if (real_zerop (arg0) && real_zerop (arg1))
2805 return 1;
2807 return 0;
2809 case VECTOR_CST:
2811 unsigned i;
2813 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2814 return 0;
2816 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2818 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2819 VECTOR_CST_ELT (arg1, i), flags))
2820 return 0;
2822 return 1;
2825 case COMPLEX_CST:
2826 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2827 flags)
2828 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2829 flags));
2831 case STRING_CST:
2832 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2833 && ! memcmp (TREE_STRING_POINTER (arg0),
2834 TREE_STRING_POINTER (arg1),
2835 TREE_STRING_LENGTH (arg0)));
2837 case ADDR_EXPR:
2838 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2839 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2840 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2841 default:
2842 break;
2845 if (flags & OEP_ONLY_CONST)
2846 return 0;
2848 /* Define macros to test an operand from arg0 and arg1 for equality and a
2849 variant that allows null and views null as being different from any
2850 non-null value. In the latter case, if either is null, the both
2851 must be; otherwise, do the normal comparison. */
2852 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2853 TREE_OPERAND (arg1, N), flags)
2855 #define OP_SAME_WITH_NULL(N) \
2856 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2857 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2859 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2861 case tcc_unary:
2862 /* Two conversions are equal only if signedness and modes match. */
2863 switch (TREE_CODE (arg0))
2865 CASE_CONVERT:
2866 case FIX_TRUNC_EXPR:
2867 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2868 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2869 return 0;
2870 break;
2871 default:
2872 break;
2875 return OP_SAME (0);
2878 case tcc_comparison:
2879 case tcc_binary:
2880 if (OP_SAME (0) && OP_SAME (1))
2881 return 1;
2883 /* For commutative ops, allow the other order. */
2884 return (commutative_tree_code (TREE_CODE (arg0))
2885 && operand_equal_p (TREE_OPERAND (arg0, 0),
2886 TREE_OPERAND (arg1, 1), flags)
2887 && operand_equal_p (TREE_OPERAND (arg0, 1),
2888 TREE_OPERAND (arg1, 0), flags));
2890 case tcc_reference:
2891 /* If either of the pointer (or reference) expressions we are
2892 dereferencing contain a side effect, these cannot be equal,
2893 but their addresses can be. */
2894 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2895 && (TREE_SIDE_EFFECTS (arg0)
2896 || TREE_SIDE_EFFECTS (arg1)))
2897 return 0;
2899 switch (TREE_CODE (arg0))
2901 case INDIRECT_REF:
2902 if (!(flags & OEP_ADDRESS_OF)
2903 && (TYPE_ALIGN (TREE_TYPE (arg0))
2904 != TYPE_ALIGN (TREE_TYPE (arg1))))
2905 return 0;
2906 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2907 return OP_SAME (0);
2909 case REALPART_EXPR:
2910 case IMAGPART_EXPR:
2911 return OP_SAME (0);
2913 case TARGET_MEM_REF:
2914 case MEM_REF:
2915 /* Require equal access sizes, and similar pointer types.
2916 We can have incomplete types for array references of
2917 variable-sized arrays from the Fortran frontend
2918 though. Also verify the types are compatible. */
2919 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2920 || (TYPE_SIZE (TREE_TYPE (arg0))
2921 && TYPE_SIZE (TREE_TYPE (arg1))
2922 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2923 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2924 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2925 && ((flags & OEP_ADDRESS_OF)
2926 || (alias_ptr_types_compatible_p
2927 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2928 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2929 && (MR_DEPENDENCE_CLIQUE (arg0)
2930 == MR_DEPENDENCE_CLIQUE (arg1))
2931 && (MR_DEPENDENCE_BASE (arg0)
2932 == MR_DEPENDENCE_BASE (arg1))
2933 && (TYPE_ALIGN (TREE_TYPE (arg0))
2934 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2935 return 0;
2936 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2937 return (OP_SAME (0) && OP_SAME (1)
2938 /* TARGET_MEM_REF require equal extra operands. */
2939 && (TREE_CODE (arg0) != TARGET_MEM_REF
2940 || (OP_SAME_WITH_NULL (2)
2941 && OP_SAME_WITH_NULL (3)
2942 && OP_SAME_WITH_NULL (4))));
2944 case ARRAY_REF:
2945 case ARRAY_RANGE_REF:
2946 /* Operands 2 and 3 may be null.
2947 Compare the array index by value if it is constant first as we
2948 may have different types but same value here. */
2949 if (!OP_SAME (0))
2950 return 0;
2951 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2952 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2953 TREE_OPERAND (arg1, 1))
2954 || OP_SAME (1))
2955 && OP_SAME_WITH_NULL (2)
2956 && OP_SAME_WITH_NULL (3));
2958 case COMPONENT_REF:
2959 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2960 may be NULL when we're called to compare MEM_EXPRs. */
2961 if (!OP_SAME_WITH_NULL (0)
2962 || !OP_SAME (1))
2963 return 0;
2964 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2965 return OP_SAME_WITH_NULL (2);
2967 case BIT_FIELD_REF:
2968 if (!OP_SAME (0))
2969 return 0;
2970 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2971 return OP_SAME (1) && OP_SAME (2);
2973 default:
2974 return 0;
2977 case tcc_expression:
2978 switch (TREE_CODE (arg0))
2980 case ADDR_EXPR:
2981 return operand_equal_p (TREE_OPERAND (arg0, 0),
2982 TREE_OPERAND (arg1, 0),
2983 flags | OEP_ADDRESS_OF);
2985 case TRUTH_NOT_EXPR:
2986 return OP_SAME (0);
2988 case TRUTH_ANDIF_EXPR:
2989 case TRUTH_ORIF_EXPR:
2990 return OP_SAME (0) && OP_SAME (1);
2992 case FMA_EXPR:
2993 case WIDEN_MULT_PLUS_EXPR:
2994 case WIDEN_MULT_MINUS_EXPR:
2995 if (!OP_SAME (2))
2996 return 0;
2997 /* The multiplcation operands are commutative. */
2998 /* FALLTHRU */
3000 case TRUTH_AND_EXPR:
3001 case TRUTH_OR_EXPR:
3002 case TRUTH_XOR_EXPR:
3003 if (OP_SAME (0) && OP_SAME (1))
3004 return 1;
3006 /* Otherwise take into account this is a commutative operation. */
3007 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3008 TREE_OPERAND (arg1, 1), flags)
3009 && operand_equal_p (TREE_OPERAND (arg0, 1),
3010 TREE_OPERAND (arg1, 0), flags));
3012 case COND_EXPR:
3013 case VEC_COND_EXPR:
3014 case DOT_PROD_EXPR:
3015 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3017 default:
3018 return 0;
3021 case tcc_vl_exp:
3022 switch (TREE_CODE (arg0))
3024 case CALL_EXPR:
3025 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3026 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3027 /* If not both CALL_EXPRs are either internal or normal function
3028 functions, then they are not equal. */
3029 return 0;
3030 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3032 /* If the CALL_EXPRs call different internal functions, then they
3033 are not equal. */
3034 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3035 return 0;
3037 else
3039 /* If the CALL_EXPRs call different functions, then they are not
3040 equal. */
3041 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3042 flags))
3043 return 0;
3047 unsigned int cef = call_expr_flags (arg0);
3048 if (flags & OEP_PURE_SAME)
3049 cef &= ECF_CONST | ECF_PURE;
3050 else
3051 cef &= ECF_CONST;
3052 if (!cef)
3053 return 0;
3056 /* Now see if all the arguments are the same. */
3058 const_call_expr_arg_iterator iter0, iter1;
3059 const_tree a0, a1;
3060 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3061 a1 = first_const_call_expr_arg (arg1, &iter1);
3062 a0 && a1;
3063 a0 = next_const_call_expr_arg (&iter0),
3064 a1 = next_const_call_expr_arg (&iter1))
3065 if (! operand_equal_p (a0, a1, flags))
3066 return 0;
3068 /* If we get here and both argument lists are exhausted
3069 then the CALL_EXPRs are equal. */
3070 return ! (a0 || a1);
3072 default:
3073 return 0;
3076 case tcc_declaration:
3077 /* Consider __builtin_sqrt equal to sqrt. */
3078 return (TREE_CODE (arg0) == FUNCTION_DECL
3079 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3080 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3081 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3083 default:
3084 return 0;
3087 #undef OP_SAME
3088 #undef OP_SAME_WITH_NULL
3091 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3092 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3094 When in doubt, return 0. */
3096 static int
3097 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3099 int unsignedp1, unsignedpo;
3100 tree primarg0, primarg1, primother;
3101 unsigned int correct_width;
3103 if (operand_equal_p (arg0, arg1, 0))
3104 return 1;
3106 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3107 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3108 return 0;
3110 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3111 and see if the inner values are the same. This removes any
3112 signedness comparison, which doesn't matter here. */
3113 primarg0 = arg0, primarg1 = arg1;
3114 STRIP_NOPS (primarg0);
3115 STRIP_NOPS (primarg1);
3116 if (operand_equal_p (primarg0, primarg1, 0))
3117 return 1;
3119 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3120 actual comparison operand, ARG0.
3122 First throw away any conversions to wider types
3123 already present in the operands. */
3125 primarg1 = get_narrower (arg1, &unsignedp1);
3126 primother = get_narrower (other, &unsignedpo);
3128 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3129 if (unsignedp1 == unsignedpo
3130 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3131 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3133 tree type = TREE_TYPE (arg0);
3135 /* Make sure shorter operand is extended the right way
3136 to match the longer operand. */
3137 primarg1 = fold_convert (signed_or_unsigned_type_for
3138 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3140 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3141 return 1;
3144 return 0;
3147 /* See if ARG is an expression that is either a comparison or is performing
3148 arithmetic on comparisons. The comparisons must only be comparing
3149 two different values, which will be stored in *CVAL1 and *CVAL2; if
3150 they are nonzero it means that some operands have already been found.
3151 No variables may be used anywhere else in the expression except in the
3152 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3153 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3155 If this is true, return 1. Otherwise, return zero. */
3157 static int
3158 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3160 enum tree_code code = TREE_CODE (arg);
3161 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3163 /* We can handle some of the tcc_expression cases here. */
3164 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3165 tclass = tcc_unary;
3166 else if (tclass == tcc_expression
3167 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3168 || code == COMPOUND_EXPR))
3169 tclass = tcc_binary;
3171 else if (tclass == tcc_expression && code == SAVE_EXPR
3172 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3174 /* If we've already found a CVAL1 or CVAL2, this expression is
3175 two complex to handle. */
3176 if (*cval1 || *cval2)
3177 return 0;
3179 tclass = tcc_unary;
3180 *save_p = 1;
3183 switch (tclass)
3185 case tcc_unary:
3186 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3188 case tcc_binary:
3189 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3190 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3191 cval1, cval2, save_p));
3193 case tcc_constant:
3194 return 1;
3196 case tcc_expression:
3197 if (code == COND_EXPR)
3198 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3199 cval1, cval2, save_p)
3200 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3201 cval1, cval2, save_p)
3202 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3203 cval1, cval2, save_p));
3204 return 0;
3206 case tcc_comparison:
3207 /* First see if we can handle the first operand, then the second. For
3208 the second operand, we know *CVAL1 can't be zero. It must be that
3209 one side of the comparison is each of the values; test for the
3210 case where this isn't true by failing if the two operands
3211 are the same. */
3213 if (operand_equal_p (TREE_OPERAND (arg, 0),
3214 TREE_OPERAND (arg, 1), 0))
3215 return 0;
3217 if (*cval1 == 0)
3218 *cval1 = TREE_OPERAND (arg, 0);
3219 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3221 else if (*cval2 == 0)
3222 *cval2 = TREE_OPERAND (arg, 0);
3223 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3225 else
3226 return 0;
3228 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3230 else if (*cval2 == 0)
3231 *cval2 = TREE_OPERAND (arg, 1);
3232 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3234 else
3235 return 0;
3237 return 1;
3239 default:
3240 return 0;
3244 /* ARG is a tree that is known to contain just arithmetic operations and
3245 comparisons. Evaluate the operations in the tree substituting NEW0 for
3246 any occurrence of OLD0 as an operand of a comparison and likewise for
3247 NEW1 and OLD1. */
3249 static tree
3250 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3251 tree old1, tree new1)
3253 tree type = TREE_TYPE (arg);
3254 enum tree_code code = TREE_CODE (arg);
3255 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3257 /* We can handle some of the tcc_expression cases here. */
3258 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3259 tclass = tcc_unary;
3260 else if (tclass == tcc_expression
3261 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3262 tclass = tcc_binary;
3264 switch (tclass)
3266 case tcc_unary:
3267 return fold_build1_loc (loc, code, type,
3268 eval_subst (loc, TREE_OPERAND (arg, 0),
3269 old0, new0, old1, new1));
3271 case tcc_binary:
3272 return fold_build2_loc (loc, code, type,
3273 eval_subst (loc, TREE_OPERAND (arg, 0),
3274 old0, new0, old1, new1),
3275 eval_subst (loc, TREE_OPERAND (arg, 1),
3276 old0, new0, old1, new1));
3278 case tcc_expression:
3279 switch (code)
3281 case SAVE_EXPR:
3282 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3283 old1, new1);
3285 case COMPOUND_EXPR:
3286 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3287 old1, new1);
3289 case COND_EXPR:
3290 return fold_build3_loc (loc, code, type,
3291 eval_subst (loc, TREE_OPERAND (arg, 0),
3292 old0, new0, old1, new1),
3293 eval_subst (loc, TREE_OPERAND (arg, 1),
3294 old0, new0, old1, new1),
3295 eval_subst (loc, TREE_OPERAND (arg, 2),
3296 old0, new0, old1, new1));
3297 default:
3298 break;
3300 /* Fall through - ??? */
3302 case tcc_comparison:
3304 tree arg0 = TREE_OPERAND (arg, 0);
3305 tree arg1 = TREE_OPERAND (arg, 1);
3307 /* We need to check both for exact equality and tree equality. The
3308 former will be true if the operand has a side-effect. In that
3309 case, we know the operand occurred exactly once. */
3311 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3312 arg0 = new0;
3313 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3314 arg0 = new1;
3316 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3317 arg1 = new0;
3318 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3319 arg1 = new1;
3321 return fold_build2_loc (loc, code, type, arg0, arg1);
3324 default:
3325 return arg;
3329 /* Return a tree for the case when the result of an expression is RESULT
3330 converted to TYPE and OMITTED was previously an operand of the expression
3331 but is now not needed (e.g., we folded OMITTED * 0).
3333 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3334 the conversion of RESULT to TYPE. */
3336 tree
3337 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3339 tree t = fold_convert_loc (loc, type, result);
3341 /* If the resulting operand is an empty statement, just return the omitted
3342 statement casted to void. */
3343 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3344 return build1_loc (loc, NOP_EXPR, void_type_node,
3345 fold_ignored_result (omitted));
3347 if (TREE_SIDE_EFFECTS (omitted))
3348 return build2_loc (loc, COMPOUND_EXPR, type,
3349 fold_ignored_result (omitted), t);
3351 return non_lvalue_loc (loc, t);
3354 /* Return a tree for the case when the result of an expression is RESULT
3355 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3356 of the expression but are now not needed.
3358 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3359 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3360 evaluated before OMITTED2. Otherwise, if neither has side effects,
3361 just do the conversion of RESULT to TYPE. */
3363 tree
3364 omit_two_operands_loc (location_t loc, tree type, tree result,
3365 tree omitted1, tree omitted2)
3367 tree t = fold_convert_loc (loc, type, result);
3369 if (TREE_SIDE_EFFECTS (omitted2))
3370 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3371 if (TREE_SIDE_EFFECTS (omitted1))
3372 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3374 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3378 /* Return a simplified tree node for the truth-negation of ARG. This
3379 never alters ARG itself. We assume that ARG is an operation that
3380 returns a truth value (0 or 1).
3382 FIXME: one would think we would fold the result, but it causes
3383 problems with the dominator optimizer. */
3385 static tree
3386 fold_truth_not_expr (location_t loc, tree arg)
3388 tree type = TREE_TYPE (arg);
3389 enum tree_code code = TREE_CODE (arg);
3390 location_t loc1, loc2;
3392 /* If this is a comparison, we can simply invert it, except for
3393 floating-point non-equality comparisons, in which case we just
3394 enclose a TRUTH_NOT_EXPR around what we have. */
3396 if (TREE_CODE_CLASS (code) == tcc_comparison)
3398 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3399 if (FLOAT_TYPE_P (op_type)
3400 && flag_trapping_math
3401 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3402 && code != NE_EXPR && code != EQ_EXPR)
3403 return NULL_TREE;
3405 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3406 if (code == ERROR_MARK)
3407 return NULL_TREE;
3409 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3410 TREE_OPERAND (arg, 1));
3413 switch (code)
3415 case INTEGER_CST:
3416 return constant_boolean_node (integer_zerop (arg), type);
3418 case TRUTH_AND_EXPR:
3419 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3420 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3421 return build2_loc (loc, TRUTH_OR_EXPR, type,
3422 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3423 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3425 case TRUTH_OR_EXPR:
3426 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3427 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3428 return build2_loc (loc, TRUTH_AND_EXPR, type,
3429 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3430 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3432 case TRUTH_XOR_EXPR:
3433 /* Here we can invert either operand. We invert the first operand
3434 unless the second operand is a TRUTH_NOT_EXPR in which case our
3435 result is the XOR of the first operand with the inside of the
3436 negation of the second operand. */
3438 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3439 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3440 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3441 else
3442 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3443 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3444 TREE_OPERAND (arg, 1));
3446 case TRUTH_ANDIF_EXPR:
3447 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3448 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3449 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3450 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3451 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3453 case TRUTH_ORIF_EXPR:
3454 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3455 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3456 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3457 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3458 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3460 case TRUTH_NOT_EXPR:
3461 return TREE_OPERAND (arg, 0);
3463 case COND_EXPR:
3465 tree arg1 = TREE_OPERAND (arg, 1);
3466 tree arg2 = TREE_OPERAND (arg, 2);
3468 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3469 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3471 /* A COND_EXPR may have a throw as one operand, which
3472 then has void type. Just leave void operands
3473 as they are. */
3474 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3475 VOID_TYPE_P (TREE_TYPE (arg1))
3476 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3477 VOID_TYPE_P (TREE_TYPE (arg2))
3478 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3481 case COMPOUND_EXPR:
3482 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3483 return build2_loc (loc, COMPOUND_EXPR, type,
3484 TREE_OPERAND (arg, 0),
3485 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3487 case NON_LVALUE_EXPR:
3488 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3489 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3491 CASE_CONVERT:
3492 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3493 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3495 /* ... fall through ... */
3497 case FLOAT_EXPR:
3498 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3499 return build1_loc (loc, TREE_CODE (arg), type,
3500 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3502 case BIT_AND_EXPR:
3503 if (!integer_onep (TREE_OPERAND (arg, 1)))
3504 return NULL_TREE;
3505 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3507 case SAVE_EXPR:
3508 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3510 case CLEANUP_POINT_EXPR:
3511 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3512 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3513 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3515 default:
3516 return NULL_TREE;
3520 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3521 assume that ARG is an operation that returns a truth value (0 or 1
3522 for scalars, 0 or -1 for vectors). Return the folded expression if
3523 folding is successful. Otherwise, return NULL_TREE. */
3525 static tree
3526 fold_invert_truthvalue (location_t loc, tree arg)
3528 tree type = TREE_TYPE (arg);
3529 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3530 ? BIT_NOT_EXPR
3531 : TRUTH_NOT_EXPR,
3532 type, arg);
3535 /* Return a simplified tree node for the truth-negation of ARG. This
3536 never alters ARG itself. We assume that ARG is an operation that
3537 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3539 tree
3540 invert_truthvalue_loc (location_t loc, tree arg)
3542 if (TREE_CODE (arg) == ERROR_MARK)
3543 return arg;
3545 tree type = TREE_TYPE (arg);
3546 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3547 ? BIT_NOT_EXPR
3548 : TRUTH_NOT_EXPR,
3549 type, arg);
3552 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3553 with code CODE. This optimization is unsafe. */
3554 static tree
3555 distribute_real_division (location_t loc, enum tree_code code, tree type,
3556 tree arg0, tree arg1)
3558 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3559 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3561 /* (A / C) +- (B / C) -> (A +- B) / C. */
3562 if (mul0 == mul1
3563 && operand_equal_p (TREE_OPERAND (arg0, 1),
3564 TREE_OPERAND (arg1, 1), 0))
3565 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3566 fold_build2_loc (loc, code, type,
3567 TREE_OPERAND (arg0, 0),
3568 TREE_OPERAND (arg1, 0)),
3569 TREE_OPERAND (arg0, 1));
3571 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3572 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3573 TREE_OPERAND (arg1, 0), 0)
3574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3575 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3577 REAL_VALUE_TYPE r0, r1;
3578 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3579 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3580 if (!mul0)
3581 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3582 if (!mul1)
3583 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3584 real_arithmetic (&r0, code, &r0, &r1);
3585 return fold_build2_loc (loc, MULT_EXPR, type,
3586 TREE_OPERAND (arg0, 0),
3587 build_real (type, r0));
3590 return NULL_TREE;
3593 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3594 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3596 static tree
3597 make_bit_field_ref (location_t loc, tree inner, tree type,
3598 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3600 tree result, bftype;
3602 if (bitpos == 0)
3604 tree size = TYPE_SIZE (TREE_TYPE (inner));
3605 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3606 || POINTER_TYPE_P (TREE_TYPE (inner)))
3607 && tree_fits_shwi_p (size)
3608 && tree_to_shwi (size) == bitsize)
3609 return fold_convert_loc (loc, type, inner);
3612 bftype = type;
3613 if (TYPE_PRECISION (bftype) != bitsize
3614 || TYPE_UNSIGNED (bftype) == !unsignedp)
3615 bftype = build_nonstandard_integer_type (bitsize, 0);
3617 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3618 size_int (bitsize), bitsize_int (bitpos));
3620 if (bftype != type)
3621 result = fold_convert_loc (loc, type, result);
3623 return result;
3626 /* Optimize a bit-field compare.
3628 There are two cases: First is a compare against a constant and the
3629 second is a comparison of two items where the fields are at the same
3630 bit position relative to the start of a chunk (byte, halfword, word)
3631 large enough to contain it. In these cases we can avoid the shift
3632 implicit in bitfield extractions.
3634 For constants, we emit a compare of the shifted constant with the
3635 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3636 compared. For two fields at the same position, we do the ANDs with the
3637 similar mask and compare the result of the ANDs.
3639 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3640 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3641 are the left and right operands of the comparison, respectively.
3643 If the optimization described above can be done, we return the resulting
3644 tree. Otherwise we return zero. */
3646 static tree
3647 optimize_bit_field_compare (location_t loc, enum tree_code code,
3648 tree compare_type, tree lhs, tree rhs)
3650 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3651 tree type = TREE_TYPE (lhs);
3652 tree unsigned_type;
3653 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3654 machine_mode lmode, rmode, nmode;
3655 int lunsignedp, runsignedp;
3656 int lvolatilep = 0, rvolatilep = 0;
3657 tree linner, rinner = NULL_TREE;
3658 tree mask;
3659 tree offset;
3661 /* Get all the information about the extractions being done. If the bit size
3662 if the same as the size of the underlying object, we aren't doing an
3663 extraction at all and so can do nothing. We also don't want to
3664 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3665 then will no longer be able to replace it. */
3666 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3667 &lunsignedp, &lvolatilep, false);
3668 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3669 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3670 return 0;
3672 if (!const_p)
3674 /* If this is not a constant, we can only do something if bit positions,
3675 sizes, and signedness are the same. */
3676 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3677 &runsignedp, &rvolatilep, false);
3679 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3680 || lunsignedp != runsignedp || offset != 0
3681 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3682 return 0;
3685 /* See if we can find a mode to refer to this field. We should be able to,
3686 but fail if we can't. */
3687 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3688 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3689 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3690 TYPE_ALIGN (TREE_TYPE (rinner))),
3691 word_mode, false);
3692 if (nmode == VOIDmode)
3693 return 0;
3695 /* Set signed and unsigned types of the precision of this mode for the
3696 shifts below. */
3697 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3699 /* Compute the bit position and size for the new reference and our offset
3700 within it. If the new reference is the same size as the original, we
3701 won't optimize anything, so return zero. */
3702 nbitsize = GET_MODE_BITSIZE (nmode);
3703 nbitpos = lbitpos & ~ (nbitsize - 1);
3704 lbitpos -= nbitpos;
3705 if (nbitsize == lbitsize)
3706 return 0;
3708 if (BYTES_BIG_ENDIAN)
3709 lbitpos = nbitsize - lbitsize - lbitpos;
3711 /* Make the mask to be used against the extracted field. */
3712 mask = build_int_cst_type (unsigned_type, -1);
3713 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3714 mask = const_binop (RSHIFT_EXPR, mask,
3715 size_int (nbitsize - lbitsize - lbitpos));
3717 if (! const_p)
3718 /* If not comparing with constant, just rework the comparison
3719 and return. */
3720 return fold_build2_loc (loc, code, compare_type,
3721 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3722 make_bit_field_ref (loc, linner,
3723 unsigned_type,
3724 nbitsize, nbitpos,
3726 mask),
3727 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3728 make_bit_field_ref (loc, rinner,
3729 unsigned_type,
3730 nbitsize, nbitpos,
3732 mask));
3734 /* Otherwise, we are handling the constant case. See if the constant is too
3735 big for the field. Warn and return a tree of for 0 (false) if so. We do
3736 this not only for its own sake, but to avoid having to test for this
3737 error case below. If we didn't, we might generate wrong code.
3739 For unsigned fields, the constant shifted right by the field length should
3740 be all zero. For signed fields, the high-order bits should agree with
3741 the sign bit. */
3743 if (lunsignedp)
3745 if (wi::lrshift (rhs, lbitsize) != 0)
3747 warning (0, "comparison is always %d due to width of bit-field",
3748 code == NE_EXPR);
3749 return constant_boolean_node (code == NE_EXPR, compare_type);
3752 else
3754 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3755 if (tem != 0 && tem != -1)
3757 warning (0, "comparison is always %d due to width of bit-field",
3758 code == NE_EXPR);
3759 return constant_boolean_node (code == NE_EXPR, compare_type);
3763 /* Single-bit compares should always be against zero. */
3764 if (lbitsize == 1 && ! integer_zerop (rhs))
3766 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3767 rhs = build_int_cst (type, 0);
3770 /* Make a new bitfield reference, shift the constant over the
3771 appropriate number of bits and mask it with the computed mask
3772 (in case this was a signed field). If we changed it, make a new one. */
3773 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3775 rhs = const_binop (BIT_AND_EXPR,
3776 const_binop (LSHIFT_EXPR,
3777 fold_convert_loc (loc, unsigned_type, rhs),
3778 size_int (lbitpos)),
3779 mask);
3781 lhs = build2_loc (loc, code, compare_type,
3782 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3783 return lhs;
3786 /* Subroutine for fold_truth_andor_1: decode a field reference.
3788 If EXP is a comparison reference, we return the innermost reference.
3790 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3791 set to the starting bit number.
3793 If the innermost field can be completely contained in a mode-sized
3794 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3796 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3797 otherwise it is not changed.
3799 *PUNSIGNEDP is set to the signedness of the field.
3801 *PMASK is set to the mask used. This is either contained in a
3802 BIT_AND_EXPR or derived from the width of the field.
3804 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3806 Return 0 if this is not a component reference or is one that we can't
3807 do anything with. */
3809 static tree
3810 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3811 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3812 int *punsignedp, int *pvolatilep,
3813 tree *pmask, tree *pand_mask)
3815 tree outer_type = 0;
3816 tree and_mask = 0;
3817 tree mask, inner, offset;
3818 tree unsigned_type;
3819 unsigned int precision;
3821 /* All the optimizations using this function assume integer fields.
3822 There are problems with FP fields since the type_for_size call
3823 below can fail for, e.g., XFmode. */
3824 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3825 return 0;
3827 /* We are interested in the bare arrangement of bits, so strip everything
3828 that doesn't affect the machine mode. However, record the type of the
3829 outermost expression if it may matter below. */
3830 if (CONVERT_EXPR_P (exp)
3831 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3832 outer_type = TREE_TYPE (exp);
3833 STRIP_NOPS (exp);
3835 if (TREE_CODE (exp) == BIT_AND_EXPR)
3837 and_mask = TREE_OPERAND (exp, 1);
3838 exp = TREE_OPERAND (exp, 0);
3839 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3840 if (TREE_CODE (and_mask) != INTEGER_CST)
3841 return 0;
3844 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3845 punsignedp, pvolatilep, false);
3846 if ((inner == exp && and_mask == 0)
3847 || *pbitsize < 0 || offset != 0
3848 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3849 return 0;
3851 /* If the number of bits in the reference is the same as the bitsize of
3852 the outer type, then the outer type gives the signedness. Otherwise
3853 (in case of a small bitfield) the signedness is unchanged. */
3854 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3855 *punsignedp = TYPE_UNSIGNED (outer_type);
3857 /* Compute the mask to access the bitfield. */
3858 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3859 precision = TYPE_PRECISION (unsigned_type);
3861 mask = build_int_cst_type (unsigned_type, -1);
3863 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3864 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3866 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3867 if (and_mask != 0)
3868 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3869 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3871 *pmask = mask;
3872 *pand_mask = and_mask;
3873 return inner;
3876 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3877 bit positions and MASK is SIGNED. */
3879 static int
3880 all_ones_mask_p (const_tree mask, unsigned int size)
3882 tree type = TREE_TYPE (mask);
3883 unsigned int precision = TYPE_PRECISION (type);
3885 /* If this function returns true when the type of the mask is
3886 UNSIGNED, then there will be errors. In particular see
3887 gcc.c-torture/execute/990326-1.c. There does not appear to be
3888 any documentation paper trail as to why this is so. But the pre
3889 wide-int worked with that restriction and it has been preserved
3890 here. */
3891 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3892 return false;
3894 return wi::mask (size, false, precision) == mask;
3897 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3898 represents the sign bit of EXP's type. If EXP represents a sign
3899 or zero extension, also test VAL against the unextended type.
3900 The return value is the (sub)expression whose sign bit is VAL,
3901 or NULL_TREE otherwise. */
3903 tree
3904 sign_bit_p (tree exp, const_tree val)
3906 int width;
3907 tree t;
3909 /* Tree EXP must have an integral type. */
3910 t = TREE_TYPE (exp);
3911 if (! INTEGRAL_TYPE_P (t))
3912 return NULL_TREE;
3914 /* Tree VAL must be an integer constant. */
3915 if (TREE_CODE (val) != INTEGER_CST
3916 || TREE_OVERFLOW (val))
3917 return NULL_TREE;
3919 width = TYPE_PRECISION (t);
3920 if (wi::only_sign_bit_p (val, width))
3921 return exp;
3923 /* Handle extension from a narrower type. */
3924 if (TREE_CODE (exp) == NOP_EXPR
3925 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3926 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3928 return NULL_TREE;
3931 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3932 to be evaluated unconditionally. */
3934 static int
3935 simple_operand_p (const_tree exp)
3937 /* Strip any conversions that don't change the machine mode. */
3938 STRIP_NOPS (exp);
3940 return (CONSTANT_CLASS_P (exp)
3941 || TREE_CODE (exp) == SSA_NAME
3942 || (DECL_P (exp)
3943 && ! TREE_ADDRESSABLE (exp)
3944 && ! TREE_THIS_VOLATILE (exp)
3945 && ! DECL_NONLOCAL (exp)
3946 /* Don't regard global variables as simple. They may be
3947 allocated in ways unknown to the compiler (shared memory,
3948 #pragma weak, etc). */
3949 && ! TREE_PUBLIC (exp)
3950 && ! DECL_EXTERNAL (exp)
3951 /* Weakrefs are not safe to be read, since they can be NULL.
3952 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3953 have DECL_WEAK flag set. */
3954 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3955 /* Loading a static variable is unduly expensive, but global
3956 registers aren't expensive. */
3957 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3960 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3961 to be evaluated unconditionally.
3962 I addition to simple_operand_p, we assume that comparisons, conversions,
3963 and logic-not operations are simple, if their operands are simple, too. */
3965 static bool
3966 simple_operand_p_2 (tree exp)
3968 enum tree_code code;
3970 if (TREE_SIDE_EFFECTS (exp)
3971 || tree_could_trap_p (exp))
3972 return false;
3974 while (CONVERT_EXPR_P (exp))
3975 exp = TREE_OPERAND (exp, 0);
3977 code = TREE_CODE (exp);
3979 if (TREE_CODE_CLASS (code) == tcc_comparison)
3980 return (simple_operand_p (TREE_OPERAND (exp, 0))
3981 && simple_operand_p (TREE_OPERAND (exp, 1)));
3983 if (code == TRUTH_NOT_EXPR)
3984 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3986 return simple_operand_p (exp);
3990 /* The following functions are subroutines to fold_range_test and allow it to
3991 try to change a logical combination of comparisons into a range test.
3993 For example, both
3994 X == 2 || X == 3 || X == 4 || X == 5
3996 X >= 2 && X <= 5
3997 are converted to
3998 (unsigned) (X - 2) <= 3
4000 We describe each set of comparisons as being either inside or outside
4001 a range, using a variable named like IN_P, and then describe the
4002 range with a lower and upper bound. If one of the bounds is omitted,
4003 it represents either the highest or lowest value of the type.
4005 In the comments below, we represent a range by two numbers in brackets
4006 preceded by a "+" to designate being inside that range, or a "-" to
4007 designate being outside that range, so the condition can be inverted by
4008 flipping the prefix. An omitted bound is represented by a "-". For
4009 example, "- [-, 10]" means being outside the range starting at the lowest
4010 possible value and ending at 10, in other words, being greater than 10.
4011 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4012 always false.
4014 We set up things so that the missing bounds are handled in a consistent
4015 manner so neither a missing bound nor "true" and "false" need to be
4016 handled using a special case. */
4018 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4019 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4020 and UPPER1_P are nonzero if the respective argument is an upper bound
4021 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4022 must be specified for a comparison. ARG1 will be converted to ARG0's
4023 type if both are specified. */
4025 static tree
4026 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4027 tree arg1, int upper1_p)
4029 tree tem;
4030 int result;
4031 int sgn0, sgn1;
4033 /* If neither arg represents infinity, do the normal operation.
4034 Else, if not a comparison, return infinity. Else handle the special
4035 comparison rules. Note that most of the cases below won't occur, but
4036 are handled for consistency. */
4038 if (arg0 != 0 && arg1 != 0)
4040 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4041 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4042 STRIP_NOPS (tem);
4043 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4046 if (TREE_CODE_CLASS (code) != tcc_comparison)
4047 return 0;
4049 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4050 for neither. In real maths, we cannot assume open ended ranges are
4051 the same. But, this is computer arithmetic, where numbers are finite.
4052 We can therefore make the transformation of any unbounded range with
4053 the value Z, Z being greater than any representable number. This permits
4054 us to treat unbounded ranges as equal. */
4055 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4056 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4057 switch (code)
4059 case EQ_EXPR:
4060 result = sgn0 == sgn1;
4061 break;
4062 case NE_EXPR:
4063 result = sgn0 != sgn1;
4064 break;
4065 case LT_EXPR:
4066 result = sgn0 < sgn1;
4067 break;
4068 case LE_EXPR:
4069 result = sgn0 <= sgn1;
4070 break;
4071 case GT_EXPR:
4072 result = sgn0 > sgn1;
4073 break;
4074 case GE_EXPR:
4075 result = sgn0 >= sgn1;
4076 break;
4077 default:
4078 gcc_unreachable ();
4081 return constant_boolean_node (result, type);
4084 /* Helper routine for make_range. Perform one step for it, return
4085 new expression if the loop should continue or NULL_TREE if it should
4086 stop. */
4088 tree
4089 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4090 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4091 bool *strict_overflow_p)
4093 tree arg0_type = TREE_TYPE (arg0);
4094 tree n_low, n_high, low = *p_low, high = *p_high;
4095 int in_p = *p_in_p, n_in_p;
4097 switch (code)
4099 case TRUTH_NOT_EXPR:
4100 /* We can only do something if the range is testing for zero. */
4101 if (low == NULL_TREE || high == NULL_TREE
4102 || ! integer_zerop (low) || ! integer_zerop (high))
4103 return NULL_TREE;
4104 *p_in_p = ! in_p;
4105 return arg0;
4107 case EQ_EXPR: case NE_EXPR:
4108 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4109 /* We can only do something if the range is testing for zero
4110 and if the second operand is an integer constant. Note that
4111 saying something is "in" the range we make is done by
4112 complementing IN_P since it will set in the initial case of
4113 being not equal to zero; "out" is leaving it alone. */
4114 if (low == NULL_TREE || high == NULL_TREE
4115 || ! integer_zerop (low) || ! integer_zerop (high)
4116 || TREE_CODE (arg1) != INTEGER_CST)
4117 return NULL_TREE;
4119 switch (code)
4121 case NE_EXPR: /* - [c, c] */
4122 low = high = arg1;
4123 break;
4124 case EQ_EXPR: /* + [c, c] */
4125 in_p = ! in_p, low = high = arg1;
4126 break;
4127 case GT_EXPR: /* - [-, c] */
4128 low = 0, high = arg1;
4129 break;
4130 case GE_EXPR: /* + [c, -] */
4131 in_p = ! in_p, low = arg1, high = 0;
4132 break;
4133 case LT_EXPR: /* - [c, -] */
4134 low = arg1, high = 0;
4135 break;
4136 case LE_EXPR: /* + [-, c] */
4137 in_p = ! in_p, low = 0, high = arg1;
4138 break;
4139 default:
4140 gcc_unreachable ();
4143 /* If this is an unsigned comparison, we also know that EXP is
4144 greater than or equal to zero. We base the range tests we make
4145 on that fact, so we record it here so we can parse existing
4146 range tests. We test arg0_type since often the return type
4147 of, e.g. EQ_EXPR, is boolean. */
4148 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4150 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4151 in_p, low, high, 1,
4152 build_int_cst (arg0_type, 0),
4153 NULL_TREE))
4154 return NULL_TREE;
4156 in_p = n_in_p, low = n_low, high = n_high;
4158 /* If the high bound is missing, but we have a nonzero low
4159 bound, reverse the range so it goes from zero to the low bound
4160 minus 1. */
4161 if (high == 0 && low && ! integer_zerop (low))
4163 in_p = ! in_p;
4164 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4165 build_int_cst (TREE_TYPE (low), 1), 0);
4166 low = build_int_cst (arg0_type, 0);
4170 *p_low = low;
4171 *p_high = high;
4172 *p_in_p = in_p;
4173 return arg0;
4175 case NEGATE_EXPR:
4176 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4177 low and high are non-NULL, then normalize will DTRT. */
4178 if (!TYPE_UNSIGNED (arg0_type)
4179 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4181 if (low == NULL_TREE)
4182 low = TYPE_MIN_VALUE (arg0_type);
4183 if (high == NULL_TREE)
4184 high = TYPE_MAX_VALUE (arg0_type);
4187 /* (-x) IN [a,b] -> x in [-b, -a] */
4188 n_low = range_binop (MINUS_EXPR, exp_type,
4189 build_int_cst (exp_type, 0),
4190 0, high, 1);
4191 n_high = range_binop (MINUS_EXPR, exp_type,
4192 build_int_cst (exp_type, 0),
4193 0, low, 0);
4194 if (n_high != 0 && TREE_OVERFLOW (n_high))
4195 return NULL_TREE;
4196 goto normalize;
4198 case BIT_NOT_EXPR:
4199 /* ~ X -> -X - 1 */
4200 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4201 build_int_cst (exp_type, 1));
4203 case PLUS_EXPR:
4204 case MINUS_EXPR:
4205 if (TREE_CODE (arg1) != INTEGER_CST)
4206 return NULL_TREE;
4208 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4209 move a constant to the other side. */
4210 if (!TYPE_UNSIGNED (arg0_type)
4211 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4212 return NULL_TREE;
4214 /* If EXP is signed, any overflow in the computation is undefined,
4215 so we don't worry about it so long as our computations on
4216 the bounds don't overflow. For unsigned, overflow is defined
4217 and this is exactly the right thing. */
4218 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4219 arg0_type, low, 0, arg1, 0);
4220 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4221 arg0_type, high, 1, arg1, 0);
4222 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4223 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4224 return NULL_TREE;
4226 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4227 *strict_overflow_p = true;
4229 normalize:
4230 /* Check for an unsigned range which has wrapped around the maximum
4231 value thus making n_high < n_low, and normalize it. */
4232 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4234 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4235 build_int_cst (TREE_TYPE (n_high), 1), 0);
4236 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4237 build_int_cst (TREE_TYPE (n_low), 1), 0);
4239 /* If the range is of the form +/- [ x+1, x ], we won't
4240 be able to normalize it. But then, it represents the
4241 whole range or the empty set, so make it
4242 +/- [ -, - ]. */
4243 if (tree_int_cst_equal (n_low, low)
4244 && tree_int_cst_equal (n_high, high))
4245 low = high = 0;
4246 else
4247 in_p = ! in_p;
4249 else
4250 low = n_low, high = n_high;
4252 *p_low = low;
4253 *p_high = high;
4254 *p_in_p = in_p;
4255 return arg0;
4257 CASE_CONVERT:
4258 case NON_LVALUE_EXPR:
4259 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4260 return NULL_TREE;
4262 if (! INTEGRAL_TYPE_P (arg0_type)
4263 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4264 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4265 return NULL_TREE;
4267 n_low = low, n_high = high;
4269 if (n_low != 0)
4270 n_low = fold_convert_loc (loc, arg0_type, n_low);
4272 if (n_high != 0)
4273 n_high = fold_convert_loc (loc, arg0_type, n_high);
4275 /* If we're converting arg0 from an unsigned type, to exp,
4276 a signed type, we will be doing the comparison as unsigned.
4277 The tests above have already verified that LOW and HIGH
4278 are both positive.
4280 So we have to ensure that we will handle large unsigned
4281 values the same way that the current signed bounds treat
4282 negative values. */
4284 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4286 tree high_positive;
4287 tree equiv_type;
4288 /* For fixed-point modes, we need to pass the saturating flag
4289 as the 2nd parameter. */
4290 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4291 equiv_type
4292 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4293 TYPE_SATURATING (arg0_type));
4294 else
4295 equiv_type
4296 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4298 /* A range without an upper bound is, naturally, unbounded.
4299 Since convert would have cropped a very large value, use
4300 the max value for the destination type. */
4301 high_positive
4302 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4303 : TYPE_MAX_VALUE (arg0_type);
4305 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4306 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4307 fold_convert_loc (loc, arg0_type,
4308 high_positive),
4309 build_int_cst (arg0_type, 1));
4311 /* If the low bound is specified, "and" the range with the
4312 range for which the original unsigned value will be
4313 positive. */
4314 if (low != 0)
4316 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4317 1, fold_convert_loc (loc, arg0_type,
4318 integer_zero_node),
4319 high_positive))
4320 return NULL_TREE;
4322 in_p = (n_in_p == in_p);
4324 else
4326 /* Otherwise, "or" the range with the range of the input
4327 that will be interpreted as negative. */
4328 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4329 1, fold_convert_loc (loc, arg0_type,
4330 integer_zero_node),
4331 high_positive))
4332 return NULL_TREE;
4334 in_p = (in_p != n_in_p);
4338 *p_low = n_low;
4339 *p_high = n_high;
4340 *p_in_p = in_p;
4341 return arg0;
4343 default:
4344 return NULL_TREE;
4348 /* Given EXP, a logical expression, set the range it is testing into
4349 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4350 actually being tested. *PLOW and *PHIGH will be made of the same
4351 type as the returned expression. If EXP is not a comparison, we
4352 will most likely not be returning a useful value and range. Set
4353 *STRICT_OVERFLOW_P to true if the return value is only valid
4354 because signed overflow is undefined; otherwise, do not change
4355 *STRICT_OVERFLOW_P. */
4357 tree
4358 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4359 bool *strict_overflow_p)
4361 enum tree_code code;
4362 tree arg0, arg1 = NULL_TREE;
4363 tree exp_type, nexp;
4364 int in_p;
4365 tree low, high;
4366 location_t loc = EXPR_LOCATION (exp);
4368 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4369 and see if we can refine the range. Some of the cases below may not
4370 happen, but it doesn't seem worth worrying about this. We "continue"
4371 the outer loop when we've changed something; otherwise we "break"
4372 the switch, which will "break" the while. */
4374 in_p = 0;
4375 low = high = build_int_cst (TREE_TYPE (exp), 0);
4377 while (1)
4379 code = TREE_CODE (exp);
4380 exp_type = TREE_TYPE (exp);
4381 arg0 = NULL_TREE;
4383 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4385 if (TREE_OPERAND_LENGTH (exp) > 0)
4386 arg0 = TREE_OPERAND (exp, 0);
4387 if (TREE_CODE_CLASS (code) == tcc_binary
4388 || TREE_CODE_CLASS (code) == tcc_comparison
4389 || (TREE_CODE_CLASS (code) == tcc_expression
4390 && TREE_OPERAND_LENGTH (exp) > 1))
4391 arg1 = TREE_OPERAND (exp, 1);
4393 if (arg0 == NULL_TREE)
4394 break;
4396 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4397 &high, &in_p, strict_overflow_p);
4398 if (nexp == NULL_TREE)
4399 break;
4400 exp = nexp;
4403 /* If EXP is a constant, we can evaluate whether this is true or false. */
4404 if (TREE_CODE (exp) == INTEGER_CST)
4406 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4407 exp, 0, low, 0))
4408 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4409 exp, 1, high, 1)));
4410 low = high = 0;
4411 exp = 0;
4414 *pin_p = in_p, *plow = low, *phigh = high;
4415 return exp;
4418 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4419 type, TYPE, return an expression to test if EXP is in (or out of, depending
4420 on IN_P) the range. Return 0 if the test couldn't be created. */
4422 tree
4423 build_range_check (location_t loc, tree type, tree exp, int in_p,
4424 tree low, tree high)
4426 tree etype = TREE_TYPE (exp), value;
4428 /* Disable this optimization for function pointer expressions
4429 on targets that require function pointer canonicalization. */
4430 if (targetm.have_canonicalize_funcptr_for_compare ()
4431 && TREE_CODE (etype) == POINTER_TYPE
4432 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4433 return NULL_TREE;
4435 if (! in_p)
4437 value = build_range_check (loc, type, exp, 1, low, high);
4438 if (value != 0)
4439 return invert_truthvalue_loc (loc, value);
4441 return 0;
4444 if (low == 0 && high == 0)
4445 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4447 if (low == 0)
4448 return fold_build2_loc (loc, LE_EXPR, type, exp,
4449 fold_convert_loc (loc, etype, high));
4451 if (high == 0)
4452 return fold_build2_loc (loc, GE_EXPR, type, exp,
4453 fold_convert_loc (loc, etype, low));
4455 if (operand_equal_p (low, high, 0))
4456 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4457 fold_convert_loc (loc, etype, low));
4459 if (integer_zerop (low))
4461 if (! TYPE_UNSIGNED (etype))
4463 etype = unsigned_type_for (etype);
4464 high = fold_convert_loc (loc, etype, high);
4465 exp = fold_convert_loc (loc, etype, exp);
4467 return build_range_check (loc, type, exp, 1, 0, high);
4470 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4471 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4473 int prec = TYPE_PRECISION (etype);
4475 if (wi::mask (prec - 1, false, prec) == high)
4477 if (TYPE_UNSIGNED (etype))
4479 tree signed_etype = signed_type_for (etype);
4480 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4481 etype
4482 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4483 else
4484 etype = signed_etype;
4485 exp = fold_convert_loc (loc, etype, exp);
4487 return fold_build2_loc (loc, GT_EXPR, type, exp,
4488 build_int_cst (etype, 0));
4492 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4493 This requires wrap-around arithmetics for the type of the expression.
4494 First make sure that arithmetics in this type is valid, then make sure
4495 that it wraps around. */
4496 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4497 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4498 TYPE_UNSIGNED (etype));
4500 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4502 tree utype, minv, maxv;
4504 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4505 for the type in question, as we rely on this here. */
4506 utype = unsigned_type_for (etype);
4507 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4508 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4509 build_int_cst (TREE_TYPE (maxv), 1), 1);
4510 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4512 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4513 minv, 1, maxv, 1)))
4514 etype = utype;
4515 else
4516 return 0;
4519 high = fold_convert_loc (loc, etype, high);
4520 low = fold_convert_loc (loc, etype, low);
4521 exp = fold_convert_loc (loc, etype, exp);
4523 value = const_binop (MINUS_EXPR, high, low);
4526 if (POINTER_TYPE_P (etype))
4528 if (value != 0 && !TREE_OVERFLOW (value))
4530 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4531 return build_range_check (loc, type,
4532 fold_build_pointer_plus_loc (loc, exp, low),
4533 1, build_int_cst (etype, 0), value);
4535 return 0;
4538 if (value != 0 && !TREE_OVERFLOW (value))
4539 return build_range_check (loc, type,
4540 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4541 1, build_int_cst (etype, 0), value);
4543 return 0;
4546 /* Return the predecessor of VAL in its type, handling the infinite case. */
4548 static tree
4549 range_predecessor (tree val)
4551 tree type = TREE_TYPE (val);
4553 if (INTEGRAL_TYPE_P (type)
4554 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4555 return 0;
4556 else
4557 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4558 build_int_cst (TREE_TYPE (val), 1), 0);
4561 /* Return the successor of VAL in its type, handling the infinite case. */
4563 static tree
4564 range_successor (tree val)
4566 tree type = TREE_TYPE (val);
4568 if (INTEGRAL_TYPE_P (type)
4569 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4570 return 0;
4571 else
4572 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4573 build_int_cst (TREE_TYPE (val), 1), 0);
4576 /* Given two ranges, see if we can merge them into one. Return 1 if we
4577 can, 0 if we can't. Set the output range into the specified parameters. */
4579 bool
4580 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4581 tree high0, int in1_p, tree low1, tree high1)
4583 int no_overlap;
4584 int subset;
4585 int temp;
4586 tree tem;
4587 int in_p;
4588 tree low, high;
4589 int lowequal = ((low0 == 0 && low1 == 0)
4590 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4591 low0, 0, low1, 0)));
4592 int highequal = ((high0 == 0 && high1 == 0)
4593 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4594 high0, 1, high1, 1)));
4596 /* Make range 0 be the range that starts first, or ends last if they
4597 start at the same value. Swap them if it isn't. */
4598 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4599 low0, 0, low1, 0))
4600 || (lowequal
4601 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4602 high1, 1, high0, 1))))
4604 temp = in0_p, in0_p = in1_p, in1_p = temp;
4605 tem = low0, low0 = low1, low1 = tem;
4606 tem = high0, high0 = high1, high1 = tem;
4609 /* Now flag two cases, whether the ranges are disjoint or whether the
4610 second range is totally subsumed in the first. Note that the tests
4611 below are simplified by the ones above. */
4612 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4613 high0, 1, low1, 0));
4614 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4615 high1, 1, high0, 1));
4617 /* We now have four cases, depending on whether we are including or
4618 excluding the two ranges. */
4619 if (in0_p && in1_p)
4621 /* If they don't overlap, the result is false. If the second range
4622 is a subset it is the result. Otherwise, the range is from the start
4623 of the second to the end of the first. */
4624 if (no_overlap)
4625 in_p = 0, low = high = 0;
4626 else if (subset)
4627 in_p = 1, low = low1, high = high1;
4628 else
4629 in_p = 1, low = low1, high = high0;
4632 else if (in0_p && ! in1_p)
4634 /* If they don't overlap, the result is the first range. If they are
4635 equal, the result is false. If the second range is a subset of the
4636 first, and the ranges begin at the same place, we go from just after
4637 the end of the second range to the end of the first. If the second
4638 range is not a subset of the first, or if it is a subset and both
4639 ranges end at the same place, the range starts at the start of the
4640 first range and ends just before the second range.
4641 Otherwise, we can't describe this as a single range. */
4642 if (no_overlap)
4643 in_p = 1, low = low0, high = high0;
4644 else if (lowequal && highequal)
4645 in_p = 0, low = high = 0;
4646 else if (subset && lowequal)
4648 low = range_successor (high1);
4649 high = high0;
4650 in_p = 1;
4651 if (low == 0)
4653 /* We are in the weird situation where high0 > high1 but
4654 high1 has no successor. Punt. */
4655 return 0;
4658 else if (! subset || highequal)
4660 low = low0;
4661 high = range_predecessor (low1);
4662 in_p = 1;
4663 if (high == 0)
4665 /* low0 < low1 but low1 has no predecessor. Punt. */
4666 return 0;
4669 else
4670 return 0;
4673 else if (! in0_p && in1_p)
4675 /* If they don't overlap, the result is the second range. If the second
4676 is a subset of the first, the result is false. Otherwise,
4677 the range starts just after the first range and ends at the
4678 end of the second. */
4679 if (no_overlap)
4680 in_p = 1, low = low1, high = high1;
4681 else if (subset || highequal)
4682 in_p = 0, low = high = 0;
4683 else
4685 low = range_successor (high0);
4686 high = high1;
4687 in_p = 1;
4688 if (low == 0)
4690 /* high1 > high0 but high0 has no successor. Punt. */
4691 return 0;
4696 else
4698 /* The case where we are excluding both ranges. Here the complex case
4699 is if they don't overlap. In that case, the only time we have a
4700 range is if they are adjacent. If the second is a subset of the
4701 first, the result is the first. Otherwise, the range to exclude
4702 starts at the beginning of the first range and ends at the end of the
4703 second. */
4704 if (no_overlap)
4706 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4707 range_successor (high0),
4708 1, low1, 0)))
4709 in_p = 0, low = low0, high = high1;
4710 else
4712 /* Canonicalize - [min, x] into - [-, x]. */
4713 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4714 switch (TREE_CODE (TREE_TYPE (low0)))
4716 case ENUMERAL_TYPE:
4717 if (TYPE_PRECISION (TREE_TYPE (low0))
4718 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4719 break;
4720 /* FALLTHROUGH */
4721 case INTEGER_TYPE:
4722 if (tree_int_cst_equal (low0,
4723 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4724 low0 = 0;
4725 break;
4726 case POINTER_TYPE:
4727 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4728 && integer_zerop (low0))
4729 low0 = 0;
4730 break;
4731 default:
4732 break;
4735 /* Canonicalize - [x, max] into - [x, -]. */
4736 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4737 switch (TREE_CODE (TREE_TYPE (high1)))
4739 case ENUMERAL_TYPE:
4740 if (TYPE_PRECISION (TREE_TYPE (high1))
4741 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4742 break;
4743 /* FALLTHROUGH */
4744 case INTEGER_TYPE:
4745 if (tree_int_cst_equal (high1,
4746 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4747 high1 = 0;
4748 break;
4749 case POINTER_TYPE:
4750 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4751 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4752 high1, 1,
4753 build_int_cst (TREE_TYPE (high1), 1),
4754 1)))
4755 high1 = 0;
4756 break;
4757 default:
4758 break;
4761 /* The ranges might be also adjacent between the maximum and
4762 minimum values of the given type. For
4763 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4764 return + [x + 1, y - 1]. */
4765 if (low0 == 0 && high1 == 0)
4767 low = range_successor (high0);
4768 high = range_predecessor (low1);
4769 if (low == 0 || high == 0)
4770 return 0;
4772 in_p = 1;
4774 else
4775 return 0;
4778 else if (subset)
4779 in_p = 0, low = low0, high = high0;
4780 else
4781 in_p = 0, low = low0, high = high1;
4784 *pin_p = in_p, *plow = low, *phigh = high;
4785 return 1;
4789 /* Subroutine of fold, looking inside expressions of the form
4790 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4791 of the COND_EXPR. This function is being used also to optimize
4792 A op B ? C : A, by reversing the comparison first.
4794 Return a folded expression whose code is not a COND_EXPR
4795 anymore, or NULL_TREE if no folding opportunity is found. */
4797 static tree
4798 fold_cond_expr_with_comparison (location_t loc, tree type,
4799 tree arg0, tree arg1, tree arg2)
4801 enum tree_code comp_code = TREE_CODE (arg0);
4802 tree arg00 = TREE_OPERAND (arg0, 0);
4803 tree arg01 = TREE_OPERAND (arg0, 1);
4804 tree arg1_type = TREE_TYPE (arg1);
4805 tree tem;
4807 STRIP_NOPS (arg1);
4808 STRIP_NOPS (arg2);
4810 /* If we have A op 0 ? A : -A, consider applying the following
4811 transformations:
4813 A == 0? A : -A same as -A
4814 A != 0? A : -A same as A
4815 A >= 0? A : -A same as abs (A)
4816 A > 0? A : -A same as abs (A)
4817 A <= 0? A : -A same as -abs (A)
4818 A < 0? A : -A same as -abs (A)
4820 None of these transformations work for modes with signed
4821 zeros. If A is +/-0, the first two transformations will
4822 change the sign of the result (from +0 to -0, or vice
4823 versa). The last four will fix the sign of the result,
4824 even though the original expressions could be positive or
4825 negative, depending on the sign of A.
4827 Note that all these transformations are correct if A is
4828 NaN, since the two alternatives (A and -A) are also NaNs. */
4829 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4830 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4831 ? real_zerop (arg01)
4832 : integer_zerop (arg01))
4833 && ((TREE_CODE (arg2) == NEGATE_EXPR
4834 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4835 /* In the case that A is of the form X-Y, '-A' (arg2) may
4836 have already been folded to Y-X, check for that. */
4837 || (TREE_CODE (arg1) == MINUS_EXPR
4838 && TREE_CODE (arg2) == MINUS_EXPR
4839 && operand_equal_p (TREE_OPERAND (arg1, 0),
4840 TREE_OPERAND (arg2, 1), 0)
4841 && operand_equal_p (TREE_OPERAND (arg1, 1),
4842 TREE_OPERAND (arg2, 0), 0))))
4843 switch (comp_code)
4845 case EQ_EXPR:
4846 case UNEQ_EXPR:
4847 tem = fold_convert_loc (loc, arg1_type, arg1);
4848 return pedantic_non_lvalue_loc (loc,
4849 fold_convert_loc (loc, type,
4850 negate_expr (tem)));
4851 case NE_EXPR:
4852 case LTGT_EXPR:
4853 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4854 case UNGE_EXPR:
4855 case UNGT_EXPR:
4856 if (flag_trapping_math)
4857 break;
4858 /* Fall through. */
4859 case GE_EXPR:
4860 case GT_EXPR:
4861 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4862 arg1 = fold_convert_loc (loc, signed_type_for
4863 (TREE_TYPE (arg1)), arg1);
4864 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4865 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4866 case UNLE_EXPR:
4867 case UNLT_EXPR:
4868 if (flag_trapping_math)
4869 break;
4870 case LE_EXPR:
4871 case LT_EXPR:
4872 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4873 arg1 = fold_convert_loc (loc, signed_type_for
4874 (TREE_TYPE (arg1)), arg1);
4875 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4876 return negate_expr (fold_convert_loc (loc, type, tem));
4877 default:
4878 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4879 break;
4882 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4883 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4884 both transformations are correct when A is NaN: A != 0
4885 is then true, and A == 0 is false. */
4887 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4888 && integer_zerop (arg01) && integer_zerop (arg2))
4890 if (comp_code == NE_EXPR)
4891 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4892 else if (comp_code == EQ_EXPR)
4893 return build_zero_cst (type);
4896 /* Try some transformations of A op B ? A : B.
4898 A == B? A : B same as B
4899 A != B? A : B same as A
4900 A >= B? A : B same as max (A, B)
4901 A > B? A : B same as max (B, A)
4902 A <= B? A : B same as min (A, B)
4903 A < B? A : B same as min (B, A)
4905 As above, these transformations don't work in the presence
4906 of signed zeros. For example, if A and B are zeros of
4907 opposite sign, the first two transformations will change
4908 the sign of the result. In the last four, the original
4909 expressions give different results for (A=+0, B=-0) and
4910 (A=-0, B=+0), but the transformed expressions do not.
4912 The first two transformations are correct if either A or B
4913 is a NaN. In the first transformation, the condition will
4914 be false, and B will indeed be chosen. In the case of the
4915 second transformation, the condition A != B will be true,
4916 and A will be chosen.
4918 The conversions to max() and min() are not correct if B is
4919 a number and A is not. The conditions in the original
4920 expressions will be false, so all four give B. The min()
4921 and max() versions would give a NaN instead. */
4922 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4923 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4924 /* Avoid these transformations if the COND_EXPR may be used
4925 as an lvalue in the C++ front-end. PR c++/19199. */
4926 && (in_gimple_form
4927 || VECTOR_TYPE_P (type)
4928 || (! lang_GNU_CXX ()
4929 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4930 || ! maybe_lvalue_p (arg1)
4931 || ! maybe_lvalue_p (arg2)))
4933 tree comp_op0 = arg00;
4934 tree comp_op1 = arg01;
4935 tree comp_type = TREE_TYPE (comp_op0);
4937 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4938 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4940 comp_type = type;
4941 comp_op0 = arg1;
4942 comp_op1 = arg2;
4945 switch (comp_code)
4947 case EQ_EXPR:
4948 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4949 case NE_EXPR:
4950 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4951 case LE_EXPR:
4952 case LT_EXPR:
4953 case UNLE_EXPR:
4954 case UNLT_EXPR:
4955 /* In C++ a ?: expression can be an lvalue, so put the
4956 operand which will be used if they are equal first
4957 so that we can convert this back to the
4958 corresponding COND_EXPR. */
4959 if (!HONOR_NANS (arg1))
4961 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4962 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4963 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4964 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4965 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4966 comp_op1, comp_op0);
4967 return pedantic_non_lvalue_loc (loc,
4968 fold_convert_loc (loc, type, tem));
4970 break;
4971 case GE_EXPR:
4972 case GT_EXPR:
4973 case UNGE_EXPR:
4974 case UNGT_EXPR:
4975 if (!HONOR_NANS (arg1))
4977 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4978 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4979 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4980 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4981 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4982 comp_op1, comp_op0);
4983 return pedantic_non_lvalue_loc (loc,
4984 fold_convert_loc (loc, type, tem));
4986 break;
4987 case UNEQ_EXPR:
4988 if (!HONOR_NANS (arg1))
4989 return pedantic_non_lvalue_loc (loc,
4990 fold_convert_loc (loc, type, arg2));
4991 break;
4992 case LTGT_EXPR:
4993 if (!HONOR_NANS (arg1))
4994 return pedantic_non_lvalue_loc (loc,
4995 fold_convert_loc (loc, type, arg1));
4996 break;
4997 default:
4998 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4999 break;
5003 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5004 we might still be able to simplify this. For example,
5005 if C1 is one less or one more than C2, this might have started
5006 out as a MIN or MAX and been transformed by this function.
5007 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5009 if (INTEGRAL_TYPE_P (type)
5010 && TREE_CODE (arg01) == INTEGER_CST
5011 && TREE_CODE (arg2) == INTEGER_CST)
5012 switch (comp_code)
5014 case EQ_EXPR:
5015 if (TREE_CODE (arg1) == INTEGER_CST)
5016 break;
5017 /* We can replace A with C1 in this case. */
5018 arg1 = fold_convert_loc (loc, type, arg01);
5019 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5021 case LT_EXPR:
5022 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5023 MIN_EXPR, to preserve the signedness of the comparison. */
5024 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5025 OEP_ONLY_CONST)
5026 && operand_equal_p (arg01,
5027 const_binop (PLUS_EXPR, arg2,
5028 build_int_cst (type, 1)),
5029 OEP_ONLY_CONST))
5031 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5032 fold_convert_loc (loc, TREE_TYPE (arg00),
5033 arg2));
5034 return pedantic_non_lvalue_loc (loc,
5035 fold_convert_loc (loc, type, tem));
5037 break;
5039 case LE_EXPR:
5040 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5041 as above. */
5042 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5043 OEP_ONLY_CONST)
5044 && operand_equal_p (arg01,
5045 const_binop (MINUS_EXPR, arg2,
5046 build_int_cst (type, 1)),
5047 OEP_ONLY_CONST))
5049 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5050 fold_convert_loc (loc, TREE_TYPE (arg00),
5051 arg2));
5052 return pedantic_non_lvalue_loc (loc,
5053 fold_convert_loc (loc, type, tem));
5055 break;
5057 case GT_EXPR:
5058 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5059 MAX_EXPR, to preserve the signedness of the comparison. */
5060 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5061 OEP_ONLY_CONST)
5062 && operand_equal_p (arg01,
5063 const_binop (MINUS_EXPR, arg2,
5064 build_int_cst (type, 1)),
5065 OEP_ONLY_CONST))
5067 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5068 fold_convert_loc (loc, TREE_TYPE (arg00),
5069 arg2));
5070 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5072 break;
5074 case GE_EXPR:
5075 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5076 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5077 OEP_ONLY_CONST)
5078 && operand_equal_p (arg01,
5079 const_binop (PLUS_EXPR, arg2,
5080 build_int_cst (type, 1)),
5081 OEP_ONLY_CONST))
5083 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5084 fold_convert_loc (loc, TREE_TYPE (arg00),
5085 arg2));
5086 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5088 break;
5089 case NE_EXPR:
5090 break;
5091 default:
5092 gcc_unreachable ();
5095 return NULL_TREE;
5100 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5101 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5102 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5103 false) >= 2)
5104 #endif
5106 /* EXP is some logical combination of boolean tests. See if we can
5107 merge it into some range test. Return the new tree if so. */
5109 static tree
5110 fold_range_test (location_t loc, enum tree_code code, tree type,
5111 tree op0, tree op1)
5113 int or_op = (code == TRUTH_ORIF_EXPR
5114 || code == TRUTH_OR_EXPR);
5115 int in0_p, in1_p, in_p;
5116 tree low0, low1, low, high0, high1, high;
5117 bool strict_overflow_p = false;
5118 tree tem, lhs, rhs;
5119 const char * const warnmsg = G_("assuming signed overflow does not occur "
5120 "when simplifying range test");
5122 if (!INTEGRAL_TYPE_P (type))
5123 return 0;
5125 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5126 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5128 /* If this is an OR operation, invert both sides; we will invert
5129 again at the end. */
5130 if (or_op)
5131 in0_p = ! in0_p, in1_p = ! in1_p;
5133 /* If both expressions are the same, if we can merge the ranges, and we
5134 can build the range test, return it or it inverted. If one of the
5135 ranges is always true or always false, consider it to be the same
5136 expression as the other. */
5137 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5138 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5139 in1_p, low1, high1)
5140 && 0 != (tem = (build_range_check (loc, type,
5141 lhs != 0 ? lhs
5142 : rhs != 0 ? rhs : integer_zero_node,
5143 in_p, low, high))))
5145 if (strict_overflow_p)
5146 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5147 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5150 /* On machines where the branch cost is expensive, if this is a
5151 short-circuited branch and the underlying object on both sides
5152 is the same, make a non-short-circuit operation. */
5153 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5154 && lhs != 0 && rhs != 0
5155 && (code == TRUTH_ANDIF_EXPR
5156 || code == TRUTH_ORIF_EXPR)
5157 && operand_equal_p (lhs, rhs, 0))
5159 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5160 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5161 which cases we can't do this. */
5162 if (simple_operand_p (lhs))
5163 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5164 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5165 type, op0, op1);
5167 else if (!lang_hooks.decls.global_bindings_p ()
5168 && !CONTAINS_PLACEHOLDER_P (lhs))
5170 tree common = save_expr (lhs);
5172 if (0 != (lhs = build_range_check (loc, type, common,
5173 or_op ? ! in0_p : in0_p,
5174 low0, high0))
5175 && (0 != (rhs = build_range_check (loc, type, common,
5176 or_op ? ! in1_p : in1_p,
5177 low1, high1))))
5179 if (strict_overflow_p)
5180 fold_overflow_warning (warnmsg,
5181 WARN_STRICT_OVERFLOW_COMPARISON);
5182 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5183 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5184 type, lhs, rhs);
5189 return 0;
5192 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5193 bit value. Arrange things so the extra bits will be set to zero if and
5194 only if C is signed-extended to its full width. If MASK is nonzero,
5195 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5197 static tree
5198 unextend (tree c, int p, int unsignedp, tree mask)
5200 tree type = TREE_TYPE (c);
5201 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5202 tree temp;
5204 if (p == modesize || unsignedp)
5205 return c;
5207 /* We work by getting just the sign bit into the low-order bit, then
5208 into the high-order bit, then sign-extend. We then XOR that value
5209 with C. */
5210 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5212 /* We must use a signed type in order to get an arithmetic right shift.
5213 However, we must also avoid introducing accidental overflows, so that
5214 a subsequent call to integer_zerop will work. Hence we must
5215 do the type conversion here. At this point, the constant is either
5216 zero or one, and the conversion to a signed type can never overflow.
5217 We could get an overflow if this conversion is done anywhere else. */
5218 if (TYPE_UNSIGNED (type))
5219 temp = fold_convert (signed_type_for (type), temp);
5221 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5222 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5223 if (mask != 0)
5224 temp = const_binop (BIT_AND_EXPR, temp,
5225 fold_convert (TREE_TYPE (c), mask));
5226 /* If necessary, convert the type back to match the type of C. */
5227 if (TYPE_UNSIGNED (type))
5228 temp = fold_convert (type, temp);
5230 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5233 /* For an expression that has the form
5234 (A && B) || ~B
5236 (A || B) && ~B,
5237 we can drop one of the inner expressions and simplify to
5238 A || ~B
5240 A && ~B
5241 LOC is the location of the resulting expression. OP is the inner
5242 logical operation; the left-hand side in the examples above, while CMPOP
5243 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5244 removing a condition that guards another, as in
5245 (A != NULL && A->...) || A == NULL
5246 which we must not transform. If RHS_ONLY is true, only eliminate the
5247 right-most operand of the inner logical operation. */
5249 static tree
5250 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5251 bool rhs_only)
5253 tree type = TREE_TYPE (cmpop);
5254 enum tree_code code = TREE_CODE (cmpop);
5255 enum tree_code truthop_code = TREE_CODE (op);
5256 tree lhs = TREE_OPERAND (op, 0);
5257 tree rhs = TREE_OPERAND (op, 1);
5258 tree orig_lhs = lhs, orig_rhs = rhs;
5259 enum tree_code rhs_code = TREE_CODE (rhs);
5260 enum tree_code lhs_code = TREE_CODE (lhs);
5261 enum tree_code inv_code;
5263 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5264 return NULL_TREE;
5266 if (TREE_CODE_CLASS (code) != tcc_comparison)
5267 return NULL_TREE;
5269 if (rhs_code == truthop_code)
5271 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5272 if (newrhs != NULL_TREE)
5274 rhs = newrhs;
5275 rhs_code = TREE_CODE (rhs);
5278 if (lhs_code == truthop_code && !rhs_only)
5280 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5281 if (newlhs != NULL_TREE)
5283 lhs = newlhs;
5284 lhs_code = TREE_CODE (lhs);
5288 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5289 if (inv_code == rhs_code
5290 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5291 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5292 return lhs;
5293 if (!rhs_only && inv_code == lhs_code
5294 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5295 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5296 return rhs;
5297 if (rhs != orig_rhs || lhs != orig_lhs)
5298 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5299 lhs, rhs);
5300 return NULL_TREE;
5303 /* Find ways of folding logical expressions of LHS and RHS:
5304 Try to merge two comparisons to the same innermost item.
5305 Look for range tests like "ch >= '0' && ch <= '9'".
5306 Look for combinations of simple terms on machines with expensive branches
5307 and evaluate the RHS unconditionally.
5309 For example, if we have p->a == 2 && p->b == 4 and we can make an
5310 object large enough to span both A and B, we can do this with a comparison
5311 against the object ANDed with the a mask.
5313 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5314 operations to do this with one comparison.
5316 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5317 function and the one above.
5319 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5320 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5322 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5323 two operands.
5325 We return the simplified tree or 0 if no optimization is possible. */
5327 static tree
5328 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5329 tree lhs, tree rhs)
5331 /* If this is the "or" of two comparisons, we can do something if
5332 the comparisons are NE_EXPR. If this is the "and", we can do something
5333 if the comparisons are EQ_EXPR. I.e.,
5334 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5336 WANTED_CODE is this operation code. For single bit fields, we can
5337 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5338 comparison for one-bit fields. */
5340 enum tree_code wanted_code;
5341 enum tree_code lcode, rcode;
5342 tree ll_arg, lr_arg, rl_arg, rr_arg;
5343 tree ll_inner, lr_inner, rl_inner, rr_inner;
5344 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5345 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5346 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5347 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5348 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5349 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5350 machine_mode lnmode, rnmode;
5351 tree ll_mask, lr_mask, rl_mask, rr_mask;
5352 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5353 tree l_const, r_const;
5354 tree lntype, rntype, result;
5355 HOST_WIDE_INT first_bit, end_bit;
5356 int volatilep;
5358 /* Start by getting the comparison codes. Fail if anything is volatile.
5359 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5360 it were surrounded with a NE_EXPR. */
5362 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5363 return 0;
5365 lcode = TREE_CODE (lhs);
5366 rcode = TREE_CODE (rhs);
5368 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5370 lhs = build2 (NE_EXPR, truth_type, lhs,
5371 build_int_cst (TREE_TYPE (lhs), 0));
5372 lcode = NE_EXPR;
5375 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5377 rhs = build2 (NE_EXPR, truth_type, rhs,
5378 build_int_cst (TREE_TYPE (rhs), 0));
5379 rcode = NE_EXPR;
5382 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5383 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5384 return 0;
5386 ll_arg = TREE_OPERAND (lhs, 0);
5387 lr_arg = TREE_OPERAND (lhs, 1);
5388 rl_arg = TREE_OPERAND (rhs, 0);
5389 rr_arg = TREE_OPERAND (rhs, 1);
5391 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5392 if (simple_operand_p (ll_arg)
5393 && simple_operand_p (lr_arg))
5395 if (operand_equal_p (ll_arg, rl_arg, 0)
5396 && operand_equal_p (lr_arg, rr_arg, 0))
5398 result = combine_comparisons (loc, code, lcode, rcode,
5399 truth_type, ll_arg, lr_arg);
5400 if (result)
5401 return result;
5403 else if (operand_equal_p (ll_arg, rr_arg, 0)
5404 && operand_equal_p (lr_arg, rl_arg, 0))
5406 result = combine_comparisons (loc, code, lcode,
5407 swap_tree_comparison (rcode),
5408 truth_type, ll_arg, lr_arg);
5409 if (result)
5410 return result;
5414 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5415 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5417 /* If the RHS can be evaluated unconditionally and its operands are
5418 simple, it wins to evaluate the RHS unconditionally on machines
5419 with expensive branches. In this case, this isn't a comparison
5420 that can be merged. */
5422 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5423 false) >= 2
5424 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5425 && simple_operand_p (rl_arg)
5426 && simple_operand_p (rr_arg))
5428 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5429 if (code == TRUTH_OR_EXPR
5430 && lcode == NE_EXPR && integer_zerop (lr_arg)
5431 && rcode == NE_EXPR && integer_zerop (rr_arg)
5432 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5433 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5434 return build2_loc (loc, NE_EXPR, truth_type,
5435 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5436 ll_arg, rl_arg),
5437 build_int_cst (TREE_TYPE (ll_arg), 0));
5439 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5440 if (code == TRUTH_AND_EXPR
5441 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5442 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5443 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5444 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5445 return build2_loc (loc, EQ_EXPR, truth_type,
5446 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5447 ll_arg, rl_arg),
5448 build_int_cst (TREE_TYPE (ll_arg), 0));
5451 /* See if the comparisons can be merged. Then get all the parameters for
5452 each side. */
5454 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5455 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5456 return 0;
5458 volatilep = 0;
5459 ll_inner = decode_field_reference (loc, ll_arg,
5460 &ll_bitsize, &ll_bitpos, &ll_mode,
5461 &ll_unsignedp, &volatilep, &ll_mask,
5462 &ll_and_mask);
5463 lr_inner = decode_field_reference (loc, lr_arg,
5464 &lr_bitsize, &lr_bitpos, &lr_mode,
5465 &lr_unsignedp, &volatilep, &lr_mask,
5466 &lr_and_mask);
5467 rl_inner = decode_field_reference (loc, rl_arg,
5468 &rl_bitsize, &rl_bitpos, &rl_mode,
5469 &rl_unsignedp, &volatilep, &rl_mask,
5470 &rl_and_mask);
5471 rr_inner = decode_field_reference (loc, rr_arg,
5472 &rr_bitsize, &rr_bitpos, &rr_mode,
5473 &rr_unsignedp, &volatilep, &rr_mask,
5474 &rr_and_mask);
5476 /* It must be true that the inner operation on the lhs of each
5477 comparison must be the same if we are to be able to do anything.
5478 Then see if we have constants. If not, the same must be true for
5479 the rhs's. */
5480 if (volatilep || ll_inner == 0 || rl_inner == 0
5481 || ! operand_equal_p (ll_inner, rl_inner, 0))
5482 return 0;
5484 if (TREE_CODE (lr_arg) == INTEGER_CST
5485 && TREE_CODE (rr_arg) == INTEGER_CST)
5486 l_const = lr_arg, r_const = rr_arg;
5487 else if (lr_inner == 0 || rr_inner == 0
5488 || ! operand_equal_p (lr_inner, rr_inner, 0))
5489 return 0;
5490 else
5491 l_const = r_const = 0;
5493 /* If either comparison code is not correct for our logical operation,
5494 fail. However, we can convert a one-bit comparison against zero into
5495 the opposite comparison against that bit being set in the field. */
5497 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5498 if (lcode != wanted_code)
5500 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5502 /* Make the left operand unsigned, since we are only interested
5503 in the value of one bit. Otherwise we are doing the wrong
5504 thing below. */
5505 ll_unsignedp = 1;
5506 l_const = ll_mask;
5508 else
5509 return 0;
5512 /* This is analogous to the code for l_const above. */
5513 if (rcode != wanted_code)
5515 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5517 rl_unsignedp = 1;
5518 r_const = rl_mask;
5520 else
5521 return 0;
5524 /* See if we can find a mode that contains both fields being compared on
5525 the left. If we can't, fail. Otherwise, update all constants and masks
5526 to be relative to a field of that size. */
5527 first_bit = MIN (ll_bitpos, rl_bitpos);
5528 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5529 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5530 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5531 volatilep);
5532 if (lnmode == VOIDmode)
5533 return 0;
5535 lnbitsize = GET_MODE_BITSIZE (lnmode);
5536 lnbitpos = first_bit & ~ (lnbitsize - 1);
5537 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5538 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5540 if (BYTES_BIG_ENDIAN)
5542 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5543 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5546 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5547 size_int (xll_bitpos));
5548 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5549 size_int (xrl_bitpos));
5551 if (l_const)
5553 l_const = fold_convert_loc (loc, lntype, l_const);
5554 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5555 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5556 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5557 fold_build1_loc (loc, BIT_NOT_EXPR,
5558 lntype, ll_mask))))
5560 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5562 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5565 if (r_const)
5567 r_const = fold_convert_loc (loc, lntype, r_const);
5568 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5569 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5570 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5571 fold_build1_loc (loc, BIT_NOT_EXPR,
5572 lntype, rl_mask))))
5574 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5576 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5580 /* If the right sides are not constant, do the same for it. Also,
5581 disallow this optimization if a size or signedness mismatch occurs
5582 between the left and right sides. */
5583 if (l_const == 0)
5585 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5586 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5587 /* Make sure the two fields on the right
5588 correspond to the left without being swapped. */
5589 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5590 return 0;
5592 first_bit = MIN (lr_bitpos, rr_bitpos);
5593 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5594 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5595 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5596 volatilep);
5597 if (rnmode == VOIDmode)
5598 return 0;
5600 rnbitsize = GET_MODE_BITSIZE (rnmode);
5601 rnbitpos = first_bit & ~ (rnbitsize - 1);
5602 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5603 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5605 if (BYTES_BIG_ENDIAN)
5607 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5608 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5611 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5612 rntype, lr_mask),
5613 size_int (xlr_bitpos));
5614 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5615 rntype, rr_mask),
5616 size_int (xrr_bitpos));
5618 /* Make a mask that corresponds to both fields being compared.
5619 Do this for both items being compared. If the operands are the
5620 same size and the bits being compared are in the same position
5621 then we can do this by masking both and comparing the masked
5622 results. */
5623 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5624 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5625 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5627 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5628 ll_unsignedp || rl_unsignedp);
5629 if (! all_ones_mask_p (ll_mask, lnbitsize))
5630 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5632 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5633 lr_unsignedp || rr_unsignedp);
5634 if (! all_ones_mask_p (lr_mask, rnbitsize))
5635 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5637 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5640 /* There is still another way we can do something: If both pairs of
5641 fields being compared are adjacent, we may be able to make a wider
5642 field containing them both.
5644 Note that we still must mask the lhs/rhs expressions. Furthermore,
5645 the mask must be shifted to account for the shift done by
5646 make_bit_field_ref. */
5647 if ((ll_bitsize + ll_bitpos == rl_bitpos
5648 && lr_bitsize + lr_bitpos == rr_bitpos)
5649 || (ll_bitpos == rl_bitpos + rl_bitsize
5650 && lr_bitpos == rr_bitpos + rr_bitsize))
5652 tree type;
5654 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5655 ll_bitsize + rl_bitsize,
5656 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5657 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5658 lr_bitsize + rr_bitsize,
5659 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5661 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5662 size_int (MIN (xll_bitpos, xrl_bitpos)));
5663 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5664 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5666 /* Convert to the smaller type before masking out unwanted bits. */
5667 type = lntype;
5668 if (lntype != rntype)
5670 if (lnbitsize > rnbitsize)
5672 lhs = fold_convert_loc (loc, rntype, lhs);
5673 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5674 type = rntype;
5676 else if (lnbitsize < rnbitsize)
5678 rhs = fold_convert_loc (loc, lntype, rhs);
5679 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5680 type = lntype;
5684 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5685 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5687 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5688 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5690 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5693 return 0;
5696 /* Handle the case of comparisons with constants. If there is something in
5697 common between the masks, those bits of the constants must be the same.
5698 If not, the condition is always false. Test for this to avoid generating
5699 incorrect code below. */
5700 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5701 if (! integer_zerop (result)
5702 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5703 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5705 if (wanted_code == NE_EXPR)
5707 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5708 return constant_boolean_node (true, truth_type);
5710 else
5712 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5713 return constant_boolean_node (false, truth_type);
5717 /* Construct the expression we will return. First get the component
5718 reference we will make. Unless the mask is all ones the width of
5719 that field, perform the mask operation. Then compare with the
5720 merged constant. */
5721 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5722 ll_unsignedp || rl_unsignedp);
5724 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5725 if (! all_ones_mask_p (ll_mask, lnbitsize))
5726 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5728 return build2_loc (loc, wanted_code, truth_type, result,
5729 const_binop (BIT_IOR_EXPR, l_const, r_const));
5732 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5733 constant. */
5735 static tree
5736 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5737 tree op0, tree op1)
5739 tree arg0 = op0;
5740 enum tree_code op_code;
5741 tree comp_const;
5742 tree minmax_const;
5743 int consts_equal, consts_lt;
5744 tree inner;
5746 STRIP_SIGN_NOPS (arg0);
5748 op_code = TREE_CODE (arg0);
5749 minmax_const = TREE_OPERAND (arg0, 1);
5750 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5751 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5752 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5753 inner = TREE_OPERAND (arg0, 0);
5755 /* If something does not permit us to optimize, return the original tree. */
5756 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5757 || TREE_CODE (comp_const) != INTEGER_CST
5758 || TREE_OVERFLOW (comp_const)
5759 || TREE_CODE (minmax_const) != INTEGER_CST
5760 || TREE_OVERFLOW (minmax_const))
5761 return NULL_TREE;
5763 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5764 and GT_EXPR, doing the rest with recursive calls using logical
5765 simplifications. */
5766 switch (code)
5768 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5770 tree tem
5771 = optimize_minmax_comparison (loc,
5772 invert_tree_comparison (code, false),
5773 type, op0, op1);
5774 if (tem)
5775 return invert_truthvalue_loc (loc, tem);
5776 return NULL_TREE;
5779 case GE_EXPR:
5780 return
5781 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5782 optimize_minmax_comparison
5783 (loc, EQ_EXPR, type, arg0, comp_const),
5784 optimize_minmax_comparison
5785 (loc, GT_EXPR, type, arg0, comp_const));
5787 case EQ_EXPR:
5788 if (op_code == MAX_EXPR && consts_equal)
5789 /* MAX (X, 0) == 0 -> X <= 0 */
5790 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5792 else if (op_code == MAX_EXPR && consts_lt)
5793 /* MAX (X, 0) == 5 -> X == 5 */
5794 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5796 else if (op_code == MAX_EXPR)
5797 /* MAX (X, 0) == -1 -> false */
5798 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5800 else if (consts_equal)
5801 /* MIN (X, 0) == 0 -> X >= 0 */
5802 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5804 else if (consts_lt)
5805 /* MIN (X, 0) == 5 -> false */
5806 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5808 else
5809 /* MIN (X, 0) == -1 -> X == -1 */
5810 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5812 case GT_EXPR:
5813 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5814 /* MAX (X, 0) > 0 -> X > 0
5815 MAX (X, 0) > 5 -> X > 5 */
5816 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5818 else if (op_code == MAX_EXPR)
5819 /* MAX (X, 0) > -1 -> true */
5820 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5822 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5823 /* MIN (X, 0) > 0 -> false
5824 MIN (X, 0) > 5 -> false */
5825 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5827 else
5828 /* MIN (X, 0) > -1 -> X > -1 */
5829 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5831 default:
5832 return NULL_TREE;
5836 /* T is an integer expression that is being multiplied, divided, or taken a
5837 modulus (CODE says which and what kind of divide or modulus) by a
5838 constant C. See if we can eliminate that operation by folding it with
5839 other operations already in T. WIDE_TYPE, if non-null, is a type that
5840 should be used for the computation if wider than our type.
5842 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5843 (X * 2) + (Y * 4). We must, however, be assured that either the original
5844 expression would not overflow or that overflow is undefined for the type
5845 in the language in question.
5847 If we return a non-null expression, it is an equivalent form of the
5848 original computation, but need not be in the original type.
5850 We set *STRICT_OVERFLOW_P to true if the return values depends on
5851 signed overflow being undefined. Otherwise we do not change
5852 *STRICT_OVERFLOW_P. */
5854 static tree
5855 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5856 bool *strict_overflow_p)
5858 /* To avoid exponential search depth, refuse to allow recursion past
5859 three levels. Beyond that (1) it's highly unlikely that we'll find
5860 something interesting and (2) we've probably processed it before
5861 when we built the inner expression. */
5863 static int depth;
5864 tree ret;
5866 if (depth > 3)
5867 return NULL;
5869 depth++;
5870 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5871 depth--;
5873 return ret;
5876 static tree
5877 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5878 bool *strict_overflow_p)
5880 tree type = TREE_TYPE (t);
5881 enum tree_code tcode = TREE_CODE (t);
5882 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5883 > GET_MODE_SIZE (TYPE_MODE (type)))
5884 ? wide_type : type);
5885 tree t1, t2;
5886 int same_p = tcode == code;
5887 tree op0 = NULL_TREE, op1 = NULL_TREE;
5888 bool sub_strict_overflow_p;
5890 /* Don't deal with constants of zero here; they confuse the code below. */
5891 if (integer_zerop (c))
5892 return NULL_TREE;
5894 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5895 op0 = TREE_OPERAND (t, 0);
5897 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5898 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5900 /* Note that we need not handle conditional operations here since fold
5901 already handles those cases. So just do arithmetic here. */
5902 switch (tcode)
5904 case INTEGER_CST:
5905 /* For a constant, we can always simplify if we are a multiply
5906 or (for divide and modulus) if it is a multiple of our constant. */
5907 if (code == MULT_EXPR
5908 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5909 return const_binop (code, fold_convert (ctype, t),
5910 fold_convert (ctype, c));
5911 break;
5913 CASE_CONVERT: case NON_LVALUE_EXPR:
5914 /* If op0 is an expression ... */
5915 if ((COMPARISON_CLASS_P (op0)
5916 || UNARY_CLASS_P (op0)
5917 || BINARY_CLASS_P (op0)
5918 || VL_EXP_CLASS_P (op0)
5919 || EXPRESSION_CLASS_P (op0))
5920 /* ... and has wrapping overflow, and its type is smaller
5921 than ctype, then we cannot pass through as widening. */
5922 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5923 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5924 && (TYPE_PRECISION (ctype)
5925 > TYPE_PRECISION (TREE_TYPE (op0))))
5926 /* ... or this is a truncation (t is narrower than op0),
5927 then we cannot pass through this narrowing. */
5928 || (TYPE_PRECISION (type)
5929 < TYPE_PRECISION (TREE_TYPE (op0)))
5930 /* ... or signedness changes for division or modulus,
5931 then we cannot pass through this conversion. */
5932 || (code != MULT_EXPR
5933 && (TYPE_UNSIGNED (ctype)
5934 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5935 /* ... or has undefined overflow while the converted to
5936 type has not, we cannot do the operation in the inner type
5937 as that would introduce undefined overflow. */
5938 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5939 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5940 && !TYPE_OVERFLOW_UNDEFINED (type))))
5941 break;
5943 /* Pass the constant down and see if we can make a simplification. If
5944 we can, replace this expression with the inner simplification for
5945 possible later conversion to our or some other type. */
5946 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5947 && TREE_CODE (t2) == INTEGER_CST
5948 && !TREE_OVERFLOW (t2)
5949 && (0 != (t1 = extract_muldiv (op0, t2, code,
5950 code == MULT_EXPR
5951 ? ctype : NULL_TREE,
5952 strict_overflow_p))))
5953 return t1;
5954 break;
5956 case ABS_EXPR:
5957 /* If widening the type changes it from signed to unsigned, then we
5958 must avoid building ABS_EXPR itself as unsigned. */
5959 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5961 tree cstype = (*signed_type_for) (ctype);
5962 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5963 != 0)
5965 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5966 return fold_convert (ctype, t1);
5968 break;
5970 /* If the constant is negative, we cannot simplify this. */
5971 if (tree_int_cst_sgn (c) == -1)
5972 break;
5973 /* FALLTHROUGH */
5974 case NEGATE_EXPR:
5975 /* For division and modulus, type can't be unsigned, as e.g.
5976 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5977 For signed types, even with wrapping overflow, this is fine. */
5978 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5979 break;
5980 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5981 != 0)
5982 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5983 break;
5985 case MIN_EXPR: case MAX_EXPR:
5986 /* If widening the type changes the signedness, then we can't perform
5987 this optimization as that changes the result. */
5988 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5989 break;
5991 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5992 sub_strict_overflow_p = false;
5993 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5994 &sub_strict_overflow_p)) != 0
5995 && (t2 = extract_muldiv (op1, c, code, wide_type,
5996 &sub_strict_overflow_p)) != 0)
5998 if (tree_int_cst_sgn (c) < 0)
5999 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6000 if (sub_strict_overflow_p)
6001 *strict_overflow_p = true;
6002 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6003 fold_convert (ctype, t2));
6005 break;
6007 case LSHIFT_EXPR: case RSHIFT_EXPR:
6008 /* If the second operand is constant, this is a multiplication
6009 or floor division, by a power of two, so we can treat it that
6010 way unless the multiplier or divisor overflows. Signed
6011 left-shift overflow is implementation-defined rather than
6012 undefined in C90, so do not convert signed left shift into
6013 multiplication. */
6014 if (TREE_CODE (op1) == INTEGER_CST
6015 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6016 /* const_binop may not detect overflow correctly,
6017 so check for it explicitly here. */
6018 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6019 && 0 != (t1 = fold_convert (ctype,
6020 const_binop (LSHIFT_EXPR,
6021 size_one_node,
6022 op1)))
6023 && !TREE_OVERFLOW (t1))
6024 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6025 ? MULT_EXPR : FLOOR_DIV_EXPR,
6026 ctype,
6027 fold_convert (ctype, op0),
6028 t1),
6029 c, code, wide_type, strict_overflow_p);
6030 break;
6032 case PLUS_EXPR: case MINUS_EXPR:
6033 /* See if we can eliminate the operation on both sides. If we can, we
6034 can return a new PLUS or MINUS. If we can't, the only remaining
6035 cases where we can do anything are if the second operand is a
6036 constant. */
6037 sub_strict_overflow_p = false;
6038 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6039 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6040 if (t1 != 0 && t2 != 0
6041 && (code == MULT_EXPR
6042 /* If not multiplication, we can only do this if both operands
6043 are divisible by c. */
6044 || (multiple_of_p (ctype, op0, c)
6045 && multiple_of_p (ctype, op1, c))))
6047 if (sub_strict_overflow_p)
6048 *strict_overflow_p = true;
6049 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6050 fold_convert (ctype, t2));
6053 /* If this was a subtraction, negate OP1 and set it to be an addition.
6054 This simplifies the logic below. */
6055 if (tcode == MINUS_EXPR)
6057 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6058 /* If OP1 was not easily negatable, the constant may be OP0. */
6059 if (TREE_CODE (op0) == INTEGER_CST)
6061 std::swap (op0, op1);
6062 std::swap (t1, t2);
6066 if (TREE_CODE (op1) != INTEGER_CST)
6067 break;
6069 /* If either OP1 or C are negative, this optimization is not safe for
6070 some of the division and remainder types while for others we need
6071 to change the code. */
6072 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6074 if (code == CEIL_DIV_EXPR)
6075 code = FLOOR_DIV_EXPR;
6076 else if (code == FLOOR_DIV_EXPR)
6077 code = CEIL_DIV_EXPR;
6078 else if (code != MULT_EXPR
6079 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6080 break;
6083 /* If it's a multiply or a division/modulus operation of a multiple
6084 of our constant, do the operation and verify it doesn't overflow. */
6085 if (code == MULT_EXPR
6086 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6088 op1 = const_binop (code, fold_convert (ctype, op1),
6089 fold_convert (ctype, c));
6090 /* We allow the constant to overflow with wrapping semantics. */
6091 if (op1 == 0
6092 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6093 break;
6095 else
6096 break;
6098 /* If we have an unsigned type, we cannot widen the operation since it
6099 will change the result if the original computation overflowed. */
6100 if (TYPE_UNSIGNED (ctype) && ctype != type)
6101 break;
6103 /* If we were able to eliminate our operation from the first side,
6104 apply our operation to the second side and reform the PLUS. */
6105 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6106 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6108 /* The last case is if we are a multiply. In that case, we can
6109 apply the distributive law to commute the multiply and addition
6110 if the multiplication of the constants doesn't overflow
6111 and overflow is defined. With undefined overflow
6112 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6113 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6114 return fold_build2 (tcode, ctype,
6115 fold_build2 (code, ctype,
6116 fold_convert (ctype, op0),
6117 fold_convert (ctype, c)),
6118 op1);
6120 break;
6122 case MULT_EXPR:
6123 /* We have a special case here if we are doing something like
6124 (C * 8) % 4 since we know that's zero. */
6125 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6126 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6127 /* If the multiplication can overflow we cannot optimize this. */
6128 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6129 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6130 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6132 *strict_overflow_p = true;
6133 return omit_one_operand (type, integer_zero_node, op0);
6136 /* ... fall through ... */
6138 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6139 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6140 /* If we can extract our operation from the LHS, do so and return a
6141 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6142 do something only if the second operand is a constant. */
6143 if (same_p
6144 && (t1 = extract_muldiv (op0, c, code, wide_type,
6145 strict_overflow_p)) != 0)
6146 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6147 fold_convert (ctype, op1));
6148 else if (tcode == MULT_EXPR && code == MULT_EXPR
6149 && (t1 = extract_muldiv (op1, c, code, wide_type,
6150 strict_overflow_p)) != 0)
6151 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6152 fold_convert (ctype, t1));
6153 else if (TREE_CODE (op1) != INTEGER_CST)
6154 return 0;
6156 /* If these are the same operation types, we can associate them
6157 assuming no overflow. */
6158 if (tcode == code)
6160 bool overflow_p = false;
6161 bool overflow_mul_p;
6162 signop sign = TYPE_SIGN (ctype);
6163 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6164 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6165 if (overflow_mul_p
6166 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6167 overflow_p = true;
6168 if (!overflow_p)
6169 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6170 wide_int_to_tree (ctype, mul));
6173 /* If these operations "cancel" each other, we have the main
6174 optimizations of this pass, which occur when either constant is a
6175 multiple of the other, in which case we replace this with either an
6176 operation or CODE or TCODE.
6178 If we have an unsigned type, we cannot do this since it will change
6179 the result if the original computation overflowed. */
6180 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6181 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6182 || (tcode == MULT_EXPR
6183 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6184 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6185 && code != MULT_EXPR)))
6187 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6189 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6190 *strict_overflow_p = true;
6191 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6192 fold_convert (ctype,
6193 const_binop (TRUNC_DIV_EXPR,
6194 op1, c)));
6196 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6198 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6199 *strict_overflow_p = true;
6200 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6201 fold_convert (ctype,
6202 const_binop (TRUNC_DIV_EXPR,
6203 c, op1)));
6206 break;
6208 default:
6209 break;
6212 return 0;
6215 /* Return a node which has the indicated constant VALUE (either 0 or
6216 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6217 and is of the indicated TYPE. */
6219 tree
6220 constant_boolean_node (bool value, tree type)
6222 if (type == integer_type_node)
6223 return value ? integer_one_node : integer_zero_node;
6224 else if (type == boolean_type_node)
6225 return value ? boolean_true_node : boolean_false_node;
6226 else if (TREE_CODE (type) == VECTOR_TYPE)
6227 return build_vector_from_val (type,
6228 build_int_cst (TREE_TYPE (type),
6229 value ? -1 : 0));
6230 else
6231 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6235 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6236 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6237 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6238 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6239 COND is the first argument to CODE; otherwise (as in the example
6240 given here), it is the second argument. TYPE is the type of the
6241 original expression. Return NULL_TREE if no simplification is
6242 possible. */
6244 static tree
6245 fold_binary_op_with_conditional_arg (location_t loc,
6246 enum tree_code code,
6247 tree type, tree op0, tree op1,
6248 tree cond, tree arg, int cond_first_p)
6250 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6251 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6252 tree test, true_value, false_value;
6253 tree lhs = NULL_TREE;
6254 tree rhs = NULL_TREE;
6255 enum tree_code cond_code = COND_EXPR;
6257 if (TREE_CODE (cond) == COND_EXPR
6258 || TREE_CODE (cond) == VEC_COND_EXPR)
6260 test = TREE_OPERAND (cond, 0);
6261 true_value = TREE_OPERAND (cond, 1);
6262 false_value = TREE_OPERAND (cond, 2);
6263 /* If this operand throws an expression, then it does not make
6264 sense to try to perform a logical or arithmetic operation
6265 involving it. */
6266 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6267 lhs = true_value;
6268 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6269 rhs = false_value;
6271 else
6273 tree testtype = TREE_TYPE (cond);
6274 test = cond;
6275 true_value = constant_boolean_node (true, testtype);
6276 false_value = constant_boolean_node (false, testtype);
6279 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6280 cond_code = VEC_COND_EXPR;
6282 /* This transformation is only worthwhile if we don't have to wrap ARG
6283 in a SAVE_EXPR and the operation can be simplified without recursing
6284 on at least one of the branches once its pushed inside the COND_EXPR. */
6285 if (!TREE_CONSTANT (arg)
6286 && (TREE_SIDE_EFFECTS (arg)
6287 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6288 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6289 return NULL_TREE;
6291 arg = fold_convert_loc (loc, arg_type, arg);
6292 if (lhs == 0)
6294 true_value = fold_convert_loc (loc, cond_type, true_value);
6295 if (cond_first_p)
6296 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6297 else
6298 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6300 if (rhs == 0)
6302 false_value = fold_convert_loc (loc, cond_type, false_value);
6303 if (cond_first_p)
6304 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6305 else
6306 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6309 /* Check that we have simplified at least one of the branches. */
6310 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6311 return NULL_TREE;
6313 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6317 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6319 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6320 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6321 ADDEND is the same as X.
6323 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6324 and finite. The problematic cases are when X is zero, and its mode
6325 has signed zeros. In the case of rounding towards -infinity,
6326 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6327 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6329 bool
6330 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6332 if (!real_zerop (addend))
6333 return false;
6335 /* Don't allow the fold with -fsignaling-nans. */
6336 if (HONOR_SNANS (element_mode (type)))
6337 return false;
6339 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6340 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6341 return true;
6343 /* In a vector or complex, we would need to check the sign of all zeros. */
6344 if (TREE_CODE (addend) != REAL_CST)
6345 return false;
6347 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6348 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6349 negate = !negate;
6351 /* The mode has signed zeros, and we have to honor their sign.
6352 In this situation, there is only one case we can return true for.
6353 X - 0 is the same as X unless rounding towards -infinity is
6354 supported. */
6355 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6358 /* Subroutine of fold() that optimizes comparisons of a division by
6359 a nonzero integer constant against an integer constant, i.e.
6360 X/C1 op C2.
6362 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6363 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6364 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6366 The function returns the constant folded tree if a simplification
6367 can be made, and NULL_TREE otherwise. */
6369 static tree
6370 fold_div_compare (location_t loc,
6371 enum tree_code code, tree type, tree arg0, tree arg1)
6373 tree prod, tmp, hi, lo;
6374 tree arg00 = TREE_OPERAND (arg0, 0);
6375 tree arg01 = TREE_OPERAND (arg0, 1);
6376 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6377 bool neg_overflow = false;
6378 bool overflow;
6380 /* We have to do this the hard way to detect unsigned overflow.
6381 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6382 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6383 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6384 neg_overflow = false;
6386 if (sign == UNSIGNED)
6388 tmp = int_const_binop (MINUS_EXPR, arg01,
6389 build_int_cst (TREE_TYPE (arg01), 1));
6390 lo = prod;
6392 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6393 val = wi::add (prod, tmp, sign, &overflow);
6394 hi = force_fit_type (TREE_TYPE (arg00), val,
6395 -1, overflow | TREE_OVERFLOW (prod));
6397 else if (tree_int_cst_sgn (arg01) >= 0)
6399 tmp = int_const_binop (MINUS_EXPR, arg01,
6400 build_int_cst (TREE_TYPE (arg01), 1));
6401 switch (tree_int_cst_sgn (arg1))
6403 case -1:
6404 neg_overflow = true;
6405 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6406 hi = prod;
6407 break;
6409 case 0:
6410 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6411 hi = tmp;
6412 break;
6414 case 1:
6415 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6416 lo = prod;
6417 break;
6419 default:
6420 gcc_unreachable ();
6423 else
6425 /* A negative divisor reverses the relational operators. */
6426 code = swap_tree_comparison (code);
6428 tmp = int_const_binop (PLUS_EXPR, arg01,
6429 build_int_cst (TREE_TYPE (arg01), 1));
6430 switch (tree_int_cst_sgn (arg1))
6432 case -1:
6433 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6434 lo = prod;
6435 break;
6437 case 0:
6438 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6439 lo = tmp;
6440 break;
6442 case 1:
6443 neg_overflow = true;
6444 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6445 hi = prod;
6446 break;
6448 default:
6449 gcc_unreachable ();
6453 switch (code)
6455 case EQ_EXPR:
6456 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6457 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6458 if (TREE_OVERFLOW (hi))
6459 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6460 if (TREE_OVERFLOW (lo))
6461 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6462 return build_range_check (loc, type, arg00, 1, lo, hi);
6464 case NE_EXPR:
6465 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6466 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6467 if (TREE_OVERFLOW (hi))
6468 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6469 if (TREE_OVERFLOW (lo))
6470 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6471 return build_range_check (loc, type, arg00, 0, lo, hi);
6473 case LT_EXPR:
6474 if (TREE_OVERFLOW (lo))
6476 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6477 return omit_one_operand_loc (loc, type, tmp, arg00);
6479 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6481 case LE_EXPR:
6482 if (TREE_OVERFLOW (hi))
6484 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6485 return omit_one_operand_loc (loc, type, tmp, arg00);
6487 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6489 case GT_EXPR:
6490 if (TREE_OVERFLOW (hi))
6492 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6493 return omit_one_operand_loc (loc, type, tmp, arg00);
6495 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6497 case GE_EXPR:
6498 if (TREE_OVERFLOW (lo))
6500 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6501 return omit_one_operand_loc (loc, type, tmp, arg00);
6503 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6505 default:
6506 break;
6509 return NULL_TREE;
6513 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6514 equality/inequality test, then return a simplified form of the test
6515 using a sign testing. Otherwise return NULL. TYPE is the desired
6516 result type. */
6518 static tree
6519 fold_single_bit_test_into_sign_test (location_t loc,
6520 enum tree_code code, tree arg0, tree arg1,
6521 tree result_type)
6523 /* If this is testing a single bit, we can optimize the test. */
6524 if ((code == NE_EXPR || code == EQ_EXPR)
6525 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6526 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6528 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6529 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6530 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6532 if (arg00 != NULL_TREE
6533 /* This is only a win if casting to a signed type is cheap,
6534 i.e. when arg00's type is not a partial mode. */
6535 && TYPE_PRECISION (TREE_TYPE (arg00))
6536 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6538 tree stype = signed_type_for (TREE_TYPE (arg00));
6539 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6540 result_type,
6541 fold_convert_loc (loc, stype, arg00),
6542 build_int_cst (stype, 0));
6546 return NULL_TREE;
6549 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6550 equality/inequality test, then return a simplified form of
6551 the test using shifts and logical operations. Otherwise return
6552 NULL. TYPE is the desired result type. */
6554 tree
6555 fold_single_bit_test (location_t loc, enum tree_code code,
6556 tree arg0, tree arg1, tree result_type)
6558 /* If this is testing a single bit, we can optimize the test. */
6559 if ((code == NE_EXPR || code == EQ_EXPR)
6560 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6561 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6563 tree inner = TREE_OPERAND (arg0, 0);
6564 tree type = TREE_TYPE (arg0);
6565 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6566 machine_mode operand_mode = TYPE_MODE (type);
6567 int ops_unsigned;
6568 tree signed_type, unsigned_type, intermediate_type;
6569 tree tem, one;
6571 /* First, see if we can fold the single bit test into a sign-bit
6572 test. */
6573 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6574 result_type);
6575 if (tem)
6576 return tem;
6578 /* Otherwise we have (A & C) != 0 where C is a single bit,
6579 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6580 Similarly for (A & C) == 0. */
6582 /* If INNER is a right shift of a constant and it plus BITNUM does
6583 not overflow, adjust BITNUM and INNER. */
6584 if (TREE_CODE (inner) == RSHIFT_EXPR
6585 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6586 && bitnum < TYPE_PRECISION (type)
6587 && wi::ltu_p (TREE_OPERAND (inner, 1),
6588 TYPE_PRECISION (type) - bitnum))
6590 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6591 inner = TREE_OPERAND (inner, 0);
6594 /* If we are going to be able to omit the AND below, we must do our
6595 operations as unsigned. If we must use the AND, we have a choice.
6596 Normally unsigned is faster, but for some machines signed is. */
6597 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6598 && !flag_syntax_only) ? 0 : 1;
6600 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6601 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6602 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6603 inner = fold_convert_loc (loc, intermediate_type, inner);
6605 if (bitnum != 0)
6606 inner = build2 (RSHIFT_EXPR, intermediate_type,
6607 inner, size_int (bitnum));
6609 one = build_int_cst (intermediate_type, 1);
6611 if (code == EQ_EXPR)
6612 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6614 /* Put the AND last so it can combine with more things. */
6615 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6617 /* Make sure to return the proper type. */
6618 inner = fold_convert_loc (loc, result_type, inner);
6620 return inner;
6622 return NULL_TREE;
6625 /* Check whether we are allowed to reorder operands arg0 and arg1,
6626 such that the evaluation of arg1 occurs before arg0. */
6628 static bool
6629 reorder_operands_p (const_tree arg0, const_tree arg1)
6631 if (! flag_evaluation_order)
6632 return true;
6633 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6634 return true;
6635 return ! TREE_SIDE_EFFECTS (arg0)
6636 && ! TREE_SIDE_EFFECTS (arg1);
6639 /* Test whether it is preferable two swap two operands, ARG0 and
6640 ARG1, for example because ARG0 is an integer constant and ARG1
6641 isn't. If REORDER is true, only recommend swapping if we can
6642 evaluate the operands in reverse order. */
6644 bool
6645 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6647 if (CONSTANT_CLASS_P (arg1))
6648 return 0;
6649 if (CONSTANT_CLASS_P (arg0))
6650 return 1;
6652 STRIP_NOPS (arg0);
6653 STRIP_NOPS (arg1);
6655 if (TREE_CONSTANT (arg1))
6656 return 0;
6657 if (TREE_CONSTANT (arg0))
6658 return 1;
6660 if (reorder && flag_evaluation_order
6661 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6662 return 0;
6664 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6665 for commutative and comparison operators. Ensuring a canonical
6666 form allows the optimizers to find additional redundancies without
6667 having to explicitly check for both orderings. */
6668 if (TREE_CODE (arg0) == SSA_NAME
6669 && TREE_CODE (arg1) == SSA_NAME
6670 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6671 return 1;
6673 /* Put SSA_NAMEs last. */
6674 if (TREE_CODE (arg1) == SSA_NAME)
6675 return 0;
6676 if (TREE_CODE (arg0) == SSA_NAME)
6677 return 1;
6679 /* Put variables last. */
6680 if (DECL_P (arg1))
6681 return 0;
6682 if (DECL_P (arg0))
6683 return 1;
6685 return 0;
6689 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6690 means A >= Y && A != MAX, but in this case we know that
6691 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6693 static tree
6694 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6696 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6698 if (TREE_CODE (bound) == LT_EXPR)
6699 a = TREE_OPERAND (bound, 0);
6700 else if (TREE_CODE (bound) == GT_EXPR)
6701 a = TREE_OPERAND (bound, 1);
6702 else
6703 return NULL_TREE;
6705 typea = TREE_TYPE (a);
6706 if (!INTEGRAL_TYPE_P (typea)
6707 && !POINTER_TYPE_P (typea))
6708 return NULL_TREE;
6710 if (TREE_CODE (ineq) == LT_EXPR)
6712 a1 = TREE_OPERAND (ineq, 1);
6713 y = TREE_OPERAND (ineq, 0);
6715 else if (TREE_CODE (ineq) == GT_EXPR)
6717 a1 = TREE_OPERAND (ineq, 0);
6718 y = TREE_OPERAND (ineq, 1);
6720 else
6721 return NULL_TREE;
6723 if (TREE_TYPE (a1) != typea)
6724 return NULL_TREE;
6726 if (POINTER_TYPE_P (typea))
6728 /* Convert the pointer types into integer before taking the difference. */
6729 tree ta = fold_convert_loc (loc, ssizetype, a);
6730 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6731 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6733 else
6734 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6736 if (!diff || !integer_onep (diff))
6737 return NULL_TREE;
6739 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6742 /* Fold a sum or difference of at least one multiplication.
6743 Returns the folded tree or NULL if no simplification could be made. */
6745 static tree
6746 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6747 tree arg0, tree arg1)
6749 tree arg00, arg01, arg10, arg11;
6750 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6752 /* (A * C) +- (B * C) -> (A+-B) * C.
6753 (A * C) +- A -> A * (C+-1).
6754 We are most concerned about the case where C is a constant,
6755 but other combinations show up during loop reduction. Since
6756 it is not difficult, try all four possibilities. */
6758 if (TREE_CODE (arg0) == MULT_EXPR)
6760 arg00 = TREE_OPERAND (arg0, 0);
6761 arg01 = TREE_OPERAND (arg0, 1);
6763 else if (TREE_CODE (arg0) == INTEGER_CST)
6765 arg00 = build_one_cst (type);
6766 arg01 = arg0;
6768 else
6770 /* We cannot generate constant 1 for fract. */
6771 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6772 return NULL_TREE;
6773 arg00 = arg0;
6774 arg01 = build_one_cst (type);
6776 if (TREE_CODE (arg1) == MULT_EXPR)
6778 arg10 = TREE_OPERAND (arg1, 0);
6779 arg11 = TREE_OPERAND (arg1, 1);
6781 else if (TREE_CODE (arg1) == INTEGER_CST)
6783 arg10 = build_one_cst (type);
6784 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6785 the purpose of this canonicalization. */
6786 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6787 && negate_expr_p (arg1)
6788 && code == PLUS_EXPR)
6790 arg11 = negate_expr (arg1);
6791 code = MINUS_EXPR;
6793 else
6794 arg11 = arg1;
6796 else
6798 /* We cannot generate constant 1 for fract. */
6799 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6800 return NULL_TREE;
6801 arg10 = arg1;
6802 arg11 = build_one_cst (type);
6804 same = NULL_TREE;
6806 if (operand_equal_p (arg01, arg11, 0))
6807 same = arg01, alt0 = arg00, alt1 = arg10;
6808 else if (operand_equal_p (arg00, arg10, 0))
6809 same = arg00, alt0 = arg01, alt1 = arg11;
6810 else if (operand_equal_p (arg00, arg11, 0))
6811 same = arg00, alt0 = arg01, alt1 = arg10;
6812 else if (operand_equal_p (arg01, arg10, 0))
6813 same = arg01, alt0 = arg00, alt1 = arg11;
6815 /* No identical multiplicands; see if we can find a common
6816 power-of-two factor in non-power-of-two multiplies. This
6817 can help in multi-dimensional array access. */
6818 else if (tree_fits_shwi_p (arg01)
6819 && tree_fits_shwi_p (arg11))
6821 HOST_WIDE_INT int01, int11, tmp;
6822 bool swap = false;
6823 tree maybe_same;
6824 int01 = tree_to_shwi (arg01);
6825 int11 = tree_to_shwi (arg11);
6827 /* Move min of absolute values to int11. */
6828 if (absu_hwi (int01) < absu_hwi (int11))
6830 tmp = int01, int01 = int11, int11 = tmp;
6831 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6832 maybe_same = arg01;
6833 swap = true;
6835 else
6836 maybe_same = arg11;
6838 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6839 /* The remainder should not be a constant, otherwise we
6840 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6841 increased the number of multiplications necessary. */
6842 && TREE_CODE (arg10) != INTEGER_CST)
6844 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6845 build_int_cst (TREE_TYPE (arg00),
6846 int01 / int11));
6847 alt1 = arg10;
6848 same = maybe_same;
6849 if (swap)
6850 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6854 if (same)
6855 return fold_build2_loc (loc, MULT_EXPR, type,
6856 fold_build2_loc (loc, code, type,
6857 fold_convert_loc (loc, type, alt0),
6858 fold_convert_loc (loc, type, alt1)),
6859 fold_convert_loc (loc, type, same));
6861 return NULL_TREE;
6864 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6865 specified by EXPR into the buffer PTR of length LEN bytes.
6866 Return the number of bytes placed in the buffer, or zero
6867 upon failure. */
6869 static int
6870 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6872 tree type = TREE_TYPE (expr);
6873 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6874 int byte, offset, word, words;
6875 unsigned char value;
6877 if ((off == -1 && total_bytes > len)
6878 || off >= total_bytes)
6879 return 0;
6880 if (off == -1)
6881 off = 0;
6882 words = total_bytes / UNITS_PER_WORD;
6884 for (byte = 0; byte < total_bytes; byte++)
6886 int bitpos = byte * BITS_PER_UNIT;
6887 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6888 number of bytes. */
6889 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6891 if (total_bytes > UNITS_PER_WORD)
6893 word = byte / UNITS_PER_WORD;
6894 if (WORDS_BIG_ENDIAN)
6895 word = (words - 1) - word;
6896 offset = word * UNITS_PER_WORD;
6897 if (BYTES_BIG_ENDIAN)
6898 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6899 else
6900 offset += byte % UNITS_PER_WORD;
6902 else
6903 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6904 if (offset >= off
6905 && offset - off < len)
6906 ptr[offset - off] = value;
6908 return MIN (len, total_bytes - off);
6912 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6913 specified by EXPR into the buffer PTR of length LEN bytes.
6914 Return the number of bytes placed in the buffer, or zero
6915 upon failure. */
6917 static int
6918 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
6920 tree type = TREE_TYPE (expr);
6921 machine_mode mode = TYPE_MODE (type);
6922 int total_bytes = GET_MODE_SIZE (mode);
6923 FIXED_VALUE_TYPE value;
6924 tree i_value, i_type;
6926 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
6927 return 0;
6929 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
6931 if (NULL_TREE == i_type
6932 || TYPE_PRECISION (i_type) != total_bytes)
6933 return 0;
6935 value = TREE_FIXED_CST (expr);
6936 i_value = double_int_to_tree (i_type, value.data);
6938 return native_encode_int (i_value, ptr, len, off);
6942 /* Subroutine of native_encode_expr. Encode the REAL_CST
6943 specified by EXPR into the buffer PTR of length LEN bytes.
6944 Return the number of bytes placed in the buffer, or zero
6945 upon failure. */
6947 static int
6948 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
6950 tree type = TREE_TYPE (expr);
6951 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6952 int byte, offset, word, words, bitpos;
6953 unsigned char value;
6955 /* There are always 32 bits in each long, no matter the size of
6956 the hosts long. We handle floating point representations with
6957 up to 192 bits. */
6958 long tmp[6];
6960 if ((off == -1 && total_bytes > len)
6961 || off >= total_bytes)
6962 return 0;
6963 if (off == -1)
6964 off = 0;
6965 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
6967 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6969 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
6970 bitpos += BITS_PER_UNIT)
6972 byte = (bitpos / BITS_PER_UNIT) & 3;
6973 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6975 if (UNITS_PER_WORD < 4)
6977 word = byte / UNITS_PER_WORD;
6978 if (WORDS_BIG_ENDIAN)
6979 word = (words - 1) - word;
6980 offset = word * UNITS_PER_WORD;
6981 if (BYTES_BIG_ENDIAN)
6982 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6983 else
6984 offset += byte % UNITS_PER_WORD;
6986 else
6987 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
6988 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
6989 if (offset >= off
6990 && offset - off < len)
6991 ptr[offset - off] = value;
6993 return MIN (len, total_bytes - off);
6996 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6997 specified by EXPR into the buffer PTR of length LEN bytes.
6998 Return the number of bytes placed in the buffer, or zero
6999 upon failure. */
7001 static int
7002 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7004 int rsize, isize;
7005 tree part;
7007 part = TREE_REALPART (expr);
7008 rsize = native_encode_expr (part, ptr, len, off);
7009 if (off == -1
7010 && rsize == 0)
7011 return 0;
7012 part = TREE_IMAGPART (expr);
7013 if (off != -1)
7014 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7015 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7016 if (off == -1
7017 && isize != rsize)
7018 return 0;
7019 return rsize + isize;
7023 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7024 specified by EXPR into the buffer PTR of length LEN bytes.
7025 Return the number of bytes placed in the buffer, or zero
7026 upon failure. */
7028 static int
7029 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7031 unsigned i, count;
7032 int size, offset;
7033 tree itype, elem;
7035 offset = 0;
7036 count = VECTOR_CST_NELTS (expr);
7037 itype = TREE_TYPE (TREE_TYPE (expr));
7038 size = GET_MODE_SIZE (TYPE_MODE (itype));
7039 for (i = 0; i < count; i++)
7041 if (off >= size)
7043 off -= size;
7044 continue;
7046 elem = VECTOR_CST_ELT (expr, i);
7047 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7048 if ((off == -1 && res != size)
7049 || res == 0)
7050 return 0;
7051 offset += res;
7052 if (offset >= len)
7053 return offset;
7054 if (off != -1)
7055 off = 0;
7057 return offset;
7061 /* Subroutine of native_encode_expr. Encode the STRING_CST
7062 specified by EXPR into the buffer PTR of length LEN bytes.
7063 Return the number of bytes placed in the buffer, or zero
7064 upon failure. */
7066 static int
7067 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7069 tree type = TREE_TYPE (expr);
7070 HOST_WIDE_INT total_bytes;
7072 if (TREE_CODE (type) != ARRAY_TYPE
7073 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7074 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7075 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7076 return 0;
7077 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7078 if ((off == -1 && total_bytes > len)
7079 || off >= total_bytes)
7080 return 0;
7081 if (off == -1)
7082 off = 0;
7083 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7085 int written = 0;
7086 if (off < TREE_STRING_LENGTH (expr))
7088 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7089 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7091 memset (ptr + written, 0,
7092 MIN (total_bytes - written, len - written));
7094 else
7095 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7096 return MIN (total_bytes - off, len);
7100 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7101 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7102 buffer PTR of length LEN bytes. If OFF is not -1 then start
7103 the encoding at byte offset OFF and encode at most LEN bytes.
7104 Return the number of bytes placed in the buffer, or zero upon failure. */
7107 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7109 switch (TREE_CODE (expr))
7111 case INTEGER_CST:
7112 return native_encode_int (expr, ptr, len, off);
7114 case REAL_CST:
7115 return native_encode_real (expr, ptr, len, off);
7117 case FIXED_CST:
7118 return native_encode_fixed (expr, ptr, len, off);
7120 case COMPLEX_CST:
7121 return native_encode_complex (expr, ptr, len, off);
7123 case VECTOR_CST:
7124 return native_encode_vector (expr, ptr, len, off);
7126 case STRING_CST:
7127 return native_encode_string (expr, ptr, len, off);
7129 default:
7130 return 0;
7135 /* Subroutine of native_interpret_expr. Interpret the contents of
7136 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7137 If the buffer cannot be interpreted, return NULL_TREE. */
7139 static tree
7140 native_interpret_int (tree type, const unsigned char *ptr, int len)
7142 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7144 if (total_bytes > len
7145 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7146 return NULL_TREE;
7148 wide_int result = wi::from_buffer (ptr, total_bytes);
7150 return wide_int_to_tree (type, result);
7154 /* Subroutine of native_interpret_expr. Interpret the contents of
7155 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7156 If the buffer cannot be interpreted, return NULL_TREE. */
7158 static tree
7159 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7161 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7162 double_int result;
7163 FIXED_VALUE_TYPE fixed_value;
7165 if (total_bytes > len
7166 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7167 return NULL_TREE;
7169 result = double_int::from_buffer (ptr, total_bytes);
7170 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7172 return build_fixed (type, fixed_value);
7176 /* Subroutine of native_interpret_expr. Interpret the contents of
7177 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7178 If the buffer cannot be interpreted, return NULL_TREE. */
7180 static tree
7181 native_interpret_real (tree type, const unsigned char *ptr, int len)
7183 machine_mode mode = TYPE_MODE (type);
7184 int total_bytes = GET_MODE_SIZE (mode);
7185 int byte, offset, word, words, bitpos;
7186 unsigned char value;
7187 /* There are always 32 bits in each long, no matter the size of
7188 the hosts long. We handle floating point representations with
7189 up to 192 bits. */
7190 REAL_VALUE_TYPE r;
7191 long tmp[6];
7193 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7194 if (total_bytes > len || total_bytes > 24)
7195 return NULL_TREE;
7196 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7198 memset (tmp, 0, sizeof (tmp));
7199 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7200 bitpos += BITS_PER_UNIT)
7202 byte = (bitpos / BITS_PER_UNIT) & 3;
7203 if (UNITS_PER_WORD < 4)
7205 word = byte / UNITS_PER_WORD;
7206 if (WORDS_BIG_ENDIAN)
7207 word = (words - 1) - word;
7208 offset = word * UNITS_PER_WORD;
7209 if (BYTES_BIG_ENDIAN)
7210 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7211 else
7212 offset += byte % UNITS_PER_WORD;
7214 else
7215 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7216 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7218 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7221 real_from_target (&r, tmp, mode);
7222 return build_real (type, r);
7226 /* Subroutine of native_interpret_expr. Interpret the contents of
7227 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7228 If the buffer cannot be interpreted, return NULL_TREE. */
7230 static tree
7231 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7233 tree etype, rpart, ipart;
7234 int size;
7236 etype = TREE_TYPE (type);
7237 size = GET_MODE_SIZE (TYPE_MODE (etype));
7238 if (size * 2 > len)
7239 return NULL_TREE;
7240 rpart = native_interpret_expr (etype, ptr, size);
7241 if (!rpart)
7242 return NULL_TREE;
7243 ipart = native_interpret_expr (etype, ptr+size, size);
7244 if (!ipart)
7245 return NULL_TREE;
7246 return build_complex (type, rpart, ipart);
7250 /* Subroutine of native_interpret_expr. Interpret the contents of
7251 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7252 If the buffer cannot be interpreted, return NULL_TREE. */
7254 static tree
7255 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7257 tree etype, elem;
7258 int i, size, count;
7259 tree *elements;
7261 etype = TREE_TYPE (type);
7262 size = GET_MODE_SIZE (TYPE_MODE (etype));
7263 count = TYPE_VECTOR_SUBPARTS (type);
7264 if (size * count > len)
7265 return NULL_TREE;
7267 elements = XALLOCAVEC (tree, count);
7268 for (i = count - 1; i >= 0; i--)
7270 elem = native_interpret_expr (etype, ptr+(i*size), size);
7271 if (!elem)
7272 return NULL_TREE;
7273 elements[i] = elem;
7275 return build_vector (type, elements);
7279 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7280 the buffer PTR of length LEN as a constant of type TYPE. For
7281 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7282 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7283 return NULL_TREE. */
7285 tree
7286 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7288 switch (TREE_CODE (type))
7290 case INTEGER_TYPE:
7291 case ENUMERAL_TYPE:
7292 case BOOLEAN_TYPE:
7293 case POINTER_TYPE:
7294 case REFERENCE_TYPE:
7295 return native_interpret_int (type, ptr, len);
7297 case REAL_TYPE:
7298 return native_interpret_real (type, ptr, len);
7300 case FIXED_POINT_TYPE:
7301 return native_interpret_fixed (type, ptr, len);
7303 case COMPLEX_TYPE:
7304 return native_interpret_complex (type, ptr, len);
7306 case VECTOR_TYPE:
7307 return native_interpret_vector (type, ptr, len);
7309 default:
7310 return NULL_TREE;
7314 /* Returns true if we can interpret the contents of a native encoding
7315 as TYPE. */
7317 static bool
7318 can_native_interpret_type_p (tree type)
7320 switch (TREE_CODE (type))
7322 case INTEGER_TYPE:
7323 case ENUMERAL_TYPE:
7324 case BOOLEAN_TYPE:
7325 case POINTER_TYPE:
7326 case REFERENCE_TYPE:
7327 case FIXED_POINT_TYPE:
7328 case REAL_TYPE:
7329 case COMPLEX_TYPE:
7330 case VECTOR_TYPE:
7331 return true;
7332 default:
7333 return false;
7337 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7338 TYPE at compile-time. If we're unable to perform the conversion
7339 return NULL_TREE. */
7341 static tree
7342 fold_view_convert_expr (tree type, tree expr)
7344 /* We support up to 512-bit values (for V8DFmode). */
7345 unsigned char buffer[64];
7346 int len;
7348 /* Check that the host and target are sane. */
7349 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7350 return NULL_TREE;
7352 len = native_encode_expr (expr, buffer, sizeof (buffer));
7353 if (len == 0)
7354 return NULL_TREE;
7356 return native_interpret_expr (type, buffer, len);
7359 /* Build an expression for the address of T. Folds away INDIRECT_REF
7360 to avoid confusing the gimplify process. */
7362 tree
7363 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7365 /* The size of the object is not relevant when talking about its address. */
7366 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7367 t = TREE_OPERAND (t, 0);
7369 if (TREE_CODE (t) == INDIRECT_REF)
7371 t = TREE_OPERAND (t, 0);
7373 if (TREE_TYPE (t) != ptrtype)
7374 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7376 else if (TREE_CODE (t) == MEM_REF
7377 && integer_zerop (TREE_OPERAND (t, 1)))
7378 return TREE_OPERAND (t, 0);
7379 else if (TREE_CODE (t) == MEM_REF
7380 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7381 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7382 TREE_OPERAND (t, 0),
7383 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7384 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7386 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7388 if (TREE_TYPE (t) != ptrtype)
7389 t = fold_convert_loc (loc, ptrtype, t);
7391 else
7392 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7394 return t;
7397 /* Build an expression for the address of T. */
7399 tree
7400 build_fold_addr_expr_loc (location_t loc, tree t)
7402 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7404 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7407 /* Fold a unary expression of code CODE and type TYPE with operand
7408 OP0. Return the folded expression if folding is successful.
7409 Otherwise, return NULL_TREE. */
7411 tree
7412 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7414 tree tem;
7415 tree arg0;
7416 enum tree_code_class kind = TREE_CODE_CLASS (code);
7418 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7419 && TREE_CODE_LENGTH (code) == 1);
7421 arg0 = op0;
7422 if (arg0)
7424 if (CONVERT_EXPR_CODE_P (code)
7425 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7427 /* Don't use STRIP_NOPS, because signedness of argument type
7428 matters. */
7429 STRIP_SIGN_NOPS (arg0);
7431 else
7433 /* Strip any conversions that don't change the mode. This
7434 is safe for every expression, except for a comparison
7435 expression because its signedness is derived from its
7436 operands.
7438 Note that this is done as an internal manipulation within
7439 the constant folder, in order to find the simplest
7440 representation of the arguments so that their form can be
7441 studied. In any cases, the appropriate type conversions
7442 should be put back in the tree that will get out of the
7443 constant folder. */
7444 STRIP_NOPS (arg0);
7447 if (CONSTANT_CLASS_P (arg0))
7449 tree tem = const_unop (code, type, arg0);
7450 if (tem)
7452 if (TREE_TYPE (tem) != type)
7453 tem = fold_convert_loc (loc, type, tem);
7454 return tem;
7459 tem = generic_simplify (loc, code, type, op0);
7460 if (tem)
7461 return tem;
7463 if (TREE_CODE_CLASS (code) == tcc_unary)
7465 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7466 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7467 fold_build1_loc (loc, code, type,
7468 fold_convert_loc (loc, TREE_TYPE (op0),
7469 TREE_OPERAND (arg0, 1))));
7470 else if (TREE_CODE (arg0) == COND_EXPR)
7472 tree arg01 = TREE_OPERAND (arg0, 1);
7473 tree arg02 = TREE_OPERAND (arg0, 2);
7474 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7475 arg01 = fold_build1_loc (loc, code, type,
7476 fold_convert_loc (loc,
7477 TREE_TYPE (op0), arg01));
7478 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7479 arg02 = fold_build1_loc (loc, code, type,
7480 fold_convert_loc (loc,
7481 TREE_TYPE (op0), arg02));
7482 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7483 arg01, arg02);
7485 /* If this was a conversion, and all we did was to move into
7486 inside the COND_EXPR, bring it back out. But leave it if
7487 it is a conversion from integer to integer and the
7488 result precision is no wider than a word since such a
7489 conversion is cheap and may be optimized away by combine,
7490 while it couldn't if it were outside the COND_EXPR. Then return
7491 so we don't get into an infinite recursion loop taking the
7492 conversion out and then back in. */
7494 if ((CONVERT_EXPR_CODE_P (code)
7495 || code == NON_LVALUE_EXPR)
7496 && TREE_CODE (tem) == COND_EXPR
7497 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7498 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7499 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7500 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7501 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7502 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7503 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7504 && (INTEGRAL_TYPE_P
7505 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7506 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7507 || flag_syntax_only))
7508 tem = build1_loc (loc, code, type,
7509 build3 (COND_EXPR,
7510 TREE_TYPE (TREE_OPERAND
7511 (TREE_OPERAND (tem, 1), 0)),
7512 TREE_OPERAND (tem, 0),
7513 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7514 TREE_OPERAND (TREE_OPERAND (tem, 2),
7515 0)));
7516 return tem;
7520 switch (code)
7522 case NON_LVALUE_EXPR:
7523 if (!maybe_lvalue_p (op0))
7524 return fold_convert_loc (loc, type, op0);
7525 return NULL_TREE;
7527 CASE_CONVERT:
7528 case FLOAT_EXPR:
7529 case FIX_TRUNC_EXPR:
7530 if (COMPARISON_CLASS_P (op0))
7532 /* If we have (type) (a CMP b) and type is an integral type, return
7533 new expression involving the new type. Canonicalize
7534 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7535 non-integral type.
7536 Do not fold the result as that would not simplify further, also
7537 folding again results in recursions. */
7538 if (TREE_CODE (type) == BOOLEAN_TYPE)
7539 return build2_loc (loc, TREE_CODE (op0), type,
7540 TREE_OPERAND (op0, 0),
7541 TREE_OPERAND (op0, 1));
7542 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7543 && TREE_CODE (type) != VECTOR_TYPE)
7544 return build3_loc (loc, COND_EXPR, type, op0,
7545 constant_boolean_node (true, type),
7546 constant_boolean_node (false, type));
7549 /* Handle (T *)&A.B.C for A being of type T and B and C
7550 living at offset zero. This occurs frequently in
7551 C++ upcasting and then accessing the base. */
7552 if (TREE_CODE (op0) == ADDR_EXPR
7553 && POINTER_TYPE_P (type)
7554 && handled_component_p (TREE_OPERAND (op0, 0)))
7556 HOST_WIDE_INT bitsize, bitpos;
7557 tree offset;
7558 machine_mode mode;
7559 int unsignedp, volatilep;
7560 tree base = TREE_OPERAND (op0, 0);
7561 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7562 &mode, &unsignedp, &volatilep, false);
7563 /* If the reference was to a (constant) zero offset, we can use
7564 the address of the base if it has the same base type
7565 as the result type and the pointer type is unqualified. */
7566 if (! offset && bitpos == 0
7567 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7568 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7569 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7570 return fold_convert_loc (loc, type,
7571 build_fold_addr_expr_loc (loc, base));
7574 if (TREE_CODE (op0) == MODIFY_EXPR
7575 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7576 /* Detect assigning a bitfield. */
7577 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7578 && DECL_BIT_FIELD
7579 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7581 /* Don't leave an assignment inside a conversion
7582 unless assigning a bitfield. */
7583 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7584 /* First do the assignment, then return converted constant. */
7585 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7586 TREE_NO_WARNING (tem) = 1;
7587 TREE_USED (tem) = 1;
7588 return tem;
7591 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7592 constants (if x has signed type, the sign bit cannot be set
7593 in c). This folds extension into the BIT_AND_EXPR.
7594 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7595 very likely don't have maximal range for their precision and this
7596 transformation effectively doesn't preserve non-maximal ranges. */
7597 if (TREE_CODE (type) == INTEGER_TYPE
7598 && TREE_CODE (op0) == BIT_AND_EXPR
7599 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7601 tree and_expr = op0;
7602 tree and0 = TREE_OPERAND (and_expr, 0);
7603 tree and1 = TREE_OPERAND (and_expr, 1);
7604 int change = 0;
7606 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7607 || (TYPE_PRECISION (type)
7608 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7609 change = 1;
7610 else if (TYPE_PRECISION (TREE_TYPE (and1))
7611 <= HOST_BITS_PER_WIDE_INT
7612 && tree_fits_uhwi_p (and1))
7614 unsigned HOST_WIDE_INT cst;
7616 cst = tree_to_uhwi (and1);
7617 cst &= HOST_WIDE_INT_M1U
7618 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7619 change = (cst == 0);
7620 if (change
7621 && !flag_syntax_only
7622 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7623 == ZERO_EXTEND))
7625 tree uns = unsigned_type_for (TREE_TYPE (and0));
7626 and0 = fold_convert_loc (loc, uns, and0);
7627 and1 = fold_convert_loc (loc, uns, and1);
7630 if (change)
7632 tem = force_fit_type (type, wi::to_widest (and1), 0,
7633 TREE_OVERFLOW (and1));
7634 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7635 fold_convert_loc (loc, type, and0), tem);
7639 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7640 when one of the new casts will fold away. Conservatively we assume
7641 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7642 if (POINTER_TYPE_P (type)
7643 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7644 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7645 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7646 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7647 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7649 tree arg00 = TREE_OPERAND (arg0, 0);
7650 tree arg01 = TREE_OPERAND (arg0, 1);
7652 return fold_build_pointer_plus_loc
7653 (loc, fold_convert_loc (loc, type, arg00), arg01);
7656 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7657 of the same precision, and X is an integer type not narrower than
7658 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7659 if (INTEGRAL_TYPE_P (type)
7660 && TREE_CODE (op0) == BIT_NOT_EXPR
7661 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7662 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7663 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7665 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7666 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7667 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7668 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7669 fold_convert_loc (loc, type, tem));
7672 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7673 type of X and Y (integer types only). */
7674 if (INTEGRAL_TYPE_P (type)
7675 && TREE_CODE (op0) == MULT_EXPR
7676 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7677 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7679 /* Be careful not to introduce new overflows. */
7680 tree mult_type;
7681 if (TYPE_OVERFLOW_WRAPS (type))
7682 mult_type = type;
7683 else
7684 mult_type = unsigned_type_for (type);
7686 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7688 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7689 fold_convert_loc (loc, mult_type,
7690 TREE_OPERAND (op0, 0)),
7691 fold_convert_loc (loc, mult_type,
7692 TREE_OPERAND (op0, 1)));
7693 return fold_convert_loc (loc, type, tem);
7697 return NULL_TREE;
7699 case VIEW_CONVERT_EXPR:
7700 if (TREE_CODE (op0) == MEM_REF)
7701 return fold_build2_loc (loc, MEM_REF, type,
7702 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7704 return NULL_TREE;
7706 case NEGATE_EXPR:
7707 tem = fold_negate_expr (loc, arg0);
7708 if (tem)
7709 return fold_convert_loc (loc, type, tem);
7710 return NULL_TREE;
7712 case ABS_EXPR:
7713 /* Convert fabs((double)float) into (double)fabsf(float). */
7714 if (TREE_CODE (arg0) == NOP_EXPR
7715 && TREE_CODE (type) == REAL_TYPE)
7717 tree targ0 = strip_float_extensions (arg0);
7718 if (targ0 != arg0)
7719 return fold_convert_loc (loc, type,
7720 fold_build1_loc (loc, ABS_EXPR,
7721 TREE_TYPE (targ0),
7722 targ0));
7725 /* Strip sign ops from argument. */
7726 if (TREE_CODE (type) == REAL_TYPE)
7728 tem = fold_strip_sign_ops (arg0);
7729 if (tem)
7730 return fold_build1_loc (loc, ABS_EXPR, type,
7731 fold_convert_loc (loc, type, tem));
7733 return NULL_TREE;
7735 case CONJ_EXPR:
7736 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7737 return fold_convert_loc (loc, type, arg0);
7738 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7740 tree itype = TREE_TYPE (type);
7741 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7742 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7743 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7744 negate_expr (ipart));
7746 if (TREE_CODE (arg0) == CONJ_EXPR)
7747 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7748 return NULL_TREE;
7750 case BIT_NOT_EXPR:
7751 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7752 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7753 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7754 fold_convert_loc (loc, type,
7755 TREE_OPERAND (arg0, 0)))))
7756 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7757 fold_convert_loc (loc, type,
7758 TREE_OPERAND (arg0, 1)));
7759 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7760 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7761 fold_convert_loc (loc, type,
7762 TREE_OPERAND (arg0, 1)))))
7763 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7764 fold_convert_loc (loc, type,
7765 TREE_OPERAND (arg0, 0)), tem);
7767 return NULL_TREE;
7769 case TRUTH_NOT_EXPR:
7770 /* Note that the operand of this must be an int
7771 and its values must be 0 or 1.
7772 ("true" is a fixed value perhaps depending on the language,
7773 but we don't handle values other than 1 correctly yet.) */
7774 tem = fold_truth_not_expr (loc, arg0);
7775 if (!tem)
7776 return NULL_TREE;
7777 return fold_convert_loc (loc, type, tem);
7779 case REALPART_EXPR:
7780 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7781 return fold_convert_loc (loc, type, arg0);
7782 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7784 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7785 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7786 fold_build1_loc (loc, REALPART_EXPR, itype,
7787 TREE_OPERAND (arg0, 0)),
7788 fold_build1_loc (loc, REALPART_EXPR, itype,
7789 TREE_OPERAND (arg0, 1)));
7790 return fold_convert_loc (loc, type, tem);
7792 if (TREE_CODE (arg0) == CONJ_EXPR)
7794 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7795 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7796 TREE_OPERAND (arg0, 0));
7797 return fold_convert_loc (loc, type, tem);
7799 if (TREE_CODE (arg0) == CALL_EXPR)
7801 tree fn = get_callee_fndecl (arg0);
7802 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7803 switch (DECL_FUNCTION_CODE (fn))
7805 CASE_FLT_FN (BUILT_IN_CEXPI):
7806 fn = mathfn_built_in (type, BUILT_IN_COS);
7807 if (fn)
7808 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7809 break;
7811 default:
7812 break;
7815 return NULL_TREE;
7817 case IMAGPART_EXPR:
7818 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7819 return build_zero_cst (type);
7820 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7822 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7823 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7824 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7825 TREE_OPERAND (arg0, 0)),
7826 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7827 TREE_OPERAND (arg0, 1)));
7828 return fold_convert_loc (loc, type, tem);
7830 if (TREE_CODE (arg0) == CONJ_EXPR)
7832 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7833 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7834 return fold_convert_loc (loc, type, negate_expr (tem));
7836 if (TREE_CODE (arg0) == CALL_EXPR)
7838 tree fn = get_callee_fndecl (arg0);
7839 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7840 switch (DECL_FUNCTION_CODE (fn))
7842 CASE_FLT_FN (BUILT_IN_CEXPI):
7843 fn = mathfn_built_in (type, BUILT_IN_SIN);
7844 if (fn)
7845 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7846 break;
7848 default:
7849 break;
7852 return NULL_TREE;
7854 case INDIRECT_REF:
7855 /* Fold *&X to X if X is an lvalue. */
7856 if (TREE_CODE (op0) == ADDR_EXPR)
7858 tree op00 = TREE_OPERAND (op0, 0);
7859 if ((TREE_CODE (op00) == VAR_DECL
7860 || TREE_CODE (op00) == PARM_DECL
7861 || TREE_CODE (op00) == RESULT_DECL)
7862 && !TREE_READONLY (op00))
7863 return op00;
7865 return NULL_TREE;
7867 default:
7868 return NULL_TREE;
7869 } /* switch (code) */
7873 /* If the operation was a conversion do _not_ mark a resulting constant
7874 with TREE_OVERFLOW if the original constant was not. These conversions
7875 have implementation defined behavior and retaining the TREE_OVERFLOW
7876 flag here would confuse later passes such as VRP. */
7877 tree
7878 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7879 tree type, tree op0)
7881 tree res = fold_unary_loc (loc, code, type, op0);
7882 if (res
7883 && TREE_CODE (res) == INTEGER_CST
7884 && TREE_CODE (op0) == INTEGER_CST
7885 && CONVERT_EXPR_CODE_P (code))
7886 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7888 return res;
7891 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7892 operands OP0 and OP1. LOC is the location of the resulting expression.
7893 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7894 Return the folded expression if folding is successful. Otherwise,
7895 return NULL_TREE. */
7896 static tree
7897 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7898 tree arg0, tree arg1, tree op0, tree op1)
7900 tree tem;
7902 /* We only do these simplifications if we are optimizing. */
7903 if (!optimize)
7904 return NULL_TREE;
7906 /* Check for things like (A || B) && (A || C). We can convert this
7907 to A || (B && C). Note that either operator can be any of the four
7908 truth and/or operations and the transformation will still be
7909 valid. Also note that we only care about order for the
7910 ANDIF and ORIF operators. If B contains side effects, this
7911 might change the truth-value of A. */
7912 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7913 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7914 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7915 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7916 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7917 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7919 tree a00 = TREE_OPERAND (arg0, 0);
7920 tree a01 = TREE_OPERAND (arg0, 1);
7921 tree a10 = TREE_OPERAND (arg1, 0);
7922 tree a11 = TREE_OPERAND (arg1, 1);
7923 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7924 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7925 && (code == TRUTH_AND_EXPR
7926 || code == TRUTH_OR_EXPR));
7928 if (operand_equal_p (a00, a10, 0))
7929 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7930 fold_build2_loc (loc, code, type, a01, a11));
7931 else if (commutative && operand_equal_p (a00, a11, 0))
7932 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7933 fold_build2_loc (loc, code, type, a01, a10));
7934 else if (commutative && operand_equal_p (a01, a10, 0))
7935 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7936 fold_build2_loc (loc, code, type, a00, a11));
7938 /* This case if tricky because we must either have commutative
7939 operators or else A10 must not have side-effects. */
7941 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7942 && operand_equal_p (a01, a11, 0))
7943 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7944 fold_build2_loc (loc, code, type, a00, a10),
7945 a01);
7948 /* See if we can build a range comparison. */
7949 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7950 return tem;
7952 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7953 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7955 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7956 if (tem)
7957 return fold_build2_loc (loc, code, type, tem, arg1);
7960 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7961 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7963 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7964 if (tem)
7965 return fold_build2_loc (loc, code, type, arg0, tem);
7968 /* Check for the possibility of merging component references. If our
7969 lhs is another similar operation, try to merge its rhs with our
7970 rhs. Then try to merge our lhs and rhs. */
7971 if (TREE_CODE (arg0) == code
7972 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
7973 TREE_OPERAND (arg0, 1), arg1)))
7974 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
7976 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
7977 return tem;
7979 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7980 && (code == TRUTH_AND_EXPR
7981 || code == TRUTH_ANDIF_EXPR
7982 || code == TRUTH_OR_EXPR
7983 || code == TRUTH_ORIF_EXPR))
7985 enum tree_code ncode, icode;
7987 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
7988 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
7989 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
7991 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
7992 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
7993 We don't want to pack more than two leafs to a non-IF AND/OR
7994 expression.
7995 If tree-code of left-hand operand isn't an AND/OR-IF code and not
7996 equal to IF-CODE, then we don't want to add right-hand operand.
7997 If the inner right-hand side of left-hand operand has
7998 side-effects, or isn't simple, then we can't add to it,
7999 as otherwise we might destroy if-sequence. */
8000 if (TREE_CODE (arg0) == icode
8001 && simple_operand_p_2 (arg1)
8002 /* Needed for sequence points to handle trappings, and
8003 side-effects. */
8004 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8006 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8007 arg1);
8008 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8009 tem);
8011 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8012 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8013 else if (TREE_CODE (arg1) == icode
8014 && simple_operand_p_2 (arg0)
8015 /* Needed for sequence points to handle trappings, and
8016 side-effects. */
8017 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8019 tem = fold_build2_loc (loc, ncode, type,
8020 arg0, TREE_OPERAND (arg1, 0));
8021 return fold_build2_loc (loc, icode, type, tem,
8022 TREE_OPERAND (arg1, 1));
8024 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8025 into (A OR B).
8026 For sequence point consistancy, we need to check for trapping,
8027 and side-effects. */
8028 else if (code == icode && simple_operand_p_2 (arg0)
8029 && simple_operand_p_2 (arg1))
8030 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8033 return NULL_TREE;
8036 /* Fold a binary expression of code CODE and type TYPE with operands
8037 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8038 Return the folded expression if folding is successful. Otherwise,
8039 return NULL_TREE. */
8041 static tree
8042 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8044 enum tree_code compl_code;
8046 if (code == MIN_EXPR)
8047 compl_code = MAX_EXPR;
8048 else if (code == MAX_EXPR)
8049 compl_code = MIN_EXPR;
8050 else
8051 gcc_unreachable ();
8053 /* MIN (MAX (a, b), b) == b. */
8054 if (TREE_CODE (op0) == compl_code
8055 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8056 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8058 /* MIN (MAX (b, a), b) == b. */
8059 if (TREE_CODE (op0) == compl_code
8060 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8061 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8062 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8064 /* MIN (a, MAX (a, b)) == a. */
8065 if (TREE_CODE (op1) == compl_code
8066 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8067 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8068 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8070 /* MIN (a, MAX (b, a)) == a. */
8071 if (TREE_CODE (op1) == compl_code
8072 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8073 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8074 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8076 return NULL_TREE;
8079 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8080 by changing CODE to reduce the magnitude of constants involved in
8081 ARG0 of the comparison.
8082 Returns a canonicalized comparison tree if a simplification was
8083 possible, otherwise returns NULL_TREE.
8084 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8085 valid if signed overflow is undefined. */
8087 static tree
8088 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8089 tree arg0, tree arg1,
8090 bool *strict_overflow_p)
8092 enum tree_code code0 = TREE_CODE (arg0);
8093 tree t, cst0 = NULL_TREE;
8094 int sgn0;
8096 /* Match A +- CST code arg1. We can change this only if overflow
8097 is undefined. */
8098 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8099 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8100 /* In principle pointers also have undefined overflow behavior,
8101 but that causes problems elsewhere. */
8102 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8103 && (code0 == MINUS_EXPR
8104 || code0 == PLUS_EXPR)
8105 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8106 return NULL_TREE;
8108 /* Identify the constant in arg0 and its sign. */
8109 cst0 = TREE_OPERAND (arg0, 1);
8110 sgn0 = tree_int_cst_sgn (cst0);
8112 /* Overflowed constants and zero will cause problems. */
8113 if (integer_zerop (cst0)
8114 || TREE_OVERFLOW (cst0))
8115 return NULL_TREE;
8117 /* See if we can reduce the magnitude of the constant in
8118 arg0 by changing the comparison code. */
8119 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8120 if (code == LT_EXPR
8121 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8122 code = LE_EXPR;
8123 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8124 else if (code == GT_EXPR
8125 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8126 code = GE_EXPR;
8127 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8128 else if (code == LE_EXPR
8129 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8130 code = LT_EXPR;
8131 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8132 else if (code == GE_EXPR
8133 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8134 code = GT_EXPR;
8135 else
8136 return NULL_TREE;
8137 *strict_overflow_p = true;
8139 /* Now build the constant reduced in magnitude. But not if that
8140 would produce one outside of its types range. */
8141 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8142 && ((sgn0 == 1
8143 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8144 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8145 || (sgn0 == -1
8146 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8147 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8148 return NULL_TREE;
8150 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8151 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8152 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8153 t = fold_convert (TREE_TYPE (arg1), t);
8155 return fold_build2_loc (loc, code, type, t, arg1);
8158 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8159 overflow further. Try to decrease the magnitude of constants involved
8160 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8161 and put sole constants at the second argument position.
8162 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8164 static tree
8165 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8166 tree arg0, tree arg1)
8168 tree t;
8169 bool strict_overflow_p;
8170 const char * const warnmsg = G_("assuming signed overflow does not occur "
8171 "when reducing constant in comparison");
8173 /* Try canonicalization by simplifying arg0. */
8174 strict_overflow_p = false;
8175 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8176 &strict_overflow_p);
8177 if (t)
8179 if (strict_overflow_p)
8180 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8181 return t;
8184 /* Try canonicalization by simplifying arg1 using the swapped
8185 comparison. */
8186 code = swap_tree_comparison (code);
8187 strict_overflow_p = false;
8188 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8189 &strict_overflow_p);
8190 if (t && strict_overflow_p)
8191 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8192 return t;
8195 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8196 space. This is used to avoid issuing overflow warnings for
8197 expressions like &p->x which can not wrap. */
8199 static bool
8200 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8202 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8203 return true;
8205 if (bitpos < 0)
8206 return true;
8208 wide_int wi_offset;
8209 int precision = TYPE_PRECISION (TREE_TYPE (base));
8210 if (offset == NULL_TREE)
8211 wi_offset = wi::zero (precision);
8212 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8213 return true;
8214 else
8215 wi_offset = offset;
8217 bool overflow;
8218 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8219 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8220 if (overflow)
8221 return true;
8223 if (!wi::fits_uhwi_p (total))
8224 return true;
8226 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8227 if (size <= 0)
8228 return true;
8230 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8231 array. */
8232 if (TREE_CODE (base) == ADDR_EXPR)
8234 HOST_WIDE_INT base_size;
8236 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8237 if (base_size > 0 && size < base_size)
8238 size = base_size;
8241 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8244 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8245 kind INTEGER_CST. This makes sure to properly sign-extend the
8246 constant. */
8248 static HOST_WIDE_INT
8249 size_low_cst (const_tree t)
8251 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8252 int prec = TYPE_PRECISION (TREE_TYPE (t));
8253 if (prec < HOST_BITS_PER_WIDE_INT)
8254 return sext_hwi (w, prec);
8255 return w;
8258 /* Subroutine of fold_binary. This routine performs all of the
8259 transformations that are common to the equality/inequality
8260 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8261 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8262 fold_binary should call fold_binary. Fold a comparison with
8263 tree code CODE and type TYPE with operands OP0 and OP1. Return
8264 the folded comparison or NULL_TREE. */
8266 static tree
8267 fold_comparison (location_t loc, enum tree_code code, tree type,
8268 tree op0, tree op1)
8270 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8271 tree arg0, arg1, tem;
8273 arg0 = op0;
8274 arg1 = op1;
8276 STRIP_SIGN_NOPS (arg0);
8277 STRIP_SIGN_NOPS (arg1);
8279 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8280 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8281 && (equality_code
8282 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8283 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8285 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8286 && TREE_CODE (arg1) == INTEGER_CST
8287 && !TREE_OVERFLOW (arg1))
8289 const enum tree_code
8290 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8291 tree const1 = TREE_OPERAND (arg0, 1);
8292 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8293 tree variable = TREE_OPERAND (arg0, 0);
8294 tree new_const = int_const_binop (reverse_op, const2, const1);
8296 /* If the constant operation overflowed this can be
8297 simplified as a comparison against INT_MAX/INT_MIN. */
8298 if (TREE_OVERFLOW (new_const)
8299 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8301 int const1_sgn = tree_int_cst_sgn (const1);
8302 enum tree_code code2 = code;
8304 /* Get the sign of the constant on the lhs if the
8305 operation were VARIABLE + CONST1. */
8306 if (TREE_CODE (arg0) == MINUS_EXPR)
8307 const1_sgn = -const1_sgn;
8309 /* The sign of the constant determines if we overflowed
8310 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8311 Canonicalize to the INT_MIN overflow by swapping the comparison
8312 if necessary. */
8313 if (const1_sgn == -1)
8314 code2 = swap_tree_comparison (code);
8316 /* We now can look at the canonicalized case
8317 VARIABLE + 1 CODE2 INT_MIN
8318 and decide on the result. */
8319 switch (code2)
8321 case EQ_EXPR:
8322 case LT_EXPR:
8323 case LE_EXPR:
8324 return
8325 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8327 case NE_EXPR:
8328 case GE_EXPR:
8329 case GT_EXPR:
8330 return
8331 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8333 default:
8334 gcc_unreachable ();
8337 else
8339 if (!equality_code)
8340 fold_overflow_warning ("assuming signed overflow does not occur "
8341 "when changing X +- C1 cmp C2 to "
8342 "X cmp C2 -+ C1",
8343 WARN_STRICT_OVERFLOW_COMPARISON);
8344 return fold_build2_loc (loc, code, type, variable, new_const);
8348 /* For comparisons of pointers we can decompose it to a compile time
8349 comparison of the base objects and the offsets into the object.
8350 This requires at least one operand being an ADDR_EXPR or a
8351 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8352 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8353 && (TREE_CODE (arg0) == ADDR_EXPR
8354 || TREE_CODE (arg1) == ADDR_EXPR
8355 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8356 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8358 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8359 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8360 machine_mode mode;
8361 int volatilep, unsignedp;
8362 bool indirect_base0 = false, indirect_base1 = false;
8364 /* Get base and offset for the access. Strip ADDR_EXPR for
8365 get_inner_reference, but put it back by stripping INDIRECT_REF
8366 off the base object if possible. indirect_baseN will be true
8367 if baseN is not an address but refers to the object itself. */
8368 base0 = arg0;
8369 if (TREE_CODE (arg0) == ADDR_EXPR)
8371 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8372 &bitsize, &bitpos0, &offset0, &mode,
8373 &unsignedp, &volatilep, false);
8374 if (TREE_CODE (base0) == INDIRECT_REF)
8375 base0 = TREE_OPERAND (base0, 0);
8376 else
8377 indirect_base0 = true;
8379 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8381 base0 = TREE_OPERAND (arg0, 0);
8382 STRIP_SIGN_NOPS (base0);
8383 if (TREE_CODE (base0) == ADDR_EXPR)
8385 base0 = TREE_OPERAND (base0, 0);
8386 indirect_base0 = true;
8388 offset0 = TREE_OPERAND (arg0, 1);
8389 if (tree_fits_shwi_p (offset0))
8391 HOST_WIDE_INT off = size_low_cst (offset0);
8392 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8393 * BITS_PER_UNIT)
8394 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8396 bitpos0 = off * BITS_PER_UNIT;
8397 offset0 = NULL_TREE;
8402 base1 = arg1;
8403 if (TREE_CODE (arg1) == ADDR_EXPR)
8405 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8406 &bitsize, &bitpos1, &offset1, &mode,
8407 &unsignedp, &volatilep, false);
8408 if (TREE_CODE (base1) == INDIRECT_REF)
8409 base1 = TREE_OPERAND (base1, 0);
8410 else
8411 indirect_base1 = true;
8413 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8415 base1 = TREE_OPERAND (arg1, 0);
8416 STRIP_SIGN_NOPS (base1);
8417 if (TREE_CODE (base1) == ADDR_EXPR)
8419 base1 = TREE_OPERAND (base1, 0);
8420 indirect_base1 = true;
8422 offset1 = TREE_OPERAND (arg1, 1);
8423 if (tree_fits_shwi_p (offset1))
8425 HOST_WIDE_INT off = size_low_cst (offset1);
8426 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8427 * BITS_PER_UNIT)
8428 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8430 bitpos1 = off * BITS_PER_UNIT;
8431 offset1 = NULL_TREE;
8436 /* If we have equivalent bases we might be able to simplify. */
8437 if (indirect_base0 == indirect_base1
8438 && operand_equal_p (base0, base1, 0))
8440 /* We can fold this expression to a constant if the non-constant
8441 offset parts are equal. */
8442 if ((offset0 == offset1
8443 || (offset0 && offset1
8444 && operand_equal_p (offset0, offset1, 0)))
8445 && (code == EQ_EXPR
8446 || code == NE_EXPR
8447 || (indirect_base0 && DECL_P (base0))
8448 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8451 if (!equality_code
8452 && bitpos0 != bitpos1
8453 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8454 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8455 fold_overflow_warning (("assuming pointer wraparound does not "
8456 "occur when comparing P +- C1 with "
8457 "P +- C2"),
8458 WARN_STRICT_OVERFLOW_CONDITIONAL);
8460 switch (code)
8462 case EQ_EXPR:
8463 return constant_boolean_node (bitpos0 == bitpos1, type);
8464 case NE_EXPR:
8465 return constant_boolean_node (bitpos0 != bitpos1, type);
8466 case LT_EXPR:
8467 return constant_boolean_node (bitpos0 < bitpos1, type);
8468 case LE_EXPR:
8469 return constant_boolean_node (bitpos0 <= bitpos1, type);
8470 case GE_EXPR:
8471 return constant_boolean_node (bitpos0 >= bitpos1, type);
8472 case GT_EXPR:
8473 return constant_boolean_node (bitpos0 > bitpos1, type);
8474 default:;
8477 /* We can simplify the comparison to a comparison of the variable
8478 offset parts if the constant offset parts are equal.
8479 Be careful to use signed sizetype here because otherwise we
8480 mess with array offsets in the wrong way. This is possible
8481 because pointer arithmetic is restricted to retain within an
8482 object and overflow on pointer differences is undefined as of
8483 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8484 else if (bitpos0 == bitpos1
8485 && (equality_code
8486 || (indirect_base0 && DECL_P (base0))
8487 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8489 /* By converting to signed sizetype we cover middle-end pointer
8490 arithmetic which operates on unsigned pointer types of size
8491 type size and ARRAY_REF offsets which are properly sign or
8492 zero extended from their type in case it is narrower than
8493 sizetype. */
8494 if (offset0 == NULL_TREE)
8495 offset0 = build_int_cst (ssizetype, 0);
8496 else
8497 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8498 if (offset1 == NULL_TREE)
8499 offset1 = build_int_cst (ssizetype, 0);
8500 else
8501 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8503 if (!equality_code
8504 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8505 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8506 fold_overflow_warning (("assuming pointer wraparound does not "
8507 "occur when comparing P +- C1 with "
8508 "P +- C2"),
8509 WARN_STRICT_OVERFLOW_COMPARISON);
8511 return fold_build2_loc (loc, code, type, offset0, offset1);
8514 /* For equal offsets we can simplify to a comparison of the
8515 base addresses. */
8516 else if (bitpos0 == bitpos1
8517 && (indirect_base0
8518 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8519 && (indirect_base1
8520 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8521 && ((offset0 == offset1)
8522 || (offset0 && offset1
8523 && operand_equal_p (offset0, offset1, 0))))
8525 if (indirect_base0)
8526 base0 = build_fold_addr_expr_loc (loc, base0);
8527 if (indirect_base1)
8528 base1 = build_fold_addr_expr_loc (loc, base1);
8529 return fold_build2_loc (loc, code, type, base0, base1);
8533 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8534 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8535 the resulting offset is smaller in absolute value than the
8536 original one and has the same sign. */
8537 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8538 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8539 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8540 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8541 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8542 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8543 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8544 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8546 tree const1 = TREE_OPERAND (arg0, 1);
8547 tree const2 = TREE_OPERAND (arg1, 1);
8548 tree variable1 = TREE_OPERAND (arg0, 0);
8549 tree variable2 = TREE_OPERAND (arg1, 0);
8550 tree cst;
8551 const char * const warnmsg = G_("assuming signed overflow does not "
8552 "occur when combining constants around "
8553 "a comparison");
8555 /* Put the constant on the side where it doesn't overflow and is
8556 of lower absolute value and of same sign than before. */
8557 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8558 ? MINUS_EXPR : PLUS_EXPR,
8559 const2, const1);
8560 if (!TREE_OVERFLOW (cst)
8561 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8562 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8564 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8565 return fold_build2_loc (loc, code, type,
8566 variable1,
8567 fold_build2_loc (loc, TREE_CODE (arg1),
8568 TREE_TYPE (arg1),
8569 variable2, cst));
8572 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8573 ? MINUS_EXPR : PLUS_EXPR,
8574 const1, const2);
8575 if (!TREE_OVERFLOW (cst)
8576 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8577 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8579 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8580 return fold_build2_loc (loc, code, type,
8581 fold_build2_loc (loc, TREE_CODE (arg0),
8582 TREE_TYPE (arg0),
8583 variable1, cst),
8584 variable2);
8588 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8589 if (tem)
8590 return tem;
8592 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8593 constant, we can simplify it. */
8594 if (TREE_CODE (arg1) == INTEGER_CST
8595 && (TREE_CODE (arg0) == MIN_EXPR
8596 || TREE_CODE (arg0) == MAX_EXPR)
8597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8599 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8600 if (tem)
8601 return tem;
8604 /* If we are comparing an expression that just has comparisons
8605 of two integer values, arithmetic expressions of those comparisons,
8606 and constants, we can simplify it. There are only three cases
8607 to check: the two values can either be equal, the first can be
8608 greater, or the second can be greater. Fold the expression for
8609 those three values. Since each value must be 0 or 1, we have
8610 eight possibilities, each of which corresponds to the constant 0
8611 or 1 or one of the six possible comparisons.
8613 This handles common cases like (a > b) == 0 but also handles
8614 expressions like ((x > y) - (y > x)) > 0, which supposedly
8615 occur in macroized code. */
8617 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8619 tree cval1 = 0, cval2 = 0;
8620 int save_p = 0;
8622 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8623 /* Don't handle degenerate cases here; they should already
8624 have been handled anyway. */
8625 && cval1 != 0 && cval2 != 0
8626 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8627 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8628 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8629 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8630 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8631 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8632 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8634 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8635 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8637 /* We can't just pass T to eval_subst in case cval1 or cval2
8638 was the same as ARG1. */
8640 tree high_result
8641 = fold_build2_loc (loc, code, type,
8642 eval_subst (loc, arg0, cval1, maxval,
8643 cval2, minval),
8644 arg1);
8645 tree equal_result
8646 = fold_build2_loc (loc, code, type,
8647 eval_subst (loc, arg0, cval1, maxval,
8648 cval2, maxval),
8649 arg1);
8650 tree low_result
8651 = fold_build2_loc (loc, code, type,
8652 eval_subst (loc, arg0, cval1, minval,
8653 cval2, maxval),
8654 arg1);
8656 /* All three of these results should be 0 or 1. Confirm they are.
8657 Then use those values to select the proper code to use. */
8659 if (TREE_CODE (high_result) == INTEGER_CST
8660 && TREE_CODE (equal_result) == INTEGER_CST
8661 && TREE_CODE (low_result) == INTEGER_CST)
8663 /* Make a 3-bit mask with the high-order bit being the
8664 value for `>', the next for '=', and the low for '<'. */
8665 switch ((integer_onep (high_result) * 4)
8666 + (integer_onep (equal_result) * 2)
8667 + integer_onep (low_result))
8669 case 0:
8670 /* Always false. */
8671 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8672 case 1:
8673 code = LT_EXPR;
8674 break;
8675 case 2:
8676 code = EQ_EXPR;
8677 break;
8678 case 3:
8679 code = LE_EXPR;
8680 break;
8681 case 4:
8682 code = GT_EXPR;
8683 break;
8684 case 5:
8685 code = NE_EXPR;
8686 break;
8687 case 6:
8688 code = GE_EXPR;
8689 break;
8690 case 7:
8691 /* Always true. */
8692 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8695 if (save_p)
8697 tem = save_expr (build2 (code, type, cval1, cval2));
8698 SET_EXPR_LOCATION (tem, loc);
8699 return tem;
8701 return fold_build2_loc (loc, code, type, cval1, cval2);
8706 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8707 into a single range test. */
8708 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8709 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8710 && TREE_CODE (arg1) == INTEGER_CST
8711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8712 && !integer_zerop (TREE_OPERAND (arg0, 1))
8713 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8714 && !TREE_OVERFLOW (arg1))
8716 tem = fold_div_compare (loc, code, type, arg0, arg1);
8717 if (tem != NULL_TREE)
8718 return tem;
8721 return NULL_TREE;
8725 /* Subroutine of fold_binary. Optimize complex multiplications of the
8726 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8727 argument EXPR represents the expression "z" of type TYPE. */
8729 static tree
8730 fold_mult_zconjz (location_t loc, tree type, tree expr)
8732 tree itype = TREE_TYPE (type);
8733 tree rpart, ipart, tem;
8735 if (TREE_CODE (expr) == COMPLEX_EXPR)
8737 rpart = TREE_OPERAND (expr, 0);
8738 ipart = TREE_OPERAND (expr, 1);
8740 else if (TREE_CODE (expr) == COMPLEX_CST)
8742 rpart = TREE_REALPART (expr);
8743 ipart = TREE_IMAGPART (expr);
8745 else
8747 expr = save_expr (expr);
8748 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8749 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8752 rpart = save_expr (rpart);
8753 ipart = save_expr (ipart);
8754 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8755 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8756 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8757 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8758 build_zero_cst (itype));
8762 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8763 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8765 static bool
8766 vec_cst_ctor_to_array (tree arg, tree *elts)
8768 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8770 if (TREE_CODE (arg) == VECTOR_CST)
8772 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8773 elts[i] = VECTOR_CST_ELT (arg, i);
8775 else if (TREE_CODE (arg) == CONSTRUCTOR)
8777 constructor_elt *elt;
8779 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8780 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8781 return false;
8782 else
8783 elts[i] = elt->value;
8785 else
8786 return false;
8787 for (; i < nelts; i++)
8788 elts[i]
8789 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8790 return true;
8793 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8794 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8795 NULL_TREE otherwise. */
8797 static tree
8798 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8800 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8801 tree *elts;
8802 bool need_ctor = false;
8804 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8805 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8806 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8807 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8808 return NULL_TREE;
8810 elts = XALLOCAVEC (tree, nelts * 3);
8811 if (!vec_cst_ctor_to_array (arg0, elts)
8812 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8813 return NULL_TREE;
8815 for (i = 0; i < nelts; i++)
8817 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8818 need_ctor = true;
8819 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8822 if (need_ctor)
8824 vec<constructor_elt, va_gc> *v;
8825 vec_alloc (v, nelts);
8826 for (i = 0; i < nelts; i++)
8827 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8828 return build_constructor (type, v);
8830 else
8831 return build_vector (type, &elts[2 * nelts]);
8834 /* Try to fold a pointer difference of type TYPE two address expressions of
8835 array references AREF0 and AREF1 using location LOC. Return a
8836 simplified expression for the difference or NULL_TREE. */
8838 static tree
8839 fold_addr_of_array_ref_difference (location_t loc, tree type,
8840 tree aref0, tree aref1)
8842 tree base0 = TREE_OPERAND (aref0, 0);
8843 tree base1 = TREE_OPERAND (aref1, 0);
8844 tree base_offset = build_int_cst (type, 0);
8846 /* If the bases are array references as well, recurse. If the bases
8847 are pointer indirections compute the difference of the pointers.
8848 If the bases are equal, we are set. */
8849 if ((TREE_CODE (base0) == ARRAY_REF
8850 && TREE_CODE (base1) == ARRAY_REF
8851 && (base_offset
8852 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8853 || (INDIRECT_REF_P (base0)
8854 && INDIRECT_REF_P (base1)
8855 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
8856 TREE_OPERAND (base0, 0),
8857 TREE_OPERAND (base1, 0))))
8858 || operand_equal_p (base0, base1, 0))
8860 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8861 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8862 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8863 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8864 return fold_build2_loc (loc, PLUS_EXPR, type,
8865 base_offset,
8866 fold_build2_loc (loc, MULT_EXPR, type,
8867 diff, esz));
8869 return NULL_TREE;
8872 /* If the real or vector real constant CST of type TYPE has an exact
8873 inverse, return it, else return NULL. */
8875 tree
8876 exact_inverse (tree type, tree cst)
8878 REAL_VALUE_TYPE r;
8879 tree unit_type, *elts;
8880 machine_mode mode;
8881 unsigned vec_nelts, i;
8883 switch (TREE_CODE (cst))
8885 case REAL_CST:
8886 r = TREE_REAL_CST (cst);
8888 if (exact_real_inverse (TYPE_MODE (type), &r))
8889 return build_real (type, r);
8891 return NULL_TREE;
8893 case VECTOR_CST:
8894 vec_nelts = VECTOR_CST_NELTS (cst);
8895 elts = XALLOCAVEC (tree, vec_nelts);
8896 unit_type = TREE_TYPE (type);
8897 mode = TYPE_MODE (unit_type);
8899 for (i = 0; i < vec_nelts; i++)
8901 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8902 if (!exact_real_inverse (mode, &r))
8903 return NULL_TREE;
8904 elts[i] = build_real (unit_type, r);
8907 return build_vector (type, elts);
8909 default:
8910 return NULL_TREE;
8914 /* Mask out the tz least significant bits of X of type TYPE where
8915 tz is the number of trailing zeroes in Y. */
8916 static wide_int
8917 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8919 int tz = wi::ctz (y);
8920 if (tz > 0)
8921 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8922 return x;
8925 /* Return true when T is an address and is known to be nonzero.
8926 For floating point we further ensure that T is not denormal.
8927 Similar logic is present in nonzero_address in rtlanal.h.
8929 If the return value is based on the assumption that signed overflow
8930 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8931 change *STRICT_OVERFLOW_P. */
8933 static bool
8934 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8936 tree type = TREE_TYPE (t);
8937 enum tree_code code;
8939 /* Doing something useful for floating point would need more work. */
8940 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8941 return false;
8943 code = TREE_CODE (t);
8944 switch (TREE_CODE_CLASS (code))
8946 case tcc_unary:
8947 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8948 strict_overflow_p);
8949 case tcc_binary:
8950 case tcc_comparison:
8951 return tree_binary_nonzero_warnv_p (code, type,
8952 TREE_OPERAND (t, 0),
8953 TREE_OPERAND (t, 1),
8954 strict_overflow_p);
8955 case tcc_constant:
8956 case tcc_declaration:
8957 case tcc_reference:
8958 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8960 default:
8961 break;
8964 switch (code)
8966 case TRUTH_NOT_EXPR:
8967 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8968 strict_overflow_p);
8970 case TRUTH_AND_EXPR:
8971 case TRUTH_OR_EXPR:
8972 case TRUTH_XOR_EXPR:
8973 return tree_binary_nonzero_warnv_p (code, type,
8974 TREE_OPERAND (t, 0),
8975 TREE_OPERAND (t, 1),
8976 strict_overflow_p);
8978 case COND_EXPR:
8979 case CONSTRUCTOR:
8980 case OBJ_TYPE_REF:
8981 case ASSERT_EXPR:
8982 case ADDR_EXPR:
8983 case WITH_SIZE_EXPR:
8984 case SSA_NAME:
8985 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8987 case COMPOUND_EXPR:
8988 case MODIFY_EXPR:
8989 case BIND_EXPR:
8990 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8991 strict_overflow_p);
8993 case SAVE_EXPR:
8994 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8995 strict_overflow_p);
8997 case CALL_EXPR:
8999 tree fndecl = get_callee_fndecl (t);
9000 if (!fndecl) return false;
9001 if (flag_delete_null_pointer_checks && !flag_check_new
9002 && DECL_IS_OPERATOR_NEW (fndecl)
9003 && !TREE_NOTHROW (fndecl))
9004 return true;
9005 if (flag_delete_null_pointer_checks
9006 && lookup_attribute ("returns_nonnull",
9007 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9008 return true;
9009 return alloca_call_p (t);
9012 default:
9013 break;
9015 return false;
9018 /* Return true when T is an address and is known to be nonzero.
9019 Handle warnings about undefined signed overflow. */
9021 static bool
9022 tree_expr_nonzero_p (tree t)
9024 bool ret, strict_overflow_p;
9026 strict_overflow_p = false;
9027 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9028 if (strict_overflow_p)
9029 fold_overflow_warning (("assuming signed overflow does not occur when "
9030 "determining that expression is always "
9031 "non-zero"),
9032 WARN_STRICT_OVERFLOW_MISC);
9033 return ret;
9036 /* Fold a binary expression of code CODE and type TYPE with operands
9037 OP0 and OP1. LOC is the location of the resulting expression.
9038 Return the folded expression if folding is successful. Otherwise,
9039 return NULL_TREE. */
9041 tree
9042 fold_binary_loc (location_t loc,
9043 enum tree_code code, tree type, tree op0, tree op1)
9045 enum tree_code_class kind = TREE_CODE_CLASS (code);
9046 tree arg0, arg1, tem;
9047 tree t1 = NULL_TREE;
9048 bool strict_overflow_p;
9049 unsigned int prec;
9051 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9052 && TREE_CODE_LENGTH (code) == 2
9053 && op0 != NULL_TREE
9054 && op1 != NULL_TREE);
9056 arg0 = op0;
9057 arg1 = op1;
9059 /* Strip any conversions that don't change the mode. This is
9060 safe for every expression, except for a comparison expression
9061 because its signedness is derived from its operands. So, in
9062 the latter case, only strip conversions that don't change the
9063 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9064 preserved.
9066 Note that this is done as an internal manipulation within the
9067 constant folder, in order to find the simplest representation
9068 of the arguments so that their form can be studied. In any
9069 cases, the appropriate type conversions should be put back in
9070 the tree that will get out of the constant folder. */
9072 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9074 STRIP_SIGN_NOPS (arg0);
9075 STRIP_SIGN_NOPS (arg1);
9077 else
9079 STRIP_NOPS (arg0);
9080 STRIP_NOPS (arg1);
9083 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9084 constant but we can't do arithmetic on them. */
9085 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9087 tem = const_binop (code, type, arg0, arg1);
9088 if (tem != NULL_TREE)
9090 if (TREE_TYPE (tem) != type)
9091 tem = fold_convert_loc (loc, type, tem);
9092 return tem;
9096 /* If this is a commutative operation, and ARG0 is a constant, move it
9097 to ARG1 to reduce the number of tests below. */
9098 if (commutative_tree_code (code)
9099 && tree_swap_operands_p (arg0, arg1, true))
9100 return fold_build2_loc (loc, code, type, op1, op0);
9102 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9103 to ARG1 to reduce the number of tests below. */
9104 if (kind == tcc_comparison
9105 && tree_swap_operands_p (arg0, arg1, true))
9106 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9108 tem = generic_simplify (loc, code, type, op0, op1);
9109 if (tem)
9110 return tem;
9112 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9114 First check for cases where an arithmetic operation is applied to a
9115 compound, conditional, or comparison operation. Push the arithmetic
9116 operation inside the compound or conditional to see if any folding
9117 can then be done. Convert comparison to conditional for this purpose.
9118 The also optimizes non-constant cases that used to be done in
9119 expand_expr.
9121 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9122 one of the operands is a comparison and the other is a comparison, a
9123 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9124 code below would make the expression more complex. Change it to a
9125 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9126 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9128 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9129 || code == EQ_EXPR || code == NE_EXPR)
9130 && TREE_CODE (type) != VECTOR_TYPE
9131 && ((truth_value_p (TREE_CODE (arg0))
9132 && (truth_value_p (TREE_CODE (arg1))
9133 || (TREE_CODE (arg1) == BIT_AND_EXPR
9134 && integer_onep (TREE_OPERAND (arg1, 1)))))
9135 || (truth_value_p (TREE_CODE (arg1))
9136 && (truth_value_p (TREE_CODE (arg0))
9137 || (TREE_CODE (arg0) == BIT_AND_EXPR
9138 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9140 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9141 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9142 : TRUTH_XOR_EXPR,
9143 boolean_type_node,
9144 fold_convert_loc (loc, boolean_type_node, arg0),
9145 fold_convert_loc (loc, boolean_type_node, arg1));
9147 if (code == EQ_EXPR)
9148 tem = invert_truthvalue_loc (loc, tem);
9150 return fold_convert_loc (loc, type, tem);
9153 if (TREE_CODE_CLASS (code) == tcc_binary
9154 || TREE_CODE_CLASS (code) == tcc_comparison)
9156 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9158 tem = fold_build2_loc (loc, code, type,
9159 fold_convert_loc (loc, TREE_TYPE (op0),
9160 TREE_OPERAND (arg0, 1)), op1);
9161 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9162 tem);
9164 if (TREE_CODE (arg1) == COMPOUND_EXPR
9165 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9167 tem = fold_build2_loc (loc, code, type, op0,
9168 fold_convert_loc (loc, TREE_TYPE (op1),
9169 TREE_OPERAND (arg1, 1)));
9170 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9171 tem);
9174 if (TREE_CODE (arg0) == COND_EXPR
9175 || TREE_CODE (arg0) == VEC_COND_EXPR
9176 || COMPARISON_CLASS_P (arg0))
9178 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9179 arg0, arg1,
9180 /*cond_first_p=*/1);
9181 if (tem != NULL_TREE)
9182 return tem;
9185 if (TREE_CODE (arg1) == COND_EXPR
9186 || TREE_CODE (arg1) == VEC_COND_EXPR
9187 || COMPARISON_CLASS_P (arg1))
9189 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9190 arg1, arg0,
9191 /*cond_first_p=*/0);
9192 if (tem != NULL_TREE)
9193 return tem;
9197 switch (code)
9199 case MEM_REF:
9200 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9201 if (TREE_CODE (arg0) == ADDR_EXPR
9202 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9204 tree iref = TREE_OPERAND (arg0, 0);
9205 return fold_build2 (MEM_REF, type,
9206 TREE_OPERAND (iref, 0),
9207 int_const_binop (PLUS_EXPR, arg1,
9208 TREE_OPERAND (iref, 1)));
9211 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9212 if (TREE_CODE (arg0) == ADDR_EXPR
9213 && handled_component_p (TREE_OPERAND (arg0, 0)))
9215 tree base;
9216 HOST_WIDE_INT coffset;
9217 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9218 &coffset);
9219 if (!base)
9220 return NULL_TREE;
9221 return fold_build2 (MEM_REF, type,
9222 build_fold_addr_expr (base),
9223 int_const_binop (PLUS_EXPR, arg1,
9224 size_int (coffset)));
9227 return NULL_TREE;
9229 case POINTER_PLUS_EXPR:
9230 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9231 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9232 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9233 return fold_convert_loc (loc, type,
9234 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9235 fold_convert_loc (loc, sizetype,
9236 arg1),
9237 fold_convert_loc (loc, sizetype,
9238 arg0)));
9240 return NULL_TREE;
9242 case PLUS_EXPR:
9243 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9245 /* X + (X / CST) * -CST is X % CST. */
9246 if (TREE_CODE (arg1) == MULT_EXPR
9247 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9248 && operand_equal_p (arg0,
9249 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9251 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9252 tree cst1 = TREE_OPERAND (arg1, 1);
9253 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9254 cst1, cst0);
9255 if (sum && integer_zerop (sum))
9256 return fold_convert_loc (loc, type,
9257 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9258 TREE_TYPE (arg0), arg0,
9259 cst0));
9263 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9264 one. Make sure the type is not saturating and has the signedness of
9265 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9266 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9267 if ((TREE_CODE (arg0) == MULT_EXPR
9268 || TREE_CODE (arg1) == MULT_EXPR)
9269 && !TYPE_SATURATING (type)
9270 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9271 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9272 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9274 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9275 if (tem)
9276 return tem;
9279 if (! FLOAT_TYPE_P (type))
9281 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9282 (plus (plus (mult) (mult)) (foo)) so that we can
9283 take advantage of the factoring cases below. */
9284 if (ANY_INTEGRAL_TYPE_P (type)
9285 && TYPE_OVERFLOW_WRAPS (type)
9286 && (((TREE_CODE (arg0) == PLUS_EXPR
9287 || TREE_CODE (arg0) == MINUS_EXPR)
9288 && TREE_CODE (arg1) == MULT_EXPR)
9289 || ((TREE_CODE (arg1) == PLUS_EXPR
9290 || TREE_CODE (arg1) == MINUS_EXPR)
9291 && TREE_CODE (arg0) == MULT_EXPR)))
9293 tree parg0, parg1, parg, marg;
9294 enum tree_code pcode;
9296 if (TREE_CODE (arg1) == MULT_EXPR)
9297 parg = arg0, marg = arg1;
9298 else
9299 parg = arg1, marg = arg0;
9300 pcode = TREE_CODE (parg);
9301 parg0 = TREE_OPERAND (parg, 0);
9302 parg1 = TREE_OPERAND (parg, 1);
9303 STRIP_NOPS (parg0);
9304 STRIP_NOPS (parg1);
9306 if (TREE_CODE (parg0) == MULT_EXPR
9307 && TREE_CODE (parg1) != MULT_EXPR)
9308 return fold_build2_loc (loc, pcode, type,
9309 fold_build2_loc (loc, PLUS_EXPR, type,
9310 fold_convert_loc (loc, type,
9311 parg0),
9312 fold_convert_loc (loc, type,
9313 marg)),
9314 fold_convert_loc (loc, type, parg1));
9315 if (TREE_CODE (parg0) != MULT_EXPR
9316 && TREE_CODE (parg1) == MULT_EXPR)
9317 return
9318 fold_build2_loc (loc, PLUS_EXPR, type,
9319 fold_convert_loc (loc, type, parg0),
9320 fold_build2_loc (loc, pcode, type,
9321 fold_convert_loc (loc, type, marg),
9322 fold_convert_loc (loc, type,
9323 parg1)));
9326 else
9328 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9329 to __complex__ ( x, y ). This is not the same for SNaNs or
9330 if signed zeros are involved. */
9331 if (!HONOR_SNANS (element_mode (arg0))
9332 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9333 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9335 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9336 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9337 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9338 bool arg0rz = false, arg0iz = false;
9339 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9340 || (arg0i && (arg0iz = real_zerop (arg0i))))
9342 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9343 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9344 if (arg0rz && arg1i && real_zerop (arg1i))
9346 tree rp = arg1r ? arg1r
9347 : build1 (REALPART_EXPR, rtype, arg1);
9348 tree ip = arg0i ? arg0i
9349 : build1 (IMAGPART_EXPR, rtype, arg0);
9350 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9352 else if (arg0iz && arg1r && real_zerop (arg1r))
9354 tree rp = arg0r ? arg0r
9355 : build1 (REALPART_EXPR, rtype, arg0);
9356 tree ip = arg1i ? arg1i
9357 : build1 (IMAGPART_EXPR, rtype, arg1);
9358 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9363 if (flag_unsafe_math_optimizations
9364 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9365 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9366 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9367 return tem;
9369 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9370 We associate floats only if the user has specified
9371 -fassociative-math. */
9372 if (flag_associative_math
9373 && TREE_CODE (arg1) == PLUS_EXPR
9374 && TREE_CODE (arg0) != MULT_EXPR)
9376 tree tree10 = TREE_OPERAND (arg1, 0);
9377 tree tree11 = TREE_OPERAND (arg1, 1);
9378 if (TREE_CODE (tree11) == MULT_EXPR
9379 && TREE_CODE (tree10) == MULT_EXPR)
9381 tree tree0;
9382 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9383 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9386 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9387 We associate floats only if the user has specified
9388 -fassociative-math. */
9389 if (flag_associative_math
9390 && TREE_CODE (arg0) == PLUS_EXPR
9391 && TREE_CODE (arg1) != MULT_EXPR)
9393 tree tree00 = TREE_OPERAND (arg0, 0);
9394 tree tree01 = TREE_OPERAND (arg0, 1);
9395 if (TREE_CODE (tree01) == MULT_EXPR
9396 && TREE_CODE (tree00) == MULT_EXPR)
9398 tree tree0;
9399 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9400 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9405 bit_rotate:
9406 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9407 is a rotate of A by C1 bits. */
9408 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9409 is a rotate of A by B bits. */
9411 enum tree_code code0, code1;
9412 tree rtype;
9413 code0 = TREE_CODE (arg0);
9414 code1 = TREE_CODE (arg1);
9415 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9416 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9417 && operand_equal_p (TREE_OPERAND (arg0, 0),
9418 TREE_OPERAND (arg1, 0), 0)
9419 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9420 TYPE_UNSIGNED (rtype))
9421 /* Only create rotates in complete modes. Other cases are not
9422 expanded properly. */
9423 && (element_precision (rtype)
9424 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9426 tree tree01, tree11;
9427 enum tree_code code01, code11;
9429 tree01 = TREE_OPERAND (arg0, 1);
9430 tree11 = TREE_OPERAND (arg1, 1);
9431 STRIP_NOPS (tree01);
9432 STRIP_NOPS (tree11);
9433 code01 = TREE_CODE (tree01);
9434 code11 = TREE_CODE (tree11);
9435 if (code01 == INTEGER_CST
9436 && code11 == INTEGER_CST
9437 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9438 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9440 tem = build2_loc (loc, LROTATE_EXPR,
9441 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9442 TREE_OPERAND (arg0, 0),
9443 code0 == LSHIFT_EXPR
9444 ? TREE_OPERAND (arg0, 1)
9445 : TREE_OPERAND (arg1, 1));
9446 return fold_convert_loc (loc, type, tem);
9448 else if (code11 == MINUS_EXPR)
9450 tree tree110, tree111;
9451 tree110 = TREE_OPERAND (tree11, 0);
9452 tree111 = TREE_OPERAND (tree11, 1);
9453 STRIP_NOPS (tree110);
9454 STRIP_NOPS (tree111);
9455 if (TREE_CODE (tree110) == INTEGER_CST
9456 && 0 == compare_tree_int (tree110,
9457 element_precision
9458 (TREE_TYPE (TREE_OPERAND
9459 (arg0, 0))))
9460 && operand_equal_p (tree01, tree111, 0))
9461 return
9462 fold_convert_loc (loc, type,
9463 build2 ((code0 == LSHIFT_EXPR
9464 ? LROTATE_EXPR
9465 : RROTATE_EXPR),
9466 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9467 TREE_OPERAND (arg0, 0),
9468 TREE_OPERAND (arg0, 1)));
9470 else if (code01 == MINUS_EXPR)
9472 tree tree010, tree011;
9473 tree010 = TREE_OPERAND (tree01, 0);
9474 tree011 = TREE_OPERAND (tree01, 1);
9475 STRIP_NOPS (tree010);
9476 STRIP_NOPS (tree011);
9477 if (TREE_CODE (tree010) == INTEGER_CST
9478 && 0 == compare_tree_int (tree010,
9479 element_precision
9480 (TREE_TYPE (TREE_OPERAND
9481 (arg0, 0))))
9482 && operand_equal_p (tree11, tree011, 0))
9483 return fold_convert_loc
9484 (loc, type,
9485 build2 ((code0 != LSHIFT_EXPR
9486 ? LROTATE_EXPR
9487 : RROTATE_EXPR),
9488 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9489 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9494 associate:
9495 /* In most languages, can't associate operations on floats through
9496 parentheses. Rather than remember where the parentheses were, we
9497 don't associate floats at all, unless the user has specified
9498 -fassociative-math.
9499 And, we need to make sure type is not saturating. */
9501 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9502 && !TYPE_SATURATING (type))
9504 tree var0, con0, lit0, minus_lit0;
9505 tree var1, con1, lit1, minus_lit1;
9506 tree atype = type;
9507 bool ok = true;
9509 /* Split both trees into variables, constants, and literals. Then
9510 associate each group together, the constants with literals,
9511 then the result with variables. This increases the chances of
9512 literals being recombined later and of generating relocatable
9513 expressions for the sum of a constant and literal. */
9514 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9515 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9516 code == MINUS_EXPR);
9518 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9519 if (code == MINUS_EXPR)
9520 code = PLUS_EXPR;
9522 /* With undefined overflow prefer doing association in a type
9523 which wraps on overflow, if that is one of the operand types. */
9524 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9525 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9527 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9528 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9529 atype = TREE_TYPE (arg0);
9530 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9531 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9532 atype = TREE_TYPE (arg1);
9533 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9536 /* With undefined overflow we can only associate constants with one
9537 variable, and constants whose association doesn't overflow. */
9538 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9539 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9541 if (var0 && var1)
9543 tree tmp0 = var0;
9544 tree tmp1 = var1;
9546 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9547 tmp0 = TREE_OPERAND (tmp0, 0);
9548 if (CONVERT_EXPR_P (tmp0)
9549 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9550 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9551 <= TYPE_PRECISION (atype)))
9552 tmp0 = TREE_OPERAND (tmp0, 0);
9553 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9554 tmp1 = TREE_OPERAND (tmp1, 0);
9555 if (CONVERT_EXPR_P (tmp1)
9556 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9557 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9558 <= TYPE_PRECISION (atype)))
9559 tmp1 = TREE_OPERAND (tmp1, 0);
9560 /* The only case we can still associate with two variables
9561 is if they are the same, modulo negation and bit-pattern
9562 preserving conversions. */
9563 if (!operand_equal_p (tmp0, tmp1, 0))
9564 ok = false;
9568 /* Only do something if we found more than two objects. Otherwise,
9569 nothing has changed and we risk infinite recursion. */
9570 if (ok
9571 && (2 < ((var0 != 0) + (var1 != 0)
9572 + (con0 != 0) + (con1 != 0)
9573 + (lit0 != 0) + (lit1 != 0)
9574 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9576 bool any_overflows = false;
9577 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9578 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9579 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9580 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9581 var0 = associate_trees (loc, var0, var1, code, atype);
9582 con0 = associate_trees (loc, con0, con1, code, atype);
9583 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9584 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9585 code, atype);
9587 /* Preserve the MINUS_EXPR if the negative part of the literal is
9588 greater than the positive part. Otherwise, the multiplicative
9589 folding code (i.e extract_muldiv) may be fooled in case
9590 unsigned constants are subtracted, like in the following
9591 example: ((X*2 + 4) - 8U)/2. */
9592 if (minus_lit0 && lit0)
9594 if (TREE_CODE (lit0) == INTEGER_CST
9595 && TREE_CODE (minus_lit0) == INTEGER_CST
9596 && tree_int_cst_lt (lit0, minus_lit0))
9598 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9599 MINUS_EXPR, atype);
9600 lit0 = 0;
9602 else
9604 lit0 = associate_trees (loc, lit0, minus_lit0,
9605 MINUS_EXPR, atype);
9606 minus_lit0 = 0;
9610 /* Don't introduce overflows through reassociation. */
9611 if (!any_overflows
9612 && ((lit0 && TREE_OVERFLOW_P (lit0))
9613 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9614 return NULL_TREE;
9616 if (minus_lit0)
9618 if (con0 == 0)
9619 return
9620 fold_convert_loc (loc, type,
9621 associate_trees (loc, var0, minus_lit0,
9622 MINUS_EXPR, atype));
9623 else
9625 con0 = associate_trees (loc, con0, minus_lit0,
9626 MINUS_EXPR, atype);
9627 return
9628 fold_convert_loc (loc, type,
9629 associate_trees (loc, var0, con0,
9630 PLUS_EXPR, atype));
9634 con0 = associate_trees (loc, con0, lit0, code, atype);
9635 return
9636 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9637 code, atype));
9641 return NULL_TREE;
9643 case MINUS_EXPR:
9644 /* Pointer simplifications for subtraction, simple reassociations. */
9645 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9647 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9648 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9649 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9651 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9652 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9653 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9654 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9655 return fold_build2_loc (loc, PLUS_EXPR, type,
9656 fold_build2_loc (loc, MINUS_EXPR, type,
9657 arg00, arg10),
9658 fold_build2_loc (loc, MINUS_EXPR, type,
9659 arg01, arg11));
9661 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9662 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9664 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9665 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9666 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9667 fold_convert_loc (loc, type, arg1));
9668 if (tmp)
9669 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9671 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9672 simplifies. */
9673 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9675 tree arg10 = fold_convert_loc (loc, type,
9676 TREE_OPERAND (arg1, 0));
9677 tree arg11 = fold_convert_loc (loc, type,
9678 TREE_OPERAND (arg1, 1));
9679 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
9680 fold_convert_loc (loc, type, arg0),
9681 arg10);
9682 if (tmp)
9683 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
9686 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9687 if (TREE_CODE (arg0) == NEGATE_EXPR
9688 && negate_expr_p (arg1)
9689 && reorder_operands_p (arg0, arg1))
9690 return fold_build2_loc (loc, MINUS_EXPR, type,
9691 fold_convert_loc (loc, type,
9692 negate_expr (arg1)),
9693 fold_convert_loc (loc, type,
9694 TREE_OPERAND (arg0, 0)));
9696 if (! FLOAT_TYPE_P (type))
9698 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9699 any power of 2 minus 1. */
9700 if (TREE_CODE (arg0) == BIT_AND_EXPR
9701 && TREE_CODE (arg1) == BIT_AND_EXPR
9702 && operand_equal_p (TREE_OPERAND (arg0, 0),
9703 TREE_OPERAND (arg1, 0), 0))
9705 tree mask0 = TREE_OPERAND (arg0, 1);
9706 tree mask1 = TREE_OPERAND (arg1, 1);
9707 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
9709 if (operand_equal_p (tem, mask1, 0))
9711 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
9712 TREE_OPERAND (arg0, 0), mask1);
9713 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
9718 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9719 __complex__ ( x, -y ). This is not the same for SNaNs or if
9720 signed zeros are involved. */
9721 if (!HONOR_SNANS (element_mode (arg0))
9722 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9723 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9725 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9726 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9727 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9728 bool arg0rz = false, arg0iz = false;
9729 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9730 || (arg0i && (arg0iz = real_zerop (arg0i))))
9732 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9733 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9734 if (arg0rz && arg1i && real_zerop (arg1i))
9736 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9737 arg1r ? arg1r
9738 : build1 (REALPART_EXPR, rtype, arg1));
9739 tree ip = arg0i ? arg0i
9740 : build1 (IMAGPART_EXPR, rtype, arg0);
9741 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9743 else if (arg0iz && arg1r && real_zerop (arg1r))
9745 tree rp = arg0r ? arg0r
9746 : build1 (REALPART_EXPR, rtype, arg0);
9747 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9748 arg1i ? arg1i
9749 : build1 (IMAGPART_EXPR, rtype, arg1));
9750 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9755 /* A - B -> A + (-B) if B is easily negatable. */
9756 if (negate_expr_p (arg1)
9757 && !TYPE_OVERFLOW_SANITIZED (type)
9758 && ((FLOAT_TYPE_P (type)
9759 /* Avoid this transformation if B is a positive REAL_CST. */
9760 && (TREE_CODE (arg1) != REAL_CST
9761 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9762 || INTEGRAL_TYPE_P (type)))
9763 return fold_build2_loc (loc, PLUS_EXPR, type,
9764 fold_convert_loc (loc, type, arg0),
9765 fold_convert_loc (loc, type,
9766 negate_expr (arg1)));
9768 /* Fold &a[i] - &a[j] to i-j. */
9769 if (TREE_CODE (arg0) == ADDR_EXPR
9770 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9771 && TREE_CODE (arg1) == ADDR_EXPR
9772 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9774 tree tem = fold_addr_of_array_ref_difference (loc, type,
9775 TREE_OPERAND (arg0, 0),
9776 TREE_OPERAND (arg1, 0));
9777 if (tem)
9778 return tem;
9781 if (FLOAT_TYPE_P (type)
9782 && flag_unsafe_math_optimizations
9783 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9784 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9785 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9786 return tem;
9788 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9789 one. Make sure the type is not saturating and has the signedness of
9790 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9791 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9792 if ((TREE_CODE (arg0) == MULT_EXPR
9793 || TREE_CODE (arg1) == MULT_EXPR)
9794 && !TYPE_SATURATING (type)
9795 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9796 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9797 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9799 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9800 if (tem)
9801 return tem;
9804 goto associate;
9806 case MULT_EXPR:
9807 /* (-A) * (-B) -> A * B */
9808 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9809 return fold_build2_loc (loc, MULT_EXPR, type,
9810 fold_convert_loc (loc, type,
9811 TREE_OPERAND (arg0, 0)),
9812 fold_convert_loc (loc, type,
9813 negate_expr (arg1)));
9814 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9815 return fold_build2_loc (loc, MULT_EXPR, type,
9816 fold_convert_loc (loc, type,
9817 negate_expr (arg0)),
9818 fold_convert_loc (loc, type,
9819 TREE_OPERAND (arg1, 0)));
9821 if (! FLOAT_TYPE_P (type))
9823 /* Transform x * -C into -x * C if x is easily negatable. */
9824 if (TREE_CODE (arg1) == INTEGER_CST
9825 && tree_int_cst_sgn (arg1) == -1
9826 && negate_expr_p (arg0)
9827 && (tem = negate_expr (arg1)) != arg1
9828 && !TREE_OVERFLOW (tem))
9829 return fold_build2_loc (loc, MULT_EXPR, type,
9830 fold_convert_loc (loc, type,
9831 negate_expr (arg0)),
9832 tem);
9834 /* (a * (1 << b)) is (a << b) */
9835 if (TREE_CODE (arg1) == LSHIFT_EXPR
9836 && integer_onep (TREE_OPERAND (arg1, 0)))
9837 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
9838 TREE_OPERAND (arg1, 1));
9839 if (TREE_CODE (arg0) == LSHIFT_EXPR
9840 && integer_onep (TREE_OPERAND (arg0, 0)))
9841 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
9842 TREE_OPERAND (arg0, 1));
9844 /* (A + A) * C -> A * 2 * C */
9845 if (TREE_CODE (arg0) == PLUS_EXPR
9846 && TREE_CODE (arg1) == INTEGER_CST
9847 && operand_equal_p (TREE_OPERAND (arg0, 0),
9848 TREE_OPERAND (arg0, 1), 0))
9849 return fold_build2_loc (loc, MULT_EXPR, type,
9850 omit_one_operand_loc (loc, type,
9851 TREE_OPERAND (arg0, 0),
9852 TREE_OPERAND (arg0, 1)),
9853 fold_build2_loc (loc, MULT_EXPR, type,
9854 build_int_cst (type, 2) , arg1));
9856 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9857 sign-changing only. */
9858 if (TREE_CODE (arg1) == INTEGER_CST
9859 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9860 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9861 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9863 strict_overflow_p = false;
9864 if (TREE_CODE (arg1) == INTEGER_CST
9865 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9866 &strict_overflow_p)))
9868 if (strict_overflow_p)
9869 fold_overflow_warning (("assuming signed overflow does not "
9870 "occur when simplifying "
9871 "multiplication"),
9872 WARN_STRICT_OVERFLOW_MISC);
9873 return fold_convert_loc (loc, type, tem);
9876 /* Optimize z * conj(z) for integer complex numbers. */
9877 if (TREE_CODE (arg0) == CONJ_EXPR
9878 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9879 return fold_mult_zconjz (loc, type, arg1);
9880 if (TREE_CODE (arg1) == CONJ_EXPR
9881 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9882 return fold_mult_zconjz (loc, type, arg0);
9884 else
9886 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
9887 the result for floating point types due to rounding so it is applied
9888 only if -fassociative-math was specify. */
9889 if (flag_associative_math
9890 && TREE_CODE (arg0) == RDIV_EXPR
9891 && TREE_CODE (arg1) == REAL_CST
9892 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9894 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9895 arg1);
9896 if (tem)
9897 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
9898 TREE_OPERAND (arg0, 1));
9901 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9902 if (operand_equal_p (arg0, arg1, 0))
9904 tree tem = fold_strip_sign_ops (arg0);
9905 if (tem != NULL_TREE)
9907 tem = fold_convert_loc (loc, type, tem);
9908 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
9912 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9913 This is not the same for NaNs or if signed zeros are
9914 involved. */
9915 if (!HONOR_NANS (arg0)
9916 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9917 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9918 && TREE_CODE (arg1) == COMPLEX_CST
9919 && real_zerop (TREE_REALPART (arg1)))
9921 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9922 if (real_onep (TREE_IMAGPART (arg1)))
9923 return
9924 fold_build2_loc (loc, COMPLEX_EXPR, type,
9925 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9926 rtype, arg0)),
9927 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9928 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9929 return
9930 fold_build2_loc (loc, COMPLEX_EXPR, type,
9931 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9932 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9933 rtype, arg0)));
9936 /* Optimize z * conj(z) for floating point complex numbers.
9937 Guarded by flag_unsafe_math_optimizations as non-finite
9938 imaginary components don't produce scalar results. */
9939 if (flag_unsafe_math_optimizations
9940 && TREE_CODE (arg0) == CONJ_EXPR
9941 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9942 return fold_mult_zconjz (loc, type, arg1);
9943 if (flag_unsafe_math_optimizations
9944 && TREE_CODE (arg1) == CONJ_EXPR
9945 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9946 return fold_mult_zconjz (loc, type, arg0);
9948 if (flag_unsafe_math_optimizations)
9950 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9951 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9953 /* Optimizations of root(...)*root(...). */
9954 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9956 tree rootfn, arg;
9957 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9958 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9960 /* Optimize root(x)*root(y) as root(x*y). */
9961 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9962 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
9963 return build_call_expr_loc (loc, rootfn, 1, arg);
9966 /* Optimize expN(x)*expN(y) as expN(x+y). */
9967 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9969 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9970 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
9971 CALL_EXPR_ARG (arg0, 0),
9972 CALL_EXPR_ARG (arg1, 0));
9973 return build_call_expr_loc (loc, expfn, 1, arg);
9976 /* Optimizations of pow(...)*pow(...). */
9977 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9978 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9979 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9981 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9982 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9983 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9984 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9986 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9987 if (operand_equal_p (arg00, arg10, 0))
9989 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9990 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
9991 arg01, arg11);
9992 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
9996 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9997 if (!in_gimple_form
9998 && optimize
9999 && operand_equal_p (arg0, arg1, 0))
10001 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10003 if (powfn)
10005 tree arg = build_real (type, dconst2);
10006 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10011 goto associate;
10013 case BIT_IOR_EXPR:
10014 /* Canonicalize (X & C1) | C2. */
10015 if (TREE_CODE (arg0) == BIT_AND_EXPR
10016 && TREE_CODE (arg1) == INTEGER_CST
10017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10019 int width = TYPE_PRECISION (type), w;
10020 wide_int c1 = TREE_OPERAND (arg0, 1);
10021 wide_int c2 = arg1;
10023 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10024 if ((c1 & c2) == c1)
10025 return omit_one_operand_loc (loc, type, arg1,
10026 TREE_OPERAND (arg0, 0));
10028 wide_int msk = wi::mask (width, false,
10029 TYPE_PRECISION (TREE_TYPE (arg1)));
10031 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10032 if (msk.and_not (c1 | c2) == 0)
10033 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10034 TREE_OPERAND (arg0, 0), arg1);
10036 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10037 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10038 mode which allows further optimizations. */
10039 c1 &= msk;
10040 c2 &= msk;
10041 wide_int c3 = c1.and_not (c2);
10042 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10044 wide_int mask = wi::mask (w, false,
10045 TYPE_PRECISION (type));
10046 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10048 c3 = mask;
10049 break;
10053 if (c3 != c1)
10054 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10055 fold_build2_loc (loc, BIT_AND_EXPR, type,
10056 TREE_OPERAND (arg0, 0),
10057 wide_int_to_tree (type,
10058 c3)),
10059 arg1);
10062 /* (X & ~Y) | (~X & Y) is X ^ Y */
10063 if (TREE_CODE (arg0) == BIT_AND_EXPR
10064 && TREE_CODE (arg1) == BIT_AND_EXPR)
10066 tree a0, a1, l0, l1, n0, n1;
10068 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10069 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10071 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10072 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10074 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10075 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10077 if ((operand_equal_p (n0, a0, 0)
10078 && operand_equal_p (n1, a1, 0))
10079 || (operand_equal_p (n0, a1, 0)
10080 && operand_equal_p (n1, a0, 0)))
10081 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10084 /* See if this can be simplified into a rotate first. If that
10085 is unsuccessful continue in the association code. */
10086 goto bit_rotate;
10088 case BIT_XOR_EXPR:
10089 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10090 if (TREE_CODE (arg0) == BIT_AND_EXPR
10091 && INTEGRAL_TYPE_P (type)
10092 && integer_onep (TREE_OPERAND (arg0, 1))
10093 && integer_onep (arg1))
10094 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10095 build_zero_cst (TREE_TYPE (arg0)));
10097 /* See if this can be simplified into a rotate first. If that
10098 is unsuccessful continue in the association code. */
10099 goto bit_rotate;
10101 case BIT_AND_EXPR:
10102 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10103 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10104 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10105 || (TREE_CODE (arg0) == EQ_EXPR
10106 && integer_zerop (TREE_OPERAND (arg0, 1))))
10107 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10108 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10110 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10111 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10112 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10113 || (TREE_CODE (arg1) == EQ_EXPR
10114 && integer_zerop (TREE_OPERAND (arg1, 1))))
10115 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10116 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10118 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10119 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10120 && INTEGRAL_TYPE_P (type)
10121 && integer_onep (TREE_OPERAND (arg0, 1))
10122 && integer_onep (arg1))
10124 tree tem2;
10125 tem = TREE_OPERAND (arg0, 0);
10126 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10127 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10128 tem, tem2);
10129 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10130 build_zero_cst (TREE_TYPE (tem)));
10132 /* Fold ~X & 1 as (X & 1) == 0. */
10133 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10134 && INTEGRAL_TYPE_P (type)
10135 && integer_onep (arg1))
10137 tree tem2;
10138 tem = TREE_OPERAND (arg0, 0);
10139 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10140 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10141 tem, tem2);
10142 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10143 build_zero_cst (TREE_TYPE (tem)));
10145 /* Fold !X & 1 as X == 0. */
10146 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10147 && integer_onep (arg1))
10149 tem = TREE_OPERAND (arg0, 0);
10150 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10151 build_zero_cst (TREE_TYPE (tem)));
10154 /* Fold (X ^ Y) & Y as ~X & Y. */
10155 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10156 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10158 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10159 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10160 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10161 fold_convert_loc (loc, type, arg1));
10163 /* Fold (X ^ Y) & X as ~Y & X. */
10164 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10165 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10166 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10168 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10169 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10170 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10171 fold_convert_loc (loc, type, arg1));
10173 /* Fold X & (X ^ Y) as X & ~Y. */
10174 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10175 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10177 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10178 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10179 fold_convert_loc (loc, type, arg0),
10180 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10182 /* Fold X & (Y ^ X) as ~Y & X. */
10183 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10184 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10185 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10187 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10188 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10189 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10190 fold_convert_loc (loc, type, arg0));
10193 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10194 multiple of 1 << CST. */
10195 if (TREE_CODE (arg1) == INTEGER_CST)
10197 wide_int cst1 = arg1;
10198 wide_int ncst1 = -cst1;
10199 if ((cst1 & ncst1) == ncst1
10200 && multiple_of_p (type, arg0,
10201 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10202 return fold_convert_loc (loc, type, arg0);
10205 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10206 bits from CST2. */
10207 if (TREE_CODE (arg1) == INTEGER_CST
10208 && TREE_CODE (arg0) == MULT_EXPR
10209 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10211 wide_int warg1 = arg1;
10212 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10214 if (masked == 0)
10215 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10216 arg0, arg1);
10217 else if (masked != warg1)
10219 /* Avoid the transform if arg1 is a mask of some
10220 mode which allows further optimizations. */
10221 int pop = wi::popcount (warg1);
10222 if (!(pop >= BITS_PER_UNIT
10223 && exact_log2 (pop) != -1
10224 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10225 return fold_build2_loc (loc, code, type, op0,
10226 wide_int_to_tree (type, masked));
10230 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10231 ((A & N) + B) & M -> (A + B) & M
10232 Similarly if (N & M) == 0,
10233 ((A | N) + B) & M -> (A + B) & M
10234 and for - instead of + (or unary - instead of +)
10235 and/or ^ instead of |.
10236 If B is constant and (B & M) == 0, fold into A & M. */
10237 if (TREE_CODE (arg1) == INTEGER_CST)
10239 wide_int cst1 = arg1;
10240 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10241 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10242 && (TREE_CODE (arg0) == PLUS_EXPR
10243 || TREE_CODE (arg0) == MINUS_EXPR
10244 || TREE_CODE (arg0) == NEGATE_EXPR)
10245 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10246 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10248 tree pmop[2];
10249 int which = 0;
10250 wide_int cst0;
10252 /* Now we know that arg0 is (C + D) or (C - D) or
10253 -C and arg1 (M) is == (1LL << cst) - 1.
10254 Store C into PMOP[0] and D into PMOP[1]. */
10255 pmop[0] = TREE_OPERAND (arg0, 0);
10256 pmop[1] = NULL;
10257 if (TREE_CODE (arg0) != NEGATE_EXPR)
10259 pmop[1] = TREE_OPERAND (arg0, 1);
10260 which = 1;
10263 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10264 which = -1;
10266 for (; which >= 0; which--)
10267 switch (TREE_CODE (pmop[which]))
10269 case BIT_AND_EXPR:
10270 case BIT_IOR_EXPR:
10271 case BIT_XOR_EXPR:
10272 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10273 != INTEGER_CST)
10274 break;
10275 cst0 = TREE_OPERAND (pmop[which], 1);
10276 cst0 &= cst1;
10277 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10279 if (cst0 != cst1)
10280 break;
10282 else if (cst0 != 0)
10283 break;
10284 /* If C or D is of the form (A & N) where
10285 (N & M) == M, or of the form (A | N) or
10286 (A ^ N) where (N & M) == 0, replace it with A. */
10287 pmop[which] = TREE_OPERAND (pmop[which], 0);
10288 break;
10289 case INTEGER_CST:
10290 /* If C or D is a N where (N & M) == 0, it can be
10291 omitted (assumed 0). */
10292 if ((TREE_CODE (arg0) == PLUS_EXPR
10293 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10294 && (cst1 & pmop[which]) == 0)
10295 pmop[which] = NULL;
10296 break;
10297 default:
10298 break;
10301 /* Only build anything new if we optimized one or both arguments
10302 above. */
10303 if (pmop[0] != TREE_OPERAND (arg0, 0)
10304 || (TREE_CODE (arg0) != NEGATE_EXPR
10305 && pmop[1] != TREE_OPERAND (arg0, 1)))
10307 tree utype = TREE_TYPE (arg0);
10308 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10310 /* Perform the operations in a type that has defined
10311 overflow behavior. */
10312 utype = unsigned_type_for (TREE_TYPE (arg0));
10313 if (pmop[0] != NULL)
10314 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10315 if (pmop[1] != NULL)
10316 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10319 if (TREE_CODE (arg0) == NEGATE_EXPR)
10320 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10321 else if (TREE_CODE (arg0) == PLUS_EXPR)
10323 if (pmop[0] != NULL && pmop[1] != NULL)
10324 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10325 pmop[0], pmop[1]);
10326 else if (pmop[0] != NULL)
10327 tem = pmop[0];
10328 else if (pmop[1] != NULL)
10329 tem = pmop[1];
10330 else
10331 return build_int_cst (type, 0);
10333 else if (pmop[0] == NULL)
10334 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10335 else
10336 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10337 pmop[0], pmop[1]);
10338 /* TEM is now the new binary +, - or unary - replacement. */
10339 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10340 fold_convert_loc (loc, utype, arg1));
10341 return fold_convert_loc (loc, type, tem);
10346 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10347 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10348 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10350 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10352 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10353 if (mask == -1)
10354 return
10355 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10358 goto associate;
10360 case RDIV_EXPR:
10361 /* Don't touch a floating-point divide by zero unless the mode
10362 of the constant can represent infinity. */
10363 if (TREE_CODE (arg1) == REAL_CST
10364 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10365 && real_zerop (arg1))
10366 return NULL_TREE;
10368 /* (-A) / (-B) -> A / B */
10369 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10370 return fold_build2_loc (loc, RDIV_EXPR, type,
10371 TREE_OPERAND (arg0, 0),
10372 negate_expr (arg1));
10373 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10374 return fold_build2_loc (loc, RDIV_EXPR, type,
10375 negate_expr (arg0),
10376 TREE_OPERAND (arg1, 0));
10378 /* Convert A/B/C to A/(B*C). */
10379 if (flag_reciprocal_math
10380 && TREE_CODE (arg0) == RDIV_EXPR)
10381 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10382 fold_build2_loc (loc, MULT_EXPR, type,
10383 TREE_OPERAND (arg0, 1), arg1));
10385 /* Convert A/(B/C) to (A/B)*C. */
10386 if (flag_reciprocal_math
10387 && TREE_CODE (arg1) == RDIV_EXPR)
10388 return fold_build2_loc (loc, MULT_EXPR, type,
10389 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10390 TREE_OPERAND (arg1, 0)),
10391 TREE_OPERAND (arg1, 1));
10393 /* Convert C1/(X*C2) into (C1/C2)/X. */
10394 if (flag_reciprocal_math
10395 && TREE_CODE (arg1) == MULT_EXPR
10396 && TREE_CODE (arg0) == REAL_CST
10397 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10399 tree tem = const_binop (RDIV_EXPR, arg0,
10400 TREE_OPERAND (arg1, 1));
10401 if (tem)
10402 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10403 TREE_OPERAND (arg1, 0));
10406 if (flag_unsafe_math_optimizations)
10408 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10410 /* Optimize a/root(b/c) into a*root(c/b). */
10411 if (BUILTIN_CBRT_P (fcode1))
10413 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10415 if (TREE_CODE (rootarg) == RDIV_EXPR)
10417 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10418 tree b = TREE_OPERAND (rootarg, 0);
10419 tree c = TREE_OPERAND (rootarg, 1);
10421 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
10423 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
10424 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
10428 /* Optimize x/expN(y) into x*expN(-y). */
10429 if (BUILTIN_EXPONENT_P (fcode1))
10431 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10432 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10433 arg1 = build_call_expr_loc (loc,
10434 expfn, 1,
10435 fold_convert_loc (loc, type, arg));
10436 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10440 return NULL_TREE;
10442 case TRUNC_DIV_EXPR:
10443 /* Optimize (X & (-A)) / A where A is a power of 2,
10444 to X >> log2(A) */
10445 if (TREE_CODE (arg0) == BIT_AND_EXPR
10446 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10447 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10449 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10450 arg1, TREE_OPERAND (arg0, 1));
10451 if (sum && integer_zerop (sum)) {
10452 tree pow2 = build_int_cst (integer_type_node,
10453 wi::exact_log2 (arg1));
10454 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10455 TREE_OPERAND (arg0, 0), pow2);
10459 /* Fall through */
10461 case FLOOR_DIV_EXPR:
10462 /* Simplify A / (B << N) where A and B are positive and B is
10463 a power of 2, to A >> (N + log2(B)). */
10464 strict_overflow_p = false;
10465 if (TREE_CODE (arg1) == LSHIFT_EXPR
10466 && (TYPE_UNSIGNED (type)
10467 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10469 tree sval = TREE_OPERAND (arg1, 0);
10470 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10472 tree sh_cnt = TREE_OPERAND (arg1, 1);
10473 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10474 wi::exact_log2 (sval));
10476 if (strict_overflow_p)
10477 fold_overflow_warning (("assuming signed overflow does not "
10478 "occur when simplifying A / (B << N)"),
10479 WARN_STRICT_OVERFLOW_MISC);
10481 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10482 sh_cnt, pow2);
10483 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10484 fold_convert_loc (loc, type, arg0), sh_cnt);
10488 /* Fall through */
10490 case ROUND_DIV_EXPR:
10491 case CEIL_DIV_EXPR:
10492 case EXACT_DIV_EXPR:
10493 if (integer_zerop (arg1))
10494 return NULL_TREE;
10496 /* Convert -A / -B to A / B when the type is signed and overflow is
10497 undefined. */
10498 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10499 && TREE_CODE (arg0) == NEGATE_EXPR
10500 && negate_expr_p (arg1))
10502 if (INTEGRAL_TYPE_P (type))
10503 fold_overflow_warning (("assuming signed overflow does not occur "
10504 "when distributing negation across "
10505 "division"),
10506 WARN_STRICT_OVERFLOW_MISC);
10507 return fold_build2_loc (loc, code, type,
10508 fold_convert_loc (loc, type,
10509 TREE_OPERAND (arg0, 0)),
10510 fold_convert_loc (loc, type,
10511 negate_expr (arg1)));
10513 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10514 && TREE_CODE (arg1) == NEGATE_EXPR
10515 && negate_expr_p (arg0))
10517 if (INTEGRAL_TYPE_P (type))
10518 fold_overflow_warning (("assuming signed overflow does not occur "
10519 "when distributing negation across "
10520 "division"),
10521 WARN_STRICT_OVERFLOW_MISC);
10522 return fold_build2_loc (loc, code, type,
10523 fold_convert_loc (loc, type,
10524 negate_expr (arg0)),
10525 fold_convert_loc (loc, type,
10526 TREE_OPERAND (arg1, 0)));
10529 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10530 operation, EXACT_DIV_EXPR.
10532 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10533 At one time others generated faster code, it's not clear if they do
10534 after the last round to changes to the DIV code in expmed.c. */
10535 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10536 && multiple_of_p (type, arg0, arg1))
10537 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10538 fold_convert (type, arg0),
10539 fold_convert (type, arg1));
10541 strict_overflow_p = false;
10542 if (TREE_CODE (arg1) == INTEGER_CST
10543 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10544 &strict_overflow_p)))
10546 if (strict_overflow_p)
10547 fold_overflow_warning (("assuming signed overflow does not occur "
10548 "when simplifying division"),
10549 WARN_STRICT_OVERFLOW_MISC);
10550 return fold_convert_loc (loc, type, tem);
10553 return NULL_TREE;
10555 case CEIL_MOD_EXPR:
10556 case FLOOR_MOD_EXPR:
10557 case ROUND_MOD_EXPR:
10558 case TRUNC_MOD_EXPR:
10559 strict_overflow_p = false;
10560 if (TREE_CODE (arg1) == INTEGER_CST
10561 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10562 &strict_overflow_p)))
10564 if (strict_overflow_p)
10565 fold_overflow_warning (("assuming signed overflow does not occur "
10566 "when simplifying modulus"),
10567 WARN_STRICT_OVERFLOW_MISC);
10568 return fold_convert_loc (loc, type, tem);
10571 return NULL_TREE;
10573 case LROTATE_EXPR:
10574 case RROTATE_EXPR:
10575 case RSHIFT_EXPR:
10576 case LSHIFT_EXPR:
10577 /* Since negative shift count is not well-defined,
10578 don't try to compute it in the compiler. */
10579 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10580 return NULL_TREE;
10582 prec = element_precision (type);
10584 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10585 into x & ((unsigned)-1 >> c) for unsigned types. */
10586 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10587 || (TYPE_UNSIGNED (type)
10588 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10589 && tree_fits_uhwi_p (arg1)
10590 && tree_to_uhwi (arg1) < prec
10591 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
10592 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
10594 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
10595 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
10596 tree lshift;
10597 tree arg00;
10599 if (low0 == low1)
10601 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10603 lshift = build_minus_one_cst (type);
10604 lshift = const_binop (code, lshift, arg1);
10606 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
10610 /* If we have a rotate of a bit operation with the rotate count and
10611 the second operand of the bit operation both constant,
10612 permute the two operations. */
10613 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10614 && (TREE_CODE (arg0) == BIT_AND_EXPR
10615 || TREE_CODE (arg0) == BIT_IOR_EXPR
10616 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10618 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10619 fold_build2_loc (loc, code, type,
10620 TREE_OPERAND (arg0, 0), arg1),
10621 fold_build2_loc (loc, code, type,
10622 TREE_OPERAND (arg0, 1), arg1));
10624 /* Two consecutive rotates adding up to the some integer
10625 multiple of the precision of the type can be ignored. */
10626 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10627 && TREE_CODE (arg0) == RROTATE_EXPR
10628 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10629 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10630 prec) == 0)
10631 return TREE_OPERAND (arg0, 0);
10633 return NULL_TREE;
10635 case MIN_EXPR:
10636 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
10637 if (tem)
10638 return tem;
10639 goto associate;
10641 case MAX_EXPR:
10642 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
10643 if (tem)
10644 return tem;
10645 goto associate;
10647 case TRUTH_ANDIF_EXPR:
10648 /* Note that the operands of this must be ints
10649 and their values must be 0 or 1.
10650 ("true" is a fixed value perhaps depending on the language.) */
10651 /* If first arg is constant zero, return it. */
10652 if (integer_zerop (arg0))
10653 return fold_convert_loc (loc, type, arg0);
10654 case TRUTH_AND_EXPR:
10655 /* If either arg is constant true, drop it. */
10656 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10657 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10658 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10659 /* Preserve sequence points. */
10660 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10661 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10662 /* If second arg is constant zero, result is zero, but first arg
10663 must be evaluated. */
10664 if (integer_zerop (arg1))
10665 return omit_one_operand_loc (loc, type, arg1, arg0);
10666 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10667 case will be handled here. */
10668 if (integer_zerop (arg0))
10669 return omit_one_operand_loc (loc, type, arg0, arg1);
10671 /* !X && X is always false. */
10672 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10673 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10674 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10675 /* X && !X is always false. */
10676 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10677 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10678 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10680 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10681 means A >= Y && A != MAX, but in this case we know that
10682 A < X <= MAX. */
10684 if (!TREE_SIDE_EFFECTS (arg0)
10685 && !TREE_SIDE_EFFECTS (arg1))
10687 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10688 if (tem && !operand_equal_p (tem, arg0, 0))
10689 return fold_build2_loc (loc, code, type, tem, arg1);
10691 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10692 if (tem && !operand_equal_p (tem, arg1, 0))
10693 return fold_build2_loc (loc, code, type, arg0, tem);
10696 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10697 != NULL_TREE)
10698 return tem;
10700 return NULL_TREE;
10702 case TRUTH_ORIF_EXPR:
10703 /* Note that the operands of this must be ints
10704 and their values must be 0 or true.
10705 ("true" is a fixed value perhaps depending on the language.) */
10706 /* If first arg is constant true, return it. */
10707 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10708 return fold_convert_loc (loc, type, arg0);
10709 case TRUTH_OR_EXPR:
10710 /* If either arg is constant zero, drop it. */
10711 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10712 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10713 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10714 /* Preserve sequence points. */
10715 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10716 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10717 /* If second arg is constant true, result is true, but we must
10718 evaluate first arg. */
10719 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10720 return omit_one_operand_loc (loc, type, arg1, arg0);
10721 /* Likewise for first arg, but note this only occurs here for
10722 TRUTH_OR_EXPR. */
10723 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10724 return omit_one_operand_loc (loc, type, arg0, arg1);
10726 /* !X || X is always true. */
10727 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10728 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10729 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10730 /* X || !X is always true. */
10731 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10732 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10733 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10735 /* (X && !Y) || (!X && Y) is X ^ Y */
10736 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10737 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10739 tree a0, a1, l0, l1, n0, n1;
10741 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10742 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10744 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10745 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10747 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10748 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10750 if ((operand_equal_p (n0, a0, 0)
10751 && operand_equal_p (n1, a1, 0))
10752 || (operand_equal_p (n0, a1, 0)
10753 && operand_equal_p (n1, a0, 0)))
10754 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10757 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10758 != NULL_TREE)
10759 return tem;
10761 return NULL_TREE;
10763 case TRUTH_XOR_EXPR:
10764 /* If the second arg is constant zero, drop it. */
10765 if (integer_zerop (arg1))
10766 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10767 /* If the second arg is constant true, this is a logical inversion. */
10768 if (integer_onep (arg1))
10770 tem = invert_truthvalue_loc (loc, arg0);
10771 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10773 /* Identical arguments cancel to zero. */
10774 if (operand_equal_p (arg0, arg1, 0))
10775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10777 /* !X ^ X is always true. */
10778 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10780 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10782 /* X ^ !X is always true. */
10783 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10784 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10785 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10787 return NULL_TREE;
10789 case EQ_EXPR:
10790 case NE_EXPR:
10791 STRIP_NOPS (arg0);
10792 STRIP_NOPS (arg1);
10794 tem = fold_comparison (loc, code, type, op0, op1);
10795 if (tem != NULL_TREE)
10796 return tem;
10798 /* bool_var != 1 becomes !bool_var. */
10799 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10800 && code == NE_EXPR)
10801 return fold_convert_loc (loc, type,
10802 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10803 TREE_TYPE (arg0), arg0));
10805 /* bool_var == 0 becomes !bool_var. */
10806 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10807 && code == EQ_EXPR)
10808 return fold_convert_loc (loc, type,
10809 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10810 TREE_TYPE (arg0), arg0));
10812 /* !exp != 0 becomes !exp */
10813 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10814 && code == NE_EXPR)
10815 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10817 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10818 if ((TREE_CODE (arg0) == PLUS_EXPR
10819 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10820 || TREE_CODE (arg0) == MINUS_EXPR)
10821 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10822 0)),
10823 arg1, 0)
10824 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10825 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10827 tree val = TREE_OPERAND (arg0, 1);
10828 return omit_two_operands_loc (loc, type,
10829 fold_build2_loc (loc, code, type,
10830 val,
10831 build_int_cst (TREE_TYPE (val),
10832 0)),
10833 TREE_OPERAND (arg0, 0), arg1);
10836 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10837 if (TREE_CODE (arg0) == MINUS_EXPR
10838 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10839 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10840 1)),
10841 arg1, 0)
10842 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10844 return omit_two_operands_loc (loc, type,
10845 code == NE_EXPR
10846 ? boolean_true_node : boolean_false_node,
10847 TREE_OPERAND (arg0, 1), arg1);
10850 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10851 if (TREE_CODE (arg0) == ABS_EXPR
10852 && (integer_zerop (arg1) || real_zerop (arg1)))
10853 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
10855 /* If this is an EQ or NE comparison with zero and ARG0 is
10856 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10857 two operations, but the latter can be done in one less insn
10858 on machines that have only two-operand insns or on which a
10859 constant cannot be the first operand. */
10860 if (TREE_CODE (arg0) == BIT_AND_EXPR
10861 && integer_zerop (arg1))
10863 tree arg00 = TREE_OPERAND (arg0, 0);
10864 tree arg01 = TREE_OPERAND (arg0, 1);
10865 if (TREE_CODE (arg00) == LSHIFT_EXPR
10866 && integer_onep (TREE_OPERAND (arg00, 0)))
10868 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10869 arg01, TREE_OPERAND (arg00, 1));
10870 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10871 build_int_cst (TREE_TYPE (arg0), 1));
10872 return fold_build2_loc (loc, code, type,
10873 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10874 arg1);
10876 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10877 && integer_onep (TREE_OPERAND (arg01, 0)))
10879 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10880 arg00, TREE_OPERAND (arg01, 1));
10881 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10882 build_int_cst (TREE_TYPE (arg0), 1));
10883 return fold_build2_loc (loc, code, type,
10884 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10885 arg1);
10889 /* If this is an NE or EQ comparison of zero against the result of a
10890 signed MOD operation whose second operand is a power of 2, make
10891 the MOD operation unsigned since it is simpler and equivalent. */
10892 if (integer_zerop (arg1)
10893 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10894 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10895 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10896 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10897 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10898 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10900 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10901 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10902 fold_convert_loc (loc, newtype,
10903 TREE_OPERAND (arg0, 0)),
10904 fold_convert_loc (loc, newtype,
10905 TREE_OPERAND (arg0, 1)));
10907 return fold_build2_loc (loc, code, type, newmod,
10908 fold_convert_loc (loc, newtype, arg1));
10911 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10912 C1 is a valid shift constant, and C2 is a power of two, i.e.
10913 a single bit. */
10914 if (TREE_CODE (arg0) == BIT_AND_EXPR
10915 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10916 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10917 == INTEGER_CST
10918 && integer_pow2p (TREE_OPERAND (arg0, 1))
10919 && integer_zerop (arg1))
10921 tree itype = TREE_TYPE (arg0);
10922 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10923 prec = TYPE_PRECISION (itype);
10925 /* Check for a valid shift count. */
10926 if (wi::ltu_p (arg001, prec))
10928 tree arg01 = TREE_OPERAND (arg0, 1);
10929 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10930 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10931 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10932 can be rewritten as (X & (C2 << C1)) != 0. */
10933 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10935 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10936 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10937 return fold_build2_loc (loc, code, type, tem,
10938 fold_convert_loc (loc, itype, arg1));
10940 /* Otherwise, for signed (arithmetic) shifts,
10941 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10942 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10943 else if (!TYPE_UNSIGNED (itype))
10944 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10945 arg000, build_int_cst (itype, 0));
10946 /* Otherwise, of unsigned (logical) shifts,
10947 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10948 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10949 else
10950 return omit_one_operand_loc (loc, type,
10951 code == EQ_EXPR ? integer_one_node
10952 : integer_zero_node,
10953 arg000);
10957 /* If we have (A & C) == C where C is a power of 2, convert this into
10958 (A & C) != 0. Similarly for NE_EXPR. */
10959 if (TREE_CODE (arg0) == BIT_AND_EXPR
10960 && integer_pow2p (TREE_OPERAND (arg0, 1))
10961 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10962 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10963 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
10964 integer_zero_node));
10966 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10967 bit, then fold the expression into A < 0 or A >= 0. */
10968 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
10969 if (tem)
10970 return tem;
10972 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10973 Similarly for NE_EXPR. */
10974 if (TREE_CODE (arg0) == BIT_AND_EXPR
10975 && TREE_CODE (arg1) == INTEGER_CST
10976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10978 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10979 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10980 TREE_OPERAND (arg0, 1));
10981 tree dandnotc
10982 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10983 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10984 notc);
10985 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10986 if (integer_nonzerop (dandnotc))
10987 return omit_one_operand_loc (loc, type, rslt, arg0);
10990 /* If this is a comparison of a field, we may be able to simplify it. */
10991 if ((TREE_CODE (arg0) == COMPONENT_REF
10992 || TREE_CODE (arg0) == BIT_FIELD_REF)
10993 /* Handle the constant case even without -O
10994 to make sure the warnings are given. */
10995 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10997 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10998 if (t1)
10999 return t1;
11002 /* Optimize comparisons of strlen vs zero to a compare of the
11003 first character of the string vs zero. To wit,
11004 strlen(ptr) == 0 => *ptr == 0
11005 strlen(ptr) != 0 => *ptr != 0
11006 Other cases should reduce to one of these two (or a constant)
11007 due to the return value of strlen being unsigned. */
11008 if (TREE_CODE (arg0) == CALL_EXPR
11009 && integer_zerop (arg1))
11011 tree fndecl = get_callee_fndecl (arg0);
11013 if (fndecl
11014 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11015 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11016 && call_expr_nargs (arg0) == 1
11017 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11019 tree iref = build_fold_indirect_ref_loc (loc,
11020 CALL_EXPR_ARG (arg0, 0));
11021 return fold_build2_loc (loc, code, type, iref,
11022 build_int_cst (TREE_TYPE (iref), 0));
11026 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11027 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11028 if (TREE_CODE (arg0) == RSHIFT_EXPR
11029 && integer_zerop (arg1)
11030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11032 tree arg00 = TREE_OPERAND (arg0, 0);
11033 tree arg01 = TREE_OPERAND (arg0, 1);
11034 tree itype = TREE_TYPE (arg00);
11035 if (wi::eq_p (arg01, element_precision (itype) - 1))
11037 if (TYPE_UNSIGNED (itype))
11039 itype = signed_type_for (itype);
11040 arg00 = fold_convert_loc (loc, itype, arg00);
11042 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11043 type, arg00, build_zero_cst (itype));
11047 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11048 (X & C) == 0 when C is a single bit. */
11049 if (TREE_CODE (arg0) == BIT_AND_EXPR
11050 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11051 && integer_zerop (arg1)
11052 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11054 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11055 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11056 TREE_OPERAND (arg0, 1));
11057 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11058 type, tem,
11059 fold_convert_loc (loc, TREE_TYPE (arg0),
11060 arg1));
11063 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11064 constant C is a power of two, i.e. a single bit. */
11065 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11066 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11067 && integer_zerop (arg1)
11068 && integer_pow2p (TREE_OPERAND (arg0, 1))
11069 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11070 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11072 tree arg00 = TREE_OPERAND (arg0, 0);
11073 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11074 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11077 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11078 when is C is a power of two, i.e. a single bit. */
11079 if (TREE_CODE (arg0) == BIT_AND_EXPR
11080 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11081 && integer_zerop (arg1)
11082 && integer_pow2p (TREE_OPERAND (arg0, 1))
11083 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11084 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11086 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11087 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11088 arg000, TREE_OPERAND (arg0, 1));
11089 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11090 tem, build_int_cst (TREE_TYPE (tem), 0));
11093 if (integer_zerop (arg1)
11094 && tree_expr_nonzero_p (arg0))
11096 tree res = constant_boolean_node (code==NE_EXPR, type);
11097 return omit_one_operand_loc (loc, type, res, arg0);
11100 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11101 if (TREE_CODE (arg0) == BIT_AND_EXPR
11102 && TREE_CODE (arg1) == BIT_AND_EXPR)
11104 tree arg00 = TREE_OPERAND (arg0, 0);
11105 tree arg01 = TREE_OPERAND (arg0, 1);
11106 tree arg10 = TREE_OPERAND (arg1, 0);
11107 tree arg11 = TREE_OPERAND (arg1, 1);
11108 tree itype = TREE_TYPE (arg0);
11110 if (operand_equal_p (arg01, arg11, 0))
11111 return fold_build2_loc (loc, code, type,
11112 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11113 fold_build2_loc (loc,
11114 BIT_XOR_EXPR, itype,
11115 arg00, arg10),
11116 arg01),
11117 build_zero_cst (itype));
11119 if (operand_equal_p (arg01, arg10, 0))
11120 return fold_build2_loc (loc, code, type,
11121 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11122 fold_build2_loc (loc,
11123 BIT_XOR_EXPR, itype,
11124 arg00, arg11),
11125 arg01),
11126 build_zero_cst (itype));
11128 if (operand_equal_p (arg00, arg11, 0))
11129 return fold_build2_loc (loc, code, type,
11130 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11131 fold_build2_loc (loc,
11132 BIT_XOR_EXPR, itype,
11133 arg01, arg10),
11134 arg00),
11135 build_zero_cst (itype));
11137 if (operand_equal_p (arg00, arg10, 0))
11138 return fold_build2_loc (loc, code, type,
11139 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11140 fold_build2_loc (loc,
11141 BIT_XOR_EXPR, itype,
11142 arg01, arg11),
11143 arg00),
11144 build_zero_cst (itype));
11147 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11148 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11150 tree arg00 = TREE_OPERAND (arg0, 0);
11151 tree arg01 = TREE_OPERAND (arg0, 1);
11152 tree arg10 = TREE_OPERAND (arg1, 0);
11153 tree arg11 = TREE_OPERAND (arg1, 1);
11154 tree itype = TREE_TYPE (arg0);
11156 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11157 operand_equal_p guarantees no side-effects so we don't need
11158 to use omit_one_operand on Z. */
11159 if (operand_equal_p (arg01, arg11, 0))
11160 return fold_build2_loc (loc, code, type, arg00,
11161 fold_convert_loc (loc, TREE_TYPE (arg00),
11162 arg10));
11163 if (operand_equal_p (arg01, arg10, 0))
11164 return fold_build2_loc (loc, code, type, arg00,
11165 fold_convert_loc (loc, TREE_TYPE (arg00),
11166 arg11));
11167 if (operand_equal_p (arg00, arg11, 0))
11168 return fold_build2_loc (loc, code, type, arg01,
11169 fold_convert_loc (loc, TREE_TYPE (arg01),
11170 arg10));
11171 if (operand_equal_p (arg00, arg10, 0))
11172 return fold_build2_loc (loc, code, type, arg01,
11173 fold_convert_loc (loc, TREE_TYPE (arg01),
11174 arg11));
11176 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11177 if (TREE_CODE (arg01) == INTEGER_CST
11178 && TREE_CODE (arg11) == INTEGER_CST)
11180 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11181 fold_convert_loc (loc, itype, arg11));
11182 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11183 return fold_build2_loc (loc, code, type, tem,
11184 fold_convert_loc (loc, itype, arg10));
11188 /* Attempt to simplify equality/inequality comparisons of complex
11189 values. Only lower the comparison if the result is known or
11190 can be simplified to a single scalar comparison. */
11191 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11192 || TREE_CODE (arg0) == COMPLEX_CST)
11193 && (TREE_CODE (arg1) == COMPLEX_EXPR
11194 || TREE_CODE (arg1) == COMPLEX_CST))
11196 tree real0, imag0, real1, imag1;
11197 tree rcond, icond;
11199 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11201 real0 = TREE_OPERAND (arg0, 0);
11202 imag0 = TREE_OPERAND (arg0, 1);
11204 else
11206 real0 = TREE_REALPART (arg0);
11207 imag0 = TREE_IMAGPART (arg0);
11210 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11212 real1 = TREE_OPERAND (arg1, 0);
11213 imag1 = TREE_OPERAND (arg1, 1);
11215 else
11217 real1 = TREE_REALPART (arg1);
11218 imag1 = TREE_IMAGPART (arg1);
11221 rcond = fold_binary_loc (loc, code, type, real0, real1);
11222 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11224 if (integer_zerop (rcond))
11226 if (code == EQ_EXPR)
11227 return omit_two_operands_loc (loc, type, boolean_false_node,
11228 imag0, imag1);
11229 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11231 else
11233 if (code == NE_EXPR)
11234 return omit_two_operands_loc (loc, type, boolean_true_node,
11235 imag0, imag1);
11236 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11240 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11241 if (icond && TREE_CODE (icond) == INTEGER_CST)
11243 if (integer_zerop (icond))
11245 if (code == EQ_EXPR)
11246 return omit_two_operands_loc (loc, type, boolean_false_node,
11247 real0, real1);
11248 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11250 else
11252 if (code == NE_EXPR)
11253 return omit_two_operands_loc (loc, type, boolean_true_node,
11254 real0, real1);
11255 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11260 return NULL_TREE;
11262 case LT_EXPR:
11263 case GT_EXPR:
11264 case LE_EXPR:
11265 case GE_EXPR:
11266 tem = fold_comparison (loc, code, type, op0, op1);
11267 if (tem != NULL_TREE)
11268 return tem;
11270 /* Transform comparisons of the form X +- C CMP X. */
11271 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11272 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11273 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11274 && !HONOR_SNANS (arg0))
11275 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11276 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11278 tree arg01 = TREE_OPERAND (arg0, 1);
11279 enum tree_code code0 = TREE_CODE (arg0);
11280 int is_positive;
11282 if (TREE_CODE (arg01) == REAL_CST)
11283 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11284 else
11285 is_positive = tree_int_cst_sgn (arg01);
11287 /* (X - c) > X becomes false. */
11288 if (code == GT_EXPR
11289 && ((code0 == MINUS_EXPR && is_positive >= 0)
11290 || (code0 == PLUS_EXPR && is_positive <= 0)))
11292 if (TREE_CODE (arg01) == INTEGER_CST
11293 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11294 fold_overflow_warning (("assuming signed overflow does not "
11295 "occur when assuming that (X - c) > X "
11296 "is always false"),
11297 WARN_STRICT_OVERFLOW_ALL);
11298 return constant_boolean_node (0, type);
11301 /* Likewise (X + c) < X becomes false. */
11302 if (code == LT_EXPR
11303 && ((code0 == PLUS_EXPR && is_positive >= 0)
11304 || (code0 == MINUS_EXPR && is_positive <= 0)))
11306 if (TREE_CODE (arg01) == INTEGER_CST
11307 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11308 fold_overflow_warning (("assuming signed overflow does not "
11309 "occur when assuming that "
11310 "(X + c) < X is always false"),
11311 WARN_STRICT_OVERFLOW_ALL);
11312 return constant_boolean_node (0, type);
11315 /* Convert (X - c) <= X to true. */
11316 if (!HONOR_NANS (arg1)
11317 && code == LE_EXPR
11318 && ((code0 == MINUS_EXPR && is_positive >= 0)
11319 || (code0 == PLUS_EXPR && is_positive <= 0)))
11321 if (TREE_CODE (arg01) == INTEGER_CST
11322 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11323 fold_overflow_warning (("assuming signed overflow does not "
11324 "occur when assuming that "
11325 "(X - c) <= X is always true"),
11326 WARN_STRICT_OVERFLOW_ALL);
11327 return constant_boolean_node (1, type);
11330 /* Convert (X + c) >= X to true. */
11331 if (!HONOR_NANS (arg1)
11332 && code == GE_EXPR
11333 && ((code0 == PLUS_EXPR && is_positive >= 0)
11334 || (code0 == MINUS_EXPR && is_positive <= 0)))
11336 if (TREE_CODE (arg01) == INTEGER_CST
11337 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11338 fold_overflow_warning (("assuming signed overflow does not "
11339 "occur when assuming that "
11340 "(X + c) >= X is always true"),
11341 WARN_STRICT_OVERFLOW_ALL);
11342 return constant_boolean_node (1, type);
11345 if (TREE_CODE (arg01) == INTEGER_CST)
11347 /* Convert X + c > X and X - c < X to true for integers. */
11348 if (code == GT_EXPR
11349 && ((code0 == PLUS_EXPR && is_positive > 0)
11350 || (code0 == MINUS_EXPR && is_positive < 0)))
11352 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11353 fold_overflow_warning (("assuming signed overflow does "
11354 "not occur when assuming that "
11355 "(X + c) > X is always true"),
11356 WARN_STRICT_OVERFLOW_ALL);
11357 return constant_boolean_node (1, type);
11360 if (code == LT_EXPR
11361 && ((code0 == MINUS_EXPR && is_positive > 0)
11362 || (code0 == PLUS_EXPR && is_positive < 0)))
11364 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11365 fold_overflow_warning (("assuming signed overflow does "
11366 "not occur when assuming that "
11367 "(X - c) < X is always true"),
11368 WARN_STRICT_OVERFLOW_ALL);
11369 return constant_boolean_node (1, type);
11372 /* Convert X + c <= X and X - c >= X to false for integers. */
11373 if (code == LE_EXPR
11374 && ((code0 == PLUS_EXPR && is_positive > 0)
11375 || (code0 == MINUS_EXPR && is_positive < 0)))
11377 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11378 fold_overflow_warning (("assuming signed overflow does "
11379 "not occur when assuming that "
11380 "(X + c) <= X is always false"),
11381 WARN_STRICT_OVERFLOW_ALL);
11382 return constant_boolean_node (0, type);
11385 if (code == GE_EXPR
11386 && ((code0 == MINUS_EXPR && is_positive > 0)
11387 || (code0 == PLUS_EXPR && is_positive < 0)))
11389 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11390 fold_overflow_warning (("assuming signed overflow does "
11391 "not occur when assuming that "
11392 "(X - c) >= X is always false"),
11393 WARN_STRICT_OVERFLOW_ALL);
11394 return constant_boolean_node (0, type);
11399 /* If we are comparing an ABS_EXPR with a constant, we can
11400 convert all the cases into explicit comparisons, but they may
11401 well not be faster than doing the ABS and one comparison.
11402 But ABS (X) <= C is a range comparison, which becomes a subtraction
11403 and a comparison, and is probably faster. */
11404 if (code == LE_EXPR
11405 && TREE_CODE (arg1) == INTEGER_CST
11406 && TREE_CODE (arg0) == ABS_EXPR
11407 && ! TREE_SIDE_EFFECTS (arg0)
11408 && (0 != (tem = negate_expr (arg1)))
11409 && TREE_CODE (tem) == INTEGER_CST
11410 && !TREE_OVERFLOW (tem))
11411 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11412 build2 (GE_EXPR, type,
11413 TREE_OPERAND (arg0, 0), tem),
11414 build2 (LE_EXPR, type,
11415 TREE_OPERAND (arg0, 0), arg1));
11417 /* Convert ABS_EXPR<x> >= 0 to true. */
11418 strict_overflow_p = false;
11419 if (code == GE_EXPR
11420 && (integer_zerop (arg1)
11421 || (! HONOR_NANS (arg0)
11422 && real_zerop (arg1)))
11423 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11425 if (strict_overflow_p)
11426 fold_overflow_warning (("assuming signed overflow does not occur "
11427 "when simplifying comparison of "
11428 "absolute value and zero"),
11429 WARN_STRICT_OVERFLOW_CONDITIONAL);
11430 return omit_one_operand_loc (loc, type,
11431 constant_boolean_node (true, type),
11432 arg0);
11435 /* Convert ABS_EXPR<x> < 0 to false. */
11436 strict_overflow_p = false;
11437 if (code == LT_EXPR
11438 && (integer_zerop (arg1) || real_zerop (arg1))
11439 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11441 if (strict_overflow_p)
11442 fold_overflow_warning (("assuming signed overflow does not occur "
11443 "when simplifying comparison of "
11444 "absolute value and zero"),
11445 WARN_STRICT_OVERFLOW_CONDITIONAL);
11446 return omit_one_operand_loc (loc, type,
11447 constant_boolean_node (false, type),
11448 arg0);
11451 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11452 and similarly for >= into !=. */
11453 if ((code == LT_EXPR || code == GE_EXPR)
11454 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11455 && TREE_CODE (arg1) == LSHIFT_EXPR
11456 && integer_onep (TREE_OPERAND (arg1, 0)))
11457 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11458 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11459 TREE_OPERAND (arg1, 1)),
11460 build_zero_cst (TREE_TYPE (arg0)));
11462 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11463 otherwise Y might be >= # of bits in X's type and thus e.g.
11464 (unsigned char) (1 << Y) for Y 15 might be 0.
11465 If the cast is widening, then 1 << Y should have unsigned type,
11466 otherwise if Y is number of bits in the signed shift type minus 1,
11467 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11468 31 might be 0xffffffff80000000. */
11469 if ((code == LT_EXPR || code == GE_EXPR)
11470 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11471 && CONVERT_EXPR_P (arg1)
11472 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11473 && (element_precision (TREE_TYPE (arg1))
11474 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11475 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11476 || (element_precision (TREE_TYPE (arg1))
11477 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11478 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11480 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11481 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11482 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11483 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11484 build_zero_cst (TREE_TYPE (arg0)));
11487 return NULL_TREE;
11489 case UNORDERED_EXPR:
11490 case ORDERED_EXPR:
11491 case UNLT_EXPR:
11492 case UNLE_EXPR:
11493 case UNGT_EXPR:
11494 case UNGE_EXPR:
11495 case UNEQ_EXPR:
11496 case LTGT_EXPR:
11497 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11499 tree targ0 = strip_float_extensions (arg0);
11500 tree targ1 = strip_float_extensions (arg1);
11501 tree newtype = TREE_TYPE (targ0);
11503 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11504 newtype = TREE_TYPE (targ1);
11506 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11507 return fold_build2_loc (loc, code, type,
11508 fold_convert_loc (loc, newtype, targ0),
11509 fold_convert_loc (loc, newtype, targ1));
11512 return NULL_TREE;
11514 case COMPOUND_EXPR:
11515 /* When pedantic, a compound expression can be neither an lvalue
11516 nor an integer constant expression. */
11517 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11518 return NULL_TREE;
11519 /* Don't let (0, 0) be null pointer constant. */
11520 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11521 : fold_convert_loc (loc, type, arg1);
11522 return pedantic_non_lvalue_loc (loc, tem);
11524 case ASSERT_EXPR:
11525 /* An ASSERT_EXPR should never be passed to fold_binary. */
11526 gcc_unreachable ();
11528 default:
11529 return NULL_TREE;
11530 } /* switch (code) */
11533 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11534 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11535 of GOTO_EXPR. */
11537 static tree
11538 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11540 switch (TREE_CODE (*tp))
11542 case LABEL_EXPR:
11543 return *tp;
11545 case GOTO_EXPR:
11546 *walk_subtrees = 0;
11548 /* ... fall through ... */
11550 default:
11551 return NULL_TREE;
11555 /* Return whether the sub-tree ST contains a label which is accessible from
11556 outside the sub-tree. */
11558 static bool
11559 contains_label_p (tree st)
11561 return
11562 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11565 /* Fold a ternary expression of code CODE and type TYPE with operands
11566 OP0, OP1, and OP2. Return the folded expression if folding is
11567 successful. Otherwise, return NULL_TREE. */
11569 tree
11570 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11571 tree op0, tree op1, tree op2)
11573 tree tem;
11574 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11575 enum tree_code_class kind = TREE_CODE_CLASS (code);
11577 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11578 && TREE_CODE_LENGTH (code) == 3);
11580 /* If this is a commutative operation, and OP0 is a constant, move it
11581 to OP1 to reduce the number of tests below. */
11582 if (commutative_ternary_tree_code (code)
11583 && tree_swap_operands_p (op0, op1, true))
11584 return fold_build3_loc (loc, code, type, op1, op0, op2);
11586 tem = generic_simplify (loc, code, type, op0, op1, op2);
11587 if (tem)
11588 return tem;
11590 /* Strip any conversions that don't change the mode. This is safe
11591 for every expression, except for a comparison expression because
11592 its signedness is derived from its operands. So, in the latter
11593 case, only strip conversions that don't change the signedness.
11595 Note that this is done as an internal manipulation within the
11596 constant folder, in order to find the simplest representation of
11597 the arguments so that their form can be studied. In any cases,
11598 the appropriate type conversions should be put back in the tree
11599 that will get out of the constant folder. */
11600 if (op0)
11602 arg0 = op0;
11603 STRIP_NOPS (arg0);
11606 if (op1)
11608 arg1 = op1;
11609 STRIP_NOPS (arg1);
11612 if (op2)
11614 arg2 = op2;
11615 STRIP_NOPS (arg2);
11618 switch (code)
11620 case COMPONENT_REF:
11621 if (TREE_CODE (arg0) == CONSTRUCTOR
11622 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11624 unsigned HOST_WIDE_INT idx;
11625 tree field, value;
11626 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11627 if (field == arg1)
11628 return value;
11630 return NULL_TREE;
11632 case COND_EXPR:
11633 case VEC_COND_EXPR:
11634 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11635 so all simple results must be passed through pedantic_non_lvalue. */
11636 if (TREE_CODE (arg0) == INTEGER_CST)
11638 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11639 tem = integer_zerop (arg0) ? op2 : op1;
11640 /* Only optimize constant conditions when the selected branch
11641 has the same type as the COND_EXPR. This avoids optimizing
11642 away "c ? x : throw", where the throw has a void type.
11643 Avoid throwing away that operand which contains label. */
11644 if ((!TREE_SIDE_EFFECTS (unused_op)
11645 || !contains_label_p (unused_op))
11646 && (! VOID_TYPE_P (TREE_TYPE (tem))
11647 || VOID_TYPE_P (type)))
11648 return pedantic_non_lvalue_loc (loc, tem);
11649 return NULL_TREE;
11651 else if (TREE_CODE (arg0) == VECTOR_CST)
11653 if ((TREE_CODE (arg1) == VECTOR_CST
11654 || TREE_CODE (arg1) == CONSTRUCTOR)
11655 && (TREE_CODE (arg2) == VECTOR_CST
11656 || TREE_CODE (arg2) == CONSTRUCTOR))
11658 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11659 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11660 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11661 for (i = 0; i < nelts; i++)
11663 tree val = VECTOR_CST_ELT (arg0, i);
11664 if (integer_all_onesp (val))
11665 sel[i] = i;
11666 else if (integer_zerop (val))
11667 sel[i] = nelts + i;
11668 else /* Currently unreachable. */
11669 return NULL_TREE;
11671 tree t = fold_vec_perm (type, arg1, arg2, sel);
11672 if (t != NULL_TREE)
11673 return t;
11677 /* If we have A op B ? A : C, we may be able to convert this to a
11678 simpler expression, depending on the operation and the values
11679 of B and C. Signed zeros prevent all of these transformations,
11680 for reasons given above each one.
11682 Also try swapping the arguments and inverting the conditional. */
11683 if (COMPARISON_CLASS_P (arg0)
11684 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11685 arg1, TREE_OPERAND (arg0, 1))
11686 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11688 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11689 if (tem)
11690 return tem;
11693 if (COMPARISON_CLASS_P (arg0)
11694 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11695 op2,
11696 TREE_OPERAND (arg0, 1))
11697 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11699 location_t loc0 = expr_location_or (arg0, loc);
11700 tem = fold_invert_truthvalue (loc0, arg0);
11701 if (tem && COMPARISON_CLASS_P (tem))
11703 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11704 if (tem)
11705 return tem;
11709 /* If the second operand is simpler than the third, swap them
11710 since that produces better jump optimization results. */
11711 if (truth_value_p (TREE_CODE (arg0))
11712 && tree_swap_operands_p (op1, op2, false))
11714 location_t loc0 = expr_location_or (arg0, loc);
11715 /* See if this can be inverted. If it can't, possibly because
11716 it was a floating-point inequality comparison, don't do
11717 anything. */
11718 tem = fold_invert_truthvalue (loc0, arg0);
11719 if (tem)
11720 return fold_build3_loc (loc, code, type, tem, op2, op1);
11723 /* Convert A ? 1 : 0 to simply A. */
11724 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11725 : (integer_onep (op1)
11726 && !VECTOR_TYPE_P (type)))
11727 && integer_zerop (op2)
11728 /* If we try to convert OP0 to our type, the
11729 call to fold will try to move the conversion inside
11730 a COND, which will recurse. In that case, the COND_EXPR
11731 is probably the best choice, so leave it alone. */
11732 && type == TREE_TYPE (arg0))
11733 return pedantic_non_lvalue_loc (loc, arg0);
11735 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11736 over COND_EXPR in cases such as floating point comparisons. */
11737 if (integer_zerop (op1)
11738 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11739 : (integer_onep (op2)
11740 && !VECTOR_TYPE_P (type)))
11741 && truth_value_p (TREE_CODE (arg0)))
11742 return pedantic_non_lvalue_loc (loc,
11743 fold_convert_loc (loc, type,
11744 invert_truthvalue_loc (loc,
11745 arg0)));
11747 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11748 if (TREE_CODE (arg0) == LT_EXPR
11749 && integer_zerop (TREE_OPERAND (arg0, 1))
11750 && integer_zerop (op2)
11751 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11753 /* sign_bit_p looks through both zero and sign extensions,
11754 but for this optimization only sign extensions are
11755 usable. */
11756 tree tem2 = TREE_OPERAND (arg0, 0);
11757 while (tem != tem2)
11759 if (TREE_CODE (tem2) != NOP_EXPR
11760 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11762 tem = NULL_TREE;
11763 break;
11765 tem2 = TREE_OPERAND (tem2, 0);
11767 /* sign_bit_p only checks ARG1 bits within A's precision.
11768 If <sign bit of A> has wider type than A, bits outside
11769 of A's precision in <sign bit of A> need to be checked.
11770 If they are all 0, this optimization needs to be done
11771 in unsigned A's type, if they are all 1 in signed A's type,
11772 otherwise this can't be done. */
11773 if (tem
11774 && TYPE_PRECISION (TREE_TYPE (tem))
11775 < TYPE_PRECISION (TREE_TYPE (arg1))
11776 && TYPE_PRECISION (TREE_TYPE (tem))
11777 < TYPE_PRECISION (type))
11779 int inner_width, outer_width;
11780 tree tem_type;
11782 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11783 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11784 if (outer_width > TYPE_PRECISION (type))
11785 outer_width = TYPE_PRECISION (type);
11787 wide_int mask = wi::shifted_mask
11788 (inner_width, outer_width - inner_width, false,
11789 TYPE_PRECISION (TREE_TYPE (arg1)));
11791 wide_int common = mask & arg1;
11792 if (common == mask)
11794 tem_type = signed_type_for (TREE_TYPE (tem));
11795 tem = fold_convert_loc (loc, tem_type, tem);
11797 else if (common == 0)
11799 tem_type = unsigned_type_for (TREE_TYPE (tem));
11800 tem = fold_convert_loc (loc, tem_type, tem);
11802 else
11803 tem = NULL;
11806 if (tem)
11807 return
11808 fold_convert_loc (loc, type,
11809 fold_build2_loc (loc, BIT_AND_EXPR,
11810 TREE_TYPE (tem), tem,
11811 fold_convert_loc (loc,
11812 TREE_TYPE (tem),
11813 arg1)));
11816 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11817 already handled above. */
11818 if (TREE_CODE (arg0) == BIT_AND_EXPR
11819 && integer_onep (TREE_OPERAND (arg0, 1))
11820 && integer_zerop (op2)
11821 && integer_pow2p (arg1))
11823 tree tem = TREE_OPERAND (arg0, 0);
11824 STRIP_NOPS (tem);
11825 if (TREE_CODE (tem) == RSHIFT_EXPR
11826 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11827 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11828 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11829 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11830 TREE_OPERAND (tem, 0), arg1);
11833 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11834 is probably obsolete because the first operand should be a
11835 truth value (that's why we have the two cases above), but let's
11836 leave it in until we can confirm this for all front-ends. */
11837 if (integer_zerop (op2)
11838 && TREE_CODE (arg0) == NE_EXPR
11839 && integer_zerop (TREE_OPERAND (arg0, 1))
11840 && integer_pow2p (arg1)
11841 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11842 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11843 arg1, OEP_ONLY_CONST))
11844 return pedantic_non_lvalue_loc (loc,
11845 fold_convert_loc (loc, type,
11846 TREE_OPERAND (arg0, 0)));
11848 /* Disable the transformations below for vectors, since
11849 fold_binary_op_with_conditional_arg may undo them immediately,
11850 yielding an infinite loop. */
11851 if (code == VEC_COND_EXPR)
11852 return NULL_TREE;
11854 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11855 if (integer_zerop (op2)
11856 && truth_value_p (TREE_CODE (arg0))
11857 && truth_value_p (TREE_CODE (arg1))
11858 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11859 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11860 : TRUTH_ANDIF_EXPR,
11861 type, fold_convert_loc (loc, type, arg0), arg1);
11863 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11864 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11865 && truth_value_p (TREE_CODE (arg0))
11866 && truth_value_p (TREE_CODE (arg1))
11867 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11869 location_t loc0 = expr_location_or (arg0, loc);
11870 /* Only perform transformation if ARG0 is easily inverted. */
11871 tem = fold_invert_truthvalue (loc0, arg0);
11872 if (tem)
11873 return fold_build2_loc (loc, code == VEC_COND_EXPR
11874 ? BIT_IOR_EXPR
11875 : TRUTH_ORIF_EXPR,
11876 type, fold_convert_loc (loc, type, tem),
11877 arg1);
11880 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11881 if (integer_zerop (arg1)
11882 && truth_value_p (TREE_CODE (arg0))
11883 && truth_value_p (TREE_CODE (op2))
11884 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11886 location_t loc0 = expr_location_or (arg0, loc);
11887 /* Only perform transformation if ARG0 is easily inverted. */
11888 tem = fold_invert_truthvalue (loc0, arg0);
11889 if (tem)
11890 return fold_build2_loc (loc, code == VEC_COND_EXPR
11891 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11892 type, fold_convert_loc (loc, type, tem),
11893 op2);
11896 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11897 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11898 && truth_value_p (TREE_CODE (arg0))
11899 && truth_value_p (TREE_CODE (op2))
11900 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11901 return fold_build2_loc (loc, code == VEC_COND_EXPR
11902 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11903 type, fold_convert_loc (loc, type, arg0), op2);
11905 return NULL_TREE;
11907 case CALL_EXPR:
11908 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11909 of fold_ternary on them. */
11910 gcc_unreachable ();
11912 case BIT_FIELD_REF:
11913 if ((TREE_CODE (arg0) == VECTOR_CST
11914 || (TREE_CODE (arg0) == CONSTRUCTOR
11915 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11916 && (type == TREE_TYPE (TREE_TYPE (arg0))
11917 || (TREE_CODE (type) == VECTOR_TYPE
11918 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11920 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11921 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11922 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11923 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11925 if (n != 0
11926 && (idx % width) == 0
11927 && (n % width) == 0
11928 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11930 idx = idx / width;
11931 n = n / width;
11933 if (TREE_CODE (arg0) == VECTOR_CST)
11935 if (n == 1)
11936 return VECTOR_CST_ELT (arg0, idx);
11938 tree *vals = XALLOCAVEC (tree, n);
11939 for (unsigned i = 0; i < n; ++i)
11940 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11941 return build_vector (type, vals);
11944 /* Constructor elements can be subvectors. */
11945 unsigned HOST_WIDE_INT k = 1;
11946 if (CONSTRUCTOR_NELTS (arg0) != 0)
11948 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11949 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11950 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11953 /* We keep an exact subset of the constructor elements. */
11954 if ((idx % k) == 0 && (n % k) == 0)
11956 if (CONSTRUCTOR_NELTS (arg0) == 0)
11957 return build_constructor (type, NULL);
11958 idx /= k;
11959 n /= k;
11960 if (n == 1)
11962 if (idx < CONSTRUCTOR_NELTS (arg0))
11963 return CONSTRUCTOR_ELT (arg0, idx)->value;
11964 return build_zero_cst (type);
11967 vec<constructor_elt, va_gc> *vals;
11968 vec_alloc (vals, n);
11969 for (unsigned i = 0;
11970 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11971 ++i)
11972 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11973 CONSTRUCTOR_ELT
11974 (arg0, idx + i)->value);
11975 return build_constructor (type, vals);
11977 /* The bitfield references a single constructor element. */
11978 else if (idx + n <= (idx / k + 1) * k)
11980 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11981 return build_zero_cst (type);
11982 else if (n == k)
11983 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11984 else
11985 return fold_build3_loc (loc, code, type,
11986 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11987 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11992 /* A bit-field-ref that referenced the full argument can be stripped. */
11993 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11994 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11995 && integer_zerop (op2))
11996 return fold_convert_loc (loc, type, arg0);
11998 /* On constants we can use native encode/interpret to constant
11999 fold (nearly) all BIT_FIELD_REFs. */
12000 if (CONSTANT_CLASS_P (arg0)
12001 && can_native_interpret_type_p (type)
12002 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
12003 /* This limitation should not be necessary, we just need to
12004 round this up to mode size. */
12005 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
12006 /* Need bit-shifting of the buffer to relax the following. */
12007 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
12009 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12010 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12011 unsigned HOST_WIDE_INT clen;
12012 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
12013 /* ??? We cannot tell native_encode_expr to start at
12014 some random byte only. So limit us to a reasonable amount
12015 of work. */
12016 if (clen <= 4096)
12018 unsigned char *b = XALLOCAVEC (unsigned char, clen);
12019 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
12020 if (len > 0
12021 && len * BITS_PER_UNIT >= bitpos + bitsize)
12023 tree v = native_interpret_expr (type,
12024 b + bitpos / BITS_PER_UNIT,
12025 bitsize / BITS_PER_UNIT);
12026 if (v)
12027 return v;
12032 return NULL_TREE;
12034 case FMA_EXPR:
12035 /* For integers we can decompose the FMA if possible. */
12036 if (TREE_CODE (arg0) == INTEGER_CST
12037 && TREE_CODE (arg1) == INTEGER_CST)
12038 return fold_build2_loc (loc, PLUS_EXPR, type,
12039 const_binop (MULT_EXPR, arg0, arg1), arg2);
12040 if (integer_zerop (arg2))
12041 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12043 return fold_fma (loc, type, arg0, arg1, arg2);
12045 case VEC_PERM_EXPR:
12046 if (TREE_CODE (arg2) == VECTOR_CST)
12048 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
12049 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
12050 unsigned char *sel2 = sel + nelts;
12051 bool need_mask_canon = false;
12052 bool need_mask_canon2 = false;
12053 bool all_in_vec0 = true;
12054 bool all_in_vec1 = true;
12055 bool maybe_identity = true;
12056 bool single_arg = (op0 == op1);
12057 bool changed = false;
12059 mask2 = 2 * nelts - 1;
12060 mask = single_arg ? (nelts - 1) : mask2;
12061 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
12062 for (i = 0; i < nelts; i++)
12064 tree val = VECTOR_CST_ELT (arg2, i);
12065 if (TREE_CODE (val) != INTEGER_CST)
12066 return NULL_TREE;
12068 /* Make sure that the perm value is in an acceptable
12069 range. */
12070 wide_int t = val;
12071 need_mask_canon |= wi::gtu_p (t, mask);
12072 need_mask_canon2 |= wi::gtu_p (t, mask2);
12073 sel[i] = t.to_uhwi () & mask;
12074 sel2[i] = t.to_uhwi () & mask2;
12076 if (sel[i] < nelts)
12077 all_in_vec1 = false;
12078 else
12079 all_in_vec0 = false;
12081 if ((sel[i] & (nelts-1)) != i)
12082 maybe_identity = false;
12085 if (maybe_identity)
12087 if (all_in_vec0)
12088 return op0;
12089 if (all_in_vec1)
12090 return op1;
12093 if (all_in_vec0)
12094 op1 = op0;
12095 else if (all_in_vec1)
12097 op0 = op1;
12098 for (i = 0; i < nelts; i++)
12099 sel[i] -= nelts;
12100 need_mask_canon = true;
12103 if ((TREE_CODE (op0) == VECTOR_CST
12104 || TREE_CODE (op0) == CONSTRUCTOR)
12105 && (TREE_CODE (op1) == VECTOR_CST
12106 || TREE_CODE (op1) == CONSTRUCTOR))
12108 tree t = fold_vec_perm (type, op0, op1, sel);
12109 if (t != NULL_TREE)
12110 return t;
12113 if (op0 == op1 && !single_arg)
12114 changed = true;
12116 /* Some targets are deficient and fail to expand a single
12117 argument permutation while still allowing an equivalent
12118 2-argument version. */
12119 if (need_mask_canon && arg2 == op2
12120 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
12121 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
12123 need_mask_canon = need_mask_canon2;
12124 sel = sel2;
12127 if (need_mask_canon && arg2 == op2)
12129 tree *tsel = XALLOCAVEC (tree, nelts);
12130 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12131 for (i = 0; i < nelts; i++)
12132 tsel[i] = build_int_cst (eltype, sel[i]);
12133 op2 = build_vector (TREE_TYPE (arg2), tsel);
12134 changed = true;
12137 if (changed)
12138 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12140 return NULL_TREE;
12142 default:
12143 return NULL_TREE;
12144 } /* switch (code) */
12147 /* Perform constant folding and related simplification of EXPR.
12148 The related simplifications include x*1 => x, x*0 => 0, etc.,
12149 and application of the associative law.
12150 NOP_EXPR conversions may be removed freely (as long as we
12151 are careful not to change the type of the overall expression).
12152 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12153 but we can constant-fold them if they have constant operands. */
12155 #ifdef ENABLE_FOLD_CHECKING
12156 # define fold(x) fold_1 (x)
12157 static tree fold_1 (tree);
12158 static
12159 #endif
12160 tree
12161 fold (tree expr)
12163 const tree t = expr;
12164 enum tree_code code = TREE_CODE (t);
12165 enum tree_code_class kind = TREE_CODE_CLASS (code);
12166 tree tem;
12167 location_t loc = EXPR_LOCATION (expr);
12169 /* Return right away if a constant. */
12170 if (kind == tcc_constant)
12171 return t;
12173 /* CALL_EXPR-like objects with variable numbers of operands are
12174 treated specially. */
12175 if (kind == tcc_vl_exp)
12177 if (code == CALL_EXPR)
12179 tem = fold_call_expr (loc, expr, false);
12180 return tem ? tem : expr;
12182 return expr;
12185 if (IS_EXPR_CODE_CLASS (kind))
12187 tree type = TREE_TYPE (t);
12188 tree op0, op1, op2;
12190 switch (TREE_CODE_LENGTH (code))
12192 case 1:
12193 op0 = TREE_OPERAND (t, 0);
12194 tem = fold_unary_loc (loc, code, type, op0);
12195 return tem ? tem : expr;
12196 case 2:
12197 op0 = TREE_OPERAND (t, 0);
12198 op1 = TREE_OPERAND (t, 1);
12199 tem = fold_binary_loc (loc, code, type, op0, op1);
12200 return tem ? tem : expr;
12201 case 3:
12202 op0 = TREE_OPERAND (t, 0);
12203 op1 = TREE_OPERAND (t, 1);
12204 op2 = TREE_OPERAND (t, 2);
12205 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12206 return tem ? tem : expr;
12207 default:
12208 break;
12212 switch (code)
12214 case ARRAY_REF:
12216 tree op0 = TREE_OPERAND (t, 0);
12217 tree op1 = TREE_OPERAND (t, 1);
12219 if (TREE_CODE (op1) == INTEGER_CST
12220 && TREE_CODE (op0) == CONSTRUCTOR
12221 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12223 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
12224 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
12225 unsigned HOST_WIDE_INT begin = 0;
12227 /* Find a matching index by means of a binary search. */
12228 while (begin != end)
12230 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12231 tree index = (*elts)[middle].index;
12233 if (TREE_CODE (index) == INTEGER_CST
12234 && tree_int_cst_lt (index, op1))
12235 begin = middle + 1;
12236 else if (TREE_CODE (index) == INTEGER_CST
12237 && tree_int_cst_lt (op1, index))
12238 end = middle;
12239 else if (TREE_CODE (index) == RANGE_EXPR
12240 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12241 begin = middle + 1;
12242 else if (TREE_CODE (index) == RANGE_EXPR
12243 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
12244 end = middle;
12245 else
12246 return (*elts)[middle].value;
12250 return t;
12253 /* Return a VECTOR_CST if possible. */
12254 case CONSTRUCTOR:
12256 tree type = TREE_TYPE (t);
12257 if (TREE_CODE (type) != VECTOR_TYPE)
12258 return t;
12260 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
12261 unsigned HOST_WIDE_INT idx, pos = 0;
12262 tree value;
12264 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
12266 if (!CONSTANT_CLASS_P (value))
12267 return t;
12268 if (TREE_CODE (value) == VECTOR_CST)
12270 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
12271 vec[pos++] = VECTOR_CST_ELT (value, i);
12273 else
12274 vec[pos++] = value;
12276 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
12277 vec[pos] = build_zero_cst (TREE_TYPE (type));
12279 return build_vector (type, vec);
12282 case CONST_DECL:
12283 return fold (DECL_INITIAL (t));
12285 default:
12286 return t;
12287 } /* switch (code) */
12290 #ifdef ENABLE_FOLD_CHECKING
12291 #undef fold
12293 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12294 hash_table<nofree_ptr_hash<const tree_node> > *);
12295 static void fold_check_failed (const_tree, const_tree);
12296 void print_fold_checksum (const_tree);
12298 /* When --enable-checking=fold, compute a digest of expr before
12299 and after actual fold call to see if fold did not accidentally
12300 change original expr. */
12302 tree
12303 fold (tree expr)
12305 tree ret;
12306 struct md5_ctx ctx;
12307 unsigned char checksum_before[16], checksum_after[16];
12308 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12310 md5_init_ctx (&ctx);
12311 fold_checksum_tree (expr, &ctx, &ht);
12312 md5_finish_ctx (&ctx, checksum_before);
12313 ht.empty ();
12315 ret = fold_1 (expr);
12317 md5_init_ctx (&ctx);
12318 fold_checksum_tree (expr, &ctx, &ht);
12319 md5_finish_ctx (&ctx, checksum_after);
12321 if (memcmp (checksum_before, checksum_after, 16))
12322 fold_check_failed (expr, ret);
12324 return ret;
12327 void
12328 print_fold_checksum (const_tree expr)
12330 struct md5_ctx ctx;
12331 unsigned char checksum[16], cnt;
12332 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12334 md5_init_ctx (&ctx);
12335 fold_checksum_tree (expr, &ctx, &ht);
12336 md5_finish_ctx (&ctx, checksum);
12337 for (cnt = 0; cnt < 16; ++cnt)
12338 fprintf (stderr, "%02x", checksum[cnt]);
12339 putc ('\n', stderr);
12342 static void
12343 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12345 internal_error ("fold check: original tree changed by fold");
12348 static void
12349 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12350 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12352 const tree_node **slot;
12353 enum tree_code code;
12354 union tree_node buf;
12355 int i, len;
12357 recursive_label:
12358 if (expr == NULL)
12359 return;
12360 slot = ht->find_slot (expr, INSERT);
12361 if (*slot != NULL)
12362 return;
12363 *slot = expr;
12364 code = TREE_CODE (expr);
12365 if (TREE_CODE_CLASS (code) == tcc_declaration
12366 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12368 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12369 memcpy ((char *) &buf, expr, tree_size (expr));
12370 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12371 buf.decl_with_vis.symtab_node = NULL;
12372 expr = (tree) &buf;
12374 else if (TREE_CODE_CLASS (code) == tcc_type
12375 && (TYPE_POINTER_TO (expr)
12376 || TYPE_REFERENCE_TO (expr)
12377 || TYPE_CACHED_VALUES_P (expr)
12378 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12379 || TYPE_NEXT_VARIANT (expr)))
12381 /* Allow these fields to be modified. */
12382 tree tmp;
12383 memcpy ((char *) &buf, expr, tree_size (expr));
12384 expr = tmp = (tree) &buf;
12385 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12386 TYPE_POINTER_TO (tmp) = NULL;
12387 TYPE_REFERENCE_TO (tmp) = NULL;
12388 TYPE_NEXT_VARIANT (tmp) = NULL;
12389 if (TYPE_CACHED_VALUES_P (tmp))
12391 TYPE_CACHED_VALUES_P (tmp) = 0;
12392 TYPE_CACHED_VALUES (tmp) = NULL;
12395 md5_process_bytes (expr, tree_size (expr), ctx);
12396 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12397 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12398 if (TREE_CODE_CLASS (code) != tcc_type
12399 && TREE_CODE_CLASS (code) != tcc_declaration
12400 && code != TREE_LIST
12401 && code != SSA_NAME
12402 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12403 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12404 switch (TREE_CODE_CLASS (code))
12406 case tcc_constant:
12407 switch (code)
12409 case STRING_CST:
12410 md5_process_bytes (TREE_STRING_POINTER (expr),
12411 TREE_STRING_LENGTH (expr), ctx);
12412 break;
12413 case COMPLEX_CST:
12414 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12415 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12416 break;
12417 case VECTOR_CST:
12418 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12419 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12420 break;
12421 default:
12422 break;
12424 break;
12425 case tcc_exceptional:
12426 switch (code)
12428 case TREE_LIST:
12429 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12430 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12431 expr = TREE_CHAIN (expr);
12432 goto recursive_label;
12433 break;
12434 case TREE_VEC:
12435 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12436 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12437 break;
12438 default:
12439 break;
12441 break;
12442 case tcc_expression:
12443 case tcc_reference:
12444 case tcc_comparison:
12445 case tcc_unary:
12446 case tcc_binary:
12447 case tcc_statement:
12448 case tcc_vl_exp:
12449 len = TREE_OPERAND_LENGTH (expr);
12450 for (i = 0; i < len; ++i)
12451 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12452 break;
12453 case tcc_declaration:
12454 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12455 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12456 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12458 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12459 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12460 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12461 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12462 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12465 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12467 if (TREE_CODE (expr) == FUNCTION_DECL)
12469 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12470 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12472 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12474 break;
12475 case tcc_type:
12476 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12477 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12478 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12479 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12480 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12481 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12482 if (INTEGRAL_TYPE_P (expr)
12483 || SCALAR_FLOAT_TYPE_P (expr))
12485 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12486 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12488 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12489 if (TREE_CODE (expr) == RECORD_TYPE
12490 || TREE_CODE (expr) == UNION_TYPE
12491 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12492 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12493 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12494 break;
12495 default:
12496 break;
12500 /* Helper function for outputting the checksum of a tree T. When
12501 debugging with gdb, you can "define mynext" to be "next" followed
12502 by "call debug_fold_checksum (op0)", then just trace down till the
12503 outputs differ. */
12505 DEBUG_FUNCTION void
12506 debug_fold_checksum (const_tree t)
12508 int i;
12509 unsigned char checksum[16];
12510 struct md5_ctx ctx;
12511 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12513 md5_init_ctx (&ctx);
12514 fold_checksum_tree (t, &ctx, &ht);
12515 md5_finish_ctx (&ctx, checksum);
12516 ht.empty ();
12518 for (i = 0; i < 16; i++)
12519 fprintf (stderr, "%d ", checksum[i]);
12521 fprintf (stderr, "\n");
12524 #endif
12526 /* Fold a unary tree expression with code CODE of type TYPE with an
12527 operand OP0. LOC is the location of the resulting expression.
12528 Return a folded expression if successful. Otherwise, return a tree
12529 expression with code CODE of type TYPE with an operand OP0. */
12531 tree
12532 fold_build1_stat_loc (location_t loc,
12533 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12535 tree tem;
12536 #ifdef ENABLE_FOLD_CHECKING
12537 unsigned char checksum_before[16], checksum_after[16];
12538 struct md5_ctx ctx;
12539 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12541 md5_init_ctx (&ctx);
12542 fold_checksum_tree (op0, &ctx, &ht);
12543 md5_finish_ctx (&ctx, checksum_before);
12544 ht.empty ();
12545 #endif
12547 tem = fold_unary_loc (loc, code, type, op0);
12548 if (!tem)
12549 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12551 #ifdef ENABLE_FOLD_CHECKING
12552 md5_init_ctx (&ctx);
12553 fold_checksum_tree (op0, &ctx, &ht);
12554 md5_finish_ctx (&ctx, checksum_after);
12556 if (memcmp (checksum_before, checksum_after, 16))
12557 fold_check_failed (op0, tem);
12558 #endif
12559 return tem;
12562 /* Fold a binary tree expression with code CODE of type TYPE with
12563 operands OP0 and OP1. LOC is the location of the resulting
12564 expression. Return a folded expression if successful. Otherwise,
12565 return a tree expression with code CODE of type TYPE with operands
12566 OP0 and OP1. */
12568 tree
12569 fold_build2_stat_loc (location_t loc,
12570 enum tree_code code, tree type, tree op0, tree op1
12571 MEM_STAT_DECL)
12573 tree tem;
12574 #ifdef ENABLE_FOLD_CHECKING
12575 unsigned char checksum_before_op0[16],
12576 checksum_before_op1[16],
12577 checksum_after_op0[16],
12578 checksum_after_op1[16];
12579 struct md5_ctx ctx;
12580 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12582 md5_init_ctx (&ctx);
12583 fold_checksum_tree (op0, &ctx, &ht);
12584 md5_finish_ctx (&ctx, checksum_before_op0);
12585 ht.empty ();
12587 md5_init_ctx (&ctx);
12588 fold_checksum_tree (op1, &ctx, &ht);
12589 md5_finish_ctx (&ctx, checksum_before_op1);
12590 ht.empty ();
12591 #endif
12593 tem = fold_binary_loc (loc, code, type, op0, op1);
12594 if (!tem)
12595 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12597 #ifdef ENABLE_FOLD_CHECKING
12598 md5_init_ctx (&ctx);
12599 fold_checksum_tree (op0, &ctx, &ht);
12600 md5_finish_ctx (&ctx, checksum_after_op0);
12601 ht.empty ();
12603 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12604 fold_check_failed (op0, tem);
12606 md5_init_ctx (&ctx);
12607 fold_checksum_tree (op1, &ctx, &ht);
12608 md5_finish_ctx (&ctx, checksum_after_op1);
12610 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12611 fold_check_failed (op1, tem);
12612 #endif
12613 return tem;
12616 /* Fold a ternary tree expression with code CODE of type TYPE with
12617 operands OP0, OP1, and OP2. Return a folded expression if
12618 successful. Otherwise, return a tree expression with code CODE of
12619 type TYPE with operands OP0, OP1, and OP2. */
12621 tree
12622 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12623 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12625 tree tem;
12626 #ifdef ENABLE_FOLD_CHECKING
12627 unsigned char checksum_before_op0[16],
12628 checksum_before_op1[16],
12629 checksum_before_op2[16],
12630 checksum_after_op0[16],
12631 checksum_after_op1[16],
12632 checksum_after_op2[16];
12633 struct md5_ctx ctx;
12634 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12636 md5_init_ctx (&ctx);
12637 fold_checksum_tree (op0, &ctx, &ht);
12638 md5_finish_ctx (&ctx, checksum_before_op0);
12639 ht.empty ();
12641 md5_init_ctx (&ctx);
12642 fold_checksum_tree (op1, &ctx, &ht);
12643 md5_finish_ctx (&ctx, checksum_before_op1);
12644 ht.empty ();
12646 md5_init_ctx (&ctx);
12647 fold_checksum_tree (op2, &ctx, &ht);
12648 md5_finish_ctx (&ctx, checksum_before_op2);
12649 ht.empty ();
12650 #endif
12652 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12653 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12654 if (!tem)
12655 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12657 #ifdef ENABLE_FOLD_CHECKING
12658 md5_init_ctx (&ctx);
12659 fold_checksum_tree (op0, &ctx, &ht);
12660 md5_finish_ctx (&ctx, checksum_after_op0);
12661 ht.empty ();
12663 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12664 fold_check_failed (op0, tem);
12666 md5_init_ctx (&ctx);
12667 fold_checksum_tree (op1, &ctx, &ht);
12668 md5_finish_ctx (&ctx, checksum_after_op1);
12669 ht.empty ();
12671 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12672 fold_check_failed (op1, tem);
12674 md5_init_ctx (&ctx);
12675 fold_checksum_tree (op2, &ctx, &ht);
12676 md5_finish_ctx (&ctx, checksum_after_op2);
12678 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12679 fold_check_failed (op2, tem);
12680 #endif
12681 return tem;
12684 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12685 arguments in ARGARRAY, and a null static chain.
12686 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12687 of type TYPE from the given operands as constructed by build_call_array. */
12689 tree
12690 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12691 int nargs, tree *argarray)
12693 tree tem;
12694 #ifdef ENABLE_FOLD_CHECKING
12695 unsigned char checksum_before_fn[16],
12696 checksum_before_arglist[16],
12697 checksum_after_fn[16],
12698 checksum_after_arglist[16];
12699 struct md5_ctx ctx;
12700 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12701 int i;
12703 md5_init_ctx (&ctx);
12704 fold_checksum_tree (fn, &ctx, &ht);
12705 md5_finish_ctx (&ctx, checksum_before_fn);
12706 ht.empty ();
12708 md5_init_ctx (&ctx);
12709 for (i = 0; i < nargs; i++)
12710 fold_checksum_tree (argarray[i], &ctx, &ht);
12711 md5_finish_ctx (&ctx, checksum_before_arglist);
12712 ht.empty ();
12713 #endif
12715 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12716 if (!tem)
12717 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12719 #ifdef ENABLE_FOLD_CHECKING
12720 md5_init_ctx (&ctx);
12721 fold_checksum_tree (fn, &ctx, &ht);
12722 md5_finish_ctx (&ctx, checksum_after_fn);
12723 ht.empty ();
12725 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12726 fold_check_failed (fn, tem);
12728 md5_init_ctx (&ctx);
12729 for (i = 0; i < nargs; i++)
12730 fold_checksum_tree (argarray[i], &ctx, &ht);
12731 md5_finish_ctx (&ctx, checksum_after_arglist);
12733 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12734 fold_check_failed (NULL_TREE, tem);
12735 #endif
12736 return tem;
12739 /* Perform constant folding and related simplification of initializer
12740 expression EXPR. These behave identically to "fold_buildN" but ignore
12741 potential run-time traps and exceptions that fold must preserve. */
12743 #define START_FOLD_INIT \
12744 int saved_signaling_nans = flag_signaling_nans;\
12745 int saved_trapping_math = flag_trapping_math;\
12746 int saved_rounding_math = flag_rounding_math;\
12747 int saved_trapv = flag_trapv;\
12748 int saved_folding_initializer = folding_initializer;\
12749 flag_signaling_nans = 0;\
12750 flag_trapping_math = 0;\
12751 flag_rounding_math = 0;\
12752 flag_trapv = 0;\
12753 folding_initializer = 1;
12755 #define END_FOLD_INIT \
12756 flag_signaling_nans = saved_signaling_nans;\
12757 flag_trapping_math = saved_trapping_math;\
12758 flag_rounding_math = saved_rounding_math;\
12759 flag_trapv = saved_trapv;\
12760 folding_initializer = saved_folding_initializer;
12762 tree
12763 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12764 tree type, tree op)
12766 tree result;
12767 START_FOLD_INIT;
12769 result = fold_build1_loc (loc, code, type, op);
12771 END_FOLD_INIT;
12772 return result;
12775 tree
12776 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12777 tree type, tree op0, tree op1)
12779 tree result;
12780 START_FOLD_INIT;
12782 result = fold_build2_loc (loc, code, type, op0, op1);
12784 END_FOLD_INIT;
12785 return result;
12788 tree
12789 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12790 int nargs, tree *argarray)
12792 tree result;
12793 START_FOLD_INIT;
12795 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12797 END_FOLD_INIT;
12798 return result;
12801 #undef START_FOLD_INIT
12802 #undef END_FOLD_INIT
12804 /* Determine if first argument is a multiple of second argument. Return 0 if
12805 it is not, or we cannot easily determined it to be.
12807 An example of the sort of thing we care about (at this point; this routine
12808 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12809 fold cases do now) is discovering that
12811 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12813 is a multiple of
12815 SAVE_EXPR (J * 8)
12817 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12819 This code also handles discovering that
12821 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12823 is a multiple of 8 so we don't have to worry about dealing with a
12824 possible remainder.
12826 Note that we *look* inside a SAVE_EXPR only to determine how it was
12827 calculated; it is not safe for fold to do much of anything else with the
12828 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12829 at run time. For example, the latter example above *cannot* be implemented
12830 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12831 evaluation time of the original SAVE_EXPR is not necessarily the same at
12832 the time the new expression is evaluated. The only optimization of this
12833 sort that would be valid is changing
12835 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12837 divided by 8 to
12839 SAVE_EXPR (I) * SAVE_EXPR (J)
12841 (where the same SAVE_EXPR (J) is used in the original and the
12842 transformed version). */
12845 multiple_of_p (tree type, const_tree top, const_tree bottom)
12847 if (operand_equal_p (top, bottom, 0))
12848 return 1;
12850 if (TREE_CODE (type) != INTEGER_TYPE)
12851 return 0;
12853 switch (TREE_CODE (top))
12855 case BIT_AND_EXPR:
12856 /* Bitwise and provides a power of two multiple. If the mask is
12857 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12858 if (!integer_pow2p (bottom))
12859 return 0;
12860 /* FALLTHRU */
12862 case MULT_EXPR:
12863 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12864 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12866 case PLUS_EXPR:
12867 case MINUS_EXPR:
12868 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12869 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12871 case LSHIFT_EXPR:
12872 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12874 tree op1, t1;
12876 op1 = TREE_OPERAND (top, 1);
12877 /* const_binop may not detect overflow correctly,
12878 so check for it explicitly here. */
12879 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12880 && 0 != (t1 = fold_convert (type,
12881 const_binop (LSHIFT_EXPR,
12882 size_one_node,
12883 op1)))
12884 && !TREE_OVERFLOW (t1))
12885 return multiple_of_p (type, t1, bottom);
12887 return 0;
12889 case NOP_EXPR:
12890 /* Can't handle conversions from non-integral or wider integral type. */
12891 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12892 || (TYPE_PRECISION (type)
12893 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12894 return 0;
12896 /* .. fall through ... */
12898 case SAVE_EXPR:
12899 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12901 case COND_EXPR:
12902 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12903 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12905 case INTEGER_CST:
12906 if (TREE_CODE (bottom) != INTEGER_CST
12907 || integer_zerop (bottom)
12908 || (TYPE_UNSIGNED (type)
12909 && (tree_int_cst_sgn (top) < 0
12910 || tree_int_cst_sgn (bottom) < 0)))
12911 return 0;
12912 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12913 SIGNED);
12915 default:
12916 return 0;
12920 /* Return true if CODE or TYPE is known to be non-negative. */
12922 static bool
12923 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12925 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12926 && truth_value_p (code))
12927 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12928 have a signed:1 type (where the value is -1 and 0). */
12929 return true;
12930 return false;
12933 /* Return true if (CODE OP0) is known to be non-negative. If the return
12934 value is based on the assumption that signed overflow is undefined,
12935 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12936 *STRICT_OVERFLOW_P. */
12938 bool
12939 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12940 bool *strict_overflow_p)
12942 if (TYPE_UNSIGNED (type))
12943 return true;
12945 switch (code)
12947 case ABS_EXPR:
12948 /* We can't return 1 if flag_wrapv is set because
12949 ABS_EXPR<INT_MIN> = INT_MIN. */
12950 if (!ANY_INTEGRAL_TYPE_P (type))
12951 return true;
12952 if (TYPE_OVERFLOW_UNDEFINED (type))
12954 *strict_overflow_p = true;
12955 return true;
12957 break;
12959 case NON_LVALUE_EXPR:
12960 case FLOAT_EXPR:
12961 case FIX_TRUNC_EXPR:
12962 return tree_expr_nonnegative_warnv_p (op0,
12963 strict_overflow_p);
12965 CASE_CONVERT:
12967 tree inner_type = TREE_TYPE (op0);
12968 tree outer_type = type;
12970 if (TREE_CODE (outer_type) == REAL_TYPE)
12972 if (TREE_CODE (inner_type) == REAL_TYPE)
12973 return tree_expr_nonnegative_warnv_p (op0,
12974 strict_overflow_p);
12975 if (INTEGRAL_TYPE_P (inner_type))
12977 if (TYPE_UNSIGNED (inner_type))
12978 return true;
12979 return tree_expr_nonnegative_warnv_p (op0,
12980 strict_overflow_p);
12983 else if (INTEGRAL_TYPE_P (outer_type))
12985 if (TREE_CODE (inner_type) == REAL_TYPE)
12986 return tree_expr_nonnegative_warnv_p (op0,
12987 strict_overflow_p);
12988 if (INTEGRAL_TYPE_P (inner_type))
12989 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12990 && TYPE_UNSIGNED (inner_type);
12993 break;
12995 default:
12996 return tree_simple_nonnegative_warnv_p (code, type);
12999 /* We don't know sign of `t', so be conservative and return false. */
13000 return false;
13003 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13004 value is based on the assumption that signed overflow is undefined,
13005 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13006 *STRICT_OVERFLOW_P. */
13008 bool
13009 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13010 tree op1, bool *strict_overflow_p)
13012 if (TYPE_UNSIGNED (type))
13013 return true;
13015 switch (code)
13017 case POINTER_PLUS_EXPR:
13018 case PLUS_EXPR:
13019 if (FLOAT_TYPE_P (type))
13020 return (tree_expr_nonnegative_warnv_p (op0,
13021 strict_overflow_p)
13022 && tree_expr_nonnegative_warnv_p (op1,
13023 strict_overflow_p));
13025 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13026 both unsigned and at least 2 bits shorter than the result. */
13027 if (TREE_CODE (type) == INTEGER_TYPE
13028 && TREE_CODE (op0) == NOP_EXPR
13029 && TREE_CODE (op1) == NOP_EXPR)
13031 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13032 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13033 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13034 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13036 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13037 TYPE_PRECISION (inner2)) + 1;
13038 return prec < TYPE_PRECISION (type);
13041 break;
13043 case MULT_EXPR:
13044 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13046 /* x * x is always non-negative for floating point x
13047 or without overflow. */
13048 if (operand_equal_p (op0, op1, 0)
13049 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
13050 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
13052 if (ANY_INTEGRAL_TYPE_P (type)
13053 && TYPE_OVERFLOW_UNDEFINED (type))
13054 *strict_overflow_p = true;
13055 return true;
13059 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13060 both unsigned and their total bits is shorter than the result. */
13061 if (TREE_CODE (type) == INTEGER_TYPE
13062 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13063 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13065 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13066 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13067 : TREE_TYPE (op0);
13068 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13069 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13070 : TREE_TYPE (op1);
13072 bool unsigned0 = TYPE_UNSIGNED (inner0);
13073 bool unsigned1 = TYPE_UNSIGNED (inner1);
13075 if (TREE_CODE (op0) == INTEGER_CST)
13076 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13078 if (TREE_CODE (op1) == INTEGER_CST)
13079 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13081 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13082 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13084 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13085 ? tree_int_cst_min_precision (op0, UNSIGNED)
13086 : TYPE_PRECISION (inner0);
13088 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13089 ? tree_int_cst_min_precision (op1, UNSIGNED)
13090 : TYPE_PRECISION (inner1);
13092 return precision0 + precision1 < TYPE_PRECISION (type);
13095 return false;
13097 case BIT_AND_EXPR:
13098 case MAX_EXPR:
13099 return (tree_expr_nonnegative_warnv_p (op0,
13100 strict_overflow_p)
13101 || tree_expr_nonnegative_warnv_p (op1,
13102 strict_overflow_p));
13104 case BIT_IOR_EXPR:
13105 case BIT_XOR_EXPR:
13106 case MIN_EXPR:
13107 case RDIV_EXPR:
13108 case TRUNC_DIV_EXPR:
13109 case CEIL_DIV_EXPR:
13110 case FLOOR_DIV_EXPR:
13111 case ROUND_DIV_EXPR:
13112 return (tree_expr_nonnegative_warnv_p (op0,
13113 strict_overflow_p)
13114 && tree_expr_nonnegative_warnv_p (op1,
13115 strict_overflow_p));
13117 case TRUNC_MOD_EXPR:
13118 case CEIL_MOD_EXPR:
13119 case FLOOR_MOD_EXPR:
13120 case ROUND_MOD_EXPR:
13121 return tree_expr_nonnegative_warnv_p (op0,
13122 strict_overflow_p);
13123 default:
13124 return tree_simple_nonnegative_warnv_p (code, type);
13127 /* We don't know sign of `t', so be conservative and return false. */
13128 return false;
13131 /* Return true if T is known to be non-negative. If the return
13132 value is based on the assumption that signed overflow is undefined,
13133 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13134 *STRICT_OVERFLOW_P. */
13136 bool
13137 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13139 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13140 return true;
13142 switch (TREE_CODE (t))
13144 case INTEGER_CST:
13145 return tree_int_cst_sgn (t) >= 0;
13147 case REAL_CST:
13148 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13150 case FIXED_CST:
13151 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13153 case COND_EXPR:
13154 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13155 strict_overflow_p)
13156 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13157 strict_overflow_p));
13158 default:
13159 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13160 TREE_TYPE (t));
13162 /* We don't know sign of `t', so be conservative and return false. */
13163 return false;
13166 /* Return true if T is known to be non-negative. If the return
13167 value is based on the assumption that signed overflow is undefined,
13168 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13169 *STRICT_OVERFLOW_P. */
13171 bool
13172 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
13173 tree arg0, tree arg1, bool *strict_overflow_p)
13175 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13176 switch (DECL_FUNCTION_CODE (fndecl))
13178 CASE_FLT_FN (BUILT_IN_ACOS):
13179 CASE_FLT_FN (BUILT_IN_ACOSH):
13180 CASE_FLT_FN (BUILT_IN_CABS):
13181 CASE_FLT_FN (BUILT_IN_COSH):
13182 CASE_FLT_FN (BUILT_IN_ERFC):
13183 CASE_FLT_FN (BUILT_IN_EXP):
13184 CASE_FLT_FN (BUILT_IN_EXP10):
13185 CASE_FLT_FN (BUILT_IN_EXP2):
13186 CASE_FLT_FN (BUILT_IN_FABS):
13187 CASE_FLT_FN (BUILT_IN_FDIM):
13188 CASE_FLT_FN (BUILT_IN_HYPOT):
13189 CASE_FLT_FN (BUILT_IN_POW10):
13190 CASE_INT_FN (BUILT_IN_FFS):
13191 CASE_INT_FN (BUILT_IN_PARITY):
13192 CASE_INT_FN (BUILT_IN_POPCOUNT):
13193 CASE_INT_FN (BUILT_IN_CLZ):
13194 CASE_INT_FN (BUILT_IN_CLRSB):
13195 case BUILT_IN_BSWAP32:
13196 case BUILT_IN_BSWAP64:
13197 /* Always true. */
13198 return true;
13200 CASE_FLT_FN (BUILT_IN_SQRT):
13201 /* sqrt(-0.0) is -0.0. */
13202 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13203 return true;
13204 return tree_expr_nonnegative_warnv_p (arg0,
13205 strict_overflow_p);
13207 CASE_FLT_FN (BUILT_IN_ASINH):
13208 CASE_FLT_FN (BUILT_IN_ATAN):
13209 CASE_FLT_FN (BUILT_IN_ATANH):
13210 CASE_FLT_FN (BUILT_IN_CBRT):
13211 CASE_FLT_FN (BUILT_IN_CEIL):
13212 CASE_FLT_FN (BUILT_IN_ERF):
13213 CASE_FLT_FN (BUILT_IN_EXPM1):
13214 CASE_FLT_FN (BUILT_IN_FLOOR):
13215 CASE_FLT_FN (BUILT_IN_FMOD):
13216 CASE_FLT_FN (BUILT_IN_FREXP):
13217 CASE_FLT_FN (BUILT_IN_ICEIL):
13218 CASE_FLT_FN (BUILT_IN_IFLOOR):
13219 CASE_FLT_FN (BUILT_IN_IRINT):
13220 CASE_FLT_FN (BUILT_IN_IROUND):
13221 CASE_FLT_FN (BUILT_IN_LCEIL):
13222 CASE_FLT_FN (BUILT_IN_LDEXP):
13223 CASE_FLT_FN (BUILT_IN_LFLOOR):
13224 CASE_FLT_FN (BUILT_IN_LLCEIL):
13225 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13226 CASE_FLT_FN (BUILT_IN_LLRINT):
13227 CASE_FLT_FN (BUILT_IN_LLROUND):
13228 CASE_FLT_FN (BUILT_IN_LRINT):
13229 CASE_FLT_FN (BUILT_IN_LROUND):
13230 CASE_FLT_FN (BUILT_IN_MODF):
13231 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13232 CASE_FLT_FN (BUILT_IN_RINT):
13233 CASE_FLT_FN (BUILT_IN_ROUND):
13234 CASE_FLT_FN (BUILT_IN_SCALB):
13235 CASE_FLT_FN (BUILT_IN_SCALBLN):
13236 CASE_FLT_FN (BUILT_IN_SCALBN):
13237 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13238 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13239 CASE_FLT_FN (BUILT_IN_SINH):
13240 CASE_FLT_FN (BUILT_IN_TANH):
13241 CASE_FLT_FN (BUILT_IN_TRUNC):
13242 /* True if the 1st argument is nonnegative. */
13243 return tree_expr_nonnegative_warnv_p (arg0,
13244 strict_overflow_p);
13246 CASE_FLT_FN (BUILT_IN_FMAX):
13247 /* True if the 1st OR 2nd arguments are nonnegative. */
13248 return (tree_expr_nonnegative_warnv_p (arg0,
13249 strict_overflow_p)
13250 || (tree_expr_nonnegative_warnv_p (arg1,
13251 strict_overflow_p)));
13253 CASE_FLT_FN (BUILT_IN_FMIN):
13254 /* True if the 1st AND 2nd arguments are nonnegative. */
13255 return (tree_expr_nonnegative_warnv_p (arg0,
13256 strict_overflow_p)
13257 && (tree_expr_nonnegative_warnv_p (arg1,
13258 strict_overflow_p)));
13260 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13261 /* True if the 2nd argument is nonnegative. */
13262 return tree_expr_nonnegative_warnv_p (arg1,
13263 strict_overflow_p);
13265 CASE_FLT_FN (BUILT_IN_POWI):
13266 /* True if the 1st argument is nonnegative or the second
13267 argument is an even integer. */
13268 if (TREE_CODE (arg1) == INTEGER_CST
13269 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13270 return true;
13271 return tree_expr_nonnegative_warnv_p (arg0,
13272 strict_overflow_p);
13274 CASE_FLT_FN (BUILT_IN_POW):
13275 /* True if the 1st argument is nonnegative or the second
13276 argument is an even integer valued real. */
13277 if (TREE_CODE (arg1) == REAL_CST)
13279 REAL_VALUE_TYPE c;
13280 HOST_WIDE_INT n;
13282 c = TREE_REAL_CST (arg1);
13283 n = real_to_integer (&c);
13284 if ((n & 1) == 0)
13286 REAL_VALUE_TYPE cint;
13287 real_from_integer (&cint, VOIDmode, n, SIGNED);
13288 if (real_identical (&c, &cint))
13289 return true;
13292 return tree_expr_nonnegative_warnv_p (arg0,
13293 strict_overflow_p);
13295 default:
13296 break;
13298 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
13299 type);
13302 /* Return true if T is known to be non-negative. If the return
13303 value is based on the assumption that signed overflow is undefined,
13304 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13305 *STRICT_OVERFLOW_P. */
13307 static bool
13308 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13310 enum tree_code code = TREE_CODE (t);
13311 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13312 return true;
13314 switch (code)
13316 case TARGET_EXPR:
13318 tree temp = TARGET_EXPR_SLOT (t);
13319 t = TARGET_EXPR_INITIAL (t);
13321 /* If the initializer is non-void, then it's a normal expression
13322 that will be assigned to the slot. */
13323 if (!VOID_TYPE_P (t))
13324 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13326 /* Otherwise, the initializer sets the slot in some way. One common
13327 way is an assignment statement at the end of the initializer. */
13328 while (1)
13330 if (TREE_CODE (t) == BIND_EXPR)
13331 t = expr_last (BIND_EXPR_BODY (t));
13332 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13333 || TREE_CODE (t) == TRY_CATCH_EXPR)
13334 t = expr_last (TREE_OPERAND (t, 0));
13335 else if (TREE_CODE (t) == STATEMENT_LIST)
13336 t = expr_last (t);
13337 else
13338 break;
13340 if (TREE_CODE (t) == MODIFY_EXPR
13341 && TREE_OPERAND (t, 0) == temp)
13342 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13343 strict_overflow_p);
13345 return false;
13348 case CALL_EXPR:
13350 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13351 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13353 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13354 get_callee_fndecl (t),
13355 arg0,
13356 arg1,
13357 strict_overflow_p);
13359 case COMPOUND_EXPR:
13360 case MODIFY_EXPR:
13361 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13362 strict_overflow_p);
13363 case BIND_EXPR:
13364 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13365 strict_overflow_p);
13366 case SAVE_EXPR:
13367 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13368 strict_overflow_p);
13370 default:
13371 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13372 TREE_TYPE (t));
13375 /* We don't know sign of `t', so be conservative and return false. */
13376 return false;
13379 /* Return true if T is known to be non-negative. If the return
13380 value is based on the assumption that signed overflow is undefined,
13381 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13382 *STRICT_OVERFLOW_P. */
13384 bool
13385 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13387 enum tree_code code;
13388 if (t == error_mark_node)
13389 return false;
13391 code = TREE_CODE (t);
13392 switch (TREE_CODE_CLASS (code))
13394 case tcc_binary:
13395 case tcc_comparison:
13396 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13397 TREE_TYPE (t),
13398 TREE_OPERAND (t, 0),
13399 TREE_OPERAND (t, 1),
13400 strict_overflow_p);
13402 case tcc_unary:
13403 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13404 TREE_TYPE (t),
13405 TREE_OPERAND (t, 0),
13406 strict_overflow_p);
13408 case tcc_constant:
13409 case tcc_declaration:
13410 case tcc_reference:
13411 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
13413 default:
13414 break;
13417 switch (code)
13419 case TRUTH_AND_EXPR:
13420 case TRUTH_OR_EXPR:
13421 case TRUTH_XOR_EXPR:
13422 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13423 TREE_TYPE (t),
13424 TREE_OPERAND (t, 0),
13425 TREE_OPERAND (t, 1),
13426 strict_overflow_p);
13427 case TRUTH_NOT_EXPR:
13428 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13429 TREE_TYPE (t),
13430 TREE_OPERAND (t, 0),
13431 strict_overflow_p);
13433 case COND_EXPR:
13434 case CONSTRUCTOR:
13435 case OBJ_TYPE_REF:
13436 case ASSERT_EXPR:
13437 case ADDR_EXPR:
13438 case WITH_SIZE_EXPR:
13439 case SSA_NAME:
13440 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
13442 default:
13443 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
13447 /* Return true if `t' is known to be non-negative. Handle warnings
13448 about undefined signed overflow. */
13450 bool
13451 tree_expr_nonnegative_p (tree t)
13453 bool ret, strict_overflow_p;
13455 strict_overflow_p = false;
13456 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13457 if (strict_overflow_p)
13458 fold_overflow_warning (("assuming signed overflow does not occur when "
13459 "determining that expression is always "
13460 "non-negative"),
13461 WARN_STRICT_OVERFLOW_MISC);
13462 return ret;
13466 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13467 For floating point we further ensure that T is not denormal.
13468 Similar logic is present in nonzero_address in rtlanal.h.
13470 If the return value is based on the assumption that signed overflow
13471 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13472 change *STRICT_OVERFLOW_P. */
13474 bool
13475 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13476 bool *strict_overflow_p)
13478 switch (code)
13480 case ABS_EXPR:
13481 return tree_expr_nonzero_warnv_p (op0,
13482 strict_overflow_p);
13484 case NOP_EXPR:
13486 tree inner_type = TREE_TYPE (op0);
13487 tree outer_type = type;
13489 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13490 && tree_expr_nonzero_warnv_p (op0,
13491 strict_overflow_p));
13493 break;
13495 case NON_LVALUE_EXPR:
13496 return tree_expr_nonzero_warnv_p (op0,
13497 strict_overflow_p);
13499 default:
13500 break;
13503 return false;
13506 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13507 For floating point we further ensure that T is not denormal.
13508 Similar logic is present in nonzero_address in rtlanal.h.
13510 If the return value is based on the assumption that signed overflow
13511 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13512 change *STRICT_OVERFLOW_P. */
13514 bool
13515 tree_binary_nonzero_warnv_p (enum tree_code code,
13516 tree type,
13517 tree op0,
13518 tree op1, bool *strict_overflow_p)
13520 bool sub_strict_overflow_p;
13521 switch (code)
13523 case POINTER_PLUS_EXPR:
13524 case PLUS_EXPR:
13525 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13527 /* With the presence of negative values it is hard
13528 to say something. */
13529 sub_strict_overflow_p = false;
13530 if (!tree_expr_nonnegative_warnv_p (op0,
13531 &sub_strict_overflow_p)
13532 || !tree_expr_nonnegative_warnv_p (op1,
13533 &sub_strict_overflow_p))
13534 return false;
13535 /* One of operands must be positive and the other non-negative. */
13536 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13537 overflows, on a twos-complement machine the sum of two
13538 nonnegative numbers can never be zero. */
13539 return (tree_expr_nonzero_warnv_p (op0,
13540 strict_overflow_p)
13541 || tree_expr_nonzero_warnv_p (op1,
13542 strict_overflow_p));
13544 break;
13546 case MULT_EXPR:
13547 if (TYPE_OVERFLOW_UNDEFINED (type))
13549 if (tree_expr_nonzero_warnv_p (op0,
13550 strict_overflow_p)
13551 && tree_expr_nonzero_warnv_p (op1,
13552 strict_overflow_p))
13554 *strict_overflow_p = true;
13555 return true;
13558 break;
13560 case MIN_EXPR:
13561 sub_strict_overflow_p = false;
13562 if (tree_expr_nonzero_warnv_p (op0,
13563 &sub_strict_overflow_p)
13564 && tree_expr_nonzero_warnv_p (op1,
13565 &sub_strict_overflow_p))
13567 if (sub_strict_overflow_p)
13568 *strict_overflow_p = true;
13570 break;
13572 case MAX_EXPR:
13573 sub_strict_overflow_p = false;
13574 if (tree_expr_nonzero_warnv_p (op0,
13575 &sub_strict_overflow_p))
13577 if (sub_strict_overflow_p)
13578 *strict_overflow_p = true;
13580 /* When both operands are nonzero, then MAX must be too. */
13581 if (tree_expr_nonzero_warnv_p (op1,
13582 strict_overflow_p))
13583 return true;
13585 /* MAX where operand 0 is positive is positive. */
13586 return tree_expr_nonnegative_warnv_p (op0,
13587 strict_overflow_p);
13589 /* MAX where operand 1 is positive is positive. */
13590 else if (tree_expr_nonzero_warnv_p (op1,
13591 &sub_strict_overflow_p)
13592 && tree_expr_nonnegative_warnv_p (op1,
13593 &sub_strict_overflow_p))
13595 if (sub_strict_overflow_p)
13596 *strict_overflow_p = true;
13597 return true;
13599 break;
13601 case BIT_IOR_EXPR:
13602 return (tree_expr_nonzero_warnv_p (op1,
13603 strict_overflow_p)
13604 || tree_expr_nonzero_warnv_p (op0,
13605 strict_overflow_p));
13607 default:
13608 break;
13611 return false;
13614 /* Return true when T is an address and is known to be nonzero.
13615 For floating point we further ensure that T is not denormal.
13616 Similar logic is present in nonzero_address in rtlanal.h.
13618 If the return value is based on the assumption that signed overflow
13619 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13620 change *STRICT_OVERFLOW_P. */
13622 bool
13623 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13625 bool sub_strict_overflow_p;
13626 switch (TREE_CODE (t))
13628 case INTEGER_CST:
13629 return !integer_zerop (t);
13631 case ADDR_EXPR:
13633 tree base = TREE_OPERAND (t, 0);
13635 if (!DECL_P (base))
13636 base = get_base_address (base);
13638 if (!base)
13639 return false;
13641 /* For objects in symbol table check if we know they are non-zero.
13642 Don't do anything for variables and functions before symtab is built;
13643 it is quite possible that they will be declared weak later. */
13644 if (DECL_P (base) && decl_in_symtab_p (base))
13646 struct symtab_node *symbol;
13648 symbol = symtab_node::get_create (base);
13649 if (symbol)
13650 return symbol->nonzero_address ();
13651 else
13652 return false;
13655 /* Function local objects are never NULL. */
13656 if (DECL_P (base)
13657 && (DECL_CONTEXT (base)
13658 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13659 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13660 return true;
13662 /* Constants are never weak. */
13663 if (CONSTANT_CLASS_P (base))
13664 return true;
13666 return false;
13669 case COND_EXPR:
13670 sub_strict_overflow_p = false;
13671 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13672 &sub_strict_overflow_p)
13673 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13674 &sub_strict_overflow_p))
13676 if (sub_strict_overflow_p)
13677 *strict_overflow_p = true;
13678 return true;
13680 break;
13682 default:
13683 break;
13685 return false;
13688 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13689 attempt to fold the expression to a constant without modifying TYPE,
13690 OP0 or OP1.
13692 If the expression could be simplified to a constant, then return
13693 the constant. If the expression would not be simplified to a
13694 constant, then return NULL_TREE. */
13696 tree
13697 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13699 tree tem = fold_binary (code, type, op0, op1);
13700 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13703 /* Given the components of a unary expression CODE, TYPE and OP0,
13704 attempt to fold the expression to a constant without modifying
13705 TYPE or OP0.
13707 If the expression could be simplified to a constant, then return
13708 the constant. If the expression would not be simplified to a
13709 constant, then return NULL_TREE. */
13711 tree
13712 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13714 tree tem = fold_unary (code, type, op0);
13715 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13718 /* If EXP represents referencing an element in a constant string
13719 (either via pointer arithmetic or array indexing), return the
13720 tree representing the value accessed, otherwise return NULL. */
13722 tree
13723 fold_read_from_constant_string (tree exp)
13725 if ((TREE_CODE (exp) == INDIRECT_REF
13726 || TREE_CODE (exp) == ARRAY_REF)
13727 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13729 tree exp1 = TREE_OPERAND (exp, 0);
13730 tree index;
13731 tree string;
13732 location_t loc = EXPR_LOCATION (exp);
13734 if (TREE_CODE (exp) == INDIRECT_REF)
13735 string = string_constant (exp1, &index);
13736 else
13738 tree low_bound = array_ref_low_bound (exp);
13739 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13741 /* Optimize the special-case of a zero lower bound.
13743 We convert the low_bound to sizetype to avoid some problems
13744 with constant folding. (E.g. suppose the lower bound is 1,
13745 and its mode is QI. Without the conversion,l (ARRAY
13746 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13747 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13748 if (! integer_zerop (low_bound))
13749 index = size_diffop_loc (loc, index,
13750 fold_convert_loc (loc, sizetype, low_bound));
13752 string = exp1;
13755 if (string
13756 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13757 && TREE_CODE (string) == STRING_CST
13758 && TREE_CODE (index) == INTEGER_CST
13759 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13760 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13761 == MODE_INT)
13762 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13763 return build_int_cst_type (TREE_TYPE (exp),
13764 (TREE_STRING_POINTER (string)
13765 [TREE_INT_CST_LOW (index)]));
13767 return NULL;
13770 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13771 an integer constant, real, or fixed-point constant.
13773 TYPE is the type of the result. */
13775 static tree
13776 fold_negate_const (tree arg0, tree type)
13778 tree t = NULL_TREE;
13780 switch (TREE_CODE (arg0))
13782 case INTEGER_CST:
13784 bool overflow;
13785 wide_int val = wi::neg (arg0, &overflow);
13786 t = force_fit_type (type, val, 1,
13787 (overflow | TREE_OVERFLOW (arg0))
13788 && !TYPE_UNSIGNED (type));
13789 break;
13792 case REAL_CST:
13793 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13794 break;
13796 case FIXED_CST:
13798 FIXED_VALUE_TYPE f;
13799 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13800 &(TREE_FIXED_CST (arg0)), NULL,
13801 TYPE_SATURATING (type));
13802 t = build_fixed (type, f);
13803 /* Propagate overflow flags. */
13804 if (overflow_p | TREE_OVERFLOW (arg0))
13805 TREE_OVERFLOW (t) = 1;
13806 break;
13809 default:
13810 gcc_unreachable ();
13813 return t;
13816 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13817 an integer constant or real constant.
13819 TYPE is the type of the result. */
13821 tree
13822 fold_abs_const (tree arg0, tree type)
13824 tree t = NULL_TREE;
13826 switch (TREE_CODE (arg0))
13828 case INTEGER_CST:
13830 /* If the value is unsigned or non-negative, then the absolute value
13831 is the same as the ordinary value. */
13832 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13833 t = arg0;
13835 /* If the value is negative, then the absolute value is
13836 its negation. */
13837 else
13839 bool overflow;
13840 wide_int val = wi::neg (arg0, &overflow);
13841 t = force_fit_type (type, val, -1,
13842 overflow | TREE_OVERFLOW (arg0));
13845 break;
13847 case REAL_CST:
13848 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13849 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13850 else
13851 t = arg0;
13852 break;
13854 default:
13855 gcc_unreachable ();
13858 return t;
13861 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13862 constant. TYPE is the type of the result. */
13864 static tree
13865 fold_not_const (const_tree arg0, tree type)
13867 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13869 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13872 /* Given CODE, a relational operator, the target type, TYPE and two
13873 constant operands OP0 and OP1, return the result of the
13874 relational operation. If the result is not a compile time
13875 constant, then return NULL_TREE. */
13877 static tree
13878 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13880 int result, invert;
13882 /* From here on, the only cases we handle are when the result is
13883 known to be a constant. */
13885 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13887 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13888 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13890 /* Handle the cases where either operand is a NaN. */
13891 if (real_isnan (c0) || real_isnan (c1))
13893 switch (code)
13895 case EQ_EXPR:
13896 case ORDERED_EXPR:
13897 result = 0;
13898 break;
13900 case NE_EXPR:
13901 case UNORDERED_EXPR:
13902 case UNLT_EXPR:
13903 case UNLE_EXPR:
13904 case UNGT_EXPR:
13905 case UNGE_EXPR:
13906 case UNEQ_EXPR:
13907 result = 1;
13908 break;
13910 case LT_EXPR:
13911 case LE_EXPR:
13912 case GT_EXPR:
13913 case GE_EXPR:
13914 case LTGT_EXPR:
13915 if (flag_trapping_math)
13916 return NULL_TREE;
13917 result = 0;
13918 break;
13920 default:
13921 gcc_unreachable ();
13924 return constant_boolean_node (result, type);
13927 return constant_boolean_node (real_compare (code, c0, c1), type);
13930 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13932 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13933 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13934 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13937 /* Handle equality/inequality of complex constants. */
13938 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13940 tree rcond = fold_relational_const (code, type,
13941 TREE_REALPART (op0),
13942 TREE_REALPART (op1));
13943 tree icond = fold_relational_const (code, type,
13944 TREE_IMAGPART (op0),
13945 TREE_IMAGPART (op1));
13946 if (code == EQ_EXPR)
13947 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13948 else if (code == NE_EXPR)
13949 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13950 else
13951 return NULL_TREE;
13954 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13956 unsigned count = VECTOR_CST_NELTS (op0);
13957 tree *elts = XALLOCAVEC (tree, count);
13958 gcc_assert (VECTOR_CST_NELTS (op1) == count
13959 && TYPE_VECTOR_SUBPARTS (type) == count);
13961 for (unsigned i = 0; i < count; i++)
13963 tree elem_type = TREE_TYPE (type);
13964 tree elem0 = VECTOR_CST_ELT (op0, i);
13965 tree elem1 = VECTOR_CST_ELT (op1, i);
13967 tree tem = fold_relational_const (code, elem_type,
13968 elem0, elem1);
13970 if (tem == NULL_TREE)
13971 return NULL_TREE;
13973 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13976 return build_vector (type, elts);
13979 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13981 To compute GT, swap the arguments and do LT.
13982 To compute GE, do LT and invert the result.
13983 To compute LE, swap the arguments, do LT and invert the result.
13984 To compute NE, do EQ and invert the result.
13986 Therefore, the code below must handle only EQ and LT. */
13988 if (code == LE_EXPR || code == GT_EXPR)
13990 std::swap (op0, op1);
13991 code = swap_tree_comparison (code);
13994 /* Note that it is safe to invert for real values here because we
13995 have already handled the one case that it matters. */
13997 invert = 0;
13998 if (code == NE_EXPR || code == GE_EXPR)
14000 invert = 1;
14001 code = invert_tree_comparison (code, false);
14004 /* Compute a result for LT or EQ if args permit;
14005 Otherwise return T. */
14006 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14008 if (code == EQ_EXPR)
14009 result = tree_int_cst_equal (op0, op1);
14010 else
14011 result = tree_int_cst_lt (op0, op1);
14013 else
14014 return NULL_TREE;
14016 if (invert)
14017 result ^= 1;
14018 return constant_boolean_node (result, type);
14021 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14022 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14023 itself. */
14025 tree
14026 fold_build_cleanup_point_expr (tree type, tree expr)
14028 /* If the expression does not have side effects then we don't have to wrap
14029 it with a cleanup point expression. */
14030 if (!TREE_SIDE_EFFECTS (expr))
14031 return expr;
14033 /* If the expression is a return, check to see if the expression inside the
14034 return has no side effects or the right hand side of the modify expression
14035 inside the return. If either don't have side effects set we don't need to
14036 wrap the expression in a cleanup point expression. Note we don't check the
14037 left hand side of the modify because it should always be a return decl. */
14038 if (TREE_CODE (expr) == RETURN_EXPR)
14040 tree op = TREE_OPERAND (expr, 0);
14041 if (!op || !TREE_SIDE_EFFECTS (op))
14042 return expr;
14043 op = TREE_OPERAND (op, 1);
14044 if (!TREE_SIDE_EFFECTS (op))
14045 return expr;
14048 return build1 (CLEANUP_POINT_EXPR, type, expr);
14051 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14052 of an indirection through OP0, or NULL_TREE if no simplification is
14053 possible. */
14055 tree
14056 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14058 tree sub = op0;
14059 tree subtype;
14061 STRIP_NOPS (sub);
14062 subtype = TREE_TYPE (sub);
14063 if (!POINTER_TYPE_P (subtype))
14064 return NULL_TREE;
14066 if (TREE_CODE (sub) == ADDR_EXPR)
14068 tree op = TREE_OPERAND (sub, 0);
14069 tree optype = TREE_TYPE (op);
14070 /* *&CONST_DECL -> to the value of the const decl. */
14071 if (TREE_CODE (op) == CONST_DECL)
14072 return DECL_INITIAL (op);
14073 /* *&p => p; make sure to handle *&"str"[cst] here. */
14074 if (type == optype)
14076 tree fop = fold_read_from_constant_string (op);
14077 if (fop)
14078 return fop;
14079 else
14080 return op;
14082 /* *(foo *)&fooarray => fooarray[0] */
14083 else if (TREE_CODE (optype) == ARRAY_TYPE
14084 && type == TREE_TYPE (optype)
14085 && (!in_gimple_form
14086 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14088 tree type_domain = TYPE_DOMAIN (optype);
14089 tree min_val = size_zero_node;
14090 if (type_domain && TYPE_MIN_VALUE (type_domain))
14091 min_val = TYPE_MIN_VALUE (type_domain);
14092 if (in_gimple_form
14093 && TREE_CODE (min_val) != INTEGER_CST)
14094 return NULL_TREE;
14095 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14096 NULL_TREE, NULL_TREE);
14098 /* *(foo *)&complexfoo => __real__ complexfoo */
14099 else if (TREE_CODE (optype) == COMPLEX_TYPE
14100 && type == TREE_TYPE (optype))
14101 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14102 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14103 else if (TREE_CODE (optype) == VECTOR_TYPE
14104 && type == TREE_TYPE (optype))
14106 tree part_width = TYPE_SIZE (type);
14107 tree index = bitsize_int (0);
14108 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14112 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14113 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14115 tree op00 = TREE_OPERAND (sub, 0);
14116 tree op01 = TREE_OPERAND (sub, 1);
14118 STRIP_NOPS (op00);
14119 if (TREE_CODE (op00) == ADDR_EXPR)
14121 tree op00type;
14122 op00 = TREE_OPERAND (op00, 0);
14123 op00type = TREE_TYPE (op00);
14125 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14126 if (TREE_CODE (op00type) == VECTOR_TYPE
14127 && type == TREE_TYPE (op00type))
14129 HOST_WIDE_INT offset = tree_to_shwi (op01);
14130 tree part_width = TYPE_SIZE (type);
14131 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14132 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14133 tree index = bitsize_int (indexi);
14135 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14136 return fold_build3_loc (loc,
14137 BIT_FIELD_REF, type, op00,
14138 part_width, index);
14141 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14142 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14143 && type == TREE_TYPE (op00type))
14145 tree size = TYPE_SIZE_UNIT (type);
14146 if (tree_int_cst_equal (size, op01))
14147 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14149 /* ((foo *)&fooarray)[1] => fooarray[1] */
14150 else if (TREE_CODE (op00type) == ARRAY_TYPE
14151 && type == TREE_TYPE (op00type))
14153 tree type_domain = TYPE_DOMAIN (op00type);
14154 tree min_val = size_zero_node;
14155 if (type_domain && TYPE_MIN_VALUE (type_domain))
14156 min_val = TYPE_MIN_VALUE (type_domain);
14157 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14158 TYPE_SIZE_UNIT (type));
14159 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14160 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14161 NULL_TREE, NULL_TREE);
14166 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14167 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14168 && type == TREE_TYPE (TREE_TYPE (subtype))
14169 && (!in_gimple_form
14170 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14172 tree type_domain;
14173 tree min_val = size_zero_node;
14174 sub = build_fold_indirect_ref_loc (loc, sub);
14175 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14176 if (type_domain && TYPE_MIN_VALUE (type_domain))
14177 min_val = TYPE_MIN_VALUE (type_domain);
14178 if (in_gimple_form
14179 && TREE_CODE (min_val) != INTEGER_CST)
14180 return NULL_TREE;
14181 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14182 NULL_TREE);
14185 return NULL_TREE;
14188 /* Builds an expression for an indirection through T, simplifying some
14189 cases. */
14191 tree
14192 build_fold_indirect_ref_loc (location_t loc, tree t)
14194 tree type = TREE_TYPE (TREE_TYPE (t));
14195 tree sub = fold_indirect_ref_1 (loc, type, t);
14197 if (sub)
14198 return sub;
14200 return build1_loc (loc, INDIRECT_REF, type, t);
14203 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14205 tree
14206 fold_indirect_ref_loc (location_t loc, tree t)
14208 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14210 if (sub)
14211 return sub;
14212 else
14213 return t;
14216 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14217 whose result is ignored. The type of the returned tree need not be
14218 the same as the original expression. */
14220 tree
14221 fold_ignored_result (tree t)
14223 if (!TREE_SIDE_EFFECTS (t))
14224 return integer_zero_node;
14226 for (;;)
14227 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14229 case tcc_unary:
14230 t = TREE_OPERAND (t, 0);
14231 break;
14233 case tcc_binary:
14234 case tcc_comparison:
14235 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14236 t = TREE_OPERAND (t, 0);
14237 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14238 t = TREE_OPERAND (t, 1);
14239 else
14240 return t;
14241 break;
14243 case tcc_expression:
14244 switch (TREE_CODE (t))
14246 case COMPOUND_EXPR:
14247 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14248 return t;
14249 t = TREE_OPERAND (t, 0);
14250 break;
14252 case COND_EXPR:
14253 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14254 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14255 return t;
14256 t = TREE_OPERAND (t, 0);
14257 break;
14259 default:
14260 return t;
14262 break;
14264 default:
14265 return t;
14269 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14271 tree
14272 round_up_loc (location_t loc, tree value, unsigned int divisor)
14274 tree div = NULL_TREE;
14276 if (divisor == 1)
14277 return value;
14279 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14280 have to do anything. Only do this when we are not given a const,
14281 because in that case, this check is more expensive than just
14282 doing it. */
14283 if (TREE_CODE (value) != INTEGER_CST)
14285 div = build_int_cst (TREE_TYPE (value), divisor);
14287 if (multiple_of_p (TREE_TYPE (value), value, div))
14288 return value;
14291 /* If divisor is a power of two, simplify this to bit manipulation. */
14292 if (divisor == (divisor & -divisor))
14294 if (TREE_CODE (value) == INTEGER_CST)
14296 wide_int val = value;
14297 bool overflow_p;
14299 if ((val & (divisor - 1)) == 0)
14300 return value;
14302 overflow_p = TREE_OVERFLOW (value);
14303 val += divisor - 1;
14304 val &= - (int) divisor;
14305 if (val == 0)
14306 overflow_p = true;
14308 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14310 else
14312 tree t;
14314 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14315 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14316 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14317 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14320 else
14322 if (!div)
14323 div = build_int_cst (TREE_TYPE (value), divisor);
14324 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14325 value = size_binop_loc (loc, MULT_EXPR, value, div);
14328 return value;
14331 /* Likewise, but round down. */
14333 tree
14334 round_down_loc (location_t loc, tree value, int divisor)
14336 tree div = NULL_TREE;
14338 gcc_assert (divisor > 0);
14339 if (divisor == 1)
14340 return value;
14342 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14343 have to do anything. Only do this when we are not given a const,
14344 because in that case, this check is more expensive than just
14345 doing it. */
14346 if (TREE_CODE (value) != INTEGER_CST)
14348 div = build_int_cst (TREE_TYPE (value), divisor);
14350 if (multiple_of_p (TREE_TYPE (value), value, div))
14351 return value;
14354 /* If divisor is a power of two, simplify this to bit manipulation. */
14355 if (divisor == (divisor & -divisor))
14357 tree t;
14359 t = build_int_cst (TREE_TYPE (value), -divisor);
14360 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14362 else
14364 if (!div)
14365 div = build_int_cst (TREE_TYPE (value), divisor);
14366 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14367 value = size_binop_loc (loc, MULT_EXPR, value, div);
14370 return value;
14373 /* Returns the pointer to the base of the object addressed by EXP and
14374 extracts the information about the offset of the access, storing it
14375 to PBITPOS and POFFSET. */
14377 static tree
14378 split_address_to_core_and_offset (tree exp,
14379 HOST_WIDE_INT *pbitpos, tree *poffset)
14381 tree core;
14382 machine_mode mode;
14383 int unsignedp, volatilep;
14384 HOST_WIDE_INT bitsize;
14385 location_t loc = EXPR_LOCATION (exp);
14387 if (TREE_CODE (exp) == ADDR_EXPR)
14389 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14390 poffset, &mode, &unsignedp, &volatilep,
14391 false);
14392 core = build_fold_addr_expr_loc (loc, core);
14394 else
14396 core = exp;
14397 *pbitpos = 0;
14398 *poffset = NULL_TREE;
14401 return core;
14404 /* Returns true if addresses of E1 and E2 differ by a constant, false
14405 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14407 bool
14408 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14410 tree core1, core2;
14411 HOST_WIDE_INT bitpos1, bitpos2;
14412 tree toffset1, toffset2, tdiff, type;
14414 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14415 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14417 if (bitpos1 % BITS_PER_UNIT != 0
14418 || bitpos2 % BITS_PER_UNIT != 0
14419 || !operand_equal_p (core1, core2, 0))
14420 return false;
14422 if (toffset1 && toffset2)
14424 type = TREE_TYPE (toffset1);
14425 if (type != TREE_TYPE (toffset2))
14426 toffset2 = fold_convert (type, toffset2);
14428 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14429 if (!cst_and_fits_in_hwi (tdiff))
14430 return false;
14432 *diff = int_cst_value (tdiff);
14434 else if (toffset1 || toffset2)
14436 /* If only one of the offsets is non-constant, the difference cannot
14437 be a constant. */
14438 return false;
14440 else
14441 *diff = 0;
14443 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14444 return true;
14447 /* Simplify the floating point expression EXP when the sign of the
14448 result is not significant. Return NULL_TREE if no simplification
14449 is possible. */
14451 tree
14452 fold_strip_sign_ops (tree exp)
14454 tree arg0, arg1;
14455 location_t loc = EXPR_LOCATION (exp);
14457 switch (TREE_CODE (exp))
14459 case ABS_EXPR:
14460 case NEGATE_EXPR:
14461 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14462 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14464 case MULT_EXPR:
14465 case RDIV_EXPR:
14466 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
14467 return NULL_TREE;
14468 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14469 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14470 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14471 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
14472 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14473 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14474 break;
14476 case COMPOUND_EXPR:
14477 arg0 = TREE_OPERAND (exp, 0);
14478 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14479 if (arg1)
14480 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14481 break;
14483 case COND_EXPR:
14484 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14485 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14486 if (arg0 || arg1)
14487 return fold_build3_loc (loc,
14488 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14489 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14490 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14491 break;
14493 case CALL_EXPR:
14495 const enum built_in_function fcode = builtin_mathfn_code (exp);
14496 switch (fcode)
14498 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14499 /* Strip copysign function call, return the 1st argument. */
14500 arg0 = CALL_EXPR_ARG (exp, 0);
14501 arg1 = CALL_EXPR_ARG (exp, 1);
14502 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
14504 default:
14505 /* Strip sign ops from the argument of "odd" math functions. */
14506 if (negate_mathfn_p (fcode))
14508 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14509 if (arg0)
14510 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
14512 break;
14515 break;
14517 default:
14518 break;
14520 return NULL_TREE;
14523 /* Return OFF converted to a pointer offset type suitable as offset for
14524 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14525 tree
14526 convert_to_ptrofftype_loc (location_t loc, tree off)
14528 return fold_convert_loc (loc, sizetype, off);
14531 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14532 tree
14533 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14535 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14536 ptr, convert_to_ptrofftype_loc (loc, off));
14539 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14540 tree
14541 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14543 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14544 ptr, size_int (off));