gcc/testsuite/
[official-gcc.git] / gcc / fold-const.c
blob188b179bb7876561f0185d721163855648357e0c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree optimize_minmax_comparison (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (location_t,
137 enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (const_tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static tree fold_convert_const (enum tree_code, tree, tree);
147 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
148 Otherwise, return LOC. */
150 static location_t
151 expr_location_or (tree t, location_t loc)
153 location_t tloc = EXPR_LOCATION (t);
154 return tloc == UNKNOWN_LOCATION ? loc : tloc;
157 /* Similar to protected_set_expr_location, but never modify x in place,
158 if location can and needs to be set, unshare it. */
160 static inline tree
161 protected_set_expr_location_unshare (tree x, location_t loc)
163 if (CAN_HAVE_LOCATION_P (x)
164 && EXPR_LOCATION (x) != loc
165 && !(TREE_CODE (x) == SAVE_EXPR
166 || TREE_CODE (x) == TARGET_EXPR
167 || TREE_CODE (x) == BIND_EXPR))
169 x = copy_node (x);
170 SET_EXPR_LOCATION (x, loc);
172 return x;
175 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
176 division and returns the quotient. Otherwise returns
177 NULL_TREE. */
179 tree
180 div_if_zero_remainder (const_tree arg1, const_tree arg2)
182 widest_int quo;
184 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
185 SIGNED, &quo))
186 return wide_int_to_tree (TREE_TYPE (arg1), quo);
188 return NULL_TREE;
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
217 void
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 const char *warnmsg;
236 location_t locus;
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
253 return;
255 if (gimple_no_warning_p (stmt))
256 return;
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
264 return;
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
284 bool
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
312 static bool
313 negate_mathfn_p (enum built_in_function code)
315 switch (code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
348 default:
349 break;
351 return false;
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
357 bool
358 may_negate_without_overflow_p (const_tree t)
360 tree type;
362 gcc_assert (TREE_CODE (t) == INTEGER_CST);
364 type = TREE_TYPE (t);
365 if (TYPE_UNSIGNED (type))
366 return false;
368 return !wi::only_sign_bit_p (t);
371 /* Determine whether an expression T can be cheaply negated using
372 the function negate_expr without introducing undefined overflow. */
374 static bool
375 negate_expr_p (tree t)
377 tree type;
379 if (t == 0)
380 return false;
382 type = TREE_TYPE (t);
384 STRIP_SIGN_NOPS (t);
385 switch (TREE_CODE (t))
387 case INTEGER_CST:
388 if (TYPE_OVERFLOW_WRAPS (type))
389 return true;
391 /* Check that -CST will not overflow type. */
392 return may_negate_without_overflow_p (t);
393 case BIT_NOT_EXPR:
394 return (INTEGRAL_TYPE_P (type)
395 && TYPE_OVERFLOW_WRAPS (type));
397 case FIXED_CST:
398 case NEGATE_EXPR:
399 return true;
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 int count = TYPE_VECTOR_SUBPARTS (type), i;
417 for (i = 0; i < count; i++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
454 /* Fall through. */
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case EXACT_DIV_EXPR:
465 /* In general we can't negate A / B, because if A is INT_MIN and
466 B is 1, we may turn this into INT_MIN / -1 which is undefined
467 and actually traps on some architectures. But if overflow is
468 undefined, we can negate, because - (INT_MIN / 1) is an
469 overflow. */
470 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
472 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
473 break;
474 /* If overflow is undefined then we have to be careful because
475 we ask whether it's ok to associate the negate with the
476 division which is not ok for example for
477 -((a - b) / c) where (-(a - b)) / c may invoke undefined
478 overflow because of negating INT_MIN. So do not use
479 negate_expr_p here but open-code the two important cases. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
481 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
482 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
483 return true;
485 else if (negate_expr_p (TREE_OPERAND (t, 0)))
486 return true;
487 return negate_expr_p (TREE_OPERAND (t, 1));
489 case NOP_EXPR:
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
493 tree tem = strip_float_extensions (t);
494 if (tem != t)
495 return negate_expr_p (tem);
497 break;
499 case CALL_EXPR:
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
503 break;
505 case RSHIFT_EXPR:
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
509 tree op1 = TREE_OPERAND (t, 1);
510 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
511 return true;
513 break;
515 default:
516 break;
518 return false;
521 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
522 simplification is possible.
523 If negate_expr_p would return true for T, NULL_TREE will never be
524 returned. */
526 static tree
527 fold_negate_expr (location_t loc, tree t)
529 tree type = TREE_TYPE (t);
530 tree tem;
532 switch (TREE_CODE (t))
534 /* Convert - (~A) to A + 1. */
535 case BIT_NOT_EXPR:
536 if (INTEGRAL_TYPE_P (type))
537 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
538 build_one_cst (type));
539 break;
541 case INTEGER_CST:
542 tem = fold_negate_const (t, type);
543 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
544 || !TYPE_OVERFLOW_TRAPS (type))
545 return tem;
546 break;
548 case REAL_CST:
549 tem = fold_negate_const (t, type);
550 /* Two's complement FP formats, such as c4x, may overflow. */
551 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
552 return tem;
553 break;
555 case FIXED_CST:
556 tem = fold_negate_const (t, type);
557 return tem;
559 case COMPLEX_CST:
561 tree rpart = negate_expr (TREE_REALPART (t));
562 tree ipart = negate_expr (TREE_IMAGPART (t));
564 if ((TREE_CODE (rpart) == REAL_CST
565 && TREE_CODE (ipart) == REAL_CST)
566 || (TREE_CODE (rpart) == INTEGER_CST
567 && TREE_CODE (ipart) == INTEGER_CST))
568 return build_complex (type, rpart, ipart);
570 break;
572 case VECTOR_CST:
574 int count = TYPE_VECTOR_SUBPARTS (type), i;
575 tree *elts = XALLOCAVEC (tree, count);
577 for (i = 0; i < count; i++)
579 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
580 if (elts[i] == NULL_TREE)
581 return NULL_TREE;
584 return build_vector (type, elts);
587 case COMPLEX_EXPR:
588 if (negate_expr_p (t))
589 return fold_build2_loc (loc, COMPLEX_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
591 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
592 break;
594 case CONJ_EXPR:
595 if (negate_expr_p (t))
596 return fold_build1_loc (loc, CONJ_EXPR, type,
597 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
598 break;
600 case NEGATE_EXPR:
601 return TREE_OPERAND (t, 0);
603 case PLUS_EXPR:
604 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
605 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
607 /* -(A + B) -> (-B) - A. */
608 if (negate_expr_p (TREE_OPERAND (t, 1))
609 && reorder_operands_p (TREE_OPERAND (t, 0),
610 TREE_OPERAND (t, 1)))
612 tem = negate_expr (TREE_OPERAND (t, 1));
613 return fold_build2_loc (loc, MINUS_EXPR, type,
614 tem, TREE_OPERAND (t, 0));
617 /* -(A + B) -> (-A) - B. */
618 if (negate_expr_p (TREE_OPERAND (t, 0)))
620 tem = negate_expr (TREE_OPERAND (t, 0));
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 tem, TREE_OPERAND (t, 1));
625 break;
627 case MINUS_EXPR:
628 /* - (A - B) -> B - A */
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
630 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
631 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
634 break;
636 case MULT_EXPR:
637 if (TYPE_UNSIGNED (type))
638 break;
640 /* Fall through. */
642 case RDIV_EXPR:
643 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
645 tem = TREE_OPERAND (t, 1);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 TREE_OPERAND (t, 0), negate_expr (tem));
649 tem = TREE_OPERAND (t, 0);
650 if (negate_expr_p (tem))
651 return fold_build2_loc (loc, TREE_CODE (t), type,
652 negate_expr (tem), TREE_OPERAND (t, 1));
654 break;
656 case TRUNC_DIV_EXPR:
657 case ROUND_DIV_EXPR:
658 case EXACT_DIV_EXPR:
659 /* In general we can't negate A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. But if overflow is
662 undefined, we can negate, because - (INT_MIN / 1) is an
663 overflow. */
664 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
666 const char * const warnmsg = G_("assuming signed overflow does not "
667 "occur when negating a division");
668 tem = TREE_OPERAND (t, 1);
669 if (negate_expr_p (tem))
671 if (INTEGRAL_TYPE_P (type)
672 && (TREE_CODE (tem) != INTEGER_CST
673 || integer_onep (tem)))
674 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
675 return fold_build2_loc (loc, TREE_CODE (t), type,
676 TREE_OPERAND (t, 0), negate_expr (tem));
678 /* If overflow is undefined then we have to be careful because
679 we ask whether it's ok to associate the negate with the
680 division which is not ok for example for
681 -((a - b) / c) where (-(a - b)) / c may invoke undefined
682 overflow because of negating INT_MIN. So do not use
683 negate_expr_p here but open-code the two important cases. */
684 tem = TREE_OPERAND (t, 0);
685 if ((INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) == NEGATE_EXPR
687 || (TREE_CODE (tem) == INTEGER_CST
688 && may_negate_without_overflow_p (tem))))
689 || !INTEGRAL_TYPE_P (type))
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 negate_expr (tem), TREE_OPERAND (t, 1));
693 break;
695 case NOP_EXPR:
696 /* Convert -((double)float) into (double)(-float). */
697 if (TREE_CODE (type) == REAL_TYPE)
699 tem = strip_float_extensions (t);
700 if (tem != t && negate_expr_p (tem))
701 return fold_convert_loc (loc, type, negate_expr (tem));
703 break;
705 case CALL_EXPR:
706 /* Negate -f(x) as f(-x). */
707 if (negate_mathfn_p (builtin_mathfn_code (t))
708 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
710 tree fndecl, arg;
712 fndecl = get_callee_fndecl (t);
713 arg = negate_expr (CALL_EXPR_ARG (t, 0));
714 return build_call_expr_loc (loc, fndecl, 1, arg);
716 break;
718 case RSHIFT_EXPR:
719 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
720 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
722 tree op1 = TREE_OPERAND (t, 1);
723 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
725 tree ntype = TYPE_UNSIGNED (type)
726 ? signed_type_for (type)
727 : unsigned_type_for (type);
728 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
729 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
730 return fold_convert_loc (loc, type, temp);
733 break;
735 default:
736 break;
739 return NULL_TREE;
742 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
743 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
744 return NULL_TREE. */
746 static tree
747 negate_expr (tree t)
749 tree type, tem;
750 location_t loc;
752 if (t == NULL_TREE)
753 return NULL_TREE;
755 loc = EXPR_LOCATION (t);
756 type = TREE_TYPE (t);
757 STRIP_SIGN_NOPS (t);
759 tem = fold_negate_expr (loc, t);
760 if (!tem)
761 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
762 return fold_convert_loc (loc, type, tem);
765 /* Split a tree IN into a constant, literal and variable parts that could be
766 combined with CODE to make IN. "constant" means an expression with
767 TREE_CONSTANT but that isn't an actual constant. CODE must be a
768 commutative arithmetic operation. Store the constant part into *CONP,
769 the literal in *LITP and return the variable part. If a part isn't
770 present, set it to null. If the tree does not decompose in this way,
771 return the entire tree as the variable part and the other parts as null.
773 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
774 case, we negate an operand that was subtracted. Except if it is a
775 literal for which we use *MINUS_LITP instead.
777 If NEGATE_P is true, we are negating all of IN, again except a literal
778 for which we use *MINUS_LITP instead.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
785 static tree
786 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
787 tree *minus_litp, int negate_p)
789 tree var = 0;
791 *conp = 0;
792 *litp = 0;
793 *minus_litp = 0;
795 /* Strip any conversions that don't change the machine mode or signedness. */
796 STRIP_SIGN_NOPS (in);
798 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
799 || TREE_CODE (in) == FIXED_CST)
800 *litp = in;
801 else if (TREE_CODE (in) == code
802 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
803 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
804 /* We can associate addition and subtraction together (even
805 though the C standard doesn't say so) for integers because
806 the value is not affected. For reals, the value might be
807 affected, so we can't. */
808 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
809 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
811 tree op0 = TREE_OPERAND (in, 0);
812 tree op1 = TREE_OPERAND (in, 1);
813 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
814 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
816 /* First see if either of the operands is a literal, then a constant. */
817 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
818 || TREE_CODE (op0) == FIXED_CST)
819 *litp = op0, op0 = 0;
820 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
821 || TREE_CODE (op1) == FIXED_CST)
822 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
824 if (op0 != 0 && TREE_CONSTANT (op0))
825 *conp = op0, op0 = 0;
826 else if (op1 != 0 && TREE_CONSTANT (op1))
827 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
829 /* If we haven't dealt with either operand, this is not a case we can
830 decompose. Otherwise, VAR is either of the ones remaining, if any. */
831 if (op0 != 0 && op1 != 0)
832 var = in;
833 else if (op0 != 0)
834 var = op0;
835 else
836 var = op1, neg_var_p = neg1_p;
838 /* Now do any needed negations. */
839 if (neg_litp_p)
840 *minus_litp = *litp, *litp = 0;
841 if (neg_conp_p)
842 *conp = negate_expr (*conp);
843 if (neg_var_p)
844 var = negate_expr (var);
846 else if (TREE_CODE (in) == BIT_NOT_EXPR
847 && code == PLUS_EXPR)
849 /* -X - 1 is folded to ~X, undo that here. */
850 *minus_litp = build_one_cst (TREE_TYPE (in));
851 var = negate_expr (TREE_OPERAND (in, 0));
853 else if (TREE_CONSTANT (in))
854 *conp = in;
855 else
856 var = in;
858 if (negate_p)
860 if (*litp)
861 *minus_litp = *litp, *litp = 0;
862 else if (*minus_litp)
863 *litp = *minus_litp, *minus_litp = 0;
864 *conp = negate_expr (*conp);
865 var = negate_expr (var);
868 return var;
871 /* Re-associate trees split by the above function. T1 and T2 are
872 either expressions to associate or null. Return the new
873 expression, if any. LOC is the location of the new expression. If
874 we build an operation, do it in TYPE and with CODE. */
876 static tree
877 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
879 if (t1 == 0)
880 return t2;
881 else if (t2 == 0)
882 return t1;
884 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
885 try to fold this since we will have infinite recursion. But do
886 deal with any NEGATE_EXPRs. */
887 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
888 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
890 if (code == PLUS_EXPR)
892 if (TREE_CODE (t1) == NEGATE_EXPR)
893 return build2_loc (loc, MINUS_EXPR, type,
894 fold_convert_loc (loc, type, t2),
895 fold_convert_loc (loc, type,
896 TREE_OPERAND (t1, 0)));
897 else if (TREE_CODE (t2) == NEGATE_EXPR)
898 return build2_loc (loc, MINUS_EXPR, type,
899 fold_convert_loc (loc, type, t1),
900 fold_convert_loc (loc, type,
901 TREE_OPERAND (t2, 0)));
902 else if (integer_zerop (t2))
903 return fold_convert_loc (loc, type, t1);
905 else if (code == MINUS_EXPR)
907 if (integer_zerop (t2))
908 return fold_convert_loc (loc, type, t1);
911 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
912 fold_convert_loc (loc, type, t2));
915 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
919 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
920 for use in int_const_binop, size_binop and size_diffop. */
922 static bool
923 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
925 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
926 return false;
927 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
928 return false;
930 switch (code)
932 case LSHIFT_EXPR:
933 case RSHIFT_EXPR:
934 case LROTATE_EXPR:
935 case RROTATE_EXPR:
936 return true;
938 default:
939 break;
942 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
943 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
944 && TYPE_MODE (type1) == TYPE_MODE (type2);
948 /* Combine two integer constants ARG1 and ARG2 under operation CODE
949 to produce a new constant. Return NULL_TREE if we don't know how
950 to evaluate CODE at compile-time. */
952 static tree
953 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
954 int overflowable)
956 wide_int res;
957 tree t;
958 tree type = TREE_TYPE (arg1);
959 signop sign = TYPE_SIGN (type);
960 bool overflow = false;
962 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
963 TYPE_SIGN (TREE_TYPE (parg2)));
965 switch (code)
967 case BIT_IOR_EXPR:
968 res = wi::bit_or (arg1, arg2);
969 break;
971 case BIT_XOR_EXPR:
972 res = wi::bit_xor (arg1, arg2);
973 break;
975 case BIT_AND_EXPR:
976 res = wi::bit_and (arg1, arg2);
977 break;
979 case RSHIFT_EXPR:
980 case LSHIFT_EXPR:
981 if (wi::neg_p (arg2))
983 arg2 = -arg2;
984 if (code == RSHIFT_EXPR)
985 code = LSHIFT_EXPR;
986 else
987 code = RSHIFT_EXPR;
990 if (code == RSHIFT_EXPR)
991 /* It's unclear from the C standard whether shifts can overflow.
992 The following code ignores overflow; perhaps a C standard
993 interpretation ruling is needed. */
994 res = wi::rshift (arg1, arg2, sign);
995 else
996 res = wi::lshift (arg1, arg2);
997 break;
999 case RROTATE_EXPR:
1000 case LROTATE_EXPR:
1001 if (wi::neg_p (arg2))
1003 arg2 = -arg2;
1004 if (code == RROTATE_EXPR)
1005 code = LROTATE_EXPR;
1006 else
1007 code = RROTATE_EXPR;
1010 if (code == RROTATE_EXPR)
1011 res = wi::rrotate (arg1, arg2);
1012 else
1013 res = wi::lrotate (arg1, arg2);
1014 break;
1016 case PLUS_EXPR:
1017 res = wi::add (arg1, arg2, sign, &overflow);
1018 break;
1020 case MINUS_EXPR:
1021 res = wi::sub (arg1, arg2, sign, &overflow);
1022 break;
1024 case MULT_EXPR:
1025 res = wi::mul (arg1, arg2, sign, &overflow);
1026 break;
1028 case MULT_HIGHPART_EXPR:
1029 res = wi::mul_high (arg1, arg2, sign);
1030 break;
1032 case TRUNC_DIV_EXPR:
1033 case EXACT_DIV_EXPR:
1034 if (arg2 == 0)
1035 return NULL_TREE;
1036 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1037 break;
1039 case FLOOR_DIV_EXPR:
1040 if (arg2 == 0)
1041 return NULL_TREE;
1042 res = wi::div_floor (arg1, arg2, sign, &overflow);
1043 break;
1045 case CEIL_DIV_EXPR:
1046 if (arg2 == 0)
1047 return NULL_TREE;
1048 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1049 break;
1051 case ROUND_DIV_EXPR:
1052 if (arg2 == 0)
1053 return NULL_TREE;
1054 res = wi::div_round (arg1, arg2, sign, &overflow);
1055 break;
1057 case TRUNC_MOD_EXPR:
1058 if (arg2 == 0)
1059 return NULL_TREE;
1060 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1061 break;
1063 case FLOOR_MOD_EXPR:
1064 if (arg2 == 0)
1065 return NULL_TREE;
1066 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1067 break;
1069 case CEIL_MOD_EXPR:
1070 if (arg2 == 0)
1071 return NULL_TREE;
1072 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1073 break;
1075 case ROUND_MOD_EXPR:
1076 if (arg2 == 0)
1077 return NULL_TREE;
1078 res = wi::mod_round (arg1, arg2, sign, &overflow);
1079 break;
1081 case MIN_EXPR:
1082 res = wi::min (arg1, arg2, sign);
1083 break;
1085 case MAX_EXPR:
1086 res = wi::max (arg1, arg2, sign);
1087 break;
1089 default:
1090 return NULL_TREE;
1093 t = force_fit_type (type, res, overflowable,
1094 (((sign == SIGNED || overflowable == -1)
1095 && overflow)
1096 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1098 return t;
1101 tree
1102 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1104 return int_const_binop_1 (code, arg1, arg2, 1);
1107 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1108 constant. We assume ARG1 and ARG2 have the same data type, or at least
1109 are the same kind of constant and the same machine mode. Return zero if
1110 combining the constants is not allowed in the current operating mode. */
1112 static tree
1113 const_binop (enum tree_code code, tree arg1, tree arg2)
1115 /* Sanity check for the recursive cases. */
1116 if (!arg1 || !arg2)
1117 return NULL_TREE;
1119 STRIP_NOPS (arg1);
1120 STRIP_NOPS (arg2);
1122 if (TREE_CODE (arg1) == INTEGER_CST)
1123 return int_const_binop (code, arg1, arg2);
1125 if (TREE_CODE (arg1) == REAL_CST)
1127 enum machine_mode mode;
1128 REAL_VALUE_TYPE d1;
1129 REAL_VALUE_TYPE d2;
1130 REAL_VALUE_TYPE value;
1131 REAL_VALUE_TYPE result;
1132 bool inexact;
1133 tree t, type;
1135 /* The following codes are handled by real_arithmetic. */
1136 switch (code)
1138 case PLUS_EXPR:
1139 case MINUS_EXPR:
1140 case MULT_EXPR:
1141 case RDIV_EXPR:
1142 case MIN_EXPR:
1143 case MAX_EXPR:
1144 break;
1146 default:
1147 return NULL_TREE;
1150 d1 = TREE_REAL_CST (arg1);
1151 d2 = TREE_REAL_CST (arg2);
1153 type = TREE_TYPE (arg1);
1154 mode = TYPE_MODE (type);
1156 /* Don't perform operation if we honor signaling NaNs and
1157 either operand is a NaN. */
1158 if (HONOR_SNANS (mode)
1159 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1160 return NULL_TREE;
1162 /* Don't perform operation if it would raise a division
1163 by zero exception. */
1164 if (code == RDIV_EXPR
1165 && REAL_VALUES_EQUAL (d2, dconst0)
1166 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1167 return NULL_TREE;
1169 /* If either operand is a NaN, just return it. Otherwise, set up
1170 for floating-point trap; we return an overflow. */
1171 if (REAL_VALUE_ISNAN (d1))
1172 return arg1;
1173 else if (REAL_VALUE_ISNAN (d2))
1174 return arg2;
1176 inexact = real_arithmetic (&value, code, &d1, &d2);
1177 real_convert (&result, mode, &value);
1179 /* Don't constant fold this floating point operation if
1180 the result has overflowed and flag_trapping_math. */
1181 if (flag_trapping_math
1182 && MODE_HAS_INFINITIES (mode)
1183 && REAL_VALUE_ISINF (result)
1184 && !REAL_VALUE_ISINF (d1)
1185 && !REAL_VALUE_ISINF (d2))
1186 return NULL_TREE;
1188 /* Don't constant fold this floating point operation if the
1189 result may dependent upon the run-time rounding mode and
1190 flag_rounding_math is set, or if GCC's software emulation
1191 is unable to accurately represent the result. */
1192 if ((flag_rounding_math
1193 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1194 && (inexact || !real_identical (&result, &value)))
1195 return NULL_TREE;
1197 t = build_real (type, result);
1199 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1200 return t;
1203 if (TREE_CODE (arg1) == FIXED_CST)
1205 FIXED_VALUE_TYPE f1;
1206 FIXED_VALUE_TYPE f2;
1207 FIXED_VALUE_TYPE result;
1208 tree t, type;
1209 int sat_p;
1210 bool overflow_p;
1212 /* The following codes are handled by fixed_arithmetic. */
1213 switch (code)
1215 case PLUS_EXPR:
1216 case MINUS_EXPR:
1217 case MULT_EXPR:
1218 case TRUNC_DIV_EXPR:
1219 f2 = TREE_FIXED_CST (arg2);
1220 break;
1222 case LSHIFT_EXPR:
1223 case RSHIFT_EXPR:
1225 wide_int w2 = arg2;
1226 f2.data.high = w2.elt (1);
1227 f2.data.low = w2.elt (0);
1228 f2.mode = SImode;
1230 break;
1232 default:
1233 return NULL_TREE;
1236 f1 = TREE_FIXED_CST (arg1);
1237 type = TREE_TYPE (arg1);
1238 sat_p = TYPE_SATURATING (type);
1239 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1240 t = build_fixed (type, result);
1241 /* Propagate overflow flags. */
1242 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1243 TREE_OVERFLOW (t) = 1;
1244 return t;
1247 if (TREE_CODE (arg1) == COMPLEX_CST)
1249 tree type = TREE_TYPE (arg1);
1250 tree r1 = TREE_REALPART (arg1);
1251 tree i1 = TREE_IMAGPART (arg1);
1252 tree r2 = TREE_REALPART (arg2);
1253 tree i2 = TREE_IMAGPART (arg2);
1254 tree real, imag;
1256 switch (code)
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 real = const_binop (code, r1, r2);
1261 imag = const_binop (code, i1, i2);
1262 break;
1264 case MULT_EXPR:
1265 if (COMPLEX_FLOAT_TYPE_P (type))
1266 return do_mpc_arg2 (arg1, arg2, type,
1267 /* do_nonfinite= */ folding_initializer,
1268 mpc_mul);
1270 real = const_binop (MINUS_EXPR,
1271 const_binop (MULT_EXPR, r1, r2),
1272 const_binop (MULT_EXPR, i1, i2));
1273 imag = const_binop (PLUS_EXPR,
1274 const_binop (MULT_EXPR, r1, i2),
1275 const_binop (MULT_EXPR, i1, r2));
1276 break;
1278 case RDIV_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_div);
1283 /* Fallthru ... */
1284 case TRUNC_DIV_EXPR:
1285 case CEIL_DIV_EXPR:
1286 case FLOOR_DIV_EXPR:
1287 case ROUND_DIV_EXPR:
1288 if (flag_complex_method == 0)
1290 /* Keep this algorithm in sync with
1291 tree-complex.c:expand_complex_div_straight().
1293 Expand complex division to scalars, straightforward algorithm.
1294 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1295 t = br*br + bi*bi
1297 tree magsquared
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r2, r2),
1300 const_binop (MULT_EXPR, i2, i2));
1301 tree t1
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r1, r2),
1304 const_binop (MULT_EXPR, i1, i2));
1305 tree t2
1306 = const_binop (MINUS_EXPR,
1307 const_binop (MULT_EXPR, i1, r2),
1308 const_binop (MULT_EXPR, r1, i2));
1310 real = const_binop (code, t1, magsquared);
1311 imag = const_binop (code, t2, magsquared);
1313 else
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_wide().
1318 Expand complex division to scalars, modified algorithm to minimize
1319 overflow with wide input ranges. */
1320 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1321 fold_abs_const (r2, TREE_TYPE (type)),
1322 fold_abs_const (i2, TREE_TYPE (type)));
1324 if (integer_nonzerop (compare))
1326 /* In the TRUE branch, we compute
1327 ratio = br/bi;
1328 div = (br * ratio) + bi;
1329 tr = (ar * ratio) + ai;
1330 ti = (ai * ratio) - ar;
1331 tr = tr / div;
1332 ti = ti / div; */
1333 tree ratio = const_binop (code, r2, i2);
1334 tree div = const_binop (PLUS_EXPR, i2,
1335 const_binop (MULT_EXPR, r2, ratio));
1336 real = const_binop (MULT_EXPR, r1, ratio);
1337 real = const_binop (PLUS_EXPR, real, i1);
1338 real = const_binop (code, real, div);
1340 imag = const_binop (MULT_EXPR, i1, ratio);
1341 imag = const_binop (MINUS_EXPR, imag, r1);
1342 imag = const_binop (code, imag, div);
1344 else
1346 /* In the FALSE branch, we compute
1347 ratio = d/c;
1348 divisor = (d * ratio) + c;
1349 tr = (b * ratio) + a;
1350 ti = b - (a * ratio);
1351 tr = tr / div;
1352 ti = ti / div; */
1353 tree ratio = const_binop (code, i2, r2);
1354 tree div = const_binop (PLUS_EXPR, r2,
1355 const_binop (MULT_EXPR, i2, ratio));
1357 real = const_binop (MULT_EXPR, i1, ratio);
1358 real = const_binop (PLUS_EXPR, real, r1);
1359 real = const_binop (code, real, div);
1361 imag = const_binop (MULT_EXPR, r1, ratio);
1362 imag = const_binop (MINUS_EXPR, i1, imag);
1363 imag = const_binop (code, imag, div);
1366 break;
1368 default:
1369 return NULL_TREE;
1372 if (real && imag)
1373 return build_complex (type, real, imag);
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == VECTOR_CST)
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1383 for (i = 0; i < count; i++)
1385 tree elem1 = VECTOR_CST_ELT (arg1, i);
1386 tree elem2 = VECTOR_CST_ELT (arg2, i);
1388 elts[i] = const_binop (code, elem1, elem2);
1390 /* It is possible that const_binop cannot handle the given
1391 code and return NULL_TREE */
1392 if (elts[i] == NULL_TREE)
1393 return NULL_TREE;
1396 return build_vector (type, elts);
1399 /* Shifts allow a scalar offset for a vector. */
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == INTEGER_CST)
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1407 if (code == VEC_LSHIFT_EXPR
1408 || code == VEC_RSHIFT_EXPR)
1410 if (!tree_fits_uhwi_p (arg2))
1411 return NULL_TREE;
1413 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1414 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1415 unsigned HOST_WIDE_INT innerc
1416 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1417 if (shiftc >= outerc || (shiftc % innerc) != 0)
1418 return NULL_TREE;
1419 int offset = shiftc / innerc;
1420 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1421 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1422 for !BYTES_BIG_ENDIAN picks first vector element, but
1423 for BYTES_BIG_ENDIAN last element from the vector. */
1424 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1425 offset = -offset;
1426 tree zero = build_zero_cst (TREE_TYPE (type));
1427 for (i = 0; i < count; i++)
1429 if (i + offset < 0 || i + offset >= count)
1430 elts[i] = zero;
1431 else
1432 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1435 else
1436 for (i = 0; i < count; i++)
1438 tree elem1 = VECTOR_CST_ELT (arg1, i);
1440 elts[i] = const_binop (code, elem1, arg2);
1442 /* It is possible that const_binop cannot handle the given
1443 code and return NULL_TREE */
1444 if (elts[i] == NULL_TREE)
1445 return NULL_TREE;
1448 return build_vector (type, elts);
1450 return NULL_TREE;
1453 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1454 indicates which particular sizetype to create. */
1456 tree
1457 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1459 return build_int_cst (sizetype_tab[(int) kind], number);
1462 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1463 is a tree code. The type of the result is taken from the operands.
1464 Both must be equivalent integer types, ala int_binop_types_match_p.
1465 If the operands are constant, so is the result. */
1467 tree
1468 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1470 tree type = TREE_TYPE (arg0);
1472 if (arg0 == error_mark_node || arg1 == error_mark_node)
1473 return error_mark_node;
1475 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1476 TREE_TYPE (arg1)));
1478 /* Handle the special case of two integer constants faster. */
1479 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1481 /* And some specific cases even faster than that. */
1482 if (code == PLUS_EXPR)
1484 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1485 return arg1;
1486 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1487 return arg0;
1489 else if (code == MINUS_EXPR)
1491 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1492 return arg0;
1494 else if (code == MULT_EXPR)
1496 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1497 return arg1;
1500 /* Handle general case of two integer constants. For sizetype
1501 constant calculations we always want to know about overflow,
1502 even in the unsigned case. */
1503 return int_const_binop_1 (code, arg0, arg1, -1);
1506 return fold_build2_loc (loc, code, type, arg0, arg1);
1509 /* Given two values, either both of sizetype or both of bitsizetype,
1510 compute the difference between the two values. Return the value
1511 in signed type corresponding to the type of the operands. */
1513 tree
1514 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1516 tree type = TREE_TYPE (arg0);
1517 tree ctype;
1519 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1520 TREE_TYPE (arg1)));
1522 /* If the type is already signed, just do the simple thing. */
1523 if (!TYPE_UNSIGNED (type))
1524 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1526 if (type == sizetype)
1527 ctype = ssizetype;
1528 else if (type == bitsizetype)
1529 ctype = sbitsizetype;
1530 else
1531 ctype = signed_type_for (type);
1533 /* If either operand is not a constant, do the conversions to the signed
1534 type and subtract. The hardware will do the right thing with any
1535 overflow in the subtraction. */
1536 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1537 return size_binop_loc (loc, MINUS_EXPR,
1538 fold_convert_loc (loc, ctype, arg0),
1539 fold_convert_loc (loc, ctype, arg1));
1541 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1542 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1543 overflow) and negate (which can't either). Special-case a result
1544 of zero while we're here. */
1545 if (tree_int_cst_equal (arg0, arg1))
1546 return build_int_cst (ctype, 0);
1547 else if (tree_int_cst_lt (arg1, arg0))
1548 return fold_convert_loc (loc, ctype,
1549 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1550 else
1551 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1552 fold_convert_loc (loc, ctype,
1553 size_binop_loc (loc,
1554 MINUS_EXPR,
1555 arg1, arg0)));
1558 /* A subroutine of fold_convert_const handling conversions of an
1559 INTEGER_CST to another integer type. */
1561 static tree
1562 fold_convert_const_int_from_int (tree type, const_tree arg1)
1564 /* Given an integer constant, make new constant with new type,
1565 appropriately sign-extended or truncated. Use widest_int
1566 so that any extension is done according ARG1's type. */
1567 return force_fit_type (type, wi::to_widest (arg1),
1568 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1569 TREE_OVERFLOW (arg1));
1572 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1573 to an integer type. */
1575 static tree
1576 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1578 bool overflow = false;
1579 tree t;
1581 /* The following code implements the floating point to integer
1582 conversion rules required by the Java Language Specification,
1583 that IEEE NaNs are mapped to zero and values that overflow
1584 the target precision saturate, i.e. values greater than
1585 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1586 are mapped to INT_MIN. These semantics are allowed by the
1587 C and C++ standards that simply state that the behavior of
1588 FP-to-integer conversion is unspecified upon overflow. */
1590 wide_int val;
1591 REAL_VALUE_TYPE r;
1592 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1594 switch (code)
1596 case FIX_TRUNC_EXPR:
1597 real_trunc (&r, VOIDmode, &x);
1598 break;
1600 default:
1601 gcc_unreachable ();
1604 /* If R is NaN, return zero and show we have an overflow. */
1605 if (REAL_VALUE_ISNAN (r))
1607 overflow = true;
1608 val = wi::zero (TYPE_PRECISION (type));
1611 /* See if R is less than the lower bound or greater than the
1612 upper bound. */
1614 if (! overflow)
1616 tree lt = TYPE_MIN_VALUE (type);
1617 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1618 if (REAL_VALUES_LESS (r, l))
1620 overflow = true;
1621 val = lt;
1625 if (! overflow)
1627 tree ut = TYPE_MAX_VALUE (type);
1628 if (ut)
1630 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1631 if (REAL_VALUES_LESS (u, r))
1633 overflow = true;
1634 val = ut;
1639 if (! overflow)
1640 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1642 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1643 return t;
1646 /* A subroutine of fold_convert_const handling conversions of a
1647 FIXED_CST to an integer type. */
1649 static tree
1650 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1652 tree t;
1653 double_int temp, temp_trunc;
1654 unsigned int mode;
1656 /* Right shift FIXED_CST to temp by fbit. */
1657 temp = TREE_FIXED_CST (arg1).data;
1658 mode = TREE_FIXED_CST (arg1).mode;
1659 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1661 temp = temp.rshift (GET_MODE_FBIT (mode),
1662 HOST_BITS_PER_DOUBLE_INT,
1663 SIGNED_FIXED_POINT_MODE_P (mode));
1665 /* Left shift temp to temp_trunc by fbit. */
1666 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1667 HOST_BITS_PER_DOUBLE_INT,
1668 SIGNED_FIXED_POINT_MODE_P (mode));
1670 else
1672 temp = double_int_zero;
1673 temp_trunc = double_int_zero;
1676 /* If FIXED_CST is negative, we need to round the value toward 0.
1677 By checking if the fractional bits are not zero to add 1 to temp. */
1678 if (SIGNED_FIXED_POINT_MODE_P (mode)
1679 && temp_trunc.is_negative ()
1680 && TREE_FIXED_CST (arg1).data != temp_trunc)
1681 temp += double_int_one;
1683 /* Given a fixed-point constant, make new constant with new type,
1684 appropriately sign-extended or truncated. */
1685 t = force_fit_type (type, temp, -1,
1686 (temp.is_negative ()
1687 && (TYPE_UNSIGNED (type)
1688 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1689 | TREE_OVERFLOW (arg1));
1691 return t;
1694 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1695 to another floating point type. */
1697 static tree
1698 fold_convert_const_real_from_real (tree type, const_tree arg1)
1700 REAL_VALUE_TYPE value;
1701 tree t;
1703 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1704 t = build_real (type, value);
1706 /* If converting an infinity or NAN to a representation that doesn't
1707 have one, set the overflow bit so that we can produce some kind of
1708 error message at the appropriate point if necessary. It's not the
1709 most user-friendly message, but it's better than nothing. */
1710 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1711 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1712 TREE_OVERFLOW (t) = 1;
1713 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1714 && !MODE_HAS_NANS (TYPE_MODE (type)))
1715 TREE_OVERFLOW (t) = 1;
1716 /* Regular overflow, conversion produced an infinity in a mode that
1717 can't represent them. */
1718 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1719 && REAL_VALUE_ISINF (value)
1720 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1721 TREE_OVERFLOW (t) = 1;
1722 else
1723 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1724 return t;
1727 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1728 to a floating point type. */
1730 static tree
1731 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1733 REAL_VALUE_TYPE value;
1734 tree t;
1736 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1737 t = build_real (type, value);
1739 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1740 return t;
1743 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1744 to another fixed-point type. */
1746 static tree
1747 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1749 FIXED_VALUE_TYPE value;
1750 tree t;
1751 bool overflow_p;
1753 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1754 TYPE_SATURATING (type));
1755 t = build_fixed (type, value);
1757 /* Propagate overflow flags. */
1758 if (overflow_p | TREE_OVERFLOW (arg1))
1759 TREE_OVERFLOW (t) = 1;
1760 return t;
1763 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1764 to a fixed-point type. */
1766 static tree
1767 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1769 FIXED_VALUE_TYPE value;
1770 tree t;
1771 bool overflow_p;
1772 double_int di;
1774 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1776 di.low = TREE_INT_CST_ELT (arg1, 0);
1777 if (TREE_INT_CST_NUNITS (arg1) == 1)
1778 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1779 else
1780 di.high = TREE_INT_CST_ELT (arg1, 1);
1782 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1783 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1784 TYPE_SATURATING (type));
1785 t = build_fixed (type, value);
1787 /* Propagate overflow flags. */
1788 if (overflow_p | TREE_OVERFLOW (arg1))
1789 TREE_OVERFLOW (t) = 1;
1790 return t;
1793 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1794 to a fixed-point type. */
1796 static tree
1797 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1799 FIXED_VALUE_TYPE value;
1800 tree t;
1801 bool overflow_p;
1803 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1804 &TREE_REAL_CST (arg1),
1805 TYPE_SATURATING (type));
1806 t = build_fixed (type, value);
1808 /* Propagate overflow flags. */
1809 if (overflow_p | TREE_OVERFLOW (arg1))
1810 TREE_OVERFLOW (t) = 1;
1811 return t;
1814 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1815 type TYPE. If no simplification can be done return NULL_TREE. */
1817 static tree
1818 fold_convert_const (enum tree_code code, tree type, tree arg1)
1820 if (TREE_TYPE (arg1) == type)
1821 return arg1;
1823 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1824 || TREE_CODE (type) == OFFSET_TYPE)
1826 if (TREE_CODE (arg1) == INTEGER_CST)
1827 return fold_convert_const_int_from_int (type, arg1);
1828 else if (TREE_CODE (arg1) == REAL_CST)
1829 return fold_convert_const_int_from_real (code, type, arg1);
1830 else if (TREE_CODE (arg1) == FIXED_CST)
1831 return fold_convert_const_int_from_fixed (type, arg1);
1833 else if (TREE_CODE (type) == REAL_TYPE)
1835 if (TREE_CODE (arg1) == INTEGER_CST)
1836 return build_real_from_int_cst (type, arg1);
1837 else if (TREE_CODE (arg1) == REAL_CST)
1838 return fold_convert_const_real_from_real (type, arg1);
1839 else if (TREE_CODE (arg1) == FIXED_CST)
1840 return fold_convert_const_real_from_fixed (type, arg1);
1842 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1844 if (TREE_CODE (arg1) == FIXED_CST)
1845 return fold_convert_const_fixed_from_fixed (type, arg1);
1846 else if (TREE_CODE (arg1) == INTEGER_CST)
1847 return fold_convert_const_fixed_from_int (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_fixed_from_real (type, arg1);
1851 return NULL_TREE;
1854 /* Construct a vector of zero elements of vector type TYPE. */
1856 static tree
1857 build_zero_vector (tree type)
1859 tree t;
1861 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1862 return build_vector_from_val (type, t);
1865 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1867 bool
1868 fold_convertible_p (const_tree type, const_tree arg)
1870 tree orig = TREE_TYPE (arg);
1872 if (type == orig)
1873 return true;
1875 if (TREE_CODE (arg) == ERROR_MARK
1876 || TREE_CODE (type) == ERROR_MARK
1877 || TREE_CODE (orig) == ERROR_MARK)
1878 return false;
1880 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1881 return true;
1883 switch (TREE_CODE (type))
1885 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1886 case POINTER_TYPE: case REFERENCE_TYPE:
1887 case OFFSET_TYPE:
1888 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1889 || TREE_CODE (orig) == OFFSET_TYPE)
1890 return true;
1891 return (TREE_CODE (orig) == VECTOR_TYPE
1892 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1894 case REAL_TYPE:
1895 case FIXED_POINT_TYPE:
1896 case COMPLEX_TYPE:
1897 case VECTOR_TYPE:
1898 case VOID_TYPE:
1899 return TREE_CODE (type) == TREE_CODE (orig);
1901 default:
1902 return false;
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1909 tree
1910 fold_convert_loc (location_t loc, tree type, tree arg)
1912 tree orig = TREE_TYPE (arg);
1913 tree tem;
1915 if (type == orig)
1916 return arg;
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1923 switch (TREE_CODE (type))
1925 case POINTER_TYPE:
1926 case REFERENCE_TYPE:
1927 /* Handle conversions between pointers to different address spaces. */
1928 if (POINTER_TYPE_P (orig)
1929 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1930 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1931 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1932 /* fall through */
1934 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1935 case OFFSET_TYPE:
1936 if (TREE_CODE (arg) == INTEGER_CST)
1938 tem = fold_convert_const (NOP_EXPR, type, arg);
1939 if (tem != NULL_TREE)
1940 return tem;
1942 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1943 || TREE_CODE (orig) == OFFSET_TYPE)
1944 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1945 if (TREE_CODE (orig) == COMPLEX_TYPE)
1946 return fold_convert_loc (loc, type,
1947 fold_build1_loc (loc, REALPART_EXPR,
1948 TREE_TYPE (orig), arg));
1949 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1950 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1951 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1953 case REAL_TYPE:
1954 if (TREE_CODE (arg) == INTEGER_CST)
1956 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1957 if (tem != NULL_TREE)
1958 return tem;
1960 else if (TREE_CODE (arg) == REAL_CST)
1962 tem = fold_convert_const (NOP_EXPR, type, arg);
1963 if (tem != NULL_TREE)
1964 return tem;
1966 else if (TREE_CODE (arg) == FIXED_CST)
1968 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 return tem;
1973 switch (TREE_CODE (orig))
1975 case INTEGER_TYPE:
1976 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1977 case POINTER_TYPE: case REFERENCE_TYPE:
1978 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1980 case REAL_TYPE:
1981 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1983 case FIXED_POINT_TYPE:
1984 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1986 case COMPLEX_TYPE:
1987 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1988 return fold_convert_loc (loc, type, tem);
1990 default:
1991 gcc_unreachable ();
1994 case FIXED_POINT_TYPE:
1995 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1996 || TREE_CODE (arg) == REAL_CST)
1998 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1999 if (tem != NULL_TREE)
2000 goto fold_convert_exit;
2003 switch (TREE_CODE (orig))
2005 case FIXED_POINT_TYPE:
2006 case INTEGER_TYPE:
2007 case ENUMERAL_TYPE:
2008 case BOOLEAN_TYPE:
2009 case REAL_TYPE:
2010 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2012 case COMPLEX_TYPE:
2013 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2014 return fold_convert_loc (loc, type, tem);
2016 default:
2017 gcc_unreachable ();
2020 case COMPLEX_TYPE:
2021 switch (TREE_CODE (orig))
2023 case INTEGER_TYPE:
2024 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2025 case POINTER_TYPE: case REFERENCE_TYPE:
2026 case REAL_TYPE:
2027 case FIXED_POINT_TYPE:
2028 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2029 fold_convert_loc (loc, TREE_TYPE (type), arg),
2030 fold_convert_loc (loc, TREE_TYPE (type),
2031 integer_zero_node));
2032 case COMPLEX_TYPE:
2034 tree rpart, ipart;
2036 if (TREE_CODE (arg) == COMPLEX_EXPR)
2038 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2039 TREE_OPERAND (arg, 0));
2040 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2041 TREE_OPERAND (arg, 1));
2042 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2045 arg = save_expr (arg);
2046 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2047 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2048 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2049 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2050 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2053 default:
2054 gcc_unreachable ();
2057 case VECTOR_TYPE:
2058 if (integer_zerop (arg))
2059 return build_zero_vector (type);
2060 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2061 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2062 || TREE_CODE (orig) == VECTOR_TYPE);
2063 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2065 case VOID_TYPE:
2066 tem = fold_ignored_result (arg);
2067 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2069 default:
2070 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2071 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2072 gcc_unreachable ();
2074 fold_convert_exit:
2075 protected_set_expr_location_unshare (tem, loc);
2076 return tem;
2079 /* Return false if expr can be assumed not to be an lvalue, true
2080 otherwise. */
2082 static bool
2083 maybe_lvalue_p (const_tree x)
2085 /* We only need to wrap lvalue tree codes. */
2086 switch (TREE_CODE (x))
2088 case VAR_DECL:
2089 case PARM_DECL:
2090 case RESULT_DECL:
2091 case LABEL_DECL:
2092 case FUNCTION_DECL:
2093 case SSA_NAME:
2095 case COMPONENT_REF:
2096 case MEM_REF:
2097 case INDIRECT_REF:
2098 case ARRAY_REF:
2099 case ARRAY_RANGE_REF:
2100 case BIT_FIELD_REF:
2101 case OBJ_TYPE_REF:
2103 case REALPART_EXPR:
2104 case IMAGPART_EXPR:
2105 case PREINCREMENT_EXPR:
2106 case PREDECREMENT_EXPR:
2107 case SAVE_EXPR:
2108 case TRY_CATCH_EXPR:
2109 case WITH_CLEANUP_EXPR:
2110 case COMPOUND_EXPR:
2111 case MODIFY_EXPR:
2112 case TARGET_EXPR:
2113 case COND_EXPR:
2114 case BIND_EXPR:
2115 break;
2117 default:
2118 /* Assume the worst for front-end tree codes. */
2119 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2120 break;
2121 return false;
2124 return true;
2127 /* Return an expr equal to X but certainly not valid as an lvalue. */
2129 tree
2130 non_lvalue_loc (location_t loc, tree x)
2132 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2133 us. */
2134 if (in_gimple_form)
2135 return x;
2137 if (! maybe_lvalue_p (x))
2138 return x;
2139 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2142 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2143 Zero means allow extended lvalues. */
2145 int pedantic_lvalues;
2147 /* When pedantic, return an expr equal to X but certainly not valid as a
2148 pedantic lvalue. Otherwise, return X. */
2150 static tree
2151 pedantic_non_lvalue_loc (location_t loc, tree x)
2153 if (pedantic_lvalues)
2154 return non_lvalue_loc (loc, x);
2156 return protected_set_expr_location_unshare (x, loc);
2159 /* Given a tree comparison code, return the code that is the logical inverse.
2160 It is generally not safe to do this for floating-point comparisons, except
2161 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2162 ERROR_MARK in this case. */
2164 enum tree_code
2165 invert_tree_comparison (enum tree_code code, bool honor_nans)
2167 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2168 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2169 return ERROR_MARK;
2171 switch (code)
2173 case EQ_EXPR:
2174 return NE_EXPR;
2175 case NE_EXPR:
2176 return EQ_EXPR;
2177 case GT_EXPR:
2178 return honor_nans ? UNLE_EXPR : LE_EXPR;
2179 case GE_EXPR:
2180 return honor_nans ? UNLT_EXPR : LT_EXPR;
2181 case LT_EXPR:
2182 return honor_nans ? UNGE_EXPR : GE_EXPR;
2183 case LE_EXPR:
2184 return honor_nans ? UNGT_EXPR : GT_EXPR;
2185 case LTGT_EXPR:
2186 return UNEQ_EXPR;
2187 case UNEQ_EXPR:
2188 return LTGT_EXPR;
2189 case UNGT_EXPR:
2190 return LE_EXPR;
2191 case UNGE_EXPR:
2192 return LT_EXPR;
2193 case UNLT_EXPR:
2194 return GE_EXPR;
2195 case UNLE_EXPR:
2196 return GT_EXPR;
2197 case ORDERED_EXPR:
2198 return UNORDERED_EXPR;
2199 case UNORDERED_EXPR:
2200 return ORDERED_EXPR;
2201 default:
2202 gcc_unreachable ();
2206 /* Similar, but return the comparison that results if the operands are
2207 swapped. This is safe for floating-point. */
2209 enum tree_code
2210 swap_tree_comparison (enum tree_code code)
2212 switch (code)
2214 case EQ_EXPR:
2215 case NE_EXPR:
2216 case ORDERED_EXPR:
2217 case UNORDERED_EXPR:
2218 case LTGT_EXPR:
2219 case UNEQ_EXPR:
2220 return code;
2221 case GT_EXPR:
2222 return LT_EXPR;
2223 case GE_EXPR:
2224 return LE_EXPR;
2225 case LT_EXPR:
2226 return GT_EXPR;
2227 case LE_EXPR:
2228 return GE_EXPR;
2229 case UNGT_EXPR:
2230 return UNLT_EXPR;
2231 case UNGE_EXPR:
2232 return UNLE_EXPR;
2233 case UNLT_EXPR:
2234 return UNGT_EXPR;
2235 case UNLE_EXPR:
2236 return UNGE_EXPR;
2237 default:
2238 gcc_unreachable ();
2243 /* Convert a comparison tree code from an enum tree_code representation
2244 into a compcode bit-based encoding. This function is the inverse of
2245 compcode_to_comparison. */
2247 static enum comparison_code
2248 comparison_to_compcode (enum tree_code code)
2250 switch (code)
2252 case LT_EXPR:
2253 return COMPCODE_LT;
2254 case EQ_EXPR:
2255 return COMPCODE_EQ;
2256 case LE_EXPR:
2257 return COMPCODE_LE;
2258 case GT_EXPR:
2259 return COMPCODE_GT;
2260 case NE_EXPR:
2261 return COMPCODE_NE;
2262 case GE_EXPR:
2263 return COMPCODE_GE;
2264 case ORDERED_EXPR:
2265 return COMPCODE_ORD;
2266 case UNORDERED_EXPR:
2267 return COMPCODE_UNORD;
2268 case UNLT_EXPR:
2269 return COMPCODE_UNLT;
2270 case UNEQ_EXPR:
2271 return COMPCODE_UNEQ;
2272 case UNLE_EXPR:
2273 return COMPCODE_UNLE;
2274 case UNGT_EXPR:
2275 return COMPCODE_UNGT;
2276 case LTGT_EXPR:
2277 return COMPCODE_LTGT;
2278 case UNGE_EXPR:
2279 return COMPCODE_UNGE;
2280 default:
2281 gcc_unreachable ();
2285 /* Convert a compcode bit-based encoding of a comparison operator back
2286 to GCC's enum tree_code representation. This function is the
2287 inverse of comparison_to_compcode. */
2289 static enum tree_code
2290 compcode_to_comparison (enum comparison_code code)
2292 switch (code)
2294 case COMPCODE_LT:
2295 return LT_EXPR;
2296 case COMPCODE_EQ:
2297 return EQ_EXPR;
2298 case COMPCODE_LE:
2299 return LE_EXPR;
2300 case COMPCODE_GT:
2301 return GT_EXPR;
2302 case COMPCODE_NE:
2303 return NE_EXPR;
2304 case COMPCODE_GE:
2305 return GE_EXPR;
2306 case COMPCODE_ORD:
2307 return ORDERED_EXPR;
2308 case COMPCODE_UNORD:
2309 return UNORDERED_EXPR;
2310 case COMPCODE_UNLT:
2311 return UNLT_EXPR;
2312 case COMPCODE_UNEQ:
2313 return UNEQ_EXPR;
2314 case COMPCODE_UNLE:
2315 return UNLE_EXPR;
2316 case COMPCODE_UNGT:
2317 return UNGT_EXPR;
2318 case COMPCODE_LTGT:
2319 return LTGT_EXPR;
2320 case COMPCODE_UNGE:
2321 return UNGE_EXPR;
2322 default:
2323 gcc_unreachable ();
2327 /* Return a tree for the comparison which is the combination of
2328 doing the AND or OR (depending on CODE) of the two operations LCODE
2329 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2330 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2331 if this makes the transformation invalid. */
2333 tree
2334 combine_comparisons (location_t loc,
2335 enum tree_code code, enum tree_code lcode,
2336 enum tree_code rcode, tree truth_type,
2337 tree ll_arg, tree lr_arg)
2339 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2340 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2341 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2342 int compcode;
2344 switch (code)
2346 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2347 compcode = lcompcode & rcompcode;
2348 break;
2350 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2351 compcode = lcompcode | rcompcode;
2352 break;
2354 default:
2355 return NULL_TREE;
2358 if (!honor_nans)
2360 /* Eliminate unordered comparisons, as well as LTGT and ORD
2361 which are not used unless the mode has NaNs. */
2362 compcode &= ~COMPCODE_UNORD;
2363 if (compcode == COMPCODE_LTGT)
2364 compcode = COMPCODE_NE;
2365 else if (compcode == COMPCODE_ORD)
2366 compcode = COMPCODE_TRUE;
2368 else if (flag_trapping_math)
2370 /* Check that the original operation and the optimized ones will trap
2371 under the same condition. */
2372 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2373 && (lcompcode != COMPCODE_EQ)
2374 && (lcompcode != COMPCODE_ORD);
2375 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2376 && (rcompcode != COMPCODE_EQ)
2377 && (rcompcode != COMPCODE_ORD);
2378 bool trap = (compcode & COMPCODE_UNORD) == 0
2379 && (compcode != COMPCODE_EQ)
2380 && (compcode != COMPCODE_ORD);
2382 /* In a short-circuited boolean expression the LHS might be
2383 such that the RHS, if evaluated, will never trap. For
2384 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2385 if neither x nor y is NaN. (This is a mixed blessing: for
2386 example, the expression above will never trap, hence
2387 optimizing it to x < y would be invalid). */
2388 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2389 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2390 rtrap = false;
2392 /* If the comparison was short-circuited, and only the RHS
2393 trapped, we may now generate a spurious trap. */
2394 if (rtrap && !ltrap
2395 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2396 return NULL_TREE;
2398 /* If we changed the conditions that cause a trap, we lose. */
2399 if ((ltrap || rtrap) != trap)
2400 return NULL_TREE;
2403 if (compcode == COMPCODE_TRUE)
2404 return constant_boolean_node (true, truth_type);
2405 else if (compcode == COMPCODE_FALSE)
2406 return constant_boolean_node (false, truth_type);
2407 else
2409 enum tree_code tcode;
2411 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2412 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2416 /* Return nonzero if two operands (typically of the same tree node)
2417 are necessarily equal. If either argument has side-effects this
2418 function returns zero. FLAGS modifies behavior as follows:
2420 If OEP_ONLY_CONST is set, only return nonzero for constants.
2421 This function tests whether the operands are indistinguishable;
2422 it does not test whether they are equal using C's == operation.
2423 The distinction is important for IEEE floating point, because
2424 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2425 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2427 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2428 even though it may hold multiple values during a function.
2429 This is because a GCC tree node guarantees that nothing else is
2430 executed between the evaluation of its "operands" (which may often
2431 be evaluated in arbitrary order). Hence if the operands themselves
2432 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2433 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2434 unset means assuming isochronic (or instantaneous) tree equivalence.
2435 Unless comparing arbitrary expression trees, such as from different
2436 statements, this flag can usually be left unset.
2438 If OEP_PURE_SAME is set, then pure functions with identical arguments
2439 are considered the same. It is used when the caller has other ways
2440 to ensure that global memory is unchanged in between. */
2443 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2445 /* If either is ERROR_MARK, they aren't equal. */
2446 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2447 || TREE_TYPE (arg0) == error_mark_node
2448 || TREE_TYPE (arg1) == error_mark_node)
2449 return 0;
2451 /* Similar, if either does not have a type (like a released SSA name),
2452 they aren't equal. */
2453 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2454 return 0;
2456 /* Check equality of integer constants before bailing out due to
2457 precision differences. */
2458 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2459 return tree_int_cst_equal (arg0, arg1);
2461 /* If both types don't have the same signedness, then we can't consider
2462 them equal. We must check this before the STRIP_NOPS calls
2463 because they may change the signedness of the arguments. As pointers
2464 strictly don't have a signedness, require either two pointers or
2465 two non-pointers as well. */
2466 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2467 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2468 return 0;
2470 /* We cannot consider pointers to different address space equal. */
2471 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2472 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2473 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2474 return 0;
2476 /* If both types don't have the same precision, then it is not safe
2477 to strip NOPs. */
2478 if (element_precision (TREE_TYPE (arg0))
2479 != element_precision (TREE_TYPE (arg1)))
2480 return 0;
2482 STRIP_NOPS (arg0);
2483 STRIP_NOPS (arg1);
2485 /* In case both args are comparisons but with different comparison
2486 code, try to swap the comparison operands of one arg to produce
2487 a match and compare that variant. */
2488 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2489 && COMPARISON_CLASS_P (arg0)
2490 && COMPARISON_CLASS_P (arg1))
2492 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2494 if (TREE_CODE (arg0) == swap_code)
2495 return operand_equal_p (TREE_OPERAND (arg0, 0),
2496 TREE_OPERAND (arg1, 1), flags)
2497 && operand_equal_p (TREE_OPERAND (arg0, 1),
2498 TREE_OPERAND (arg1, 0), flags);
2501 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2502 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2503 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2504 return 0;
2506 /* This is needed for conversions and for COMPONENT_REF.
2507 Might as well play it safe and always test this. */
2508 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2509 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2510 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2511 return 0;
2513 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2514 We don't care about side effects in that case because the SAVE_EXPR
2515 takes care of that for us. In all other cases, two expressions are
2516 equal if they have no side effects. If we have two identical
2517 expressions with side effects that should be treated the same due
2518 to the only side effects being identical SAVE_EXPR's, that will
2519 be detected in the recursive calls below.
2520 If we are taking an invariant address of two identical objects
2521 they are necessarily equal as well. */
2522 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2523 && (TREE_CODE (arg0) == SAVE_EXPR
2524 || (flags & OEP_CONSTANT_ADDRESS_OF)
2525 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2526 return 1;
2528 /* Next handle constant cases, those for which we can return 1 even
2529 if ONLY_CONST is set. */
2530 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2531 switch (TREE_CODE (arg0))
2533 case INTEGER_CST:
2534 return tree_int_cst_equal (arg0, arg1);
2536 case FIXED_CST:
2537 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2538 TREE_FIXED_CST (arg1));
2540 case REAL_CST:
2541 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2542 TREE_REAL_CST (arg1)))
2543 return 1;
2546 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2548 /* If we do not distinguish between signed and unsigned zero,
2549 consider them equal. */
2550 if (real_zerop (arg0) && real_zerop (arg1))
2551 return 1;
2553 return 0;
2555 case VECTOR_CST:
2557 unsigned i;
2559 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2560 return 0;
2562 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2564 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2565 VECTOR_CST_ELT (arg1, i), flags))
2566 return 0;
2568 return 1;
2571 case COMPLEX_CST:
2572 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2573 flags)
2574 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2575 flags));
2577 case STRING_CST:
2578 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2579 && ! memcmp (TREE_STRING_POINTER (arg0),
2580 TREE_STRING_POINTER (arg1),
2581 TREE_STRING_LENGTH (arg0)));
2583 case ADDR_EXPR:
2584 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2585 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2586 ? OEP_CONSTANT_ADDRESS_OF : 0);
2587 default:
2588 break;
2591 if (flags & OEP_ONLY_CONST)
2592 return 0;
2594 /* Define macros to test an operand from arg0 and arg1 for equality and a
2595 variant that allows null and views null as being different from any
2596 non-null value. In the latter case, if either is null, the both
2597 must be; otherwise, do the normal comparison. */
2598 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2599 TREE_OPERAND (arg1, N), flags)
2601 #define OP_SAME_WITH_NULL(N) \
2602 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2603 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2605 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2607 case tcc_unary:
2608 /* Two conversions are equal only if signedness and modes match. */
2609 switch (TREE_CODE (arg0))
2611 CASE_CONVERT:
2612 case FIX_TRUNC_EXPR:
2613 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2614 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2615 return 0;
2616 break;
2617 default:
2618 break;
2621 return OP_SAME (0);
2624 case tcc_comparison:
2625 case tcc_binary:
2626 if (OP_SAME (0) && OP_SAME (1))
2627 return 1;
2629 /* For commutative ops, allow the other order. */
2630 return (commutative_tree_code (TREE_CODE (arg0))
2631 && operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2636 case tcc_reference:
2637 /* If either of the pointer (or reference) expressions we are
2638 dereferencing contain a side effect, these cannot be equal,
2639 but their addresses can be. */
2640 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2641 && (TREE_SIDE_EFFECTS (arg0)
2642 || TREE_SIDE_EFFECTS (arg1)))
2643 return 0;
2645 switch (TREE_CODE (arg0))
2647 case INDIRECT_REF:
2648 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2649 return OP_SAME (0);
2651 case REALPART_EXPR:
2652 case IMAGPART_EXPR:
2653 return OP_SAME (0);
2655 case TARGET_MEM_REF:
2656 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2657 /* Require equal extra operands and then fall through to MEM_REF
2658 handling of the two common operands. */
2659 if (!OP_SAME_WITH_NULL (2)
2660 || !OP_SAME_WITH_NULL (3)
2661 || !OP_SAME_WITH_NULL (4))
2662 return 0;
2663 /* Fallthru. */
2664 case MEM_REF:
2665 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2666 /* Require equal access sizes, and similar pointer types.
2667 We can have incomplete types for array references of
2668 variable-sized arrays from the Fortran frontend
2669 though. Also verify the types are compatible. */
2670 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2671 || (TYPE_SIZE (TREE_TYPE (arg0))
2672 && TYPE_SIZE (TREE_TYPE (arg1))
2673 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2674 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2675 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2676 && alias_ptr_types_compatible_p
2677 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2678 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2679 && OP_SAME (0) && OP_SAME (1));
2681 case ARRAY_REF:
2682 case ARRAY_RANGE_REF:
2683 /* Operands 2 and 3 may be null.
2684 Compare the array index by value if it is constant first as we
2685 may have different types but same value here. */
2686 if (!OP_SAME (0))
2687 return 0;
2688 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2689 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2690 TREE_OPERAND (arg1, 1))
2691 || OP_SAME (1))
2692 && OP_SAME_WITH_NULL (2)
2693 && OP_SAME_WITH_NULL (3));
2695 case COMPONENT_REF:
2696 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2697 may be NULL when we're called to compare MEM_EXPRs. */
2698 if (!OP_SAME_WITH_NULL (0)
2699 || !OP_SAME (1))
2700 return 0;
2701 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2702 return OP_SAME_WITH_NULL (2);
2704 case BIT_FIELD_REF:
2705 if (!OP_SAME (0))
2706 return 0;
2707 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2708 return OP_SAME (1) && OP_SAME (2);
2710 default:
2711 return 0;
2714 case tcc_expression:
2715 switch (TREE_CODE (arg0))
2717 case ADDR_EXPR:
2718 case TRUTH_NOT_EXPR:
2719 return OP_SAME (0);
2721 case TRUTH_ANDIF_EXPR:
2722 case TRUTH_ORIF_EXPR:
2723 return OP_SAME (0) && OP_SAME (1);
2725 case FMA_EXPR:
2726 case WIDEN_MULT_PLUS_EXPR:
2727 case WIDEN_MULT_MINUS_EXPR:
2728 if (!OP_SAME (2))
2729 return 0;
2730 /* The multiplcation operands are commutative. */
2731 /* FALLTHRU */
2733 case TRUTH_AND_EXPR:
2734 case TRUTH_OR_EXPR:
2735 case TRUTH_XOR_EXPR:
2736 if (OP_SAME (0) && OP_SAME (1))
2737 return 1;
2739 /* Otherwise take into account this is a commutative operation. */
2740 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2741 TREE_OPERAND (arg1, 1), flags)
2742 && operand_equal_p (TREE_OPERAND (arg0, 1),
2743 TREE_OPERAND (arg1, 0), flags));
2745 case COND_EXPR:
2746 case VEC_COND_EXPR:
2747 case DOT_PROD_EXPR:
2748 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2750 default:
2751 return 0;
2754 case tcc_vl_exp:
2755 switch (TREE_CODE (arg0))
2757 case CALL_EXPR:
2758 /* If the CALL_EXPRs call different functions, then they
2759 clearly can not be equal. */
2760 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2761 flags))
2762 return 0;
2765 unsigned int cef = call_expr_flags (arg0);
2766 if (flags & OEP_PURE_SAME)
2767 cef &= ECF_CONST | ECF_PURE;
2768 else
2769 cef &= ECF_CONST;
2770 if (!cef)
2771 return 0;
2774 /* Now see if all the arguments are the same. */
2776 const_call_expr_arg_iterator iter0, iter1;
2777 const_tree a0, a1;
2778 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2779 a1 = first_const_call_expr_arg (arg1, &iter1);
2780 a0 && a1;
2781 a0 = next_const_call_expr_arg (&iter0),
2782 a1 = next_const_call_expr_arg (&iter1))
2783 if (! operand_equal_p (a0, a1, flags))
2784 return 0;
2786 /* If we get here and both argument lists are exhausted
2787 then the CALL_EXPRs are equal. */
2788 return ! (a0 || a1);
2790 default:
2791 return 0;
2794 case tcc_declaration:
2795 /* Consider __builtin_sqrt equal to sqrt. */
2796 return (TREE_CODE (arg0) == FUNCTION_DECL
2797 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2798 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2799 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2801 default:
2802 return 0;
2805 #undef OP_SAME
2806 #undef OP_SAME_WITH_NULL
2809 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2810 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2812 When in doubt, return 0. */
2814 static int
2815 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2817 int unsignedp1, unsignedpo;
2818 tree primarg0, primarg1, primother;
2819 unsigned int correct_width;
2821 if (operand_equal_p (arg0, arg1, 0))
2822 return 1;
2824 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2825 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2826 return 0;
2828 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2829 and see if the inner values are the same. This removes any
2830 signedness comparison, which doesn't matter here. */
2831 primarg0 = arg0, primarg1 = arg1;
2832 STRIP_NOPS (primarg0);
2833 STRIP_NOPS (primarg1);
2834 if (operand_equal_p (primarg0, primarg1, 0))
2835 return 1;
2837 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2838 actual comparison operand, ARG0.
2840 First throw away any conversions to wider types
2841 already present in the operands. */
2843 primarg1 = get_narrower (arg1, &unsignedp1);
2844 primother = get_narrower (other, &unsignedpo);
2846 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2847 if (unsignedp1 == unsignedpo
2848 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2849 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2851 tree type = TREE_TYPE (arg0);
2853 /* Make sure shorter operand is extended the right way
2854 to match the longer operand. */
2855 primarg1 = fold_convert (signed_or_unsigned_type_for
2856 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2858 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2859 return 1;
2862 return 0;
2865 /* See if ARG is an expression that is either a comparison or is performing
2866 arithmetic on comparisons. The comparisons must only be comparing
2867 two different values, which will be stored in *CVAL1 and *CVAL2; if
2868 they are nonzero it means that some operands have already been found.
2869 No variables may be used anywhere else in the expression except in the
2870 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2871 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2873 If this is true, return 1. Otherwise, return zero. */
2875 static int
2876 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2878 enum tree_code code = TREE_CODE (arg);
2879 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2881 /* We can handle some of the tcc_expression cases here. */
2882 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2883 tclass = tcc_unary;
2884 else if (tclass == tcc_expression
2885 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2886 || code == COMPOUND_EXPR))
2887 tclass = tcc_binary;
2889 else if (tclass == tcc_expression && code == SAVE_EXPR
2890 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2892 /* If we've already found a CVAL1 or CVAL2, this expression is
2893 two complex to handle. */
2894 if (*cval1 || *cval2)
2895 return 0;
2897 tclass = tcc_unary;
2898 *save_p = 1;
2901 switch (tclass)
2903 case tcc_unary:
2904 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2906 case tcc_binary:
2907 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2908 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2909 cval1, cval2, save_p));
2911 case tcc_constant:
2912 return 1;
2914 case tcc_expression:
2915 if (code == COND_EXPR)
2916 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2917 cval1, cval2, save_p)
2918 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2919 cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2921 cval1, cval2, save_p));
2922 return 0;
2924 case tcc_comparison:
2925 /* First see if we can handle the first operand, then the second. For
2926 the second operand, we know *CVAL1 can't be zero. It must be that
2927 one side of the comparison is each of the values; test for the
2928 case where this isn't true by failing if the two operands
2929 are the same. */
2931 if (operand_equal_p (TREE_OPERAND (arg, 0),
2932 TREE_OPERAND (arg, 1), 0))
2933 return 0;
2935 if (*cval1 == 0)
2936 *cval1 = TREE_OPERAND (arg, 0);
2937 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2939 else if (*cval2 == 0)
2940 *cval2 = TREE_OPERAND (arg, 0);
2941 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2943 else
2944 return 0;
2946 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2948 else if (*cval2 == 0)
2949 *cval2 = TREE_OPERAND (arg, 1);
2950 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2952 else
2953 return 0;
2955 return 1;
2957 default:
2958 return 0;
2962 /* ARG is a tree that is known to contain just arithmetic operations and
2963 comparisons. Evaluate the operations in the tree substituting NEW0 for
2964 any occurrence of OLD0 as an operand of a comparison and likewise for
2965 NEW1 and OLD1. */
2967 static tree
2968 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2969 tree old1, tree new1)
2971 tree type = TREE_TYPE (arg);
2972 enum tree_code code = TREE_CODE (arg);
2973 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2975 /* We can handle some of the tcc_expression cases here. */
2976 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2977 tclass = tcc_unary;
2978 else if (tclass == tcc_expression
2979 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2980 tclass = tcc_binary;
2982 switch (tclass)
2984 case tcc_unary:
2985 return fold_build1_loc (loc, code, type,
2986 eval_subst (loc, TREE_OPERAND (arg, 0),
2987 old0, new0, old1, new1));
2989 case tcc_binary:
2990 return fold_build2_loc (loc, code, type,
2991 eval_subst (loc, TREE_OPERAND (arg, 0),
2992 old0, new0, old1, new1),
2993 eval_subst (loc, TREE_OPERAND (arg, 1),
2994 old0, new0, old1, new1));
2996 case tcc_expression:
2997 switch (code)
2999 case SAVE_EXPR:
3000 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3001 old1, new1);
3003 case COMPOUND_EXPR:
3004 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3005 old1, new1);
3007 case COND_EXPR:
3008 return fold_build3_loc (loc, code, type,
3009 eval_subst (loc, TREE_OPERAND (arg, 0),
3010 old0, new0, old1, new1),
3011 eval_subst (loc, TREE_OPERAND (arg, 1),
3012 old0, new0, old1, new1),
3013 eval_subst (loc, TREE_OPERAND (arg, 2),
3014 old0, new0, old1, new1));
3015 default:
3016 break;
3018 /* Fall through - ??? */
3020 case tcc_comparison:
3022 tree arg0 = TREE_OPERAND (arg, 0);
3023 tree arg1 = TREE_OPERAND (arg, 1);
3025 /* We need to check both for exact equality and tree equality. The
3026 former will be true if the operand has a side-effect. In that
3027 case, we know the operand occurred exactly once. */
3029 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3030 arg0 = new0;
3031 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3032 arg0 = new1;
3034 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3035 arg1 = new0;
3036 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3037 arg1 = new1;
3039 return fold_build2_loc (loc, code, type, arg0, arg1);
3042 default:
3043 return arg;
3047 /* Return a tree for the case when the result of an expression is RESULT
3048 converted to TYPE and OMITTED was previously an operand of the expression
3049 but is now not needed (e.g., we folded OMITTED * 0).
3051 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3052 the conversion of RESULT to TYPE. */
3054 tree
3055 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3057 tree t = fold_convert_loc (loc, type, result);
3059 /* If the resulting operand is an empty statement, just return the omitted
3060 statement casted to void. */
3061 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3062 return build1_loc (loc, NOP_EXPR, void_type_node,
3063 fold_ignored_result (omitted));
3065 if (TREE_SIDE_EFFECTS (omitted))
3066 return build2_loc (loc, COMPOUND_EXPR, type,
3067 fold_ignored_result (omitted), t);
3069 return non_lvalue_loc (loc, t);
3072 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3074 static tree
3075 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3076 tree omitted)
3078 tree t = fold_convert_loc (loc, type, result);
3080 /* If the resulting operand is an empty statement, just return the omitted
3081 statement casted to void. */
3082 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3083 return build1_loc (loc, NOP_EXPR, void_type_node,
3084 fold_ignored_result (omitted));
3086 if (TREE_SIDE_EFFECTS (omitted))
3087 return build2_loc (loc, COMPOUND_EXPR, type,
3088 fold_ignored_result (omitted), t);
3090 return pedantic_non_lvalue_loc (loc, t);
3093 /* Return a tree for the case when the result of an expression is RESULT
3094 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3095 of the expression but are now not needed.
3097 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3098 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3099 evaluated before OMITTED2. Otherwise, if neither has side effects,
3100 just do the conversion of RESULT to TYPE. */
3102 tree
3103 omit_two_operands_loc (location_t loc, tree type, tree result,
3104 tree omitted1, tree omitted2)
3106 tree t = fold_convert_loc (loc, type, result);
3108 if (TREE_SIDE_EFFECTS (omitted2))
3109 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3110 if (TREE_SIDE_EFFECTS (omitted1))
3111 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3113 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3117 /* Return a simplified tree node for the truth-negation of ARG. This
3118 never alters ARG itself. We assume that ARG is an operation that
3119 returns a truth value (0 or 1).
3121 FIXME: one would think we would fold the result, but it causes
3122 problems with the dominator optimizer. */
3124 static tree
3125 fold_truth_not_expr (location_t loc, tree arg)
3127 tree type = TREE_TYPE (arg);
3128 enum tree_code code = TREE_CODE (arg);
3129 location_t loc1, loc2;
3131 /* If this is a comparison, we can simply invert it, except for
3132 floating-point non-equality comparisons, in which case we just
3133 enclose a TRUTH_NOT_EXPR around what we have. */
3135 if (TREE_CODE_CLASS (code) == tcc_comparison)
3137 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3138 if (FLOAT_TYPE_P (op_type)
3139 && flag_trapping_math
3140 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3141 && code != NE_EXPR && code != EQ_EXPR)
3142 return NULL_TREE;
3144 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3145 if (code == ERROR_MARK)
3146 return NULL_TREE;
3148 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3149 TREE_OPERAND (arg, 1));
3152 switch (code)
3154 case INTEGER_CST:
3155 return constant_boolean_node (integer_zerop (arg), type);
3157 case TRUTH_AND_EXPR:
3158 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3159 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3160 return build2_loc (loc, TRUTH_OR_EXPR, type,
3161 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3162 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3164 case TRUTH_OR_EXPR:
3165 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3166 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3167 return build2_loc (loc, TRUTH_AND_EXPR, type,
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3169 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3171 case TRUTH_XOR_EXPR:
3172 /* Here we can invert either operand. We invert the first operand
3173 unless the second operand is a TRUTH_NOT_EXPR in which case our
3174 result is the XOR of the first operand with the inside of the
3175 negation of the second operand. */
3177 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3178 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3179 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3180 else
3181 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3182 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3183 TREE_OPERAND (arg, 1));
3185 case TRUTH_ANDIF_EXPR:
3186 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3187 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3188 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3190 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3192 case TRUTH_ORIF_EXPR:
3193 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3194 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3195 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3196 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3197 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3199 case TRUTH_NOT_EXPR:
3200 return TREE_OPERAND (arg, 0);
3202 case COND_EXPR:
3204 tree arg1 = TREE_OPERAND (arg, 1);
3205 tree arg2 = TREE_OPERAND (arg, 2);
3207 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3208 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3210 /* A COND_EXPR may have a throw as one operand, which
3211 then has void type. Just leave void operands
3212 as they are. */
3213 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3214 VOID_TYPE_P (TREE_TYPE (arg1))
3215 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3216 VOID_TYPE_P (TREE_TYPE (arg2))
3217 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3220 case COMPOUND_EXPR:
3221 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3222 return build2_loc (loc, COMPOUND_EXPR, type,
3223 TREE_OPERAND (arg, 0),
3224 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3226 case NON_LVALUE_EXPR:
3227 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3228 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3230 CASE_CONVERT:
3231 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3232 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3234 /* ... fall through ... */
3236 case FLOAT_EXPR:
3237 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3238 return build1_loc (loc, TREE_CODE (arg), type,
3239 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3241 case BIT_AND_EXPR:
3242 if (!integer_onep (TREE_OPERAND (arg, 1)))
3243 return NULL_TREE;
3244 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3246 case SAVE_EXPR:
3247 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3249 case CLEANUP_POINT_EXPR:
3250 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3251 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3252 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3254 default:
3255 return NULL_TREE;
3259 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3260 assume that ARG is an operation that returns a truth value (0 or 1
3261 for scalars, 0 or -1 for vectors). Return the folded expression if
3262 folding is successful. Otherwise, return NULL_TREE. */
3264 static tree
3265 fold_invert_truthvalue (location_t loc, tree arg)
3267 tree type = TREE_TYPE (arg);
3268 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3269 ? BIT_NOT_EXPR
3270 : TRUTH_NOT_EXPR,
3271 type, arg);
3274 /* Return a simplified tree node for the truth-negation of ARG. This
3275 never alters ARG itself. We assume that ARG is an operation that
3276 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3278 tree
3279 invert_truthvalue_loc (location_t loc, tree arg)
3281 if (TREE_CODE (arg) == ERROR_MARK)
3282 return arg;
3284 tree type = TREE_TYPE (arg);
3285 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3286 ? BIT_NOT_EXPR
3287 : TRUTH_NOT_EXPR,
3288 type, arg);
3291 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3292 operands are another bit-wise operation with a common input. If so,
3293 distribute the bit operations to save an operation and possibly two if
3294 constants are involved. For example, convert
3295 (A | B) & (A | C) into A | (B & C)
3296 Further simplification will occur if B and C are constants.
3298 If this optimization cannot be done, 0 will be returned. */
3300 static tree
3301 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3302 tree arg0, tree arg1)
3304 tree common;
3305 tree left, right;
3307 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3308 || TREE_CODE (arg0) == code
3309 || (TREE_CODE (arg0) != BIT_AND_EXPR
3310 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3311 return 0;
3313 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3315 common = TREE_OPERAND (arg0, 0);
3316 left = TREE_OPERAND (arg0, 1);
3317 right = TREE_OPERAND (arg1, 1);
3319 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3321 common = TREE_OPERAND (arg0, 0);
3322 left = TREE_OPERAND (arg0, 1);
3323 right = TREE_OPERAND (arg1, 0);
3325 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3327 common = TREE_OPERAND (arg0, 1);
3328 left = TREE_OPERAND (arg0, 0);
3329 right = TREE_OPERAND (arg1, 1);
3331 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3333 common = TREE_OPERAND (arg0, 1);
3334 left = TREE_OPERAND (arg0, 0);
3335 right = TREE_OPERAND (arg1, 0);
3337 else
3338 return 0;
3340 common = fold_convert_loc (loc, type, common);
3341 left = fold_convert_loc (loc, type, left);
3342 right = fold_convert_loc (loc, type, right);
3343 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3344 fold_build2_loc (loc, code, type, left, right));
3347 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3348 with code CODE. This optimization is unsafe. */
3349 static tree
3350 distribute_real_division (location_t loc, enum tree_code code, tree type,
3351 tree arg0, tree arg1)
3353 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3354 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3356 /* (A / C) +- (B / C) -> (A +- B) / C. */
3357 if (mul0 == mul1
3358 && operand_equal_p (TREE_OPERAND (arg0, 1),
3359 TREE_OPERAND (arg1, 1), 0))
3360 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3361 fold_build2_loc (loc, code, type,
3362 TREE_OPERAND (arg0, 0),
3363 TREE_OPERAND (arg1, 0)),
3364 TREE_OPERAND (arg0, 1));
3366 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3367 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3368 TREE_OPERAND (arg1, 0), 0)
3369 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3370 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3372 REAL_VALUE_TYPE r0, r1;
3373 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3374 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3375 if (!mul0)
3376 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3377 if (!mul1)
3378 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3379 real_arithmetic (&r0, code, &r0, &r1);
3380 return fold_build2_loc (loc, MULT_EXPR, type,
3381 TREE_OPERAND (arg0, 0),
3382 build_real (type, r0));
3385 return NULL_TREE;
3388 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3389 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3391 static tree
3392 make_bit_field_ref (location_t loc, tree inner, tree type,
3393 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3395 tree result, bftype;
3397 if (bitpos == 0)
3399 tree size = TYPE_SIZE (TREE_TYPE (inner));
3400 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3401 || POINTER_TYPE_P (TREE_TYPE (inner)))
3402 && tree_fits_shwi_p (size)
3403 && tree_to_shwi (size) == bitsize)
3404 return fold_convert_loc (loc, type, inner);
3407 bftype = type;
3408 if (TYPE_PRECISION (bftype) != bitsize
3409 || TYPE_UNSIGNED (bftype) == !unsignedp)
3410 bftype = build_nonstandard_integer_type (bitsize, 0);
3412 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3413 size_int (bitsize), bitsize_int (bitpos));
3415 if (bftype != type)
3416 result = fold_convert_loc (loc, type, result);
3418 return result;
3421 /* Optimize a bit-field compare.
3423 There are two cases: First is a compare against a constant and the
3424 second is a comparison of two items where the fields are at the same
3425 bit position relative to the start of a chunk (byte, halfword, word)
3426 large enough to contain it. In these cases we can avoid the shift
3427 implicit in bitfield extractions.
3429 For constants, we emit a compare of the shifted constant with the
3430 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3431 compared. For two fields at the same position, we do the ANDs with the
3432 similar mask and compare the result of the ANDs.
3434 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3435 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3436 are the left and right operands of the comparison, respectively.
3438 If the optimization described above can be done, we return the resulting
3439 tree. Otherwise we return zero. */
3441 static tree
3442 optimize_bit_field_compare (location_t loc, enum tree_code code,
3443 tree compare_type, tree lhs, tree rhs)
3445 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3446 tree type = TREE_TYPE (lhs);
3447 tree unsigned_type;
3448 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3449 enum machine_mode lmode, rmode, nmode;
3450 int lunsignedp, runsignedp;
3451 int lvolatilep = 0, rvolatilep = 0;
3452 tree linner, rinner = NULL_TREE;
3453 tree mask;
3454 tree offset;
3456 /* Get all the information about the extractions being done. If the bit size
3457 if the same as the size of the underlying object, we aren't doing an
3458 extraction at all and so can do nothing. We also don't want to
3459 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3460 then will no longer be able to replace it. */
3461 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3462 &lunsignedp, &lvolatilep, false);
3463 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3464 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3465 return 0;
3467 if (!const_p)
3469 /* If this is not a constant, we can only do something if bit positions,
3470 sizes, and signedness are the same. */
3471 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3472 &runsignedp, &rvolatilep, false);
3474 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3475 || lunsignedp != runsignedp || offset != 0
3476 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3477 return 0;
3480 /* See if we can find a mode to refer to this field. We should be able to,
3481 but fail if we can't. */
3482 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3483 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3484 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3485 TYPE_ALIGN (TREE_TYPE (rinner))),
3486 word_mode, false);
3487 if (nmode == VOIDmode)
3488 return 0;
3490 /* Set signed and unsigned types of the precision of this mode for the
3491 shifts below. */
3492 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3494 /* Compute the bit position and size for the new reference and our offset
3495 within it. If the new reference is the same size as the original, we
3496 won't optimize anything, so return zero. */
3497 nbitsize = GET_MODE_BITSIZE (nmode);
3498 nbitpos = lbitpos & ~ (nbitsize - 1);
3499 lbitpos -= nbitpos;
3500 if (nbitsize == lbitsize)
3501 return 0;
3503 if (BYTES_BIG_ENDIAN)
3504 lbitpos = nbitsize - lbitsize - lbitpos;
3506 /* Make the mask to be used against the extracted field. */
3507 mask = build_int_cst_type (unsigned_type, -1);
3508 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3509 mask = const_binop (RSHIFT_EXPR, mask,
3510 size_int (nbitsize - lbitsize - lbitpos));
3512 if (! const_p)
3513 /* If not comparing with constant, just rework the comparison
3514 and return. */
3515 return fold_build2_loc (loc, code, compare_type,
3516 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3517 make_bit_field_ref (loc, linner,
3518 unsigned_type,
3519 nbitsize, nbitpos,
3521 mask),
3522 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3523 make_bit_field_ref (loc, rinner,
3524 unsigned_type,
3525 nbitsize, nbitpos,
3527 mask));
3529 /* Otherwise, we are handling the constant case. See if the constant is too
3530 big for the field. Warn and return a tree of for 0 (false) if so. We do
3531 this not only for its own sake, but to avoid having to test for this
3532 error case below. If we didn't, we might generate wrong code.
3534 For unsigned fields, the constant shifted right by the field length should
3535 be all zero. For signed fields, the high-order bits should agree with
3536 the sign bit. */
3538 if (lunsignedp)
3540 if (wi::lrshift (rhs, lbitsize) != 0)
3542 warning (0, "comparison is always %d due to width of bit-field",
3543 code == NE_EXPR);
3544 return constant_boolean_node (code == NE_EXPR, compare_type);
3547 else
3549 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3550 if (tem != 0 && tem != -1)
3552 warning (0, "comparison is always %d due to width of bit-field",
3553 code == NE_EXPR);
3554 return constant_boolean_node (code == NE_EXPR, compare_type);
3558 /* Single-bit compares should always be against zero. */
3559 if (lbitsize == 1 && ! integer_zerop (rhs))
3561 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3562 rhs = build_int_cst (type, 0);
3565 /* Make a new bitfield reference, shift the constant over the
3566 appropriate number of bits and mask it with the computed mask
3567 (in case this was a signed field). If we changed it, make a new one. */
3568 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3570 rhs = const_binop (BIT_AND_EXPR,
3571 const_binop (LSHIFT_EXPR,
3572 fold_convert_loc (loc, unsigned_type, rhs),
3573 size_int (lbitpos)),
3574 mask);
3576 lhs = build2_loc (loc, code, compare_type,
3577 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3578 return lhs;
3581 /* Subroutine for fold_truth_andor_1: decode a field reference.
3583 If EXP is a comparison reference, we return the innermost reference.
3585 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3586 set to the starting bit number.
3588 If the innermost field can be completely contained in a mode-sized
3589 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3591 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3592 otherwise it is not changed.
3594 *PUNSIGNEDP is set to the signedness of the field.
3596 *PMASK is set to the mask used. This is either contained in a
3597 BIT_AND_EXPR or derived from the width of the field.
3599 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3601 Return 0 if this is not a component reference or is one that we can't
3602 do anything with. */
3604 static tree
3605 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3606 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3607 int *punsignedp, int *pvolatilep,
3608 tree *pmask, tree *pand_mask)
3610 tree outer_type = 0;
3611 tree and_mask = 0;
3612 tree mask, inner, offset;
3613 tree unsigned_type;
3614 unsigned int precision;
3616 /* All the optimizations using this function assume integer fields.
3617 There are problems with FP fields since the type_for_size call
3618 below can fail for, e.g., XFmode. */
3619 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3620 return 0;
3622 /* We are interested in the bare arrangement of bits, so strip everything
3623 that doesn't affect the machine mode. However, record the type of the
3624 outermost expression if it may matter below. */
3625 if (CONVERT_EXPR_P (exp)
3626 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3627 outer_type = TREE_TYPE (exp);
3628 STRIP_NOPS (exp);
3630 if (TREE_CODE (exp) == BIT_AND_EXPR)
3632 and_mask = TREE_OPERAND (exp, 1);
3633 exp = TREE_OPERAND (exp, 0);
3634 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3635 if (TREE_CODE (and_mask) != INTEGER_CST)
3636 return 0;
3639 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3640 punsignedp, pvolatilep, false);
3641 if ((inner == exp && and_mask == 0)
3642 || *pbitsize < 0 || offset != 0
3643 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3644 return 0;
3646 /* If the number of bits in the reference is the same as the bitsize of
3647 the outer type, then the outer type gives the signedness. Otherwise
3648 (in case of a small bitfield) the signedness is unchanged. */
3649 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3650 *punsignedp = TYPE_UNSIGNED (outer_type);
3652 /* Compute the mask to access the bitfield. */
3653 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3654 precision = TYPE_PRECISION (unsigned_type);
3656 mask = build_int_cst_type (unsigned_type, -1);
3658 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3659 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3661 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3662 if (and_mask != 0)
3663 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3664 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3666 *pmask = mask;
3667 *pand_mask = and_mask;
3668 return inner;
3671 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3672 bit positions and MASK is SIGNED. */
3674 static int
3675 all_ones_mask_p (const_tree mask, unsigned int size)
3677 tree type = TREE_TYPE (mask);
3678 unsigned int precision = TYPE_PRECISION (type);
3680 /* If this function returns true when the type of the mask is
3681 UNSIGNED, then there will be errors. In particular see
3682 gcc.c-torture/execute/990326-1.c. There does not appear to be
3683 any documentation paper trail as to why this is so. But the pre
3684 wide-int worked with that restriction and it has been preserved
3685 here. */
3686 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3687 return false;
3689 return wi::mask (size, false, precision) == mask;
3692 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3693 represents the sign bit of EXP's type. If EXP represents a sign
3694 or zero extension, also test VAL against the unextended type.
3695 The return value is the (sub)expression whose sign bit is VAL,
3696 or NULL_TREE otherwise. */
3698 static tree
3699 sign_bit_p (tree exp, const_tree val)
3701 int width;
3702 tree t;
3704 /* Tree EXP must have an integral type. */
3705 t = TREE_TYPE (exp);
3706 if (! INTEGRAL_TYPE_P (t))
3707 return NULL_TREE;
3709 /* Tree VAL must be an integer constant. */
3710 if (TREE_CODE (val) != INTEGER_CST
3711 || TREE_OVERFLOW (val))
3712 return NULL_TREE;
3714 width = TYPE_PRECISION (t);
3715 if (wi::only_sign_bit_p (val, width))
3716 return exp;
3718 /* Handle extension from a narrower type. */
3719 if (TREE_CODE (exp) == NOP_EXPR
3720 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3721 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3723 return NULL_TREE;
3726 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3727 to be evaluated unconditionally. */
3729 static int
3730 simple_operand_p (const_tree exp)
3732 /* Strip any conversions that don't change the machine mode. */
3733 STRIP_NOPS (exp);
3735 return (CONSTANT_CLASS_P (exp)
3736 || TREE_CODE (exp) == SSA_NAME
3737 || (DECL_P (exp)
3738 && ! TREE_ADDRESSABLE (exp)
3739 && ! TREE_THIS_VOLATILE (exp)
3740 && ! DECL_NONLOCAL (exp)
3741 /* Don't regard global variables as simple. They may be
3742 allocated in ways unknown to the compiler (shared memory,
3743 #pragma weak, etc). */
3744 && ! TREE_PUBLIC (exp)
3745 && ! DECL_EXTERNAL (exp)
3746 /* Weakrefs are not safe to be read, since they can be NULL.
3747 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3748 have DECL_WEAK flag set. */
3749 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3750 /* Loading a static variable is unduly expensive, but global
3751 registers aren't expensive. */
3752 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3755 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3756 to be evaluated unconditionally.
3757 I addition to simple_operand_p, we assume that comparisons, conversions,
3758 and logic-not operations are simple, if their operands are simple, too. */
3760 static bool
3761 simple_operand_p_2 (tree exp)
3763 enum tree_code code;
3765 if (TREE_SIDE_EFFECTS (exp)
3766 || tree_could_trap_p (exp))
3767 return false;
3769 while (CONVERT_EXPR_P (exp))
3770 exp = TREE_OPERAND (exp, 0);
3772 code = TREE_CODE (exp);
3774 if (TREE_CODE_CLASS (code) == tcc_comparison)
3775 return (simple_operand_p (TREE_OPERAND (exp, 0))
3776 && simple_operand_p (TREE_OPERAND (exp, 1)));
3778 if (code == TRUTH_NOT_EXPR)
3779 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3781 return simple_operand_p (exp);
3785 /* The following functions are subroutines to fold_range_test and allow it to
3786 try to change a logical combination of comparisons into a range test.
3788 For example, both
3789 X == 2 || X == 3 || X == 4 || X == 5
3791 X >= 2 && X <= 5
3792 are converted to
3793 (unsigned) (X - 2) <= 3
3795 We describe each set of comparisons as being either inside or outside
3796 a range, using a variable named like IN_P, and then describe the
3797 range with a lower and upper bound. If one of the bounds is omitted,
3798 it represents either the highest or lowest value of the type.
3800 In the comments below, we represent a range by two numbers in brackets
3801 preceded by a "+" to designate being inside that range, or a "-" to
3802 designate being outside that range, so the condition can be inverted by
3803 flipping the prefix. An omitted bound is represented by a "-". For
3804 example, "- [-, 10]" means being outside the range starting at the lowest
3805 possible value and ending at 10, in other words, being greater than 10.
3806 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3807 always false.
3809 We set up things so that the missing bounds are handled in a consistent
3810 manner so neither a missing bound nor "true" and "false" need to be
3811 handled using a special case. */
3813 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3814 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3815 and UPPER1_P are nonzero if the respective argument is an upper bound
3816 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3817 must be specified for a comparison. ARG1 will be converted to ARG0's
3818 type if both are specified. */
3820 static tree
3821 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3822 tree arg1, int upper1_p)
3824 tree tem;
3825 int result;
3826 int sgn0, sgn1;
3828 /* If neither arg represents infinity, do the normal operation.
3829 Else, if not a comparison, return infinity. Else handle the special
3830 comparison rules. Note that most of the cases below won't occur, but
3831 are handled for consistency. */
3833 if (arg0 != 0 && arg1 != 0)
3835 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3836 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3837 STRIP_NOPS (tem);
3838 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3841 if (TREE_CODE_CLASS (code) != tcc_comparison)
3842 return 0;
3844 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3845 for neither. In real maths, we cannot assume open ended ranges are
3846 the same. But, this is computer arithmetic, where numbers are finite.
3847 We can therefore make the transformation of any unbounded range with
3848 the value Z, Z being greater than any representable number. This permits
3849 us to treat unbounded ranges as equal. */
3850 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3851 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3852 switch (code)
3854 case EQ_EXPR:
3855 result = sgn0 == sgn1;
3856 break;
3857 case NE_EXPR:
3858 result = sgn0 != sgn1;
3859 break;
3860 case LT_EXPR:
3861 result = sgn0 < sgn1;
3862 break;
3863 case LE_EXPR:
3864 result = sgn0 <= sgn1;
3865 break;
3866 case GT_EXPR:
3867 result = sgn0 > sgn1;
3868 break;
3869 case GE_EXPR:
3870 result = sgn0 >= sgn1;
3871 break;
3872 default:
3873 gcc_unreachable ();
3876 return constant_boolean_node (result, type);
3879 /* Helper routine for make_range. Perform one step for it, return
3880 new expression if the loop should continue or NULL_TREE if it should
3881 stop. */
3883 tree
3884 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3885 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3886 bool *strict_overflow_p)
3888 tree arg0_type = TREE_TYPE (arg0);
3889 tree n_low, n_high, low = *p_low, high = *p_high;
3890 int in_p = *p_in_p, n_in_p;
3892 switch (code)
3894 case TRUTH_NOT_EXPR:
3895 /* We can only do something if the range is testing for zero. */
3896 if (low == NULL_TREE || high == NULL_TREE
3897 || ! integer_zerop (low) || ! integer_zerop (high))
3898 return NULL_TREE;
3899 *p_in_p = ! in_p;
3900 return arg0;
3902 case EQ_EXPR: case NE_EXPR:
3903 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3904 /* We can only do something if the range is testing for zero
3905 and if the second operand is an integer constant. Note that
3906 saying something is "in" the range we make is done by
3907 complementing IN_P since it will set in the initial case of
3908 being not equal to zero; "out" is leaving it alone. */
3909 if (low == NULL_TREE || high == NULL_TREE
3910 || ! integer_zerop (low) || ! integer_zerop (high)
3911 || TREE_CODE (arg1) != INTEGER_CST)
3912 return NULL_TREE;
3914 switch (code)
3916 case NE_EXPR: /* - [c, c] */
3917 low = high = arg1;
3918 break;
3919 case EQ_EXPR: /* + [c, c] */
3920 in_p = ! in_p, low = high = arg1;
3921 break;
3922 case GT_EXPR: /* - [-, c] */
3923 low = 0, high = arg1;
3924 break;
3925 case GE_EXPR: /* + [c, -] */
3926 in_p = ! in_p, low = arg1, high = 0;
3927 break;
3928 case LT_EXPR: /* - [c, -] */
3929 low = arg1, high = 0;
3930 break;
3931 case LE_EXPR: /* + [-, c] */
3932 in_p = ! in_p, low = 0, high = arg1;
3933 break;
3934 default:
3935 gcc_unreachable ();
3938 /* If this is an unsigned comparison, we also know that EXP is
3939 greater than or equal to zero. We base the range tests we make
3940 on that fact, so we record it here so we can parse existing
3941 range tests. We test arg0_type since often the return type
3942 of, e.g. EQ_EXPR, is boolean. */
3943 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3945 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3946 in_p, low, high, 1,
3947 build_int_cst (arg0_type, 0),
3948 NULL_TREE))
3949 return NULL_TREE;
3951 in_p = n_in_p, low = n_low, high = n_high;
3953 /* If the high bound is missing, but we have a nonzero low
3954 bound, reverse the range so it goes from zero to the low bound
3955 minus 1. */
3956 if (high == 0 && low && ! integer_zerop (low))
3958 in_p = ! in_p;
3959 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3960 build_int_cst (TREE_TYPE (low), 1), 0);
3961 low = build_int_cst (arg0_type, 0);
3965 *p_low = low;
3966 *p_high = high;
3967 *p_in_p = in_p;
3968 return arg0;
3970 case NEGATE_EXPR:
3971 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3972 low and high are non-NULL, then normalize will DTRT. */
3973 if (!TYPE_UNSIGNED (arg0_type)
3974 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3976 if (low == NULL_TREE)
3977 low = TYPE_MIN_VALUE (arg0_type);
3978 if (high == NULL_TREE)
3979 high = TYPE_MAX_VALUE (arg0_type);
3982 /* (-x) IN [a,b] -> x in [-b, -a] */
3983 n_low = range_binop (MINUS_EXPR, exp_type,
3984 build_int_cst (exp_type, 0),
3985 0, high, 1);
3986 n_high = range_binop (MINUS_EXPR, exp_type,
3987 build_int_cst (exp_type, 0),
3988 0, low, 0);
3989 if (n_high != 0 && TREE_OVERFLOW (n_high))
3990 return NULL_TREE;
3991 goto normalize;
3993 case BIT_NOT_EXPR:
3994 /* ~ X -> -X - 1 */
3995 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3996 build_int_cst (exp_type, 1));
3998 case PLUS_EXPR:
3999 case MINUS_EXPR:
4000 if (TREE_CODE (arg1) != INTEGER_CST)
4001 return NULL_TREE;
4003 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4004 move a constant to the other side. */
4005 if (!TYPE_UNSIGNED (arg0_type)
4006 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4007 return NULL_TREE;
4009 /* If EXP is signed, any overflow in the computation is undefined,
4010 so we don't worry about it so long as our computations on
4011 the bounds don't overflow. For unsigned, overflow is defined
4012 and this is exactly the right thing. */
4013 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4014 arg0_type, low, 0, arg1, 0);
4015 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4016 arg0_type, high, 1, arg1, 0);
4017 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4018 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4019 return NULL_TREE;
4021 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4022 *strict_overflow_p = true;
4024 normalize:
4025 /* Check for an unsigned range which has wrapped around the maximum
4026 value thus making n_high < n_low, and normalize it. */
4027 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4029 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4030 build_int_cst (TREE_TYPE (n_high), 1), 0);
4031 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4032 build_int_cst (TREE_TYPE (n_low), 1), 0);
4034 /* If the range is of the form +/- [ x+1, x ], we won't
4035 be able to normalize it. But then, it represents the
4036 whole range or the empty set, so make it
4037 +/- [ -, - ]. */
4038 if (tree_int_cst_equal (n_low, low)
4039 && tree_int_cst_equal (n_high, high))
4040 low = high = 0;
4041 else
4042 in_p = ! in_p;
4044 else
4045 low = n_low, high = n_high;
4047 *p_low = low;
4048 *p_high = high;
4049 *p_in_p = in_p;
4050 return arg0;
4052 CASE_CONVERT:
4053 case NON_LVALUE_EXPR:
4054 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4055 return NULL_TREE;
4057 if (! INTEGRAL_TYPE_P (arg0_type)
4058 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4059 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4060 return NULL_TREE;
4062 n_low = low, n_high = high;
4064 if (n_low != 0)
4065 n_low = fold_convert_loc (loc, arg0_type, n_low);
4067 if (n_high != 0)
4068 n_high = fold_convert_loc (loc, arg0_type, n_high);
4070 /* If we're converting arg0 from an unsigned type, to exp,
4071 a signed type, we will be doing the comparison as unsigned.
4072 The tests above have already verified that LOW and HIGH
4073 are both positive.
4075 So we have to ensure that we will handle large unsigned
4076 values the same way that the current signed bounds treat
4077 negative values. */
4079 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4081 tree high_positive;
4082 tree equiv_type;
4083 /* For fixed-point modes, we need to pass the saturating flag
4084 as the 2nd parameter. */
4085 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4086 equiv_type
4087 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4088 TYPE_SATURATING (arg0_type));
4089 else
4090 equiv_type
4091 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4093 /* A range without an upper bound is, naturally, unbounded.
4094 Since convert would have cropped a very large value, use
4095 the max value for the destination type. */
4096 high_positive
4097 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4098 : TYPE_MAX_VALUE (arg0_type);
4100 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4101 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4102 fold_convert_loc (loc, arg0_type,
4103 high_positive),
4104 build_int_cst (arg0_type, 1));
4106 /* If the low bound is specified, "and" the range with the
4107 range for which the original unsigned value will be
4108 positive. */
4109 if (low != 0)
4111 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4112 1, fold_convert_loc (loc, arg0_type,
4113 integer_zero_node),
4114 high_positive))
4115 return NULL_TREE;
4117 in_p = (n_in_p == in_p);
4119 else
4121 /* Otherwise, "or" the range with the range of the input
4122 that will be interpreted as negative. */
4123 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4124 1, fold_convert_loc (loc, arg0_type,
4125 integer_zero_node),
4126 high_positive))
4127 return NULL_TREE;
4129 in_p = (in_p != n_in_p);
4133 *p_low = n_low;
4134 *p_high = n_high;
4135 *p_in_p = in_p;
4136 return arg0;
4138 default:
4139 return NULL_TREE;
4143 /* Given EXP, a logical expression, set the range it is testing into
4144 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4145 actually being tested. *PLOW and *PHIGH will be made of the same
4146 type as the returned expression. If EXP is not a comparison, we
4147 will most likely not be returning a useful value and range. Set
4148 *STRICT_OVERFLOW_P to true if the return value is only valid
4149 because signed overflow is undefined; otherwise, do not change
4150 *STRICT_OVERFLOW_P. */
4152 tree
4153 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4154 bool *strict_overflow_p)
4156 enum tree_code code;
4157 tree arg0, arg1 = NULL_TREE;
4158 tree exp_type, nexp;
4159 int in_p;
4160 tree low, high;
4161 location_t loc = EXPR_LOCATION (exp);
4163 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4164 and see if we can refine the range. Some of the cases below may not
4165 happen, but it doesn't seem worth worrying about this. We "continue"
4166 the outer loop when we've changed something; otherwise we "break"
4167 the switch, which will "break" the while. */
4169 in_p = 0;
4170 low = high = build_int_cst (TREE_TYPE (exp), 0);
4172 while (1)
4174 code = TREE_CODE (exp);
4175 exp_type = TREE_TYPE (exp);
4176 arg0 = NULL_TREE;
4178 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4180 if (TREE_OPERAND_LENGTH (exp) > 0)
4181 arg0 = TREE_OPERAND (exp, 0);
4182 if (TREE_CODE_CLASS (code) == tcc_binary
4183 || TREE_CODE_CLASS (code) == tcc_comparison
4184 || (TREE_CODE_CLASS (code) == tcc_expression
4185 && TREE_OPERAND_LENGTH (exp) > 1))
4186 arg1 = TREE_OPERAND (exp, 1);
4188 if (arg0 == NULL_TREE)
4189 break;
4191 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4192 &high, &in_p, strict_overflow_p);
4193 if (nexp == NULL_TREE)
4194 break;
4195 exp = nexp;
4198 /* If EXP is a constant, we can evaluate whether this is true or false. */
4199 if (TREE_CODE (exp) == INTEGER_CST)
4201 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4202 exp, 0, low, 0))
4203 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4204 exp, 1, high, 1)));
4205 low = high = 0;
4206 exp = 0;
4209 *pin_p = in_p, *plow = low, *phigh = high;
4210 return exp;
4213 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4214 type, TYPE, return an expression to test if EXP is in (or out of, depending
4215 on IN_P) the range. Return 0 if the test couldn't be created. */
4217 tree
4218 build_range_check (location_t loc, tree type, tree exp, int in_p,
4219 tree low, tree high)
4221 tree etype = TREE_TYPE (exp), value;
4223 #ifdef HAVE_canonicalize_funcptr_for_compare
4224 /* Disable this optimization for function pointer expressions
4225 on targets that require function pointer canonicalization. */
4226 if (HAVE_canonicalize_funcptr_for_compare
4227 && TREE_CODE (etype) == POINTER_TYPE
4228 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4229 return NULL_TREE;
4230 #endif
4232 if (! in_p)
4234 value = build_range_check (loc, type, exp, 1, low, high);
4235 if (value != 0)
4236 return invert_truthvalue_loc (loc, value);
4238 return 0;
4241 if (low == 0 && high == 0)
4242 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4244 if (low == 0)
4245 return fold_build2_loc (loc, LE_EXPR, type, exp,
4246 fold_convert_loc (loc, etype, high));
4248 if (high == 0)
4249 return fold_build2_loc (loc, GE_EXPR, type, exp,
4250 fold_convert_loc (loc, etype, low));
4252 if (operand_equal_p (low, high, 0))
4253 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4254 fold_convert_loc (loc, etype, low));
4256 if (integer_zerop (low))
4258 if (! TYPE_UNSIGNED (etype))
4260 etype = unsigned_type_for (etype);
4261 high = fold_convert_loc (loc, etype, high);
4262 exp = fold_convert_loc (loc, etype, exp);
4264 return build_range_check (loc, type, exp, 1, 0, high);
4267 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4268 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4270 int prec = TYPE_PRECISION (etype);
4272 if (wi::mask (prec - 1, false, prec) == high)
4274 if (TYPE_UNSIGNED (etype))
4276 tree signed_etype = signed_type_for (etype);
4277 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4278 etype
4279 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4280 else
4281 etype = signed_etype;
4282 exp = fold_convert_loc (loc, etype, exp);
4284 return fold_build2_loc (loc, GT_EXPR, type, exp,
4285 build_int_cst (etype, 0));
4289 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4290 This requires wrap-around arithmetics for the type of the expression.
4291 First make sure that arithmetics in this type is valid, then make sure
4292 that it wraps around. */
4293 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4294 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4295 TYPE_UNSIGNED (etype));
4297 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4299 tree utype, minv, maxv;
4301 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4302 for the type in question, as we rely on this here. */
4303 utype = unsigned_type_for (etype);
4304 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4305 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4306 build_int_cst (TREE_TYPE (maxv), 1), 1);
4307 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4309 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4310 minv, 1, maxv, 1)))
4311 etype = utype;
4312 else
4313 return 0;
4316 high = fold_convert_loc (loc, etype, high);
4317 low = fold_convert_loc (loc, etype, low);
4318 exp = fold_convert_loc (loc, etype, exp);
4320 value = const_binop (MINUS_EXPR, high, low);
4323 if (POINTER_TYPE_P (etype))
4325 if (value != 0 && !TREE_OVERFLOW (value))
4327 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4328 return build_range_check (loc, type,
4329 fold_build_pointer_plus_loc (loc, exp, low),
4330 1, build_int_cst (etype, 0), value);
4332 return 0;
4335 if (value != 0 && !TREE_OVERFLOW (value))
4336 return build_range_check (loc, type,
4337 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4338 1, build_int_cst (etype, 0), value);
4340 return 0;
4343 /* Return the predecessor of VAL in its type, handling the infinite case. */
4345 static tree
4346 range_predecessor (tree val)
4348 tree type = TREE_TYPE (val);
4350 if (INTEGRAL_TYPE_P (type)
4351 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4352 return 0;
4353 else
4354 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4355 build_int_cst (TREE_TYPE (val), 1), 0);
4358 /* Return the successor of VAL in its type, handling the infinite case. */
4360 static tree
4361 range_successor (tree val)
4363 tree type = TREE_TYPE (val);
4365 if (INTEGRAL_TYPE_P (type)
4366 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4367 return 0;
4368 else
4369 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4370 build_int_cst (TREE_TYPE (val), 1), 0);
4373 /* Given two ranges, see if we can merge them into one. Return 1 if we
4374 can, 0 if we can't. Set the output range into the specified parameters. */
4376 bool
4377 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4378 tree high0, int in1_p, tree low1, tree high1)
4380 int no_overlap;
4381 int subset;
4382 int temp;
4383 tree tem;
4384 int in_p;
4385 tree low, high;
4386 int lowequal = ((low0 == 0 && low1 == 0)
4387 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4388 low0, 0, low1, 0)));
4389 int highequal = ((high0 == 0 && high1 == 0)
4390 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4391 high0, 1, high1, 1)));
4393 /* Make range 0 be the range that starts first, or ends last if they
4394 start at the same value. Swap them if it isn't. */
4395 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4396 low0, 0, low1, 0))
4397 || (lowequal
4398 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4399 high1, 1, high0, 1))))
4401 temp = in0_p, in0_p = in1_p, in1_p = temp;
4402 tem = low0, low0 = low1, low1 = tem;
4403 tem = high0, high0 = high1, high1 = tem;
4406 /* Now flag two cases, whether the ranges are disjoint or whether the
4407 second range is totally subsumed in the first. Note that the tests
4408 below are simplified by the ones above. */
4409 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4410 high0, 1, low1, 0));
4411 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4412 high1, 1, high0, 1));
4414 /* We now have four cases, depending on whether we are including or
4415 excluding the two ranges. */
4416 if (in0_p && in1_p)
4418 /* If they don't overlap, the result is false. If the second range
4419 is a subset it is the result. Otherwise, the range is from the start
4420 of the second to the end of the first. */
4421 if (no_overlap)
4422 in_p = 0, low = high = 0;
4423 else if (subset)
4424 in_p = 1, low = low1, high = high1;
4425 else
4426 in_p = 1, low = low1, high = high0;
4429 else if (in0_p && ! in1_p)
4431 /* If they don't overlap, the result is the first range. If they are
4432 equal, the result is false. If the second range is a subset of the
4433 first, and the ranges begin at the same place, we go from just after
4434 the end of the second range to the end of the first. If the second
4435 range is not a subset of the first, or if it is a subset and both
4436 ranges end at the same place, the range starts at the start of the
4437 first range and ends just before the second range.
4438 Otherwise, we can't describe this as a single range. */
4439 if (no_overlap)
4440 in_p = 1, low = low0, high = high0;
4441 else if (lowequal && highequal)
4442 in_p = 0, low = high = 0;
4443 else if (subset && lowequal)
4445 low = range_successor (high1);
4446 high = high0;
4447 in_p = 1;
4448 if (low == 0)
4450 /* We are in the weird situation where high0 > high1 but
4451 high1 has no successor. Punt. */
4452 return 0;
4455 else if (! subset || highequal)
4457 low = low0;
4458 high = range_predecessor (low1);
4459 in_p = 1;
4460 if (high == 0)
4462 /* low0 < low1 but low1 has no predecessor. Punt. */
4463 return 0;
4466 else
4467 return 0;
4470 else if (! in0_p && in1_p)
4472 /* If they don't overlap, the result is the second range. If the second
4473 is a subset of the first, the result is false. Otherwise,
4474 the range starts just after the first range and ends at the
4475 end of the second. */
4476 if (no_overlap)
4477 in_p = 1, low = low1, high = high1;
4478 else if (subset || highequal)
4479 in_p = 0, low = high = 0;
4480 else
4482 low = range_successor (high0);
4483 high = high1;
4484 in_p = 1;
4485 if (low == 0)
4487 /* high1 > high0 but high0 has no successor. Punt. */
4488 return 0;
4493 else
4495 /* The case where we are excluding both ranges. Here the complex case
4496 is if they don't overlap. In that case, the only time we have a
4497 range is if they are adjacent. If the second is a subset of the
4498 first, the result is the first. Otherwise, the range to exclude
4499 starts at the beginning of the first range and ends at the end of the
4500 second. */
4501 if (no_overlap)
4503 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4504 range_successor (high0),
4505 1, low1, 0)))
4506 in_p = 0, low = low0, high = high1;
4507 else
4509 /* Canonicalize - [min, x] into - [-, x]. */
4510 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4511 switch (TREE_CODE (TREE_TYPE (low0)))
4513 case ENUMERAL_TYPE:
4514 if (TYPE_PRECISION (TREE_TYPE (low0))
4515 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4516 break;
4517 /* FALLTHROUGH */
4518 case INTEGER_TYPE:
4519 if (tree_int_cst_equal (low0,
4520 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4521 low0 = 0;
4522 break;
4523 case POINTER_TYPE:
4524 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4525 && integer_zerop (low0))
4526 low0 = 0;
4527 break;
4528 default:
4529 break;
4532 /* Canonicalize - [x, max] into - [x, -]. */
4533 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4534 switch (TREE_CODE (TREE_TYPE (high1)))
4536 case ENUMERAL_TYPE:
4537 if (TYPE_PRECISION (TREE_TYPE (high1))
4538 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4539 break;
4540 /* FALLTHROUGH */
4541 case INTEGER_TYPE:
4542 if (tree_int_cst_equal (high1,
4543 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4544 high1 = 0;
4545 break;
4546 case POINTER_TYPE:
4547 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4548 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4549 high1, 1,
4550 build_int_cst (TREE_TYPE (high1), 1),
4551 1)))
4552 high1 = 0;
4553 break;
4554 default:
4555 break;
4558 /* The ranges might be also adjacent between the maximum and
4559 minimum values of the given type. For
4560 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4561 return + [x + 1, y - 1]. */
4562 if (low0 == 0 && high1 == 0)
4564 low = range_successor (high0);
4565 high = range_predecessor (low1);
4566 if (low == 0 || high == 0)
4567 return 0;
4569 in_p = 1;
4571 else
4572 return 0;
4575 else if (subset)
4576 in_p = 0, low = low0, high = high0;
4577 else
4578 in_p = 0, low = low0, high = high1;
4581 *pin_p = in_p, *plow = low, *phigh = high;
4582 return 1;
4586 /* Subroutine of fold, looking inside expressions of the form
4587 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4588 of the COND_EXPR. This function is being used also to optimize
4589 A op B ? C : A, by reversing the comparison first.
4591 Return a folded expression whose code is not a COND_EXPR
4592 anymore, or NULL_TREE if no folding opportunity is found. */
4594 static tree
4595 fold_cond_expr_with_comparison (location_t loc, tree type,
4596 tree arg0, tree arg1, tree arg2)
4598 enum tree_code comp_code = TREE_CODE (arg0);
4599 tree arg00 = TREE_OPERAND (arg0, 0);
4600 tree arg01 = TREE_OPERAND (arg0, 1);
4601 tree arg1_type = TREE_TYPE (arg1);
4602 tree tem;
4604 STRIP_NOPS (arg1);
4605 STRIP_NOPS (arg2);
4607 /* If we have A op 0 ? A : -A, consider applying the following
4608 transformations:
4610 A == 0? A : -A same as -A
4611 A != 0? A : -A same as A
4612 A >= 0? A : -A same as abs (A)
4613 A > 0? A : -A same as abs (A)
4614 A <= 0? A : -A same as -abs (A)
4615 A < 0? A : -A same as -abs (A)
4617 None of these transformations work for modes with signed
4618 zeros. If A is +/-0, the first two transformations will
4619 change the sign of the result (from +0 to -0, or vice
4620 versa). The last four will fix the sign of the result,
4621 even though the original expressions could be positive or
4622 negative, depending on the sign of A.
4624 Note that all these transformations are correct if A is
4625 NaN, since the two alternatives (A and -A) are also NaNs. */
4626 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4627 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4628 ? real_zerop (arg01)
4629 : integer_zerop (arg01))
4630 && ((TREE_CODE (arg2) == NEGATE_EXPR
4631 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4632 /* In the case that A is of the form X-Y, '-A' (arg2) may
4633 have already been folded to Y-X, check for that. */
4634 || (TREE_CODE (arg1) == MINUS_EXPR
4635 && TREE_CODE (arg2) == MINUS_EXPR
4636 && operand_equal_p (TREE_OPERAND (arg1, 0),
4637 TREE_OPERAND (arg2, 1), 0)
4638 && operand_equal_p (TREE_OPERAND (arg1, 1),
4639 TREE_OPERAND (arg2, 0), 0))))
4640 switch (comp_code)
4642 case EQ_EXPR:
4643 case UNEQ_EXPR:
4644 tem = fold_convert_loc (loc, arg1_type, arg1);
4645 return pedantic_non_lvalue_loc (loc,
4646 fold_convert_loc (loc, type,
4647 negate_expr (tem)));
4648 case NE_EXPR:
4649 case LTGT_EXPR:
4650 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4651 case UNGE_EXPR:
4652 case UNGT_EXPR:
4653 if (flag_trapping_math)
4654 break;
4655 /* Fall through. */
4656 case GE_EXPR:
4657 case GT_EXPR:
4658 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4659 arg1 = fold_convert_loc (loc, signed_type_for
4660 (TREE_TYPE (arg1)), arg1);
4661 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4662 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4663 case UNLE_EXPR:
4664 case UNLT_EXPR:
4665 if (flag_trapping_math)
4666 break;
4667 case LE_EXPR:
4668 case LT_EXPR:
4669 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4670 arg1 = fold_convert_loc (loc, signed_type_for
4671 (TREE_TYPE (arg1)), arg1);
4672 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4673 return negate_expr (fold_convert_loc (loc, type, tem));
4674 default:
4675 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4676 break;
4679 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4680 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4681 both transformations are correct when A is NaN: A != 0
4682 is then true, and A == 0 is false. */
4684 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4685 && integer_zerop (arg01) && integer_zerop (arg2))
4687 if (comp_code == NE_EXPR)
4688 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4689 else if (comp_code == EQ_EXPR)
4690 return build_zero_cst (type);
4693 /* Try some transformations of A op B ? A : B.
4695 A == B? A : B same as B
4696 A != B? A : B same as A
4697 A >= B? A : B same as max (A, B)
4698 A > B? A : B same as max (B, A)
4699 A <= B? A : B same as min (A, B)
4700 A < B? A : B same as min (B, A)
4702 As above, these transformations don't work in the presence
4703 of signed zeros. For example, if A and B are zeros of
4704 opposite sign, the first two transformations will change
4705 the sign of the result. In the last four, the original
4706 expressions give different results for (A=+0, B=-0) and
4707 (A=-0, B=+0), but the transformed expressions do not.
4709 The first two transformations are correct if either A or B
4710 is a NaN. In the first transformation, the condition will
4711 be false, and B will indeed be chosen. In the case of the
4712 second transformation, the condition A != B will be true,
4713 and A will be chosen.
4715 The conversions to max() and min() are not correct if B is
4716 a number and A is not. The conditions in the original
4717 expressions will be false, so all four give B. The min()
4718 and max() versions would give a NaN instead. */
4719 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4720 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4721 /* Avoid these transformations if the COND_EXPR may be used
4722 as an lvalue in the C++ front-end. PR c++/19199. */
4723 && (in_gimple_form
4724 || VECTOR_TYPE_P (type)
4725 || (strcmp (lang_hooks.name, "GNU C++") != 0
4726 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4727 || ! maybe_lvalue_p (arg1)
4728 || ! maybe_lvalue_p (arg2)))
4730 tree comp_op0 = arg00;
4731 tree comp_op1 = arg01;
4732 tree comp_type = TREE_TYPE (comp_op0);
4734 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4735 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4737 comp_type = type;
4738 comp_op0 = arg1;
4739 comp_op1 = arg2;
4742 switch (comp_code)
4744 case EQ_EXPR:
4745 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4746 case NE_EXPR:
4747 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4748 case LE_EXPR:
4749 case LT_EXPR:
4750 case UNLE_EXPR:
4751 case UNLT_EXPR:
4752 /* In C++ a ?: expression can be an lvalue, so put the
4753 operand which will be used if they are equal first
4754 so that we can convert this back to the
4755 corresponding COND_EXPR. */
4756 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4758 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4759 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4760 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4761 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4762 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4763 comp_op1, comp_op0);
4764 return pedantic_non_lvalue_loc (loc,
4765 fold_convert_loc (loc, type, tem));
4767 break;
4768 case GE_EXPR:
4769 case GT_EXPR:
4770 case UNGE_EXPR:
4771 case UNGT_EXPR:
4772 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4774 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4775 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4776 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4777 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4778 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4779 comp_op1, comp_op0);
4780 return pedantic_non_lvalue_loc (loc,
4781 fold_convert_loc (loc, type, tem));
4783 break;
4784 case UNEQ_EXPR:
4785 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4786 return pedantic_non_lvalue_loc (loc,
4787 fold_convert_loc (loc, type, arg2));
4788 break;
4789 case LTGT_EXPR:
4790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4791 return pedantic_non_lvalue_loc (loc,
4792 fold_convert_loc (loc, type, arg1));
4793 break;
4794 default:
4795 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4796 break;
4800 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4801 we might still be able to simplify this. For example,
4802 if C1 is one less or one more than C2, this might have started
4803 out as a MIN or MAX and been transformed by this function.
4804 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4806 if (INTEGRAL_TYPE_P (type)
4807 && TREE_CODE (arg01) == INTEGER_CST
4808 && TREE_CODE (arg2) == INTEGER_CST)
4809 switch (comp_code)
4811 case EQ_EXPR:
4812 if (TREE_CODE (arg1) == INTEGER_CST)
4813 break;
4814 /* We can replace A with C1 in this case. */
4815 arg1 = fold_convert_loc (loc, type, arg01);
4816 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4818 case LT_EXPR:
4819 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4820 MIN_EXPR, to preserve the signedness of the comparison. */
4821 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4822 OEP_ONLY_CONST)
4823 && operand_equal_p (arg01,
4824 const_binop (PLUS_EXPR, arg2,
4825 build_int_cst (type, 1)),
4826 OEP_ONLY_CONST))
4828 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4829 fold_convert_loc (loc, TREE_TYPE (arg00),
4830 arg2));
4831 return pedantic_non_lvalue_loc (loc,
4832 fold_convert_loc (loc, type, tem));
4834 break;
4836 case LE_EXPR:
4837 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4838 as above. */
4839 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4840 OEP_ONLY_CONST)
4841 && operand_equal_p (arg01,
4842 const_binop (MINUS_EXPR, arg2,
4843 build_int_cst (type, 1)),
4844 OEP_ONLY_CONST))
4846 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4847 fold_convert_loc (loc, TREE_TYPE (arg00),
4848 arg2));
4849 return pedantic_non_lvalue_loc (loc,
4850 fold_convert_loc (loc, type, tem));
4852 break;
4854 case GT_EXPR:
4855 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4856 MAX_EXPR, to preserve the signedness of the comparison. */
4857 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4858 OEP_ONLY_CONST)
4859 && operand_equal_p (arg01,
4860 const_binop (MINUS_EXPR, arg2,
4861 build_int_cst (type, 1)),
4862 OEP_ONLY_CONST))
4864 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4865 fold_convert_loc (loc, TREE_TYPE (arg00),
4866 arg2));
4867 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4869 break;
4871 case GE_EXPR:
4872 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4873 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4874 OEP_ONLY_CONST)
4875 && operand_equal_p (arg01,
4876 const_binop (PLUS_EXPR, arg2,
4877 build_int_cst (type, 1)),
4878 OEP_ONLY_CONST))
4880 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4881 fold_convert_loc (loc, TREE_TYPE (arg00),
4882 arg2));
4883 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4885 break;
4886 case NE_EXPR:
4887 break;
4888 default:
4889 gcc_unreachable ();
4892 return NULL_TREE;
4897 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4898 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4899 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4900 false) >= 2)
4901 #endif
4903 /* EXP is some logical combination of boolean tests. See if we can
4904 merge it into some range test. Return the new tree if so. */
4906 static tree
4907 fold_range_test (location_t loc, enum tree_code code, tree type,
4908 tree op0, tree op1)
4910 int or_op = (code == TRUTH_ORIF_EXPR
4911 || code == TRUTH_OR_EXPR);
4912 int in0_p, in1_p, in_p;
4913 tree low0, low1, low, high0, high1, high;
4914 bool strict_overflow_p = false;
4915 tree tem, lhs, rhs;
4916 const char * const warnmsg = G_("assuming signed overflow does not occur "
4917 "when simplifying range test");
4919 if (!INTEGRAL_TYPE_P (type))
4920 return 0;
4922 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4923 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4925 /* If this is an OR operation, invert both sides; we will invert
4926 again at the end. */
4927 if (or_op)
4928 in0_p = ! in0_p, in1_p = ! in1_p;
4930 /* If both expressions are the same, if we can merge the ranges, and we
4931 can build the range test, return it or it inverted. If one of the
4932 ranges is always true or always false, consider it to be the same
4933 expression as the other. */
4934 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4935 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4936 in1_p, low1, high1)
4937 && 0 != (tem = (build_range_check (loc, type,
4938 lhs != 0 ? lhs
4939 : rhs != 0 ? rhs : integer_zero_node,
4940 in_p, low, high))))
4942 if (strict_overflow_p)
4943 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4944 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4947 /* On machines where the branch cost is expensive, if this is a
4948 short-circuited branch and the underlying object on both sides
4949 is the same, make a non-short-circuit operation. */
4950 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4951 && lhs != 0 && rhs != 0
4952 && (code == TRUTH_ANDIF_EXPR
4953 || code == TRUTH_ORIF_EXPR)
4954 && operand_equal_p (lhs, rhs, 0))
4956 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4957 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4958 which cases we can't do this. */
4959 if (simple_operand_p (lhs))
4960 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4961 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4962 type, op0, op1);
4964 else if (!lang_hooks.decls.global_bindings_p ()
4965 && !CONTAINS_PLACEHOLDER_P (lhs))
4967 tree common = save_expr (lhs);
4969 if (0 != (lhs = build_range_check (loc, type, common,
4970 or_op ? ! in0_p : in0_p,
4971 low0, high0))
4972 && (0 != (rhs = build_range_check (loc, type, common,
4973 or_op ? ! in1_p : in1_p,
4974 low1, high1))))
4976 if (strict_overflow_p)
4977 fold_overflow_warning (warnmsg,
4978 WARN_STRICT_OVERFLOW_COMPARISON);
4979 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4980 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4981 type, lhs, rhs);
4986 return 0;
4989 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4990 bit value. Arrange things so the extra bits will be set to zero if and
4991 only if C is signed-extended to its full width. If MASK is nonzero,
4992 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4994 static tree
4995 unextend (tree c, int p, int unsignedp, tree mask)
4997 tree type = TREE_TYPE (c);
4998 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4999 tree temp;
5001 if (p == modesize || unsignedp)
5002 return c;
5004 /* We work by getting just the sign bit into the low-order bit, then
5005 into the high-order bit, then sign-extend. We then XOR that value
5006 with C. */
5007 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5009 /* We must use a signed type in order to get an arithmetic right shift.
5010 However, we must also avoid introducing accidental overflows, so that
5011 a subsequent call to integer_zerop will work. Hence we must
5012 do the type conversion here. At this point, the constant is either
5013 zero or one, and the conversion to a signed type can never overflow.
5014 We could get an overflow if this conversion is done anywhere else. */
5015 if (TYPE_UNSIGNED (type))
5016 temp = fold_convert (signed_type_for (type), temp);
5018 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5019 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5020 if (mask != 0)
5021 temp = const_binop (BIT_AND_EXPR, temp,
5022 fold_convert (TREE_TYPE (c), mask));
5023 /* If necessary, convert the type back to match the type of C. */
5024 if (TYPE_UNSIGNED (type))
5025 temp = fold_convert (type, temp);
5027 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5030 /* For an expression that has the form
5031 (A && B) || ~B
5033 (A || B) && ~B,
5034 we can drop one of the inner expressions and simplify to
5035 A || ~B
5037 A && ~B
5038 LOC is the location of the resulting expression. OP is the inner
5039 logical operation; the left-hand side in the examples above, while CMPOP
5040 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5041 removing a condition that guards another, as in
5042 (A != NULL && A->...) || A == NULL
5043 which we must not transform. If RHS_ONLY is true, only eliminate the
5044 right-most operand of the inner logical operation. */
5046 static tree
5047 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5048 bool rhs_only)
5050 tree type = TREE_TYPE (cmpop);
5051 enum tree_code code = TREE_CODE (cmpop);
5052 enum tree_code truthop_code = TREE_CODE (op);
5053 tree lhs = TREE_OPERAND (op, 0);
5054 tree rhs = TREE_OPERAND (op, 1);
5055 tree orig_lhs = lhs, orig_rhs = rhs;
5056 enum tree_code rhs_code = TREE_CODE (rhs);
5057 enum tree_code lhs_code = TREE_CODE (lhs);
5058 enum tree_code inv_code;
5060 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5061 return NULL_TREE;
5063 if (TREE_CODE_CLASS (code) != tcc_comparison)
5064 return NULL_TREE;
5066 if (rhs_code == truthop_code)
5068 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5069 if (newrhs != NULL_TREE)
5071 rhs = newrhs;
5072 rhs_code = TREE_CODE (rhs);
5075 if (lhs_code == truthop_code && !rhs_only)
5077 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5078 if (newlhs != NULL_TREE)
5080 lhs = newlhs;
5081 lhs_code = TREE_CODE (lhs);
5085 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5086 if (inv_code == rhs_code
5087 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5088 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5089 return lhs;
5090 if (!rhs_only && inv_code == lhs_code
5091 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5092 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5093 return rhs;
5094 if (rhs != orig_rhs || lhs != orig_lhs)
5095 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5096 lhs, rhs);
5097 return NULL_TREE;
5100 /* Find ways of folding logical expressions of LHS and RHS:
5101 Try to merge two comparisons to the same innermost item.
5102 Look for range tests like "ch >= '0' && ch <= '9'".
5103 Look for combinations of simple terms on machines with expensive branches
5104 and evaluate the RHS unconditionally.
5106 For example, if we have p->a == 2 && p->b == 4 and we can make an
5107 object large enough to span both A and B, we can do this with a comparison
5108 against the object ANDed with the a mask.
5110 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5111 operations to do this with one comparison.
5113 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5114 function and the one above.
5116 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5117 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5119 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5120 two operands.
5122 We return the simplified tree or 0 if no optimization is possible. */
5124 static tree
5125 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5126 tree lhs, tree rhs)
5128 /* If this is the "or" of two comparisons, we can do something if
5129 the comparisons are NE_EXPR. If this is the "and", we can do something
5130 if the comparisons are EQ_EXPR. I.e.,
5131 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5133 WANTED_CODE is this operation code. For single bit fields, we can
5134 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5135 comparison for one-bit fields. */
5137 enum tree_code wanted_code;
5138 enum tree_code lcode, rcode;
5139 tree ll_arg, lr_arg, rl_arg, rr_arg;
5140 tree ll_inner, lr_inner, rl_inner, rr_inner;
5141 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5142 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5143 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5144 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5145 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5146 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5147 enum machine_mode lnmode, rnmode;
5148 tree ll_mask, lr_mask, rl_mask, rr_mask;
5149 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5150 tree l_const, r_const;
5151 tree lntype, rntype, result;
5152 HOST_WIDE_INT first_bit, end_bit;
5153 int volatilep;
5155 /* Start by getting the comparison codes. Fail if anything is volatile.
5156 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5157 it were surrounded with a NE_EXPR. */
5159 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5160 return 0;
5162 lcode = TREE_CODE (lhs);
5163 rcode = TREE_CODE (rhs);
5165 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5167 lhs = build2 (NE_EXPR, truth_type, lhs,
5168 build_int_cst (TREE_TYPE (lhs), 0));
5169 lcode = NE_EXPR;
5172 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5174 rhs = build2 (NE_EXPR, truth_type, rhs,
5175 build_int_cst (TREE_TYPE (rhs), 0));
5176 rcode = NE_EXPR;
5179 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5180 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5181 return 0;
5183 ll_arg = TREE_OPERAND (lhs, 0);
5184 lr_arg = TREE_OPERAND (lhs, 1);
5185 rl_arg = TREE_OPERAND (rhs, 0);
5186 rr_arg = TREE_OPERAND (rhs, 1);
5188 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5189 if (simple_operand_p (ll_arg)
5190 && simple_operand_p (lr_arg))
5192 if (operand_equal_p (ll_arg, rl_arg, 0)
5193 && operand_equal_p (lr_arg, rr_arg, 0))
5195 result = combine_comparisons (loc, code, lcode, rcode,
5196 truth_type, ll_arg, lr_arg);
5197 if (result)
5198 return result;
5200 else if (operand_equal_p (ll_arg, rr_arg, 0)
5201 && operand_equal_p (lr_arg, rl_arg, 0))
5203 result = combine_comparisons (loc, code, lcode,
5204 swap_tree_comparison (rcode),
5205 truth_type, ll_arg, lr_arg);
5206 if (result)
5207 return result;
5211 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5212 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5214 /* If the RHS can be evaluated unconditionally and its operands are
5215 simple, it wins to evaluate the RHS unconditionally on machines
5216 with expensive branches. In this case, this isn't a comparison
5217 that can be merged. */
5219 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5220 false) >= 2
5221 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5222 && simple_operand_p (rl_arg)
5223 && simple_operand_p (rr_arg))
5225 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5226 if (code == TRUTH_OR_EXPR
5227 && lcode == NE_EXPR && integer_zerop (lr_arg)
5228 && rcode == NE_EXPR && integer_zerop (rr_arg)
5229 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5230 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5231 return build2_loc (loc, NE_EXPR, truth_type,
5232 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5233 ll_arg, rl_arg),
5234 build_int_cst (TREE_TYPE (ll_arg), 0));
5236 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5237 if (code == TRUTH_AND_EXPR
5238 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5239 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5240 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5241 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5242 return build2_loc (loc, EQ_EXPR, truth_type,
5243 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5244 ll_arg, rl_arg),
5245 build_int_cst (TREE_TYPE (ll_arg), 0));
5248 /* See if the comparisons can be merged. Then get all the parameters for
5249 each side. */
5251 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5252 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5253 return 0;
5255 volatilep = 0;
5256 ll_inner = decode_field_reference (loc, ll_arg,
5257 &ll_bitsize, &ll_bitpos, &ll_mode,
5258 &ll_unsignedp, &volatilep, &ll_mask,
5259 &ll_and_mask);
5260 lr_inner = decode_field_reference (loc, lr_arg,
5261 &lr_bitsize, &lr_bitpos, &lr_mode,
5262 &lr_unsignedp, &volatilep, &lr_mask,
5263 &lr_and_mask);
5264 rl_inner = decode_field_reference (loc, rl_arg,
5265 &rl_bitsize, &rl_bitpos, &rl_mode,
5266 &rl_unsignedp, &volatilep, &rl_mask,
5267 &rl_and_mask);
5268 rr_inner = decode_field_reference (loc, rr_arg,
5269 &rr_bitsize, &rr_bitpos, &rr_mode,
5270 &rr_unsignedp, &volatilep, &rr_mask,
5271 &rr_and_mask);
5273 /* It must be true that the inner operation on the lhs of each
5274 comparison must be the same if we are to be able to do anything.
5275 Then see if we have constants. If not, the same must be true for
5276 the rhs's. */
5277 if (volatilep || ll_inner == 0 || rl_inner == 0
5278 || ! operand_equal_p (ll_inner, rl_inner, 0))
5279 return 0;
5281 if (TREE_CODE (lr_arg) == INTEGER_CST
5282 && TREE_CODE (rr_arg) == INTEGER_CST)
5283 l_const = lr_arg, r_const = rr_arg;
5284 else if (lr_inner == 0 || rr_inner == 0
5285 || ! operand_equal_p (lr_inner, rr_inner, 0))
5286 return 0;
5287 else
5288 l_const = r_const = 0;
5290 /* If either comparison code is not correct for our logical operation,
5291 fail. However, we can convert a one-bit comparison against zero into
5292 the opposite comparison against that bit being set in the field. */
5294 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5295 if (lcode != wanted_code)
5297 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5299 /* Make the left operand unsigned, since we are only interested
5300 in the value of one bit. Otherwise we are doing the wrong
5301 thing below. */
5302 ll_unsignedp = 1;
5303 l_const = ll_mask;
5305 else
5306 return 0;
5309 /* This is analogous to the code for l_const above. */
5310 if (rcode != wanted_code)
5312 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5314 rl_unsignedp = 1;
5315 r_const = rl_mask;
5317 else
5318 return 0;
5321 /* See if we can find a mode that contains both fields being compared on
5322 the left. If we can't, fail. Otherwise, update all constants and masks
5323 to be relative to a field of that size. */
5324 first_bit = MIN (ll_bitpos, rl_bitpos);
5325 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5326 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5327 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5328 volatilep);
5329 if (lnmode == VOIDmode)
5330 return 0;
5332 lnbitsize = GET_MODE_BITSIZE (lnmode);
5333 lnbitpos = first_bit & ~ (lnbitsize - 1);
5334 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5335 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5337 if (BYTES_BIG_ENDIAN)
5339 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5340 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5343 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5344 size_int (xll_bitpos));
5345 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5346 size_int (xrl_bitpos));
5348 if (l_const)
5350 l_const = fold_convert_loc (loc, lntype, l_const);
5351 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5352 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5353 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5354 fold_build1_loc (loc, BIT_NOT_EXPR,
5355 lntype, ll_mask))))
5357 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5359 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5362 if (r_const)
5364 r_const = fold_convert_loc (loc, lntype, r_const);
5365 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5366 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5367 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5368 fold_build1_loc (loc, BIT_NOT_EXPR,
5369 lntype, rl_mask))))
5371 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5373 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5377 /* If the right sides are not constant, do the same for it. Also,
5378 disallow this optimization if a size or signedness mismatch occurs
5379 between the left and right sides. */
5380 if (l_const == 0)
5382 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5383 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5384 /* Make sure the two fields on the right
5385 correspond to the left without being swapped. */
5386 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5387 return 0;
5389 first_bit = MIN (lr_bitpos, rr_bitpos);
5390 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5391 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5392 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5393 volatilep);
5394 if (rnmode == VOIDmode)
5395 return 0;
5397 rnbitsize = GET_MODE_BITSIZE (rnmode);
5398 rnbitpos = first_bit & ~ (rnbitsize - 1);
5399 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5400 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5402 if (BYTES_BIG_ENDIAN)
5404 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5405 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5408 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5409 rntype, lr_mask),
5410 size_int (xlr_bitpos));
5411 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5412 rntype, rr_mask),
5413 size_int (xrr_bitpos));
5415 /* Make a mask that corresponds to both fields being compared.
5416 Do this for both items being compared. If the operands are the
5417 same size and the bits being compared are in the same position
5418 then we can do this by masking both and comparing the masked
5419 results. */
5420 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5421 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5422 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5424 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5425 ll_unsignedp || rl_unsignedp);
5426 if (! all_ones_mask_p (ll_mask, lnbitsize))
5427 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5429 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5430 lr_unsignedp || rr_unsignedp);
5431 if (! all_ones_mask_p (lr_mask, rnbitsize))
5432 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5434 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5437 /* There is still another way we can do something: If both pairs of
5438 fields being compared are adjacent, we may be able to make a wider
5439 field containing them both.
5441 Note that we still must mask the lhs/rhs expressions. Furthermore,
5442 the mask must be shifted to account for the shift done by
5443 make_bit_field_ref. */
5444 if ((ll_bitsize + ll_bitpos == rl_bitpos
5445 && lr_bitsize + lr_bitpos == rr_bitpos)
5446 || (ll_bitpos == rl_bitpos + rl_bitsize
5447 && lr_bitpos == rr_bitpos + rr_bitsize))
5449 tree type;
5451 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5452 ll_bitsize + rl_bitsize,
5453 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5454 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5455 lr_bitsize + rr_bitsize,
5456 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5458 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5459 size_int (MIN (xll_bitpos, xrl_bitpos)));
5460 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5461 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5463 /* Convert to the smaller type before masking out unwanted bits. */
5464 type = lntype;
5465 if (lntype != rntype)
5467 if (lnbitsize > rnbitsize)
5469 lhs = fold_convert_loc (loc, rntype, lhs);
5470 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5471 type = rntype;
5473 else if (lnbitsize < rnbitsize)
5475 rhs = fold_convert_loc (loc, lntype, rhs);
5476 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5477 type = lntype;
5481 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5482 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5484 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5485 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5487 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5490 return 0;
5493 /* Handle the case of comparisons with constants. If there is something in
5494 common between the masks, those bits of the constants must be the same.
5495 If not, the condition is always false. Test for this to avoid generating
5496 incorrect code below. */
5497 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5498 if (! integer_zerop (result)
5499 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5500 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5502 if (wanted_code == NE_EXPR)
5504 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5505 return constant_boolean_node (true, truth_type);
5507 else
5509 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5510 return constant_boolean_node (false, truth_type);
5514 /* Construct the expression we will return. First get the component
5515 reference we will make. Unless the mask is all ones the width of
5516 that field, perform the mask operation. Then compare with the
5517 merged constant. */
5518 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5519 ll_unsignedp || rl_unsignedp);
5521 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5522 if (! all_ones_mask_p (ll_mask, lnbitsize))
5523 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5525 return build2_loc (loc, wanted_code, truth_type, result,
5526 const_binop (BIT_IOR_EXPR, l_const, r_const));
5529 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5530 constant. */
5532 static tree
5533 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5534 tree op0, tree op1)
5536 tree arg0 = op0;
5537 enum tree_code op_code;
5538 tree comp_const;
5539 tree minmax_const;
5540 int consts_equal, consts_lt;
5541 tree inner;
5543 STRIP_SIGN_NOPS (arg0);
5545 op_code = TREE_CODE (arg0);
5546 minmax_const = TREE_OPERAND (arg0, 1);
5547 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5548 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5549 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5550 inner = TREE_OPERAND (arg0, 0);
5552 /* If something does not permit us to optimize, return the original tree. */
5553 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5554 || TREE_CODE (comp_const) != INTEGER_CST
5555 || TREE_OVERFLOW (comp_const)
5556 || TREE_CODE (minmax_const) != INTEGER_CST
5557 || TREE_OVERFLOW (minmax_const))
5558 return NULL_TREE;
5560 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5561 and GT_EXPR, doing the rest with recursive calls using logical
5562 simplifications. */
5563 switch (code)
5565 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5567 tree tem
5568 = optimize_minmax_comparison (loc,
5569 invert_tree_comparison (code, false),
5570 type, op0, op1);
5571 if (tem)
5572 return invert_truthvalue_loc (loc, tem);
5573 return NULL_TREE;
5576 case GE_EXPR:
5577 return
5578 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5579 optimize_minmax_comparison
5580 (loc, EQ_EXPR, type, arg0, comp_const),
5581 optimize_minmax_comparison
5582 (loc, GT_EXPR, type, arg0, comp_const));
5584 case EQ_EXPR:
5585 if (op_code == MAX_EXPR && consts_equal)
5586 /* MAX (X, 0) == 0 -> X <= 0 */
5587 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5589 else if (op_code == MAX_EXPR && consts_lt)
5590 /* MAX (X, 0) == 5 -> X == 5 */
5591 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5593 else if (op_code == MAX_EXPR)
5594 /* MAX (X, 0) == -1 -> false */
5595 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5597 else if (consts_equal)
5598 /* MIN (X, 0) == 0 -> X >= 0 */
5599 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5601 else if (consts_lt)
5602 /* MIN (X, 0) == 5 -> false */
5603 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5605 else
5606 /* MIN (X, 0) == -1 -> X == -1 */
5607 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5609 case GT_EXPR:
5610 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5611 /* MAX (X, 0) > 0 -> X > 0
5612 MAX (X, 0) > 5 -> X > 5 */
5613 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5615 else if (op_code == MAX_EXPR)
5616 /* MAX (X, 0) > -1 -> true */
5617 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5619 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5620 /* MIN (X, 0) > 0 -> false
5621 MIN (X, 0) > 5 -> false */
5622 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5624 else
5625 /* MIN (X, 0) > -1 -> X > -1 */
5626 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5628 default:
5629 return NULL_TREE;
5633 /* T is an integer expression that is being multiplied, divided, or taken a
5634 modulus (CODE says which and what kind of divide or modulus) by a
5635 constant C. See if we can eliminate that operation by folding it with
5636 other operations already in T. WIDE_TYPE, if non-null, is a type that
5637 should be used for the computation if wider than our type.
5639 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5640 (X * 2) + (Y * 4). We must, however, be assured that either the original
5641 expression would not overflow or that overflow is undefined for the type
5642 in the language in question.
5644 If we return a non-null expression, it is an equivalent form of the
5645 original computation, but need not be in the original type.
5647 We set *STRICT_OVERFLOW_P to true if the return values depends on
5648 signed overflow being undefined. Otherwise we do not change
5649 *STRICT_OVERFLOW_P. */
5651 static tree
5652 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5653 bool *strict_overflow_p)
5655 /* To avoid exponential search depth, refuse to allow recursion past
5656 three levels. Beyond that (1) it's highly unlikely that we'll find
5657 something interesting and (2) we've probably processed it before
5658 when we built the inner expression. */
5660 static int depth;
5661 tree ret;
5663 if (depth > 3)
5664 return NULL;
5666 depth++;
5667 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5668 depth--;
5670 return ret;
5673 static tree
5674 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5675 bool *strict_overflow_p)
5677 tree type = TREE_TYPE (t);
5678 enum tree_code tcode = TREE_CODE (t);
5679 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5680 > GET_MODE_SIZE (TYPE_MODE (type)))
5681 ? wide_type : type);
5682 tree t1, t2;
5683 int same_p = tcode == code;
5684 tree op0 = NULL_TREE, op1 = NULL_TREE;
5685 bool sub_strict_overflow_p;
5687 /* Don't deal with constants of zero here; they confuse the code below. */
5688 if (integer_zerop (c))
5689 return NULL_TREE;
5691 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5692 op0 = TREE_OPERAND (t, 0);
5694 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5695 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5697 /* Note that we need not handle conditional operations here since fold
5698 already handles those cases. So just do arithmetic here. */
5699 switch (tcode)
5701 case INTEGER_CST:
5702 /* For a constant, we can always simplify if we are a multiply
5703 or (for divide and modulus) if it is a multiple of our constant. */
5704 if (code == MULT_EXPR
5705 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5706 return const_binop (code, fold_convert (ctype, t),
5707 fold_convert (ctype, c));
5708 break;
5710 CASE_CONVERT: case NON_LVALUE_EXPR:
5711 /* If op0 is an expression ... */
5712 if ((COMPARISON_CLASS_P (op0)
5713 || UNARY_CLASS_P (op0)
5714 || BINARY_CLASS_P (op0)
5715 || VL_EXP_CLASS_P (op0)
5716 || EXPRESSION_CLASS_P (op0))
5717 /* ... and has wrapping overflow, and its type is smaller
5718 than ctype, then we cannot pass through as widening. */
5719 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5720 && (TYPE_PRECISION (ctype)
5721 > TYPE_PRECISION (TREE_TYPE (op0))))
5722 /* ... or this is a truncation (t is narrower than op0),
5723 then we cannot pass through this narrowing. */
5724 || (TYPE_PRECISION (type)
5725 < TYPE_PRECISION (TREE_TYPE (op0)))
5726 /* ... or signedness changes for division or modulus,
5727 then we cannot pass through this conversion. */
5728 || (code != MULT_EXPR
5729 && (TYPE_UNSIGNED (ctype)
5730 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5731 /* ... or has undefined overflow while the converted to
5732 type has not, we cannot do the operation in the inner type
5733 as that would introduce undefined overflow. */
5734 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5735 && !TYPE_OVERFLOW_UNDEFINED (type))))
5736 break;
5738 /* Pass the constant down and see if we can make a simplification. If
5739 we can, replace this expression with the inner simplification for
5740 possible later conversion to our or some other type. */
5741 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5742 && TREE_CODE (t2) == INTEGER_CST
5743 && !TREE_OVERFLOW (t2)
5744 && (0 != (t1 = extract_muldiv (op0, t2, code,
5745 code == MULT_EXPR
5746 ? ctype : NULL_TREE,
5747 strict_overflow_p))))
5748 return t1;
5749 break;
5751 case ABS_EXPR:
5752 /* If widening the type changes it from signed to unsigned, then we
5753 must avoid building ABS_EXPR itself as unsigned. */
5754 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5756 tree cstype = (*signed_type_for) (ctype);
5757 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5758 != 0)
5760 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5761 return fold_convert (ctype, t1);
5763 break;
5765 /* If the constant is negative, we cannot simplify this. */
5766 if (tree_int_cst_sgn (c) == -1)
5767 break;
5768 /* FALLTHROUGH */
5769 case NEGATE_EXPR:
5770 /* For division and modulus, type can't be unsigned, as e.g.
5771 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5772 For signed types, even with wrapping overflow, this is fine. */
5773 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5774 break;
5775 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5776 != 0)
5777 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5778 break;
5780 case MIN_EXPR: case MAX_EXPR:
5781 /* If widening the type changes the signedness, then we can't perform
5782 this optimization as that changes the result. */
5783 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5784 break;
5786 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5787 sub_strict_overflow_p = false;
5788 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5789 &sub_strict_overflow_p)) != 0
5790 && (t2 = extract_muldiv (op1, c, code, wide_type,
5791 &sub_strict_overflow_p)) != 0)
5793 if (tree_int_cst_sgn (c) < 0)
5794 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5795 if (sub_strict_overflow_p)
5796 *strict_overflow_p = true;
5797 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5798 fold_convert (ctype, t2));
5800 break;
5802 case LSHIFT_EXPR: case RSHIFT_EXPR:
5803 /* If the second operand is constant, this is a multiplication
5804 or floor division, by a power of two, so we can treat it that
5805 way unless the multiplier or divisor overflows. Signed
5806 left-shift overflow is implementation-defined rather than
5807 undefined in C90, so do not convert signed left shift into
5808 multiplication. */
5809 if (TREE_CODE (op1) == INTEGER_CST
5810 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5811 /* const_binop may not detect overflow correctly,
5812 so check for it explicitly here. */
5813 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5814 && 0 != (t1 = fold_convert (ctype,
5815 const_binop (LSHIFT_EXPR,
5816 size_one_node,
5817 op1)))
5818 && !TREE_OVERFLOW (t1))
5819 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5820 ? MULT_EXPR : FLOOR_DIV_EXPR,
5821 ctype,
5822 fold_convert (ctype, op0),
5823 t1),
5824 c, code, wide_type, strict_overflow_p);
5825 break;
5827 case PLUS_EXPR: case MINUS_EXPR:
5828 /* See if we can eliminate the operation on both sides. If we can, we
5829 can return a new PLUS or MINUS. If we can't, the only remaining
5830 cases where we can do anything are if the second operand is a
5831 constant. */
5832 sub_strict_overflow_p = false;
5833 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5834 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5835 if (t1 != 0 && t2 != 0
5836 && (code == MULT_EXPR
5837 /* If not multiplication, we can only do this if both operands
5838 are divisible by c. */
5839 || (multiple_of_p (ctype, op0, c)
5840 && multiple_of_p (ctype, op1, c))))
5842 if (sub_strict_overflow_p)
5843 *strict_overflow_p = true;
5844 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5845 fold_convert (ctype, t2));
5848 /* If this was a subtraction, negate OP1 and set it to be an addition.
5849 This simplifies the logic below. */
5850 if (tcode == MINUS_EXPR)
5852 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5853 /* If OP1 was not easily negatable, the constant may be OP0. */
5854 if (TREE_CODE (op0) == INTEGER_CST)
5856 tree tem = op0;
5857 op0 = op1;
5858 op1 = tem;
5859 tem = t1;
5860 t1 = t2;
5861 t2 = tem;
5865 if (TREE_CODE (op1) != INTEGER_CST)
5866 break;
5868 /* If either OP1 or C are negative, this optimization is not safe for
5869 some of the division and remainder types while for others we need
5870 to change the code. */
5871 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5873 if (code == CEIL_DIV_EXPR)
5874 code = FLOOR_DIV_EXPR;
5875 else if (code == FLOOR_DIV_EXPR)
5876 code = CEIL_DIV_EXPR;
5877 else if (code != MULT_EXPR
5878 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5879 break;
5882 /* If it's a multiply or a division/modulus operation of a multiple
5883 of our constant, do the operation and verify it doesn't overflow. */
5884 if (code == MULT_EXPR
5885 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5887 op1 = const_binop (code, fold_convert (ctype, op1),
5888 fold_convert (ctype, c));
5889 /* We allow the constant to overflow with wrapping semantics. */
5890 if (op1 == 0
5891 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5892 break;
5894 else
5895 break;
5897 /* If we have an unsigned type, we cannot widen the operation since it
5898 will change the result if the original computation overflowed. */
5899 if (TYPE_UNSIGNED (ctype) && ctype != type)
5900 break;
5902 /* If we were able to eliminate our operation from the first side,
5903 apply our operation to the second side and reform the PLUS. */
5904 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5905 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5907 /* The last case is if we are a multiply. In that case, we can
5908 apply the distributive law to commute the multiply and addition
5909 if the multiplication of the constants doesn't overflow
5910 and overflow is defined. With undefined overflow
5911 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5912 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5913 return fold_build2 (tcode, ctype,
5914 fold_build2 (code, ctype,
5915 fold_convert (ctype, op0),
5916 fold_convert (ctype, c)),
5917 op1);
5919 break;
5921 case MULT_EXPR:
5922 /* We have a special case here if we are doing something like
5923 (C * 8) % 4 since we know that's zero. */
5924 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5925 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5926 /* If the multiplication can overflow we cannot optimize this. */
5927 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5928 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5929 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5931 *strict_overflow_p = true;
5932 return omit_one_operand (type, integer_zero_node, op0);
5935 /* ... fall through ... */
5937 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5938 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5939 /* If we can extract our operation from the LHS, do so and return a
5940 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5941 do something only if the second operand is a constant. */
5942 if (same_p
5943 && (t1 = extract_muldiv (op0, c, code, wide_type,
5944 strict_overflow_p)) != 0)
5945 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5946 fold_convert (ctype, op1));
5947 else if (tcode == MULT_EXPR && code == MULT_EXPR
5948 && (t1 = extract_muldiv (op1, c, code, wide_type,
5949 strict_overflow_p)) != 0)
5950 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5951 fold_convert (ctype, t1));
5952 else if (TREE_CODE (op1) != INTEGER_CST)
5953 return 0;
5955 /* If these are the same operation types, we can associate them
5956 assuming no overflow. */
5957 if (tcode == code)
5959 bool overflow_p = false;
5960 bool overflow_mul_p;
5961 signop sign = TYPE_SIGN (ctype);
5962 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5963 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5964 if (overflow_mul_p
5965 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5966 overflow_p = true;
5967 if (!overflow_p)
5968 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5969 wide_int_to_tree (ctype, mul));
5972 /* If these operations "cancel" each other, we have the main
5973 optimizations of this pass, which occur when either constant is a
5974 multiple of the other, in which case we replace this with either an
5975 operation or CODE or TCODE.
5977 If we have an unsigned type, we cannot do this since it will change
5978 the result if the original computation overflowed. */
5979 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5980 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5981 || (tcode == MULT_EXPR
5982 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5983 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5984 && code != MULT_EXPR)))
5986 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5988 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5989 *strict_overflow_p = true;
5990 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5991 fold_convert (ctype,
5992 const_binop (TRUNC_DIV_EXPR,
5993 op1, c)));
5995 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5998 *strict_overflow_p = true;
5999 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6000 fold_convert (ctype,
6001 const_binop (TRUNC_DIV_EXPR,
6002 c, op1)));
6005 break;
6007 default:
6008 break;
6011 return 0;
6014 /* Return a node which has the indicated constant VALUE (either 0 or
6015 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6016 and is of the indicated TYPE. */
6018 tree
6019 constant_boolean_node (bool value, tree type)
6021 if (type == integer_type_node)
6022 return value ? integer_one_node : integer_zero_node;
6023 else if (type == boolean_type_node)
6024 return value ? boolean_true_node : boolean_false_node;
6025 else if (TREE_CODE (type) == VECTOR_TYPE)
6026 return build_vector_from_val (type,
6027 build_int_cst (TREE_TYPE (type),
6028 value ? -1 : 0));
6029 else
6030 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6034 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6035 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6036 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6037 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6038 COND is the first argument to CODE; otherwise (as in the example
6039 given here), it is the second argument. TYPE is the type of the
6040 original expression. Return NULL_TREE if no simplification is
6041 possible. */
6043 static tree
6044 fold_binary_op_with_conditional_arg (location_t loc,
6045 enum tree_code code,
6046 tree type, tree op0, tree op1,
6047 tree cond, tree arg, int cond_first_p)
6049 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6050 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6051 tree test, true_value, false_value;
6052 tree lhs = NULL_TREE;
6053 tree rhs = NULL_TREE;
6054 enum tree_code cond_code = COND_EXPR;
6056 if (TREE_CODE (cond) == COND_EXPR
6057 || TREE_CODE (cond) == VEC_COND_EXPR)
6059 test = TREE_OPERAND (cond, 0);
6060 true_value = TREE_OPERAND (cond, 1);
6061 false_value = TREE_OPERAND (cond, 2);
6062 /* If this operand throws an expression, then it does not make
6063 sense to try to perform a logical or arithmetic operation
6064 involving it. */
6065 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6066 lhs = true_value;
6067 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6068 rhs = false_value;
6070 else
6072 tree testtype = TREE_TYPE (cond);
6073 test = cond;
6074 true_value = constant_boolean_node (true, testtype);
6075 false_value = constant_boolean_node (false, testtype);
6078 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6079 cond_code = VEC_COND_EXPR;
6081 /* This transformation is only worthwhile if we don't have to wrap ARG
6082 in a SAVE_EXPR and the operation can be simplified without recursing
6083 on at least one of the branches once its pushed inside the COND_EXPR. */
6084 if (!TREE_CONSTANT (arg)
6085 && (TREE_SIDE_EFFECTS (arg)
6086 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6087 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6088 return NULL_TREE;
6090 arg = fold_convert_loc (loc, arg_type, arg);
6091 if (lhs == 0)
6093 true_value = fold_convert_loc (loc, cond_type, true_value);
6094 if (cond_first_p)
6095 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6096 else
6097 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6099 if (rhs == 0)
6101 false_value = fold_convert_loc (loc, cond_type, false_value);
6102 if (cond_first_p)
6103 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6104 else
6105 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6108 /* Check that we have simplified at least one of the branches. */
6109 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6110 return NULL_TREE;
6112 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6116 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6118 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6119 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6120 ADDEND is the same as X.
6122 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6123 and finite. The problematic cases are when X is zero, and its mode
6124 has signed zeros. In the case of rounding towards -infinity,
6125 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6126 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6128 bool
6129 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6131 if (!real_zerop (addend))
6132 return false;
6134 /* Don't allow the fold with -fsignaling-nans. */
6135 if (HONOR_SNANS (TYPE_MODE (type)))
6136 return false;
6138 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6139 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6140 return true;
6142 /* In a vector or complex, we would need to check the sign of all zeros. */
6143 if (TREE_CODE (addend) != REAL_CST)
6144 return false;
6146 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6147 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6148 negate = !negate;
6150 /* The mode has signed zeros, and we have to honor their sign.
6151 In this situation, there is only one case we can return true for.
6152 X - 0 is the same as X unless rounding towards -infinity is
6153 supported. */
6154 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6157 /* Subroutine of fold() that checks comparisons of built-in math
6158 functions against real constants.
6160 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6161 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6162 is the type of the result and ARG0 and ARG1 are the operands of the
6163 comparison. ARG1 must be a TREE_REAL_CST.
6165 The function returns the constant folded tree if a simplification
6166 can be made, and NULL_TREE otherwise. */
6168 static tree
6169 fold_mathfn_compare (location_t loc,
6170 enum built_in_function fcode, enum tree_code code,
6171 tree type, tree arg0, tree arg1)
6173 REAL_VALUE_TYPE c;
6175 if (BUILTIN_SQRT_P (fcode))
6177 tree arg = CALL_EXPR_ARG (arg0, 0);
6178 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6180 c = TREE_REAL_CST (arg1);
6181 if (REAL_VALUE_NEGATIVE (c))
6183 /* sqrt(x) < y is always false, if y is negative. */
6184 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6185 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6187 /* sqrt(x) > y is always true, if y is negative and we
6188 don't care about NaNs, i.e. negative values of x. */
6189 if (code == NE_EXPR || !HONOR_NANS (mode))
6190 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6192 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6193 return fold_build2_loc (loc, GE_EXPR, type, arg,
6194 build_real (TREE_TYPE (arg), dconst0));
6196 else if (code == GT_EXPR || code == GE_EXPR)
6198 REAL_VALUE_TYPE c2;
6200 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6201 real_convert (&c2, mode, &c2);
6203 if (REAL_VALUE_ISINF (c2))
6205 /* sqrt(x) > y is x == +Inf, when y is very large. */
6206 if (HONOR_INFINITIES (mode))
6207 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6208 build_real (TREE_TYPE (arg), c2));
6210 /* sqrt(x) > y is always false, when y is very large
6211 and we don't care about infinities. */
6212 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6215 /* sqrt(x) > c is the same as x > c*c. */
6216 return fold_build2_loc (loc, code, type, arg,
6217 build_real (TREE_TYPE (arg), c2));
6219 else if (code == LT_EXPR || code == LE_EXPR)
6221 REAL_VALUE_TYPE c2;
6223 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6224 real_convert (&c2, mode, &c2);
6226 if (REAL_VALUE_ISINF (c2))
6228 /* sqrt(x) < y is always true, when y is a very large
6229 value and we don't care about NaNs or Infinities. */
6230 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6231 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6233 /* sqrt(x) < y is x != +Inf when y is very large and we
6234 don't care about NaNs. */
6235 if (! HONOR_NANS (mode))
6236 return fold_build2_loc (loc, NE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg), c2));
6239 /* sqrt(x) < y is x >= 0 when y is very large and we
6240 don't care about Infinities. */
6241 if (! HONOR_INFINITIES (mode))
6242 return fold_build2_loc (loc, GE_EXPR, type, arg,
6243 build_real (TREE_TYPE (arg), dconst0));
6245 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6246 arg = save_expr (arg);
6247 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6248 fold_build2_loc (loc, GE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg),
6250 dconst0)),
6251 fold_build2_loc (loc, NE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg),
6253 c2)));
6256 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6257 if (! HONOR_NANS (mode))
6258 return fold_build2_loc (loc, code, type, arg,
6259 build_real (TREE_TYPE (arg), c2));
6261 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6262 arg = save_expr (arg);
6263 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6264 fold_build2_loc (loc, GE_EXPR, type, arg,
6265 build_real (TREE_TYPE (arg),
6266 dconst0)),
6267 fold_build2_loc (loc, code, type, arg,
6268 build_real (TREE_TYPE (arg),
6269 c2)));
6273 return NULL_TREE;
6276 /* Subroutine of fold() that optimizes comparisons against Infinities,
6277 either +Inf or -Inf.
6279 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6280 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6281 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6283 The function returns the constant folded tree if a simplification
6284 can be made, and NULL_TREE otherwise. */
6286 static tree
6287 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6288 tree arg0, tree arg1)
6290 enum machine_mode mode;
6291 REAL_VALUE_TYPE max;
6292 tree temp;
6293 bool neg;
6295 mode = TYPE_MODE (TREE_TYPE (arg0));
6297 /* For negative infinity swap the sense of the comparison. */
6298 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6299 if (neg)
6300 code = swap_tree_comparison (code);
6302 switch (code)
6304 case GT_EXPR:
6305 /* x > +Inf is always false, if with ignore sNANs. */
6306 if (HONOR_SNANS (mode))
6307 return NULL_TREE;
6308 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6310 case LE_EXPR:
6311 /* x <= +Inf is always true, if we don't case about NaNs. */
6312 if (! HONOR_NANS (mode))
6313 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6315 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6316 arg0 = save_expr (arg0);
6317 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6319 case EQ_EXPR:
6320 case GE_EXPR:
6321 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6322 real_maxval (&max, neg, mode);
6323 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6324 arg0, build_real (TREE_TYPE (arg0), max));
6326 case LT_EXPR:
6327 /* x < +Inf is always equal to x <= DBL_MAX. */
6328 real_maxval (&max, neg, mode);
6329 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6332 case NE_EXPR:
6333 /* x != +Inf is always equal to !(x > DBL_MAX). */
6334 real_maxval (&max, neg, mode);
6335 if (! HONOR_NANS (mode))
6336 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6337 arg0, build_real (TREE_TYPE (arg0), max));
6339 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6340 arg0, build_real (TREE_TYPE (arg0), max));
6341 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6343 default:
6344 break;
6347 return NULL_TREE;
6350 /* Subroutine of fold() that optimizes comparisons of a division by
6351 a nonzero integer constant against an integer constant, i.e.
6352 X/C1 op C2.
6354 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6355 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6356 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6358 The function returns the constant folded tree if a simplification
6359 can be made, and NULL_TREE otherwise. */
6361 static tree
6362 fold_div_compare (location_t loc,
6363 enum tree_code code, tree type, tree arg0, tree arg1)
6365 tree prod, tmp, hi, lo;
6366 tree arg00 = TREE_OPERAND (arg0, 0);
6367 tree arg01 = TREE_OPERAND (arg0, 1);
6368 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6369 bool neg_overflow = false;
6370 bool overflow;
6372 /* We have to do this the hard way to detect unsigned overflow.
6373 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6374 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6375 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6376 neg_overflow = false;
6378 if (sign == UNSIGNED)
6380 tmp = int_const_binop (MINUS_EXPR, arg01,
6381 build_int_cst (TREE_TYPE (arg01), 1));
6382 lo = prod;
6384 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6385 val = wi::add (prod, tmp, sign, &overflow);
6386 hi = force_fit_type (TREE_TYPE (arg00), val,
6387 -1, overflow | TREE_OVERFLOW (prod));
6389 else if (tree_int_cst_sgn (arg01) >= 0)
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1));
6393 switch (tree_int_cst_sgn (arg1))
6395 case -1:
6396 neg_overflow = true;
6397 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6398 hi = prod;
6399 break;
6401 case 0:
6402 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6403 hi = tmp;
6404 break;
6406 case 1:
6407 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6408 lo = prod;
6409 break;
6411 default:
6412 gcc_unreachable ();
6415 else
6417 /* A negative divisor reverses the relational operators. */
6418 code = swap_tree_comparison (code);
6420 tmp = int_const_binop (PLUS_EXPR, arg01,
6421 build_int_cst (TREE_TYPE (arg01), 1));
6422 switch (tree_int_cst_sgn (arg1))
6424 case -1:
6425 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6426 lo = prod;
6427 break;
6429 case 0:
6430 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6431 lo = tmp;
6432 break;
6434 case 1:
6435 neg_overflow = true;
6436 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6437 hi = prod;
6438 break;
6440 default:
6441 gcc_unreachable ();
6445 switch (code)
6447 case EQ_EXPR:
6448 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6449 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6450 if (TREE_OVERFLOW (hi))
6451 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6452 if (TREE_OVERFLOW (lo))
6453 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6454 return build_range_check (loc, type, arg00, 1, lo, hi);
6456 case NE_EXPR:
6457 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6458 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6459 if (TREE_OVERFLOW (hi))
6460 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6461 if (TREE_OVERFLOW (lo))
6462 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6463 return build_range_check (loc, type, arg00, 0, lo, hi);
6465 case LT_EXPR:
6466 if (TREE_OVERFLOW (lo))
6468 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6469 return omit_one_operand_loc (loc, type, tmp, arg00);
6471 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6473 case LE_EXPR:
6474 if (TREE_OVERFLOW (hi))
6476 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6477 return omit_one_operand_loc (loc, type, tmp, arg00);
6479 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6481 case GT_EXPR:
6482 if (TREE_OVERFLOW (hi))
6484 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6485 return omit_one_operand_loc (loc, type, tmp, arg00);
6487 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6489 case GE_EXPR:
6490 if (TREE_OVERFLOW (lo))
6492 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6493 return omit_one_operand_loc (loc, type, tmp, arg00);
6495 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6497 default:
6498 break;
6501 return NULL_TREE;
6505 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6506 equality/inequality test, then return a simplified form of the test
6507 using a sign testing. Otherwise return NULL. TYPE is the desired
6508 result type. */
6510 static tree
6511 fold_single_bit_test_into_sign_test (location_t loc,
6512 enum tree_code code, tree arg0, tree arg1,
6513 tree result_type)
6515 /* If this is testing a single bit, we can optimize the test. */
6516 if ((code == NE_EXPR || code == EQ_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6520 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6521 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6522 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6524 if (arg00 != NULL_TREE
6525 /* This is only a win if casting to a signed type is cheap,
6526 i.e. when arg00's type is not a partial mode. */
6527 && TYPE_PRECISION (TREE_TYPE (arg00))
6528 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6530 tree stype = signed_type_for (TREE_TYPE (arg00));
6531 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6532 result_type,
6533 fold_convert_loc (loc, stype, arg00),
6534 build_int_cst (stype, 0));
6538 return NULL_TREE;
6541 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6542 equality/inequality test, then return a simplified form of
6543 the test using shifts and logical operations. Otherwise return
6544 NULL. TYPE is the desired result type. */
6546 tree
6547 fold_single_bit_test (location_t loc, enum tree_code code,
6548 tree arg0, tree arg1, tree result_type)
6550 /* If this is testing a single bit, we can optimize the test. */
6551 if ((code == NE_EXPR || code == EQ_EXPR)
6552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6555 tree inner = TREE_OPERAND (arg0, 0);
6556 tree type = TREE_TYPE (arg0);
6557 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6558 enum machine_mode operand_mode = TYPE_MODE (type);
6559 int ops_unsigned;
6560 tree signed_type, unsigned_type, intermediate_type;
6561 tree tem, one;
6563 /* First, see if we can fold the single bit test into a sign-bit
6564 test. */
6565 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6566 result_type);
6567 if (tem)
6568 return tem;
6570 /* Otherwise we have (A & C) != 0 where C is a single bit,
6571 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6572 Similarly for (A & C) == 0. */
6574 /* If INNER is a right shift of a constant and it plus BITNUM does
6575 not overflow, adjust BITNUM and INNER. */
6576 if (TREE_CODE (inner) == RSHIFT_EXPR
6577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6578 && bitnum < TYPE_PRECISION (type)
6579 && wi::ltu_p (TREE_OPERAND (inner, 1),
6580 TYPE_PRECISION (type) - bitnum))
6582 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6583 inner = TREE_OPERAND (inner, 0);
6586 /* If we are going to be able to omit the AND below, we must do our
6587 operations as unsigned. If we must use the AND, we have a choice.
6588 Normally unsigned is faster, but for some machines signed is. */
6589 #ifdef LOAD_EXTEND_OP
6590 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6591 && !flag_syntax_only) ? 0 : 1;
6592 #else
6593 ops_unsigned = 1;
6594 #endif
6596 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6597 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6598 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6599 inner = fold_convert_loc (loc, intermediate_type, inner);
6601 if (bitnum != 0)
6602 inner = build2 (RSHIFT_EXPR, intermediate_type,
6603 inner, size_int (bitnum));
6605 one = build_int_cst (intermediate_type, 1);
6607 if (code == EQ_EXPR)
6608 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6610 /* Put the AND last so it can combine with more things. */
6611 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6613 /* Make sure to return the proper type. */
6614 inner = fold_convert_loc (loc, result_type, inner);
6616 return inner;
6618 return NULL_TREE;
6621 /* Check whether we are allowed to reorder operands arg0 and arg1,
6622 such that the evaluation of arg1 occurs before arg0. */
6624 static bool
6625 reorder_operands_p (const_tree arg0, const_tree arg1)
6627 if (! flag_evaluation_order)
6628 return true;
6629 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6630 return true;
6631 return ! TREE_SIDE_EFFECTS (arg0)
6632 && ! TREE_SIDE_EFFECTS (arg1);
6635 /* Test whether it is preferable two swap two operands, ARG0 and
6636 ARG1, for example because ARG0 is an integer constant and ARG1
6637 isn't. If REORDER is true, only recommend swapping if we can
6638 evaluate the operands in reverse order. */
6640 bool
6641 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6643 STRIP_SIGN_NOPS (arg0);
6644 STRIP_SIGN_NOPS (arg1);
6646 if (TREE_CODE (arg1) == INTEGER_CST)
6647 return 0;
6648 if (TREE_CODE (arg0) == INTEGER_CST)
6649 return 1;
6651 if (TREE_CODE (arg1) == REAL_CST)
6652 return 0;
6653 if (TREE_CODE (arg0) == REAL_CST)
6654 return 1;
6656 if (TREE_CODE (arg1) == FIXED_CST)
6657 return 0;
6658 if (TREE_CODE (arg0) == FIXED_CST)
6659 return 1;
6661 if (TREE_CODE (arg1) == COMPLEX_CST)
6662 return 0;
6663 if (TREE_CODE (arg0) == COMPLEX_CST)
6664 return 1;
6666 if (TREE_CONSTANT (arg1))
6667 return 0;
6668 if (TREE_CONSTANT (arg0))
6669 return 1;
6671 if (optimize_function_for_size_p (cfun))
6672 return 0;
6674 if (reorder && flag_evaluation_order
6675 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6676 return 0;
6678 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6679 for commutative and comparison operators. Ensuring a canonical
6680 form allows the optimizers to find additional redundancies without
6681 having to explicitly check for both orderings. */
6682 if (TREE_CODE (arg0) == SSA_NAME
6683 && TREE_CODE (arg1) == SSA_NAME
6684 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6685 return 1;
6687 /* Put SSA_NAMEs last. */
6688 if (TREE_CODE (arg1) == SSA_NAME)
6689 return 0;
6690 if (TREE_CODE (arg0) == SSA_NAME)
6691 return 1;
6693 /* Put variables last. */
6694 if (DECL_P (arg1))
6695 return 0;
6696 if (DECL_P (arg0))
6697 return 1;
6699 return 0;
6702 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6703 ARG0 is extended to a wider type. */
6705 static tree
6706 fold_widened_comparison (location_t loc, enum tree_code code,
6707 tree type, tree arg0, tree arg1)
6709 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6710 tree arg1_unw;
6711 tree shorter_type, outer_type;
6712 tree min, max;
6713 bool above, below;
6715 if (arg0_unw == arg0)
6716 return NULL_TREE;
6717 shorter_type = TREE_TYPE (arg0_unw);
6719 #ifdef HAVE_canonicalize_funcptr_for_compare
6720 /* Disable this optimization if we're casting a function pointer
6721 type on targets that require function pointer canonicalization. */
6722 if (HAVE_canonicalize_funcptr_for_compare
6723 && TREE_CODE (shorter_type) == POINTER_TYPE
6724 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6725 return NULL_TREE;
6726 #endif
6728 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6729 return NULL_TREE;
6731 arg1_unw = get_unwidened (arg1, NULL_TREE);
6733 /* If possible, express the comparison in the shorter mode. */
6734 if ((code == EQ_EXPR || code == NE_EXPR
6735 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6736 && (TREE_TYPE (arg1_unw) == shorter_type
6737 || ((TYPE_PRECISION (shorter_type)
6738 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6739 && (TYPE_UNSIGNED (shorter_type)
6740 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6741 || (TREE_CODE (arg1_unw) == INTEGER_CST
6742 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6743 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6744 && int_fits_type_p (arg1_unw, shorter_type))))
6745 return fold_build2_loc (loc, code, type, arg0_unw,
6746 fold_convert_loc (loc, shorter_type, arg1_unw));
6748 if (TREE_CODE (arg1_unw) != INTEGER_CST
6749 || TREE_CODE (shorter_type) != INTEGER_TYPE
6750 || !int_fits_type_p (arg1_unw, shorter_type))
6751 return NULL_TREE;
6753 /* If we are comparing with the integer that does not fit into the range
6754 of the shorter type, the result is known. */
6755 outer_type = TREE_TYPE (arg1_unw);
6756 min = lower_bound_in_type (outer_type, shorter_type);
6757 max = upper_bound_in_type (outer_type, shorter_type);
6759 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6760 max, arg1_unw));
6761 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6762 arg1_unw, min));
6764 switch (code)
6766 case EQ_EXPR:
6767 if (above || below)
6768 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6769 break;
6771 case NE_EXPR:
6772 if (above || below)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774 break;
6776 case LT_EXPR:
6777 case LE_EXPR:
6778 if (above)
6779 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6780 else if (below)
6781 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6783 case GT_EXPR:
6784 case GE_EXPR:
6785 if (above)
6786 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6787 else if (below)
6788 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6790 default:
6791 break;
6794 return NULL_TREE;
6797 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6798 ARG0 just the signedness is changed. */
6800 static tree
6801 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6802 tree arg0, tree arg1)
6804 tree arg0_inner;
6805 tree inner_type, outer_type;
6807 if (!CONVERT_EXPR_P (arg0))
6808 return NULL_TREE;
6810 outer_type = TREE_TYPE (arg0);
6811 arg0_inner = TREE_OPERAND (arg0, 0);
6812 inner_type = TREE_TYPE (arg0_inner);
6814 #ifdef HAVE_canonicalize_funcptr_for_compare
6815 /* Disable this optimization if we're casting a function pointer
6816 type on targets that require function pointer canonicalization. */
6817 if (HAVE_canonicalize_funcptr_for_compare
6818 && TREE_CODE (inner_type) == POINTER_TYPE
6819 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6820 return NULL_TREE;
6821 #endif
6823 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6824 return NULL_TREE;
6826 if (TREE_CODE (arg1) != INTEGER_CST
6827 && !(CONVERT_EXPR_P (arg1)
6828 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6829 return NULL_TREE;
6831 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6832 && code != NE_EXPR
6833 && code != EQ_EXPR)
6834 return NULL_TREE;
6836 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6837 return NULL_TREE;
6839 if (TREE_CODE (arg1) == INTEGER_CST)
6840 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6841 TREE_OVERFLOW (arg1));
6842 else
6843 arg1 = fold_convert_loc (loc, inner_type, arg1);
6845 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6848 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6849 step of the array. Reconstructs s and delta in the case of s *
6850 delta being an integer constant (and thus already folded). ADDR is
6851 the address. MULT is the multiplicative expression. If the
6852 function succeeds, the new address expression is returned.
6853 Otherwise NULL_TREE is returned. LOC is the location of the
6854 resulting expression. */
6856 static tree
6857 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6859 tree s, delta, step;
6860 tree ref = TREE_OPERAND (addr, 0), pref;
6861 tree ret, pos;
6862 tree itype;
6863 bool mdim = false;
6865 /* Strip the nops that might be added when converting op1 to sizetype. */
6866 STRIP_NOPS (op1);
6868 /* Canonicalize op1 into a possibly non-constant delta
6869 and an INTEGER_CST s. */
6870 if (TREE_CODE (op1) == MULT_EXPR)
6872 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6874 STRIP_NOPS (arg0);
6875 STRIP_NOPS (arg1);
6877 if (TREE_CODE (arg0) == INTEGER_CST)
6879 s = arg0;
6880 delta = arg1;
6882 else if (TREE_CODE (arg1) == INTEGER_CST)
6884 s = arg1;
6885 delta = arg0;
6887 else
6888 return NULL_TREE;
6890 else if (TREE_CODE (op1) == INTEGER_CST)
6892 delta = op1;
6893 s = NULL_TREE;
6895 else
6897 /* Simulate we are delta * 1. */
6898 delta = op1;
6899 s = integer_one_node;
6902 /* Handle &x.array the same as we would handle &x.array[0]. */
6903 if (TREE_CODE (ref) == COMPONENT_REF
6904 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6906 tree domain;
6908 /* Remember if this was a multi-dimensional array. */
6909 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6910 mdim = true;
6912 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6913 if (! domain)
6914 goto cont;
6915 itype = TREE_TYPE (domain);
6917 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6918 if (TREE_CODE (step) != INTEGER_CST)
6919 goto cont;
6921 if (s)
6923 if (! tree_int_cst_equal (step, s))
6924 goto cont;
6926 else
6928 /* Try if delta is a multiple of step. */
6929 tree tmp = div_if_zero_remainder (op1, step);
6930 if (! tmp)
6931 goto cont;
6932 delta = tmp;
6935 /* Only fold here if we can verify we do not overflow one
6936 dimension of a multi-dimensional array. */
6937 if (mdim)
6939 tree tmp;
6941 if (!TYPE_MIN_VALUE (domain)
6942 || !TYPE_MAX_VALUE (domain)
6943 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6944 goto cont;
6946 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6947 fold_convert_loc (loc, itype,
6948 TYPE_MIN_VALUE (domain)),
6949 fold_convert_loc (loc, itype, delta));
6950 if (TREE_CODE (tmp) != INTEGER_CST
6951 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6952 goto cont;
6955 /* We found a suitable component reference. */
6957 pref = TREE_OPERAND (addr, 0);
6958 ret = copy_node (pref);
6959 SET_EXPR_LOCATION (ret, loc);
6961 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6962 fold_build2_loc
6963 (loc, PLUS_EXPR, itype,
6964 fold_convert_loc (loc, itype,
6965 TYPE_MIN_VALUE
6966 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6967 fold_convert_loc (loc, itype, delta)),
6968 NULL_TREE, NULL_TREE);
6969 return build_fold_addr_expr_loc (loc, ret);
6972 cont:
6974 for (;; ref = TREE_OPERAND (ref, 0))
6976 if (TREE_CODE (ref) == ARRAY_REF)
6978 tree domain;
6980 /* Remember if this was a multi-dimensional array. */
6981 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6982 mdim = true;
6984 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6985 if (! domain)
6986 continue;
6987 itype = TREE_TYPE (domain);
6989 step = array_ref_element_size (ref);
6990 if (TREE_CODE (step) != INTEGER_CST)
6991 continue;
6993 if (s)
6995 if (! tree_int_cst_equal (step, s))
6996 continue;
6998 else
7000 /* Try if delta is a multiple of step. */
7001 tree tmp = div_if_zero_remainder (op1, step);
7002 if (! tmp)
7003 continue;
7004 delta = tmp;
7007 /* Only fold here if we can verify we do not overflow one
7008 dimension of a multi-dimensional array. */
7009 if (mdim)
7011 tree tmp;
7013 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7014 || !TYPE_MAX_VALUE (domain)
7015 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7016 continue;
7018 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7019 fold_convert_loc (loc, itype,
7020 TREE_OPERAND (ref, 1)),
7021 fold_convert_loc (loc, itype, delta));
7022 if (!tmp
7023 || TREE_CODE (tmp) != INTEGER_CST
7024 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7025 continue;
7028 break;
7030 else
7031 mdim = false;
7033 if (!handled_component_p (ref))
7034 return NULL_TREE;
7037 /* We found the suitable array reference. So copy everything up to it,
7038 and replace the index. */
7040 pref = TREE_OPERAND (addr, 0);
7041 ret = copy_node (pref);
7042 SET_EXPR_LOCATION (ret, loc);
7043 pos = ret;
7045 while (pref != ref)
7047 pref = TREE_OPERAND (pref, 0);
7048 TREE_OPERAND (pos, 0) = copy_node (pref);
7049 pos = TREE_OPERAND (pos, 0);
7052 TREE_OPERAND (pos, 1)
7053 = fold_build2_loc (loc, PLUS_EXPR, itype,
7054 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7055 fold_convert_loc (loc, itype, delta));
7056 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7060 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7061 means A >= Y && A != MAX, but in this case we know that
7062 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7064 static tree
7065 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7067 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7069 if (TREE_CODE (bound) == LT_EXPR)
7070 a = TREE_OPERAND (bound, 0);
7071 else if (TREE_CODE (bound) == GT_EXPR)
7072 a = TREE_OPERAND (bound, 1);
7073 else
7074 return NULL_TREE;
7076 typea = TREE_TYPE (a);
7077 if (!INTEGRAL_TYPE_P (typea)
7078 && !POINTER_TYPE_P (typea))
7079 return NULL_TREE;
7081 if (TREE_CODE (ineq) == LT_EXPR)
7083 a1 = TREE_OPERAND (ineq, 1);
7084 y = TREE_OPERAND (ineq, 0);
7086 else if (TREE_CODE (ineq) == GT_EXPR)
7088 a1 = TREE_OPERAND (ineq, 0);
7089 y = TREE_OPERAND (ineq, 1);
7091 else
7092 return NULL_TREE;
7094 if (TREE_TYPE (a1) != typea)
7095 return NULL_TREE;
7097 if (POINTER_TYPE_P (typea))
7099 /* Convert the pointer types into integer before taking the difference. */
7100 tree ta = fold_convert_loc (loc, ssizetype, a);
7101 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7102 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7104 else
7105 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7107 if (!diff || !integer_onep (diff))
7108 return NULL_TREE;
7110 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7113 /* Fold a sum or difference of at least one multiplication.
7114 Returns the folded tree or NULL if no simplification could be made. */
7116 static tree
7117 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7118 tree arg0, tree arg1)
7120 tree arg00, arg01, arg10, arg11;
7121 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7123 /* (A * C) +- (B * C) -> (A+-B) * C.
7124 (A * C) +- A -> A * (C+-1).
7125 We are most concerned about the case where C is a constant,
7126 but other combinations show up during loop reduction. Since
7127 it is not difficult, try all four possibilities. */
7129 if (TREE_CODE (arg0) == MULT_EXPR)
7131 arg00 = TREE_OPERAND (arg0, 0);
7132 arg01 = TREE_OPERAND (arg0, 1);
7134 else if (TREE_CODE (arg0) == INTEGER_CST)
7136 arg00 = build_one_cst (type);
7137 arg01 = arg0;
7139 else
7141 /* We cannot generate constant 1 for fract. */
7142 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7143 return NULL_TREE;
7144 arg00 = arg0;
7145 arg01 = build_one_cst (type);
7147 if (TREE_CODE (arg1) == MULT_EXPR)
7149 arg10 = TREE_OPERAND (arg1, 0);
7150 arg11 = TREE_OPERAND (arg1, 1);
7152 else if (TREE_CODE (arg1) == INTEGER_CST)
7154 arg10 = build_one_cst (type);
7155 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7156 the purpose of this canonicalization. */
7157 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7158 && negate_expr_p (arg1)
7159 && code == PLUS_EXPR)
7161 arg11 = negate_expr (arg1);
7162 code = MINUS_EXPR;
7164 else
7165 arg11 = arg1;
7167 else
7169 /* We cannot generate constant 1 for fract. */
7170 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7171 return NULL_TREE;
7172 arg10 = arg1;
7173 arg11 = build_one_cst (type);
7175 same = NULL_TREE;
7177 if (operand_equal_p (arg01, arg11, 0))
7178 same = arg01, alt0 = arg00, alt1 = arg10;
7179 else if (operand_equal_p (arg00, arg10, 0))
7180 same = arg00, alt0 = arg01, alt1 = arg11;
7181 else if (operand_equal_p (arg00, arg11, 0))
7182 same = arg00, alt0 = arg01, alt1 = arg10;
7183 else if (operand_equal_p (arg01, arg10, 0))
7184 same = arg01, alt0 = arg00, alt1 = arg11;
7186 /* No identical multiplicands; see if we can find a common
7187 power-of-two factor in non-power-of-two multiplies. This
7188 can help in multi-dimensional array access. */
7189 else if (tree_fits_shwi_p (arg01)
7190 && tree_fits_shwi_p (arg11))
7192 HOST_WIDE_INT int01, int11, tmp;
7193 bool swap = false;
7194 tree maybe_same;
7195 int01 = tree_to_shwi (arg01);
7196 int11 = tree_to_shwi (arg11);
7198 /* Move min of absolute values to int11. */
7199 if (absu_hwi (int01) < absu_hwi (int11))
7201 tmp = int01, int01 = int11, int11 = tmp;
7202 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7203 maybe_same = arg01;
7204 swap = true;
7206 else
7207 maybe_same = arg11;
7209 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7210 /* The remainder should not be a constant, otherwise we
7211 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7212 increased the number of multiplications necessary. */
7213 && TREE_CODE (arg10) != INTEGER_CST)
7215 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7216 build_int_cst (TREE_TYPE (arg00),
7217 int01 / int11));
7218 alt1 = arg10;
7219 same = maybe_same;
7220 if (swap)
7221 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7225 if (same)
7226 return fold_build2_loc (loc, MULT_EXPR, type,
7227 fold_build2_loc (loc, code, type,
7228 fold_convert_loc (loc, type, alt0),
7229 fold_convert_loc (loc, type, alt1)),
7230 fold_convert_loc (loc, type, same));
7232 return NULL_TREE;
7235 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7236 specified by EXPR into the buffer PTR of length LEN bytes.
7237 Return the number of bytes placed in the buffer, or zero
7238 upon failure. */
7240 static int
7241 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7243 tree type = TREE_TYPE (expr);
7244 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7245 int byte, offset, word, words;
7246 unsigned char value;
7248 if (total_bytes > len)
7249 return 0;
7250 words = total_bytes / UNITS_PER_WORD;
7252 for (byte = 0; byte < total_bytes; byte++)
7254 int bitpos = byte * BITS_PER_UNIT;
7255 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7256 number of bytes. */
7257 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7259 if (total_bytes > UNITS_PER_WORD)
7261 word = byte / UNITS_PER_WORD;
7262 if (WORDS_BIG_ENDIAN)
7263 word = (words - 1) - word;
7264 offset = word * UNITS_PER_WORD;
7265 if (BYTES_BIG_ENDIAN)
7266 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7267 else
7268 offset += byte % UNITS_PER_WORD;
7270 else
7271 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7272 ptr[offset] = value;
7274 return total_bytes;
7278 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7279 specified by EXPR into the buffer PTR of length LEN bytes.
7280 Return the number of bytes placed in the buffer, or zero
7281 upon failure. */
7283 static int
7284 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7286 tree type = TREE_TYPE (expr);
7287 enum machine_mode mode = TYPE_MODE (type);
7288 int total_bytes = GET_MODE_SIZE (mode);
7289 FIXED_VALUE_TYPE value;
7290 tree i_value, i_type;
7292 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7293 return 0;
7295 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7297 if (NULL_TREE == i_type
7298 || TYPE_PRECISION (i_type) != total_bytes)
7299 return 0;
7301 value = TREE_FIXED_CST (expr);
7302 i_value = double_int_to_tree (i_type, value.data);
7304 return native_encode_int (i_value, ptr, len);
7308 /* Subroutine of native_encode_expr. Encode the REAL_CST
7309 specified by EXPR into the buffer PTR of length LEN bytes.
7310 Return the number of bytes placed in the buffer, or zero
7311 upon failure. */
7313 static int
7314 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7316 tree type = TREE_TYPE (expr);
7317 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7318 int byte, offset, word, words, bitpos;
7319 unsigned char value;
7321 /* There are always 32 bits in each long, no matter the size of
7322 the hosts long. We handle floating point representations with
7323 up to 192 bits. */
7324 long tmp[6];
7326 if (total_bytes > len)
7327 return 0;
7328 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7330 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7332 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7333 bitpos += BITS_PER_UNIT)
7335 byte = (bitpos / BITS_PER_UNIT) & 3;
7336 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7338 if (UNITS_PER_WORD < 4)
7340 word = byte / UNITS_PER_WORD;
7341 if (WORDS_BIG_ENDIAN)
7342 word = (words - 1) - word;
7343 offset = word * UNITS_PER_WORD;
7344 if (BYTES_BIG_ENDIAN)
7345 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7346 else
7347 offset += byte % UNITS_PER_WORD;
7349 else
7350 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7351 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7353 return total_bytes;
7356 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7357 specified by EXPR into the buffer PTR of length LEN bytes.
7358 Return the number of bytes placed in the buffer, or zero
7359 upon failure. */
7361 static int
7362 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7364 int rsize, isize;
7365 tree part;
7367 part = TREE_REALPART (expr);
7368 rsize = native_encode_expr (part, ptr, len);
7369 if (rsize == 0)
7370 return 0;
7371 part = TREE_IMAGPART (expr);
7372 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7373 if (isize != rsize)
7374 return 0;
7375 return rsize + isize;
7379 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7380 specified by EXPR into the buffer PTR of length LEN bytes.
7381 Return the number of bytes placed in the buffer, or zero
7382 upon failure. */
7384 static int
7385 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7387 unsigned i, count;
7388 int size, offset;
7389 tree itype, elem;
7391 offset = 0;
7392 count = VECTOR_CST_NELTS (expr);
7393 itype = TREE_TYPE (TREE_TYPE (expr));
7394 size = GET_MODE_SIZE (TYPE_MODE (itype));
7395 for (i = 0; i < count; i++)
7397 elem = VECTOR_CST_ELT (expr, i);
7398 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7399 return 0;
7400 offset += size;
7402 return offset;
7406 /* Subroutine of native_encode_expr. Encode the STRING_CST
7407 specified by EXPR into the buffer PTR of length LEN bytes.
7408 Return the number of bytes placed in the buffer, or zero
7409 upon failure. */
7411 static int
7412 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7414 tree type = TREE_TYPE (expr);
7415 HOST_WIDE_INT total_bytes;
7417 if (TREE_CODE (type) != ARRAY_TYPE
7418 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7419 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7420 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7421 return 0;
7422 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7423 if (total_bytes > len)
7424 return 0;
7425 if (TREE_STRING_LENGTH (expr) < total_bytes)
7427 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7428 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7429 total_bytes - TREE_STRING_LENGTH (expr));
7431 else
7432 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7433 return total_bytes;
7437 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7438 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7439 buffer PTR of length LEN bytes. Return the number of bytes
7440 placed in the buffer, or zero upon failure. */
7443 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7445 switch (TREE_CODE (expr))
7447 case INTEGER_CST:
7448 return native_encode_int (expr, ptr, len);
7450 case REAL_CST:
7451 return native_encode_real (expr, ptr, len);
7453 case FIXED_CST:
7454 return native_encode_fixed (expr, ptr, len);
7456 case COMPLEX_CST:
7457 return native_encode_complex (expr, ptr, len);
7459 case VECTOR_CST:
7460 return native_encode_vector (expr, ptr, len);
7462 case STRING_CST:
7463 return native_encode_string (expr, ptr, len);
7465 default:
7466 return 0;
7471 /* Subroutine of native_interpret_expr. Interpret the contents of
7472 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7473 If the buffer cannot be interpreted, return NULL_TREE. */
7475 static tree
7476 native_interpret_int (tree type, const unsigned char *ptr, int len)
7478 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7480 if (total_bytes > len
7481 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7482 return NULL_TREE;
7484 wide_int result = wi::from_buffer (ptr, total_bytes);
7486 return wide_int_to_tree (type, result);
7490 /* Subroutine of native_interpret_expr. Interpret the contents of
7491 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7492 If the buffer cannot be interpreted, return NULL_TREE. */
7494 static tree
7495 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7497 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7498 double_int result;
7499 FIXED_VALUE_TYPE fixed_value;
7501 if (total_bytes > len
7502 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7503 return NULL_TREE;
7505 result = double_int::from_buffer (ptr, total_bytes);
7506 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7508 return build_fixed (type, fixed_value);
7512 /* Subroutine of native_interpret_expr. Interpret the contents of
7513 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7514 If the buffer cannot be interpreted, return NULL_TREE. */
7516 static tree
7517 native_interpret_real (tree type, const unsigned char *ptr, int len)
7519 enum machine_mode mode = TYPE_MODE (type);
7520 int total_bytes = GET_MODE_SIZE (mode);
7521 int byte, offset, word, words, bitpos;
7522 unsigned char value;
7523 /* There are always 32 bits in each long, no matter the size of
7524 the hosts long. We handle floating point representations with
7525 up to 192 bits. */
7526 REAL_VALUE_TYPE r;
7527 long tmp[6];
7529 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7530 if (total_bytes > len || total_bytes > 24)
7531 return NULL_TREE;
7532 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7534 memset (tmp, 0, sizeof (tmp));
7535 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7536 bitpos += BITS_PER_UNIT)
7538 byte = (bitpos / BITS_PER_UNIT) & 3;
7539 if (UNITS_PER_WORD < 4)
7541 word = byte / UNITS_PER_WORD;
7542 if (WORDS_BIG_ENDIAN)
7543 word = (words - 1) - word;
7544 offset = word * UNITS_PER_WORD;
7545 if (BYTES_BIG_ENDIAN)
7546 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7547 else
7548 offset += byte % UNITS_PER_WORD;
7550 else
7551 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7552 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7554 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7557 real_from_target (&r, tmp, mode);
7558 return build_real (type, r);
7562 /* Subroutine of native_interpret_expr. Interpret the contents of
7563 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7564 If the buffer cannot be interpreted, return NULL_TREE. */
7566 static tree
7567 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7569 tree etype, rpart, ipart;
7570 int size;
7572 etype = TREE_TYPE (type);
7573 size = GET_MODE_SIZE (TYPE_MODE (etype));
7574 if (size * 2 > len)
7575 return NULL_TREE;
7576 rpart = native_interpret_expr (etype, ptr, size);
7577 if (!rpart)
7578 return NULL_TREE;
7579 ipart = native_interpret_expr (etype, ptr+size, size);
7580 if (!ipart)
7581 return NULL_TREE;
7582 return build_complex (type, rpart, ipart);
7586 /* Subroutine of native_interpret_expr. Interpret the contents of
7587 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7588 If the buffer cannot be interpreted, return NULL_TREE. */
7590 static tree
7591 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7593 tree etype, elem;
7594 int i, size, count;
7595 tree *elements;
7597 etype = TREE_TYPE (type);
7598 size = GET_MODE_SIZE (TYPE_MODE (etype));
7599 count = TYPE_VECTOR_SUBPARTS (type);
7600 if (size * count > len)
7601 return NULL_TREE;
7603 elements = XALLOCAVEC (tree, count);
7604 for (i = count - 1; i >= 0; i--)
7606 elem = native_interpret_expr (etype, ptr+(i*size), size);
7607 if (!elem)
7608 return NULL_TREE;
7609 elements[i] = elem;
7611 return build_vector (type, elements);
7615 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7616 the buffer PTR of length LEN as a constant of type TYPE. For
7617 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7618 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7619 return NULL_TREE. */
7621 tree
7622 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7624 switch (TREE_CODE (type))
7626 case INTEGER_TYPE:
7627 case ENUMERAL_TYPE:
7628 case BOOLEAN_TYPE:
7629 case POINTER_TYPE:
7630 case REFERENCE_TYPE:
7631 return native_interpret_int (type, ptr, len);
7633 case REAL_TYPE:
7634 return native_interpret_real (type, ptr, len);
7636 case FIXED_POINT_TYPE:
7637 return native_interpret_fixed (type, ptr, len);
7639 case COMPLEX_TYPE:
7640 return native_interpret_complex (type, ptr, len);
7642 case VECTOR_TYPE:
7643 return native_interpret_vector (type, ptr, len);
7645 default:
7646 return NULL_TREE;
7650 /* Returns true if we can interpret the contents of a native encoding
7651 as TYPE. */
7653 static bool
7654 can_native_interpret_type_p (tree type)
7656 switch (TREE_CODE (type))
7658 case INTEGER_TYPE:
7659 case ENUMERAL_TYPE:
7660 case BOOLEAN_TYPE:
7661 case POINTER_TYPE:
7662 case REFERENCE_TYPE:
7663 case FIXED_POINT_TYPE:
7664 case REAL_TYPE:
7665 case COMPLEX_TYPE:
7666 case VECTOR_TYPE:
7667 return true;
7668 default:
7669 return false;
7673 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7674 TYPE at compile-time. If we're unable to perform the conversion
7675 return NULL_TREE. */
7677 static tree
7678 fold_view_convert_expr (tree type, tree expr)
7680 /* We support up to 512-bit values (for V8DFmode). */
7681 unsigned char buffer[64];
7682 int len;
7684 /* Check that the host and target are sane. */
7685 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7686 return NULL_TREE;
7688 len = native_encode_expr (expr, buffer, sizeof (buffer));
7689 if (len == 0)
7690 return NULL_TREE;
7692 return native_interpret_expr (type, buffer, len);
7695 /* Build an expression for the address of T. Folds away INDIRECT_REF
7696 to avoid confusing the gimplify process. */
7698 tree
7699 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7701 /* The size of the object is not relevant when talking about its address. */
7702 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7703 t = TREE_OPERAND (t, 0);
7705 if (TREE_CODE (t) == INDIRECT_REF)
7707 t = TREE_OPERAND (t, 0);
7709 if (TREE_TYPE (t) != ptrtype)
7710 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7712 else if (TREE_CODE (t) == MEM_REF
7713 && integer_zerop (TREE_OPERAND (t, 1)))
7714 return TREE_OPERAND (t, 0);
7715 else if (TREE_CODE (t) == MEM_REF
7716 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7717 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7718 TREE_OPERAND (t, 0),
7719 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7720 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7722 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7724 if (TREE_TYPE (t) != ptrtype)
7725 t = fold_convert_loc (loc, ptrtype, t);
7727 else
7728 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7730 return t;
7733 /* Build an expression for the address of T. */
7735 tree
7736 build_fold_addr_expr_loc (location_t loc, tree t)
7738 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7740 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7743 static bool vec_cst_ctor_to_array (tree, tree *);
7745 /* Fold a unary expression of code CODE and type TYPE with operand
7746 OP0. Return the folded expression if folding is successful.
7747 Otherwise, return NULL_TREE. */
7749 tree
7750 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7752 tree tem;
7753 tree arg0;
7754 enum tree_code_class kind = TREE_CODE_CLASS (code);
7756 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7757 && TREE_CODE_LENGTH (code) == 1);
7759 arg0 = op0;
7760 if (arg0)
7762 if (CONVERT_EXPR_CODE_P (code)
7763 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7765 /* Don't use STRIP_NOPS, because signedness of argument type
7766 matters. */
7767 STRIP_SIGN_NOPS (arg0);
7769 else
7771 /* Strip any conversions that don't change the mode. This
7772 is safe for every expression, except for a comparison
7773 expression because its signedness is derived from its
7774 operands.
7776 Note that this is done as an internal manipulation within
7777 the constant folder, in order to find the simplest
7778 representation of the arguments so that their form can be
7779 studied. In any cases, the appropriate type conversions
7780 should be put back in the tree that will get out of the
7781 constant folder. */
7782 STRIP_NOPS (arg0);
7786 if (TREE_CODE_CLASS (code) == tcc_unary)
7788 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7789 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7790 fold_build1_loc (loc, code, type,
7791 fold_convert_loc (loc, TREE_TYPE (op0),
7792 TREE_OPERAND (arg0, 1))));
7793 else if (TREE_CODE (arg0) == COND_EXPR)
7795 tree arg01 = TREE_OPERAND (arg0, 1);
7796 tree arg02 = TREE_OPERAND (arg0, 2);
7797 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7798 arg01 = fold_build1_loc (loc, code, type,
7799 fold_convert_loc (loc,
7800 TREE_TYPE (op0), arg01));
7801 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7802 arg02 = fold_build1_loc (loc, code, type,
7803 fold_convert_loc (loc,
7804 TREE_TYPE (op0), arg02));
7805 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7806 arg01, arg02);
7808 /* If this was a conversion, and all we did was to move into
7809 inside the COND_EXPR, bring it back out. But leave it if
7810 it is a conversion from integer to integer and the
7811 result precision is no wider than a word since such a
7812 conversion is cheap and may be optimized away by combine,
7813 while it couldn't if it were outside the COND_EXPR. Then return
7814 so we don't get into an infinite recursion loop taking the
7815 conversion out and then back in. */
7817 if ((CONVERT_EXPR_CODE_P (code)
7818 || code == NON_LVALUE_EXPR)
7819 && TREE_CODE (tem) == COND_EXPR
7820 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7821 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7822 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7823 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7824 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7825 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7826 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7827 && (INTEGRAL_TYPE_P
7828 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7829 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7830 || flag_syntax_only))
7831 tem = build1_loc (loc, code, type,
7832 build3 (COND_EXPR,
7833 TREE_TYPE (TREE_OPERAND
7834 (TREE_OPERAND (tem, 1), 0)),
7835 TREE_OPERAND (tem, 0),
7836 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7837 TREE_OPERAND (TREE_OPERAND (tem, 2),
7838 0)));
7839 return tem;
7843 switch (code)
7845 case PAREN_EXPR:
7846 /* Re-association barriers around constants and other re-association
7847 barriers can be removed. */
7848 if (CONSTANT_CLASS_P (op0)
7849 || TREE_CODE (op0) == PAREN_EXPR)
7850 return fold_convert_loc (loc, type, op0);
7851 return NULL_TREE;
7853 case NON_LVALUE_EXPR:
7854 if (!maybe_lvalue_p (op0))
7855 return fold_convert_loc (loc, type, op0);
7856 return NULL_TREE;
7858 CASE_CONVERT:
7859 case FLOAT_EXPR:
7860 case FIX_TRUNC_EXPR:
7861 if (TREE_TYPE (op0) == type)
7862 return op0;
7864 if (COMPARISON_CLASS_P (op0))
7866 /* If we have (type) (a CMP b) and type is an integral type, return
7867 new expression involving the new type. Canonicalize
7868 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7869 non-integral type.
7870 Do not fold the result as that would not simplify further, also
7871 folding again results in recursions. */
7872 if (TREE_CODE (type) == BOOLEAN_TYPE)
7873 return build2_loc (loc, TREE_CODE (op0), type,
7874 TREE_OPERAND (op0, 0),
7875 TREE_OPERAND (op0, 1));
7876 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7877 && TREE_CODE (type) != VECTOR_TYPE)
7878 return build3_loc (loc, COND_EXPR, type, op0,
7879 constant_boolean_node (true, type),
7880 constant_boolean_node (false, type));
7883 /* Handle cases of two conversions in a row. */
7884 if (CONVERT_EXPR_P (op0))
7886 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7887 tree inter_type = TREE_TYPE (op0);
7888 int inside_int = INTEGRAL_TYPE_P (inside_type);
7889 int inside_ptr = POINTER_TYPE_P (inside_type);
7890 int inside_float = FLOAT_TYPE_P (inside_type);
7891 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7892 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7893 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7894 int inter_int = INTEGRAL_TYPE_P (inter_type);
7895 int inter_ptr = POINTER_TYPE_P (inter_type);
7896 int inter_float = FLOAT_TYPE_P (inter_type);
7897 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7898 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7899 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7900 int final_int = INTEGRAL_TYPE_P (type);
7901 int final_ptr = POINTER_TYPE_P (type);
7902 int final_float = FLOAT_TYPE_P (type);
7903 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7904 unsigned int final_prec = TYPE_PRECISION (type);
7905 int final_unsignedp = TYPE_UNSIGNED (type);
7907 /* In addition to the cases of two conversions in a row
7908 handled below, if we are converting something to its own
7909 type via an object of identical or wider precision, neither
7910 conversion is needed. */
7911 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7912 && (((inter_int || inter_ptr) && final_int)
7913 || (inter_float && final_float))
7914 && inter_prec >= final_prec)
7915 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7917 /* Likewise, if the intermediate and initial types are either both
7918 float or both integer, we don't need the middle conversion if the
7919 former is wider than the latter and doesn't change the signedness
7920 (for integers). Avoid this if the final type is a pointer since
7921 then we sometimes need the middle conversion. Likewise if the
7922 final type has a precision not equal to the size of its mode. */
7923 if (((inter_int && inside_int)
7924 || (inter_float && inside_float)
7925 || (inter_vec && inside_vec))
7926 && inter_prec >= inside_prec
7927 && (inter_float || inter_vec
7928 || inter_unsignedp == inside_unsignedp)
7929 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7930 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7931 && ! final_ptr
7932 && (! final_vec || inter_prec == inside_prec))
7933 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7935 /* If we have a sign-extension of a zero-extended value, we can
7936 replace that by a single zero-extension. Likewise if the
7937 final conversion does not change precision we can drop the
7938 intermediate conversion. */
7939 if (inside_int && inter_int && final_int
7940 && ((inside_prec < inter_prec && inter_prec < final_prec
7941 && inside_unsignedp && !inter_unsignedp)
7942 || final_prec == inter_prec))
7943 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7945 /* Two conversions in a row are not needed unless:
7946 - some conversion is floating-point (overstrict for now), or
7947 - some conversion is a vector (overstrict for now), or
7948 - the intermediate type is narrower than both initial and
7949 final, or
7950 - the intermediate type and innermost type differ in signedness,
7951 and the outermost type is wider than the intermediate, or
7952 - the initial type is a pointer type and the precisions of the
7953 intermediate and final types differ, or
7954 - the final type is a pointer type and the precisions of the
7955 initial and intermediate types differ. */
7956 if (! inside_float && ! inter_float && ! final_float
7957 && ! inside_vec && ! inter_vec && ! final_vec
7958 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7959 && ! (inside_int && inter_int
7960 && inter_unsignedp != inside_unsignedp
7961 && inter_prec < final_prec)
7962 && ((inter_unsignedp && inter_prec > inside_prec)
7963 == (final_unsignedp && final_prec > inter_prec))
7964 && ! (inside_ptr && inter_prec != final_prec)
7965 && ! (final_ptr && inside_prec != inter_prec)
7966 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7967 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7968 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7971 /* Handle (T *)&A.B.C for A being of type T and B and C
7972 living at offset zero. This occurs frequently in
7973 C++ upcasting and then accessing the base. */
7974 if (TREE_CODE (op0) == ADDR_EXPR
7975 && POINTER_TYPE_P (type)
7976 && handled_component_p (TREE_OPERAND (op0, 0)))
7978 HOST_WIDE_INT bitsize, bitpos;
7979 tree offset;
7980 enum machine_mode mode;
7981 int unsignedp, volatilep;
7982 tree base = TREE_OPERAND (op0, 0);
7983 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7984 &mode, &unsignedp, &volatilep, false);
7985 /* If the reference was to a (constant) zero offset, we can use
7986 the address of the base if it has the same base type
7987 as the result type and the pointer type is unqualified. */
7988 if (! offset && bitpos == 0
7989 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7990 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7991 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7992 return fold_convert_loc (loc, type,
7993 build_fold_addr_expr_loc (loc, base));
7996 if (TREE_CODE (op0) == MODIFY_EXPR
7997 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7998 /* Detect assigning a bitfield. */
7999 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8000 && DECL_BIT_FIELD
8001 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8003 /* Don't leave an assignment inside a conversion
8004 unless assigning a bitfield. */
8005 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8006 /* First do the assignment, then return converted constant. */
8007 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8008 TREE_NO_WARNING (tem) = 1;
8009 TREE_USED (tem) = 1;
8010 return tem;
8013 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8014 constants (if x has signed type, the sign bit cannot be set
8015 in c). This folds extension into the BIT_AND_EXPR.
8016 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8017 very likely don't have maximal range for their precision and this
8018 transformation effectively doesn't preserve non-maximal ranges. */
8019 if (TREE_CODE (type) == INTEGER_TYPE
8020 && TREE_CODE (op0) == BIT_AND_EXPR
8021 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8023 tree and_expr = op0;
8024 tree and0 = TREE_OPERAND (and_expr, 0);
8025 tree and1 = TREE_OPERAND (and_expr, 1);
8026 int change = 0;
8028 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8029 || (TYPE_PRECISION (type)
8030 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8031 change = 1;
8032 else if (TYPE_PRECISION (TREE_TYPE (and1))
8033 <= HOST_BITS_PER_WIDE_INT
8034 && tree_fits_uhwi_p (and1))
8036 unsigned HOST_WIDE_INT cst;
8038 cst = tree_to_uhwi (and1);
8039 cst &= HOST_WIDE_INT_M1U
8040 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8041 change = (cst == 0);
8042 #ifdef LOAD_EXTEND_OP
8043 if (change
8044 && !flag_syntax_only
8045 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8046 == ZERO_EXTEND))
8048 tree uns = unsigned_type_for (TREE_TYPE (and0));
8049 and0 = fold_convert_loc (loc, uns, and0);
8050 and1 = fold_convert_loc (loc, uns, and1);
8052 #endif
8054 if (change)
8056 tem = force_fit_type (type, wi::to_widest (and1), 0,
8057 TREE_OVERFLOW (and1));
8058 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8059 fold_convert_loc (loc, type, and0), tem);
8063 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8064 when one of the new casts will fold away. Conservatively we assume
8065 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8066 if (POINTER_TYPE_P (type)
8067 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8068 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8069 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8070 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8071 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8073 tree arg00 = TREE_OPERAND (arg0, 0);
8074 tree arg01 = TREE_OPERAND (arg0, 1);
8076 return fold_build_pointer_plus_loc
8077 (loc, fold_convert_loc (loc, type, arg00), arg01);
8080 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8081 of the same precision, and X is an integer type not narrower than
8082 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8083 if (INTEGRAL_TYPE_P (type)
8084 && TREE_CODE (op0) == BIT_NOT_EXPR
8085 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8086 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8087 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8089 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8090 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8091 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8092 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8093 fold_convert_loc (loc, type, tem));
8096 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8097 type of X and Y (integer types only). */
8098 if (INTEGRAL_TYPE_P (type)
8099 && TREE_CODE (op0) == MULT_EXPR
8100 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8101 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8103 /* Be careful not to introduce new overflows. */
8104 tree mult_type;
8105 if (TYPE_OVERFLOW_WRAPS (type))
8106 mult_type = type;
8107 else
8108 mult_type = unsigned_type_for (type);
8110 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8112 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8113 fold_convert_loc (loc, mult_type,
8114 TREE_OPERAND (op0, 0)),
8115 fold_convert_loc (loc, mult_type,
8116 TREE_OPERAND (op0, 1)));
8117 return fold_convert_loc (loc, type, tem);
8121 tem = fold_convert_const (code, type, arg0);
8122 return tem ? tem : NULL_TREE;
8124 case ADDR_SPACE_CONVERT_EXPR:
8125 if (integer_zerop (arg0))
8126 return fold_convert_const (code, type, arg0);
8127 return NULL_TREE;
8129 case FIXED_CONVERT_EXPR:
8130 tem = fold_convert_const (code, type, arg0);
8131 return tem ? tem : NULL_TREE;
8133 case VIEW_CONVERT_EXPR:
8134 if (TREE_TYPE (op0) == type)
8135 return op0;
8136 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8137 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8138 type, TREE_OPERAND (op0, 0));
8139 if (TREE_CODE (op0) == MEM_REF)
8140 return fold_build2_loc (loc, MEM_REF, type,
8141 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8143 /* For integral conversions with the same precision or pointer
8144 conversions use a NOP_EXPR instead. */
8145 if ((INTEGRAL_TYPE_P (type)
8146 || POINTER_TYPE_P (type))
8147 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8148 || POINTER_TYPE_P (TREE_TYPE (op0)))
8149 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8150 return fold_convert_loc (loc, type, op0);
8152 /* Strip inner integral conversions that do not change the precision. */
8153 if (CONVERT_EXPR_P (op0)
8154 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8155 || POINTER_TYPE_P (TREE_TYPE (op0)))
8156 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8157 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8158 && (TYPE_PRECISION (TREE_TYPE (op0))
8159 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8160 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8161 type, TREE_OPERAND (op0, 0));
8163 return fold_view_convert_expr (type, op0);
8165 case NEGATE_EXPR:
8166 tem = fold_negate_expr (loc, arg0);
8167 if (tem)
8168 return fold_convert_loc (loc, type, tem);
8169 return NULL_TREE;
8171 case ABS_EXPR:
8172 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8173 return fold_abs_const (arg0, type);
8174 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8175 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8176 /* Convert fabs((double)float) into (double)fabsf(float). */
8177 else if (TREE_CODE (arg0) == NOP_EXPR
8178 && TREE_CODE (type) == REAL_TYPE)
8180 tree targ0 = strip_float_extensions (arg0);
8181 if (targ0 != arg0)
8182 return fold_convert_loc (loc, type,
8183 fold_build1_loc (loc, ABS_EXPR,
8184 TREE_TYPE (targ0),
8185 targ0));
8187 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8188 else if (TREE_CODE (arg0) == ABS_EXPR)
8189 return arg0;
8190 else if (tree_expr_nonnegative_p (arg0))
8191 return arg0;
8193 /* Strip sign ops from argument. */
8194 if (TREE_CODE (type) == REAL_TYPE)
8196 tem = fold_strip_sign_ops (arg0);
8197 if (tem)
8198 return fold_build1_loc (loc, ABS_EXPR, type,
8199 fold_convert_loc (loc, type, tem));
8201 return NULL_TREE;
8203 case CONJ_EXPR:
8204 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8205 return fold_convert_loc (loc, type, arg0);
8206 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8208 tree itype = TREE_TYPE (type);
8209 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8210 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8211 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8212 negate_expr (ipart));
8214 if (TREE_CODE (arg0) == COMPLEX_CST)
8216 tree itype = TREE_TYPE (type);
8217 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8218 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8219 return build_complex (type, rpart, negate_expr (ipart));
8221 if (TREE_CODE (arg0) == CONJ_EXPR)
8222 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8223 return NULL_TREE;
8225 case BIT_NOT_EXPR:
8226 if (TREE_CODE (arg0) == INTEGER_CST)
8227 return fold_not_const (arg0, type);
8228 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8229 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8230 /* Convert ~ (-A) to A - 1. */
8231 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8232 return fold_build2_loc (loc, MINUS_EXPR, type,
8233 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8234 build_int_cst (type, 1));
8235 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8236 else if (INTEGRAL_TYPE_P (type)
8237 && ((TREE_CODE (arg0) == MINUS_EXPR
8238 && integer_onep (TREE_OPERAND (arg0, 1)))
8239 || (TREE_CODE (arg0) == PLUS_EXPR
8240 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8241 return fold_build1_loc (loc, NEGATE_EXPR, type,
8242 fold_convert_loc (loc, type,
8243 TREE_OPERAND (arg0, 0)));
8244 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8245 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8246 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8247 fold_convert_loc (loc, type,
8248 TREE_OPERAND (arg0, 0)))))
8249 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8250 fold_convert_loc (loc, type,
8251 TREE_OPERAND (arg0, 1)));
8252 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8253 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8254 fold_convert_loc (loc, type,
8255 TREE_OPERAND (arg0, 1)))))
8256 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8257 fold_convert_loc (loc, type,
8258 TREE_OPERAND (arg0, 0)), tem);
8259 /* Perform BIT_NOT_EXPR on each element individually. */
8260 else if (TREE_CODE (arg0) == VECTOR_CST)
8262 tree *elements;
8263 tree elem;
8264 unsigned count = VECTOR_CST_NELTS (arg0), i;
8266 elements = XALLOCAVEC (tree, count);
8267 for (i = 0; i < count; i++)
8269 elem = VECTOR_CST_ELT (arg0, i);
8270 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8271 if (elem == NULL_TREE)
8272 break;
8273 elements[i] = elem;
8275 if (i == count)
8276 return build_vector (type, elements);
8278 else if (COMPARISON_CLASS_P (arg0)
8279 && (VECTOR_TYPE_P (type)
8280 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8282 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8283 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8284 HONOR_NANS (TYPE_MODE (op_type)));
8285 if (subcode != ERROR_MARK)
8286 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8287 TREE_OPERAND (arg0, 1));
8291 return NULL_TREE;
8293 case TRUTH_NOT_EXPR:
8294 /* Note that the operand of this must be an int
8295 and its values must be 0 or 1.
8296 ("true" is a fixed value perhaps depending on the language,
8297 but we don't handle values other than 1 correctly yet.) */
8298 tem = fold_truth_not_expr (loc, arg0);
8299 if (!tem)
8300 return NULL_TREE;
8301 return fold_convert_loc (loc, type, tem);
8303 case REALPART_EXPR:
8304 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8305 return fold_convert_loc (loc, type, arg0);
8306 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8307 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8308 TREE_OPERAND (arg0, 1));
8309 if (TREE_CODE (arg0) == COMPLEX_CST)
8310 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8311 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8313 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8314 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8315 fold_build1_loc (loc, REALPART_EXPR, itype,
8316 TREE_OPERAND (arg0, 0)),
8317 fold_build1_loc (loc, REALPART_EXPR, itype,
8318 TREE_OPERAND (arg0, 1)));
8319 return fold_convert_loc (loc, type, tem);
8321 if (TREE_CODE (arg0) == CONJ_EXPR)
8323 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8324 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8325 TREE_OPERAND (arg0, 0));
8326 return fold_convert_loc (loc, type, tem);
8328 if (TREE_CODE (arg0) == CALL_EXPR)
8330 tree fn = get_callee_fndecl (arg0);
8331 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8332 switch (DECL_FUNCTION_CODE (fn))
8334 CASE_FLT_FN (BUILT_IN_CEXPI):
8335 fn = mathfn_built_in (type, BUILT_IN_COS);
8336 if (fn)
8337 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8338 break;
8340 default:
8341 break;
8344 return NULL_TREE;
8346 case IMAGPART_EXPR:
8347 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8348 return build_zero_cst (type);
8349 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8350 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8351 TREE_OPERAND (arg0, 0));
8352 if (TREE_CODE (arg0) == COMPLEX_CST)
8353 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8354 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8356 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8357 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8358 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8359 TREE_OPERAND (arg0, 0)),
8360 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8361 TREE_OPERAND (arg0, 1)));
8362 return fold_convert_loc (loc, type, tem);
8364 if (TREE_CODE (arg0) == CONJ_EXPR)
8366 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8367 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8368 return fold_convert_loc (loc, type, negate_expr (tem));
8370 if (TREE_CODE (arg0) == CALL_EXPR)
8372 tree fn = get_callee_fndecl (arg0);
8373 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8374 switch (DECL_FUNCTION_CODE (fn))
8376 CASE_FLT_FN (BUILT_IN_CEXPI):
8377 fn = mathfn_built_in (type, BUILT_IN_SIN);
8378 if (fn)
8379 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8380 break;
8382 default:
8383 break;
8386 return NULL_TREE;
8388 case INDIRECT_REF:
8389 /* Fold *&X to X if X is an lvalue. */
8390 if (TREE_CODE (op0) == ADDR_EXPR)
8392 tree op00 = TREE_OPERAND (op0, 0);
8393 if ((TREE_CODE (op00) == VAR_DECL
8394 || TREE_CODE (op00) == PARM_DECL
8395 || TREE_CODE (op00) == RESULT_DECL)
8396 && !TREE_READONLY (op00))
8397 return op00;
8399 return NULL_TREE;
8401 case VEC_UNPACK_LO_EXPR:
8402 case VEC_UNPACK_HI_EXPR:
8403 case VEC_UNPACK_FLOAT_LO_EXPR:
8404 case VEC_UNPACK_FLOAT_HI_EXPR:
8406 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8407 tree *elts;
8408 enum tree_code subcode;
8410 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8411 if (TREE_CODE (arg0) != VECTOR_CST)
8412 return NULL_TREE;
8414 elts = XALLOCAVEC (tree, nelts * 2);
8415 if (!vec_cst_ctor_to_array (arg0, elts))
8416 return NULL_TREE;
8418 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8419 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8420 elts += nelts;
8422 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8423 subcode = NOP_EXPR;
8424 else
8425 subcode = FLOAT_EXPR;
8427 for (i = 0; i < nelts; i++)
8429 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8430 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8431 return NULL_TREE;
8434 return build_vector (type, elts);
8437 case REDUC_MIN_EXPR:
8438 case REDUC_MAX_EXPR:
8439 case REDUC_PLUS_EXPR:
8441 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8442 tree *elts;
8443 enum tree_code subcode;
8445 if (TREE_CODE (op0) != VECTOR_CST)
8446 return NULL_TREE;
8448 elts = XALLOCAVEC (tree, nelts);
8449 if (!vec_cst_ctor_to_array (op0, elts))
8450 return NULL_TREE;
8452 switch (code)
8454 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8455 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8456 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8457 default: gcc_unreachable ();
8460 for (i = 1; i < nelts; i++)
8462 elts[0] = const_binop (subcode, elts[0], elts[i]);
8463 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8464 return NULL_TREE;
8465 elts[i] = build_zero_cst (TREE_TYPE (type));
8468 return build_vector (type, elts);
8471 default:
8472 return NULL_TREE;
8473 } /* switch (code) */
8477 /* If the operation was a conversion do _not_ mark a resulting constant
8478 with TREE_OVERFLOW if the original constant was not. These conversions
8479 have implementation defined behavior and retaining the TREE_OVERFLOW
8480 flag here would confuse later passes such as VRP. */
8481 tree
8482 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8483 tree type, tree op0)
8485 tree res = fold_unary_loc (loc, code, type, op0);
8486 if (res
8487 && TREE_CODE (res) == INTEGER_CST
8488 && TREE_CODE (op0) == INTEGER_CST
8489 && CONVERT_EXPR_CODE_P (code))
8490 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8492 return res;
8495 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8496 operands OP0 and OP1. LOC is the location of the resulting expression.
8497 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8498 Return the folded expression if folding is successful. Otherwise,
8499 return NULL_TREE. */
8500 static tree
8501 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8502 tree arg0, tree arg1, tree op0, tree op1)
8504 tree tem;
8506 /* We only do these simplifications if we are optimizing. */
8507 if (!optimize)
8508 return NULL_TREE;
8510 /* Check for things like (A || B) && (A || C). We can convert this
8511 to A || (B && C). Note that either operator can be any of the four
8512 truth and/or operations and the transformation will still be
8513 valid. Also note that we only care about order for the
8514 ANDIF and ORIF operators. If B contains side effects, this
8515 might change the truth-value of A. */
8516 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8517 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8518 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8519 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8520 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8521 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8523 tree a00 = TREE_OPERAND (arg0, 0);
8524 tree a01 = TREE_OPERAND (arg0, 1);
8525 tree a10 = TREE_OPERAND (arg1, 0);
8526 tree a11 = TREE_OPERAND (arg1, 1);
8527 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8528 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8529 && (code == TRUTH_AND_EXPR
8530 || code == TRUTH_OR_EXPR));
8532 if (operand_equal_p (a00, a10, 0))
8533 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8534 fold_build2_loc (loc, code, type, a01, a11));
8535 else if (commutative && operand_equal_p (a00, a11, 0))
8536 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8537 fold_build2_loc (loc, code, type, a01, a10));
8538 else if (commutative && operand_equal_p (a01, a10, 0))
8539 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8540 fold_build2_loc (loc, code, type, a00, a11));
8542 /* This case if tricky because we must either have commutative
8543 operators or else A10 must not have side-effects. */
8545 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8546 && operand_equal_p (a01, a11, 0))
8547 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8548 fold_build2_loc (loc, code, type, a00, a10),
8549 a01);
8552 /* See if we can build a range comparison. */
8553 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8554 return tem;
8556 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8557 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8559 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8560 if (tem)
8561 return fold_build2_loc (loc, code, type, tem, arg1);
8564 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8565 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8567 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8568 if (tem)
8569 return fold_build2_loc (loc, code, type, arg0, tem);
8572 /* Check for the possibility of merging component references. If our
8573 lhs is another similar operation, try to merge its rhs with our
8574 rhs. Then try to merge our lhs and rhs. */
8575 if (TREE_CODE (arg0) == code
8576 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8577 TREE_OPERAND (arg0, 1), arg1)))
8578 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8580 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8581 return tem;
8583 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8584 && (code == TRUTH_AND_EXPR
8585 || code == TRUTH_ANDIF_EXPR
8586 || code == TRUTH_OR_EXPR
8587 || code == TRUTH_ORIF_EXPR))
8589 enum tree_code ncode, icode;
8591 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8592 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8593 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8595 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8596 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8597 We don't want to pack more than two leafs to a non-IF AND/OR
8598 expression.
8599 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8600 equal to IF-CODE, then we don't want to add right-hand operand.
8601 If the inner right-hand side of left-hand operand has
8602 side-effects, or isn't simple, then we can't add to it,
8603 as otherwise we might destroy if-sequence. */
8604 if (TREE_CODE (arg0) == icode
8605 && simple_operand_p_2 (arg1)
8606 /* Needed for sequence points to handle trappings, and
8607 side-effects. */
8608 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8610 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8611 arg1);
8612 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8613 tem);
8615 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8616 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8617 else if (TREE_CODE (arg1) == icode
8618 && simple_operand_p_2 (arg0)
8619 /* Needed for sequence points to handle trappings, and
8620 side-effects. */
8621 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8623 tem = fold_build2_loc (loc, ncode, type,
8624 arg0, TREE_OPERAND (arg1, 0));
8625 return fold_build2_loc (loc, icode, type, tem,
8626 TREE_OPERAND (arg1, 1));
8628 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8629 into (A OR B).
8630 For sequence point consistancy, we need to check for trapping,
8631 and side-effects. */
8632 else if (code == icode && simple_operand_p_2 (arg0)
8633 && simple_operand_p_2 (arg1))
8634 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8637 return NULL_TREE;
8640 /* Fold a binary expression of code CODE and type TYPE with operands
8641 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8642 Return the folded expression if folding is successful. Otherwise,
8643 return NULL_TREE. */
8645 static tree
8646 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8648 enum tree_code compl_code;
8650 if (code == MIN_EXPR)
8651 compl_code = MAX_EXPR;
8652 else if (code == MAX_EXPR)
8653 compl_code = MIN_EXPR;
8654 else
8655 gcc_unreachable ();
8657 /* MIN (MAX (a, b), b) == b. */
8658 if (TREE_CODE (op0) == compl_code
8659 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8660 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8662 /* MIN (MAX (b, a), b) == b. */
8663 if (TREE_CODE (op0) == compl_code
8664 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8665 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8666 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8668 /* MIN (a, MAX (a, b)) == a. */
8669 if (TREE_CODE (op1) == compl_code
8670 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8671 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8672 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8674 /* MIN (a, MAX (b, a)) == a. */
8675 if (TREE_CODE (op1) == compl_code
8676 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8677 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8678 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8680 return NULL_TREE;
8683 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8684 by changing CODE to reduce the magnitude of constants involved in
8685 ARG0 of the comparison.
8686 Returns a canonicalized comparison tree if a simplification was
8687 possible, otherwise returns NULL_TREE.
8688 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8689 valid if signed overflow is undefined. */
8691 static tree
8692 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8693 tree arg0, tree arg1,
8694 bool *strict_overflow_p)
8696 enum tree_code code0 = TREE_CODE (arg0);
8697 tree t, cst0 = NULL_TREE;
8698 int sgn0;
8699 bool swap = false;
8701 /* Match A +- CST code arg1 and CST code arg1. We can change the
8702 first form only if overflow is undefined. */
8703 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8704 /* In principle pointers also have undefined overflow behavior,
8705 but that causes problems elsewhere. */
8706 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8707 && (code0 == MINUS_EXPR
8708 || code0 == PLUS_EXPR)
8709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8710 || code0 == INTEGER_CST))
8711 return NULL_TREE;
8713 /* Identify the constant in arg0 and its sign. */
8714 if (code0 == INTEGER_CST)
8715 cst0 = arg0;
8716 else
8717 cst0 = TREE_OPERAND (arg0, 1);
8718 sgn0 = tree_int_cst_sgn (cst0);
8720 /* Overflowed constants and zero will cause problems. */
8721 if (integer_zerop (cst0)
8722 || TREE_OVERFLOW (cst0))
8723 return NULL_TREE;
8725 /* See if we can reduce the magnitude of the constant in
8726 arg0 by changing the comparison code. */
8727 if (code0 == INTEGER_CST)
8729 /* CST <= arg1 -> CST-1 < arg1. */
8730 if (code == LE_EXPR && sgn0 == 1)
8731 code = LT_EXPR;
8732 /* -CST < arg1 -> -CST-1 <= arg1. */
8733 else if (code == LT_EXPR && sgn0 == -1)
8734 code = LE_EXPR;
8735 /* CST > arg1 -> CST-1 >= arg1. */
8736 else if (code == GT_EXPR && sgn0 == 1)
8737 code = GE_EXPR;
8738 /* -CST >= arg1 -> -CST-1 > arg1. */
8739 else if (code == GE_EXPR && sgn0 == -1)
8740 code = GT_EXPR;
8741 else
8742 return NULL_TREE;
8743 /* arg1 code' CST' might be more canonical. */
8744 swap = true;
8746 else
8748 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8749 if (code == LT_EXPR
8750 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8751 code = LE_EXPR;
8752 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8753 else if (code == GT_EXPR
8754 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8755 code = GE_EXPR;
8756 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8757 else if (code == LE_EXPR
8758 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8759 code = LT_EXPR;
8760 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8761 else if (code == GE_EXPR
8762 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8763 code = GT_EXPR;
8764 else
8765 return NULL_TREE;
8766 *strict_overflow_p = true;
8769 /* Now build the constant reduced in magnitude. But not if that
8770 would produce one outside of its types range. */
8771 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8772 && ((sgn0 == 1
8773 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8774 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8775 || (sgn0 == -1
8776 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8777 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8778 /* We cannot swap the comparison here as that would cause us to
8779 endlessly recurse. */
8780 return NULL_TREE;
8782 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8783 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8784 if (code0 != INTEGER_CST)
8785 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8786 t = fold_convert (TREE_TYPE (arg1), t);
8788 /* If swapping might yield to a more canonical form, do so. */
8789 if (swap)
8790 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8791 else
8792 return fold_build2_loc (loc, code, type, t, arg1);
8795 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8796 overflow further. Try to decrease the magnitude of constants involved
8797 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8798 and put sole constants at the second argument position.
8799 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8801 static tree
8802 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8803 tree arg0, tree arg1)
8805 tree t;
8806 bool strict_overflow_p;
8807 const char * const warnmsg = G_("assuming signed overflow does not occur "
8808 "when reducing constant in comparison");
8810 /* Try canonicalization by simplifying arg0. */
8811 strict_overflow_p = false;
8812 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8813 &strict_overflow_p);
8814 if (t)
8816 if (strict_overflow_p)
8817 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8818 return t;
8821 /* Try canonicalization by simplifying arg1 using the swapped
8822 comparison. */
8823 code = swap_tree_comparison (code);
8824 strict_overflow_p = false;
8825 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8826 &strict_overflow_p);
8827 if (t && strict_overflow_p)
8828 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8829 return t;
8832 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8833 space. This is used to avoid issuing overflow warnings for
8834 expressions like &p->x which can not wrap. */
8836 static bool
8837 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8839 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8840 return true;
8842 if (bitpos < 0)
8843 return true;
8845 wide_int wi_offset;
8846 int precision = TYPE_PRECISION (TREE_TYPE (base));
8847 if (offset == NULL_TREE)
8848 wi_offset = wi::zero (precision);
8849 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8850 return true;
8851 else
8852 wi_offset = offset;
8854 bool overflow;
8855 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8856 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8857 if (overflow)
8858 return true;
8860 if (!wi::fits_uhwi_p (total))
8861 return true;
8863 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8864 if (size <= 0)
8865 return true;
8867 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8868 array. */
8869 if (TREE_CODE (base) == ADDR_EXPR)
8871 HOST_WIDE_INT base_size;
8873 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8874 if (base_size > 0 && size < base_size)
8875 size = base_size;
8878 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8881 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8882 kind INTEGER_CST. This makes sure to properly sign-extend the
8883 constant. */
8885 static HOST_WIDE_INT
8886 size_low_cst (const_tree t)
8888 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8889 int prec = TYPE_PRECISION (TREE_TYPE (t));
8890 if (prec < HOST_BITS_PER_WIDE_INT)
8891 return sext_hwi (w, prec);
8892 return w;
8895 /* Subroutine of fold_binary. This routine performs all of the
8896 transformations that are common to the equality/inequality
8897 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8898 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8899 fold_binary should call fold_binary. Fold a comparison with
8900 tree code CODE and type TYPE with operands OP0 and OP1. Return
8901 the folded comparison or NULL_TREE. */
8903 static tree
8904 fold_comparison (location_t loc, enum tree_code code, tree type,
8905 tree op0, tree op1)
8907 tree arg0, arg1, tem;
8909 arg0 = op0;
8910 arg1 = op1;
8912 STRIP_SIGN_NOPS (arg0);
8913 STRIP_SIGN_NOPS (arg1);
8915 tem = fold_relational_const (code, type, arg0, arg1);
8916 if (tem != NULL_TREE)
8917 return tem;
8919 /* If one arg is a real or integer constant, put it last. */
8920 if (tree_swap_operands_p (arg0, arg1, true))
8921 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8923 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8924 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8925 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8926 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8927 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8928 && (TREE_CODE (arg1) == INTEGER_CST
8929 && !TREE_OVERFLOW (arg1)))
8931 tree const1 = TREE_OPERAND (arg0, 1);
8932 tree const2 = arg1;
8933 tree variable = TREE_OPERAND (arg0, 0);
8934 tree lhs;
8935 int lhs_add;
8936 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8938 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8939 TREE_TYPE (arg1), const2, const1);
8941 /* If the constant operation overflowed this can be
8942 simplified as a comparison against INT_MAX/INT_MIN. */
8943 if (TREE_CODE (lhs) == INTEGER_CST
8944 && TREE_OVERFLOW (lhs))
8946 int const1_sgn = tree_int_cst_sgn (const1);
8947 enum tree_code code2 = code;
8949 /* Get the sign of the constant on the lhs if the
8950 operation were VARIABLE + CONST1. */
8951 if (TREE_CODE (arg0) == MINUS_EXPR)
8952 const1_sgn = -const1_sgn;
8954 /* The sign of the constant determines if we overflowed
8955 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8956 Canonicalize to the INT_MIN overflow by swapping the comparison
8957 if necessary. */
8958 if (const1_sgn == -1)
8959 code2 = swap_tree_comparison (code);
8961 /* We now can look at the canonicalized case
8962 VARIABLE + 1 CODE2 INT_MIN
8963 and decide on the result. */
8964 if (code2 == LT_EXPR
8965 || code2 == LE_EXPR
8966 || code2 == EQ_EXPR)
8967 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8968 else if (code2 == NE_EXPR
8969 || code2 == GE_EXPR
8970 || code2 == GT_EXPR)
8971 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8974 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8975 && (TREE_CODE (lhs) != INTEGER_CST
8976 || !TREE_OVERFLOW (lhs)))
8978 if (code != EQ_EXPR && code != NE_EXPR)
8979 fold_overflow_warning ("assuming signed overflow does not occur "
8980 "when changing X +- C1 cmp C2 to "
8981 "X cmp C1 +- C2",
8982 WARN_STRICT_OVERFLOW_COMPARISON);
8983 return fold_build2_loc (loc, code, type, variable, lhs);
8987 /* For comparisons of pointers we can decompose it to a compile time
8988 comparison of the base objects and the offsets into the object.
8989 This requires at least one operand being an ADDR_EXPR or a
8990 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8991 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8992 && (TREE_CODE (arg0) == ADDR_EXPR
8993 || TREE_CODE (arg1) == ADDR_EXPR
8994 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8995 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8997 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8998 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8999 enum machine_mode mode;
9000 int volatilep, unsignedp;
9001 bool indirect_base0 = false, indirect_base1 = false;
9003 /* Get base and offset for the access. Strip ADDR_EXPR for
9004 get_inner_reference, but put it back by stripping INDIRECT_REF
9005 off the base object if possible. indirect_baseN will be true
9006 if baseN is not an address but refers to the object itself. */
9007 base0 = arg0;
9008 if (TREE_CODE (arg0) == ADDR_EXPR)
9010 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9011 &bitsize, &bitpos0, &offset0, &mode,
9012 &unsignedp, &volatilep, false);
9013 if (TREE_CODE (base0) == INDIRECT_REF)
9014 base0 = TREE_OPERAND (base0, 0);
9015 else
9016 indirect_base0 = true;
9018 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9020 base0 = TREE_OPERAND (arg0, 0);
9021 STRIP_SIGN_NOPS (base0);
9022 if (TREE_CODE (base0) == ADDR_EXPR)
9024 base0 = TREE_OPERAND (base0, 0);
9025 indirect_base0 = true;
9027 offset0 = TREE_OPERAND (arg0, 1);
9028 if (tree_fits_shwi_p (offset0))
9030 HOST_WIDE_INT off = size_low_cst (offset0);
9031 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9032 * BITS_PER_UNIT)
9033 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9035 bitpos0 = off * BITS_PER_UNIT;
9036 offset0 = NULL_TREE;
9041 base1 = arg1;
9042 if (TREE_CODE (arg1) == ADDR_EXPR)
9044 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9045 &bitsize, &bitpos1, &offset1, &mode,
9046 &unsignedp, &volatilep, false);
9047 if (TREE_CODE (base1) == INDIRECT_REF)
9048 base1 = TREE_OPERAND (base1, 0);
9049 else
9050 indirect_base1 = true;
9052 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9054 base1 = TREE_OPERAND (arg1, 0);
9055 STRIP_SIGN_NOPS (base1);
9056 if (TREE_CODE (base1) == ADDR_EXPR)
9058 base1 = TREE_OPERAND (base1, 0);
9059 indirect_base1 = true;
9061 offset1 = TREE_OPERAND (arg1, 1);
9062 if (tree_fits_shwi_p (offset1))
9064 HOST_WIDE_INT off = size_low_cst (offset1);
9065 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9066 * BITS_PER_UNIT)
9067 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9069 bitpos1 = off * BITS_PER_UNIT;
9070 offset1 = NULL_TREE;
9075 /* A local variable can never be pointed to by
9076 the default SSA name of an incoming parameter. */
9077 if ((TREE_CODE (arg0) == ADDR_EXPR
9078 && indirect_base0
9079 && TREE_CODE (base0) == VAR_DECL
9080 && auto_var_in_fn_p (base0, current_function_decl)
9081 && !indirect_base1
9082 && TREE_CODE (base1) == SSA_NAME
9083 && SSA_NAME_IS_DEFAULT_DEF (base1)
9084 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9085 || (TREE_CODE (arg1) == ADDR_EXPR
9086 && indirect_base1
9087 && TREE_CODE (base1) == VAR_DECL
9088 && auto_var_in_fn_p (base1, current_function_decl)
9089 && !indirect_base0
9090 && TREE_CODE (base0) == SSA_NAME
9091 && SSA_NAME_IS_DEFAULT_DEF (base0)
9092 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9094 if (code == NE_EXPR)
9095 return constant_boolean_node (1, type);
9096 else if (code == EQ_EXPR)
9097 return constant_boolean_node (0, type);
9099 /* If we have equivalent bases we might be able to simplify. */
9100 else if (indirect_base0 == indirect_base1
9101 && operand_equal_p (base0, base1, 0))
9103 /* We can fold this expression to a constant if the non-constant
9104 offset parts are equal. */
9105 if ((offset0 == offset1
9106 || (offset0 && offset1
9107 && operand_equal_p (offset0, offset1, 0)))
9108 && (code == EQ_EXPR
9109 || code == NE_EXPR
9110 || (indirect_base0 && DECL_P (base0))
9111 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9114 if (code != EQ_EXPR
9115 && code != NE_EXPR
9116 && bitpos0 != bitpos1
9117 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9118 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9119 fold_overflow_warning (("assuming pointer wraparound does not "
9120 "occur when comparing P +- C1 with "
9121 "P +- C2"),
9122 WARN_STRICT_OVERFLOW_CONDITIONAL);
9124 switch (code)
9126 case EQ_EXPR:
9127 return constant_boolean_node (bitpos0 == bitpos1, type);
9128 case NE_EXPR:
9129 return constant_boolean_node (bitpos0 != bitpos1, type);
9130 case LT_EXPR:
9131 return constant_boolean_node (bitpos0 < bitpos1, type);
9132 case LE_EXPR:
9133 return constant_boolean_node (bitpos0 <= bitpos1, type);
9134 case GE_EXPR:
9135 return constant_boolean_node (bitpos0 >= bitpos1, type);
9136 case GT_EXPR:
9137 return constant_boolean_node (bitpos0 > bitpos1, type);
9138 default:;
9141 /* We can simplify the comparison to a comparison of the variable
9142 offset parts if the constant offset parts are equal.
9143 Be careful to use signed sizetype here because otherwise we
9144 mess with array offsets in the wrong way. This is possible
9145 because pointer arithmetic is restricted to retain within an
9146 object and overflow on pointer differences is undefined as of
9147 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9148 else if (bitpos0 == bitpos1
9149 && ((code == EQ_EXPR || code == NE_EXPR)
9150 || (indirect_base0 && DECL_P (base0))
9151 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9153 /* By converting to signed sizetype we cover middle-end pointer
9154 arithmetic which operates on unsigned pointer types of size
9155 type size and ARRAY_REF offsets which are properly sign or
9156 zero extended from their type in case it is narrower than
9157 sizetype. */
9158 if (offset0 == NULL_TREE)
9159 offset0 = build_int_cst (ssizetype, 0);
9160 else
9161 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9162 if (offset1 == NULL_TREE)
9163 offset1 = build_int_cst (ssizetype, 0);
9164 else
9165 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9167 if (code != EQ_EXPR
9168 && code != NE_EXPR
9169 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9170 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9171 fold_overflow_warning (("assuming pointer wraparound does not "
9172 "occur when comparing P +- C1 with "
9173 "P +- C2"),
9174 WARN_STRICT_OVERFLOW_COMPARISON);
9176 return fold_build2_loc (loc, code, type, offset0, offset1);
9179 /* For non-equal bases we can simplify if they are addresses
9180 of local binding decls or constants. */
9181 else if (indirect_base0 && indirect_base1
9182 /* We know that !operand_equal_p (base0, base1, 0)
9183 because the if condition was false. But make
9184 sure two decls are not the same. */
9185 && base0 != base1
9186 && TREE_CODE (arg0) == ADDR_EXPR
9187 && TREE_CODE (arg1) == ADDR_EXPR
9188 && (((TREE_CODE (base0) == VAR_DECL
9189 || TREE_CODE (base0) == PARM_DECL)
9190 && (targetm.binds_local_p (base0)
9191 || CONSTANT_CLASS_P (base1)))
9192 || CONSTANT_CLASS_P (base0))
9193 && (((TREE_CODE (base1) == VAR_DECL
9194 || TREE_CODE (base1) == PARM_DECL)
9195 && (targetm.binds_local_p (base1)
9196 || CONSTANT_CLASS_P (base0)))
9197 || CONSTANT_CLASS_P (base1)))
9199 if (code == EQ_EXPR)
9200 return omit_two_operands_loc (loc, type, boolean_false_node,
9201 arg0, arg1);
9202 else if (code == NE_EXPR)
9203 return omit_two_operands_loc (loc, type, boolean_true_node,
9204 arg0, arg1);
9206 /* For equal offsets we can simplify to a comparison of the
9207 base addresses. */
9208 else if (bitpos0 == bitpos1
9209 && (indirect_base0
9210 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9211 && (indirect_base1
9212 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9213 && ((offset0 == offset1)
9214 || (offset0 && offset1
9215 && operand_equal_p (offset0, offset1, 0))))
9217 if (indirect_base0)
9218 base0 = build_fold_addr_expr_loc (loc, base0);
9219 if (indirect_base1)
9220 base1 = build_fold_addr_expr_loc (loc, base1);
9221 return fold_build2_loc (loc, code, type, base0, base1);
9225 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9226 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9227 the resulting offset is smaller in absolute value than the
9228 original one. */
9229 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9230 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9231 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9232 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9233 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9234 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9235 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9237 tree const1 = TREE_OPERAND (arg0, 1);
9238 tree const2 = TREE_OPERAND (arg1, 1);
9239 tree variable1 = TREE_OPERAND (arg0, 0);
9240 tree variable2 = TREE_OPERAND (arg1, 0);
9241 tree cst;
9242 const char * const warnmsg = G_("assuming signed overflow does not "
9243 "occur when combining constants around "
9244 "a comparison");
9246 /* Put the constant on the side where it doesn't overflow and is
9247 of lower absolute value than before. */
9248 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9249 ? MINUS_EXPR : PLUS_EXPR,
9250 const2, const1);
9251 if (!TREE_OVERFLOW (cst)
9252 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9254 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9255 return fold_build2_loc (loc, code, type,
9256 variable1,
9257 fold_build2_loc (loc,
9258 TREE_CODE (arg1), TREE_TYPE (arg1),
9259 variable2, cst));
9262 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9263 ? MINUS_EXPR : PLUS_EXPR,
9264 const1, const2);
9265 if (!TREE_OVERFLOW (cst)
9266 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9268 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9269 return fold_build2_loc (loc, code, type,
9270 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9271 variable1, cst),
9272 variable2);
9276 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9277 signed arithmetic case. That form is created by the compiler
9278 often enough for folding it to be of value. One example is in
9279 computing loop trip counts after Operator Strength Reduction. */
9280 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9281 && TREE_CODE (arg0) == MULT_EXPR
9282 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9283 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9284 && integer_zerop (arg1))
9286 tree const1 = TREE_OPERAND (arg0, 1);
9287 tree const2 = arg1; /* zero */
9288 tree variable1 = TREE_OPERAND (arg0, 0);
9289 enum tree_code cmp_code = code;
9291 /* Handle unfolded multiplication by zero. */
9292 if (integer_zerop (const1))
9293 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9295 fold_overflow_warning (("assuming signed overflow does not occur when "
9296 "eliminating multiplication in comparison "
9297 "with zero"),
9298 WARN_STRICT_OVERFLOW_COMPARISON);
9300 /* If const1 is negative we swap the sense of the comparison. */
9301 if (tree_int_cst_sgn (const1) < 0)
9302 cmp_code = swap_tree_comparison (cmp_code);
9304 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9307 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9308 if (tem)
9309 return tem;
9311 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9313 tree targ0 = strip_float_extensions (arg0);
9314 tree targ1 = strip_float_extensions (arg1);
9315 tree newtype = TREE_TYPE (targ0);
9317 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9318 newtype = TREE_TYPE (targ1);
9320 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9321 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9322 return fold_build2_loc (loc, code, type,
9323 fold_convert_loc (loc, newtype, targ0),
9324 fold_convert_loc (loc, newtype, targ1));
9326 /* (-a) CMP (-b) -> b CMP a */
9327 if (TREE_CODE (arg0) == NEGATE_EXPR
9328 && TREE_CODE (arg1) == NEGATE_EXPR)
9329 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9330 TREE_OPERAND (arg0, 0));
9332 if (TREE_CODE (arg1) == REAL_CST)
9334 REAL_VALUE_TYPE cst;
9335 cst = TREE_REAL_CST (arg1);
9337 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9338 if (TREE_CODE (arg0) == NEGATE_EXPR)
9339 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9340 TREE_OPERAND (arg0, 0),
9341 build_real (TREE_TYPE (arg1),
9342 real_value_negate (&cst)));
9344 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9345 /* a CMP (-0) -> a CMP 0 */
9346 if (REAL_VALUE_MINUS_ZERO (cst))
9347 return fold_build2_loc (loc, code, type, arg0,
9348 build_real (TREE_TYPE (arg1), dconst0));
9350 /* x != NaN is always true, other ops are always false. */
9351 if (REAL_VALUE_ISNAN (cst)
9352 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9354 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9355 return omit_one_operand_loc (loc, type, tem, arg0);
9358 /* Fold comparisons against infinity. */
9359 if (REAL_VALUE_ISINF (cst)
9360 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9362 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9363 if (tem != NULL_TREE)
9364 return tem;
9368 /* If this is a comparison of a real constant with a PLUS_EXPR
9369 or a MINUS_EXPR of a real constant, we can convert it into a
9370 comparison with a revised real constant as long as no overflow
9371 occurs when unsafe_math_optimizations are enabled. */
9372 if (flag_unsafe_math_optimizations
9373 && TREE_CODE (arg1) == REAL_CST
9374 && (TREE_CODE (arg0) == PLUS_EXPR
9375 || TREE_CODE (arg0) == MINUS_EXPR)
9376 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9377 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9378 ? MINUS_EXPR : PLUS_EXPR,
9379 arg1, TREE_OPERAND (arg0, 1)))
9380 && !TREE_OVERFLOW (tem))
9381 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9383 /* Likewise, we can simplify a comparison of a real constant with
9384 a MINUS_EXPR whose first operand is also a real constant, i.e.
9385 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9386 floating-point types only if -fassociative-math is set. */
9387 if (flag_associative_math
9388 && TREE_CODE (arg1) == REAL_CST
9389 && TREE_CODE (arg0) == MINUS_EXPR
9390 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9391 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9392 arg1))
9393 && !TREE_OVERFLOW (tem))
9394 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9395 TREE_OPERAND (arg0, 1), tem);
9397 /* Fold comparisons against built-in math functions. */
9398 if (TREE_CODE (arg1) == REAL_CST
9399 && flag_unsafe_math_optimizations
9400 && ! flag_errno_math)
9402 enum built_in_function fcode = builtin_mathfn_code (arg0);
9404 if (fcode != END_BUILTINS)
9406 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9407 if (tem != NULL_TREE)
9408 return tem;
9413 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9414 && CONVERT_EXPR_P (arg0))
9416 /* If we are widening one operand of an integer comparison,
9417 see if the other operand is similarly being widened. Perhaps we
9418 can do the comparison in the narrower type. */
9419 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9420 if (tem)
9421 return tem;
9423 /* Or if we are changing signedness. */
9424 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9425 if (tem)
9426 return tem;
9429 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9430 constant, we can simplify it. */
9431 if (TREE_CODE (arg1) == INTEGER_CST
9432 && (TREE_CODE (arg0) == MIN_EXPR
9433 || TREE_CODE (arg0) == MAX_EXPR)
9434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9436 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9437 if (tem)
9438 return tem;
9441 /* Simplify comparison of something with itself. (For IEEE
9442 floating-point, we can only do some of these simplifications.) */
9443 if (operand_equal_p (arg0, arg1, 0))
9445 switch (code)
9447 case EQ_EXPR:
9448 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9449 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9450 return constant_boolean_node (1, type);
9451 break;
9453 case GE_EXPR:
9454 case LE_EXPR:
9455 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9456 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9457 return constant_boolean_node (1, type);
9458 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9460 case NE_EXPR:
9461 /* For NE, we can only do this simplification if integer
9462 or we don't honor IEEE floating point NaNs. */
9463 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9464 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9465 break;
9466 /* ... fall through ... */
9467 case GT_EXPR:
9468 case LT_EXPR:
9469 return constant_boolean_node (0, type);
9470 default:
9471 gcc_unreachable ();
9475 /* If we are comparing an expression that just has comparisons
9476 of two integer values, arithmetic expressions of those comparisons,
9477 and constants, we can simplify it. There are only three cases
9478 to check: the two values can either be equal, the first can be
9479 greater, or the second can be greater. Fold the expression for
9480 those three values. Since each value must be 0 or 1, we have
9481 eight possibilities, each of which corresponds to the constant 0
9482 or 1 or one of the six possible comparisons.
9484 This handles common cases like (a > b) == 0 but also handles
9485 expressions like ((x > y) - (y > x)) > 0, which supposedly
9486 occur in macroized code. */
9488 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9490 tree cval1 = 0, cval2 = 0;
9491 int save_p = 0;
9493 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9494 /* Don't handle degenerate cases here; they should already
9495 have been handled anyway. */
9496 && cval1 != 0 && cval2 != 0
9497 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9498 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9499 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9500 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9501 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9502 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9503 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9505 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9506 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9508 /* We can't just pass T to eval_subst in case cval1 or cval2
9509 was the same as ARG1. */
9511 tree high_result
9512 = fold_build2_loc (loc, code, type,
9513 eval_subst (loc, arg0, cval1, maxval,
9514 cval2, minval),
9515 arg1);
9516 tree equal_result
9517 = fold_build2_loc (loc, code, type,
9518 eval_subst (loc, arg0, cval1, maxval,
9519 cval2, maxval),
9520 arg1);
9521 tree low_result
9522 = fold_build2_loc (loc, code, type,
9523 eval_subst (loc, arg0, cval1, minval,
9524 cval2, maxval),
9525 arg1);
9527 /* All three of these results should be 0 or 1. Confirm they are.
9528 Then use those values to select the proper code to use. */
9530 if (TREE_CODE (high_result) == INTEGER_CST
9531 && TREE_CODE (equal_result) == INTEGER_CST
9532 && TREE_CODE (low_result) == INTEGER_CST)
9534 /* Make a 3-bit mask with the high-order bit being the
9535 value for `>', the next for '=', and the low for '<'. */
9536 switch ((integer_onep (high_result) * 4)
9537 + (integer_onep (equal_result) * 2)
9538 + integer_onep (low_result))
9540 case 0:
9541 /* Always false. */
9542 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9543 case 1:
9544 code = LT_EXPR;
9545 break;
9546 case 2:
9547 code = EQ_EXPR;
9548 break;
9549 case 3:
9550 code = LE_EXPR;
9551 break;
9552 case 4:
9553 code = GT_EXPR;
9554 break;
9555 case 5:
9556 code = NE_EXPR;
9557 break;
9558 case 6:
9559 code = GE_EXPR;
9560 break;
9561 case 7:
9562 /* Always true. */
9563 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9566 if (save_p)
9568 tem = save_expr (build2 (code, type, cval1, cval2));
9569 SET_EXPR_LOCATION (tem, loc);
9570 return tem;
9572 return fold_build2_loc (loc, code, type, cval1, cval2);
9577 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9578 into a single range test. */
9579 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9580 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9581 && TREE_CODE (arg1) == INTEGER_CST
9582 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9583 && !integer_zerop (TREE_OPERAND (arg0, 1))
9584 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9585 && !TREE_OVERFLOW (arg1))
9587 tem = fold_div_compare (loc, code, type, arg0, arg1);
9588 if (tem != NULL_TREE)
9589 return tem;
9592 /* Fold ~X op ~Y as Y op X. */
9593 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9594 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9596 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9597 return fold_build2_loc (loc, code, type,
9598 fold_convert_loc (loc, cmp_type,
9599 TREE_OPERAND (arg1, 0)),
9600 TREE_OPERAND (arg0, 0));
9603 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9604 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9605 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9607 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9608 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9609 TREE_OPERAND (arg0, 0),
9610 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9611 fold_convert_loc (loc, cmp_type, arg1)));
9614 return NULL_TREE;
9618 /* Subroutine of fold_binary. Optimize complex multiplications of the
9619 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9620 argument EXPR represents the expression "z" of type TYPE. */
9622 static tree
9623 fold_mult_zconjz (location_t loc, tree type, tree expr)
9625 tree itype = TREE_TYPE (type);
9626 tree rpart, ipart, tem;
9628 if (TREE_CODE (expr) == COMPLEX_EXPR)
9630 rpart = TREE_OPERAND (expr, 0);
9631 ipart = TREE_OPERAND (expr, 1);
9633 else if (TREE_CODE (expr) == COMPLEX_CST)
9635 rpart = TREE_REALPART (expr);
9636 ipart = TREE_IMAGPART (expr);
9638 else
9640 expr = save_expr (expr);
9641 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9642 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9645 rpart = save_expr (rpart);
9646 ipart = save_expr (ipart);
9647 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9648 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9649 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9650 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9651 build_zero_cst (itype));
9655 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9656 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9657 guarantees that P and N have the same least significant log2(M) bits.
9658 N is not otherwise constrained. In particular, N is not normalized to
9659 0 <= N < M as is common. In general, the precise value of P is unknown.
9660 M is chosen as large as possible such that constant N can be determined.
9662 Returns M and sets *RESIDUE to N.
9664 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9665 account. This is not always possible due to PR 35705.
9668 static unsigned HOST_WIDE_INT
9669 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9670 bool allow_func_align)
9672 enum tree_code code;
9674 *residue = 0;
9676 code = TREE_CODE (expr);
9677 if (code == ADDR_EXPR)
9679 unsigned int bitalign;
9680 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9681 *residue /= BITS_PER_UNIT;
9682 return bitalign / BITS_PER_UNIT;
9684 else if (code == POINTER_PLUS_EXPR)
9686 tree op0, op1;
9687 unsigned HOST_WIDE_INT modulus;
9688 enum tree_code inner_code;
9690 op0 = TREE_OPERAND (expr, 0);
9691 STRIP_NOPS (op0);
9692 modulus = get_pointer_modulus_and_residue (op0, residue,
9693 allow_func_align);
9695 op1 = TREE_OPERAND (expr, 1);
9696 STRIP_NOPS (op1);
9697 inner_code = TREE_CODE (op1);
9698 if (inner_code == INTEGER_CST)
9700 *residue += TREE_INT_CST_LOW (op1);
9701 return modulus;
9703 else if (inner_code == MULT_EXPR)
9705 op1 = TREE_OPERAND (op1, 1);
9706 if (TREE_CODE (op1) == INTEGER_CST)
9708 unsigned HOST_WIDE_INT align;
9710 /* Compute the greatest power-of-2 divisor of op1. */
9711 align = TREE_INT_CST_LOW (op1);
9712 align &= -align;
9714 /* If align is non-zero and less than *modulus, replace
9715 *modulus with align., If align is 0, then either op1 is 0
9716 or the greatest power-of-2 divisor of op1 doesn't fit in an
9717 unsigned HOST_WIDE_INT. In either case, no additional
9718 constraint is imposed. */
9719 if (align)
9720 modulus = MIN (modulus, align);
9722 return modulus;
9727 /* If we get here, we were unable to determine anything useful about the
9728 expression. */
9729 return 1;
9732 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9733 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9735 static bool
9736 vec_cst_ctor_to_array (tree arg, tree *elts)
9738 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9740 if (TREE_CODE (arg) == VECTOR_CST)
9742 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9743 elts[i] = VECTOR_CST_ELT (arg, i);
9745 else if (TREE_CODE (arg) == CONSTRUCTOR)
9747 constructor_elt *elt;
9749 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9750 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9751 return false;
9752 else
9753 elts[i] = elt->value;
9755 else
9756 return false;
9757 for (; i < nelts; i++)
9758 elts[i]
9759 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9760 return true;
9763 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9764 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9765 NULL_TREE otherwise. */
9767 static tree
9768 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9770 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9771 tree *elts;
9772 bool need_ctor = false;
9774 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9775 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9776 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9777 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9778 return NULL_TREE;
9780 elts = XALLOCAVEC (tree, nelts * 3);
9781 if (!vec_cst_ctor_to_array (arg0, elts)
9782 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9783 return NULL_TREE;
9785 for (i = 0; i < nelts; i++)
9787 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9788 need_ctor = true;
9789 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9792 if (need_ctor)
9794 vec<constructor_elt, va_gc> *v;
9795 vec_alloc (v, nelts);
9796 for (i = 0; i < nelts; i++)
9797 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9798 return build_constructor (type, v);
9800 else
9801 return build_vector (type, &elts[2 * nelts]);
9804 /* Try to fold a pointer difference of type TYPE two address expressions of
9805 array references AREF0 and AREF1 using location LOC. Return a
9806 simplified expression for the difference or NULL_TREE. */
9808 static tree
9809 fold_addr_of_array_ref_difference (location_t loc, tree type,
9810 tree aref0, tree aref1)
9812 tree base0 = TREE_OPERAND (aref0, 0);
9813 tree base1 = TREE_OPERAND (aref1, 0);
9814 tree base_offset = build_int_cst (type, 0);
9816 /* If the bases are array references as well, recurse. If the bases
9817 are pointer indirections compute the difference of the pointers.
9818 If the bases are equal, we are set. */
9819 if ((TREE_CODE (base0) == ARRAY_REF
9820 && TREE_CODE (base1) == ARRAY_REF
9821 && (base_offset
9822 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9823 || (INDIRECT_REF_P (base0)
9824 && INDIRECT_REF_P (base1)
9825 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9826 TREE_OPERAND (base0, 0),
9827 TREE_OPERAND (base1, 0))))
9828 || operand_equal_p (base0, base1, 0))
9830 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9831 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9832 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9833 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9834 return fold_build2_loc (loc, PLUS_EXPR, type,
9835 base_offset,
9836 fold_build2_loc (loc, MULT_EXPR, type,
9837 diff, esz));
9839 return NULL_TREE;
9842 /* If the real or vector real constant CST of type TYPE has an exact
9843 inverse, return it, else return NULL. */
9845 static tree
9846 exact_inverse (tree type, tree cst)
9848 REAL_VALUE_TYPE r;
9849 tree unit_type, *elts;
9850 enum machine_mode mode;
9851 unsigned vec_nelts, i;
9853 switch (TREE_CODE (cst))
9855 case REAL_CST:
9856 r = TREE_REAL_CST (cst);
9858 if (exact_real_inverse (TYPE_MODE (type), &r))
9859 return build_real (type, r);
9861 return NULL_TREE;
9863 case VECTOR_CST:
9864 vec_nelts = VECTOR_CST_NELTS (cst);
9865 elts = XALLOCAVEC (tree, vec_nelts);
9866 unit_type = TREE_TYPE (type);
9867 mode = TYPE_MODE (unit_type);
9869 for (i = 0; i < vec_nelts; i++)
9871 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9872 if (!exact_real_inverse (mode, &r))
9873 return NULL_TREE;
9874 elts[i] = build_real (unit_type, r);
9877 return build_vector (type, elts);
9879 default:
9880 return NULL_TREE;
9884 /* Mask out the tz least significant bits of X of type TYPE where
9885 tz is the number of trailing zeroes in Y. */
9886 static wide_int
9887 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9889 int tz = wi::ctz (y);
9890 if (tz > 0)
9891 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9892 return x;
9895 /* Return true when T is an address and is known to be nonzero.
9896 For floating point we further ensure that T is not denormal.
9897 Similar logic is present in nonzero_address in rtlanal.h.
9899 If the return value is based on the assumption that signed overflow
9900 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9901 change *STRICT_OVERFLOW_P. */
9903 static bool
9904 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9906 tree type = TREE_TYPE (t);
9907 enum tree_code code;
9909 /* Doing something useful for floating point would need more work. */
9910 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9911 return false;
9913 code = TREE_CODE (t);
9914 switch (TREE_CODE_CLASS (code))
9916 case tcc_unary:
9917 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9918 strict_overflow_p);
9919 case tcc_binary:
9920 case tcc_comparison:
9921 return tree_binary_nonzero_warnv_p (code, type,
9922 TREE_OPERAND (t, 0),
9923 TREE_OPERAND (t, 1),
9924 strict_overflow_p);
9925 case tcc_constant:
9926 case tcc_declaration:
9927 case tcc_reference:
9928 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9930 default:
9931 break;
9934 switch (code)
9936 case TRUTH_NOT_EXPR:
9937 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9938 strict_overflow_p);
9940 case TRUTH_AND_EXPR:
9941 case TRUTH_OR_EXPR:
9942 case TRUTH_XOR_EXPR:
9943 return tree_binary_nonzero_warnv_p (code, type,
9944 TREE_OPERAND (t, 0),
9945 TREE_OPERAND (t, 1),
9946 strict_overflow_p);
9948 case COND_EXPR:
9949 case CONSTRUCTOR:
9950 case OBJ_TYPE_REF:
9951 case ASSERT_EXPR:
9952 case ADDR_EXPR:
9953 case WITH_SIZE_EXPR:
9954 case SSA_NAME:
9955 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9957 case COMPOUND_EXPR:
9958 case MODIFY_EXPR:
9959 case BIND_EXPR:
9960 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9961 strict_overflow_p);
9963 case SAVE_EXPR:
9964 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9965 strict_overflow_p);
9967 case CALL_EXPR:
9969 tree fndecl = get_callee_fndecl (t);
9970 if (!fndecl) return false;
9971 if (flag_delete_null_pointer_checks && !flag_check_new
9972 && DECL_IS_OPERATOR_NEW (fndecl)
9973 && !TREE_NOTHROW (fndecl))
9974 return true;
9975 if (flag_delete_null_pointer_checks
9976 && lookup_attribute ("returns_nonnull",
9977 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9978 return true;
9979 return alloca_call_p (t);
9982 default:
9983 break;
9985 return false;
9988 /* Return true when T is an address and is known to be nonzero.
9989 Handle warnings about undefined signed overflow. */
9991 static bool
9992 tree_expr_nonzero_p (tree t)
9994 bool ret, strict_overflow_p;
9996 strict_overflow_p = false;
9997 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9998 if (strict_overflow_p)
9999 fold_overflow_warning (("assuming signed overflow does not occur when "
10000 "determining that expression is always "
10001 "non-zero"),
10002 WARN_STRICT_OVERFLOW_MISC);
10003 return ret;
10006 /* Fold a binary expression of code CODE and type TYPE with operands
10007 OP0 and OP1. LOC is the location of the resulting expression.
10008 Return the folded expression if folding is successful. Otherwise,
10009 return NULL_TREE. */
10011 tree
10012 fold_binary_loc (location_t loc,
10013 enum tree_code code, tree type, tree op0, tree op1)
10015 enum tree_code_class kind = TREE_CODE_CLASS (code);
10016 tree arg0, arg1, tem;
10017 tree t1 = NULL_TREE;
10018 bool strict_overflow_p;
10019 unsigned int prec;
10021 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10022 && TREE_CODE_LENGTH (code) == 2
10023 && op0 != NULL_TREE
10024 && op1 != NULL_TREE);
10026 arg0 = op0;
10027 arg1 = op1;
10029 /* Strip any conversions that don't change the mode. This is
10030 safe for every expression, except for a comparison expression
10031 because its signedness is derived from its operands. So, in
10032 the latter case, only strip conversions that don't change the
10033 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10034 preserved.
10036 Note that this is done as an internal manipulation within the
10037 constant folder, in order to find the simplest representation
10038 of the arguments so that their form can be studied. In any
10039 cases, the appropriate type conversions should be put back in
10040 the tree that will get out of the constant folder. */
10042 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10044 STRIP_SIGN_NOPS (arg0);
10045 STRIP_SIGN_NOPS (arg1);
10047 else
10049 STRIP_NOPS (arg0);
10050 STRIP_NOPS (arg1);
10053 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10054 constant but we can't do arithmetic on them. */
10055 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10056 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10057 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10058 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10059 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10060 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10061 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10063 if (kind == tcc_binary)
10065 /* Make sure type and arg0 have the same saturating flag. */
10066 gcc_assert (TYPE_SATURATING (type)
10067 == TYPE_SATURATING (TREE_TYPE (arg0)));
10068 tem = const_binop (code, arg0, arg1);
10070 else if (kind == tcc_comparison)
10071 tem = fold_relational_const (code, type, arg0, arg1);
10072 else
10073 tem = NULL_TREE;
10075 if (tem != NULL_TREE)
10077 if (TREE_TYPE (tem) != type)
10078 tem = fold_convert_loc (loc, type, tem);
10079 return tem;
10083 /* If this is a commutative operation, and ARG0 is a constant, move it
10084 to ARG1 to reduce the number of tests below. */
10085 if (commutative_tree_code (code)
10086 && tree_swap_operands_p (arg0, arg1, true))
10087 return fold_build2_loc (loc, code, type, op1, op0);
10089 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10091 First check for cases where an arithmetic operation is applied to a
10092 compound, conditional, or comparison operation. Push the arithmetic
10093 operation inside the compound or conditional to see if any folding
10094 can then be done. Convert comparison to conditional for this purpose.
10095 The also optimizes non-constant cases that used to be done in
10096 expand_expr.
10098 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10099 one of the operands is a comparison and the other is a comparison, a
10100 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10101 code below would make the expression more complex. Change it to a
10102 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10103 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10105 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10106 || code == EQ_EXPR || code == NE_EXPR)
10107 && TREE_CODE (type) != VECTOR_TYPE
10108 && ((truth_value_p (TREE_CODE (arg0))
10109 && (truth_value_p (TREE_CODE (arg1))
10110 || (TREE_CODE (arg1) == BIT_AND_EXPR
10111 && integer_onep (TREE_OPERAND (arg1, 1)))))
10112 || (truth_value_p (TREE_CODE (arg1))
10113 && (truth_value_p (TREE_CODE (arg0))
10114 || (TREE_CODE (arg0) == BIT_AND_EXPR
10115 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10117 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10118 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10119 : TRUTH_XOR_EXPR,
10120 boolean_type_node,
10121 fold_convert_loc (loc, boolean_type_node, arg0),
10122 fold_convert_loc (loc, boolean_type_node, arg1));
10124 if (code == EQ_EXPR)
10125 tem = invert_truthvalue_loc (loc, tem);
10127 return fold_convert_loc (loc, type, tem);
10130 if (TREE_CODE_CLASS (code) == tcc_binary
10131 || TREE_CODE_CLASS (code) == tcc_comparison)
10133 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10135 tem = fold_build2_loc (loc, code, type,
10136 fold_convert_loc (loc, TREE_TYPE (op0),
10137 TREE_OPERAND (arg0, 1)), op1);
10138 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10139 tem);
10141 if (TREE_CODE (arg1) == COMPOUND_EXPR
10142 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10144 tem = fold_build2_loc (loc, code, type, op0,
10145 fold_convert_loc (loc, TREE_TYPE (op1),
10146 TREE_OPERAND (arg1, 1)));
10147 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10148 tem);
10151 if (TREE_CODE (arg0) == COND_EXPR
10152 || TREE_CODE (arg0) == VEC_COND_EXPR
10153 || COMPARISON_CLASS_P (arg0))
10155 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10156 arg0, arg1,
10157 /*cond_first_p=*/1);
10158 if (tem != NULL_TREE)
10159 return tem;
10162 if (TREE_CODE (arg1) == COND_EXPR
10163 || TREE_CODE (arg1) == VEC_COND_EXPR
10164 || COMPARISON_CLASS_P (arg1))
10166 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10167 arg1, arg0,
10168 /*cond_first_p=*/0);
10169 if (tem != NULL_TREE)
10170 return tem;
10174 switch (code)
10176 case MEM_REF:
10177 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10178 if (TREE_CODE (arg0) == ADDR_EXPR
10179 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10181 tree iref = TREE_OPERAND (arg0, 0);
10182 return fold_build2 (MEM_REF, type,
10183 TREE_OPERAND (iref, 0),
10184 int_const_binop (PLUS_EXPR, arg1,
10185 TREE_OPERAND (iref, 1)));
10188 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10189 if (TREE_CODE (arg0) == ADDR_EXPR
10190 && handled_component_p (TREE_OPERAND (arg0, 0)))
10192 tree base;
10193 HOST_WIDE_INT coffset;
10194 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10195 &coffset);
10196 if (!base)
10197 return NULL_TREE;
10198 return fold_build2 (MEM_REF, type,
10199 build_fold_addr_expr (base),
10200 int_const_binop (PLUS_EXPR, arg1,
10201 size_int (coffset)));
10204 return NULL_TREE;
10206 case POINTER_PLUS_EXPR:
10207 /* 0 +p index -> (type)index */
10208 if (integer_zerop (arg0))
10209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10211 /* PTR +p 0 -> PTR */
10212 if (integer_zerop (arg1))
10213 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10215 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10216 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10217 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10218 return fold_convert_loc (loc, type,
10219 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10220 fold_convert_loc (loc, sizetype,
10221 arg1),
10222 fold_convert_loc (loc, sizetype,
10223 arg0)));
10225 /* (PTR +p B) +p A -> PTR +p (B + A) */
10226 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10228 tree inner;
10229 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10230 tree arg00 = TREE_OPERAND (arg0, 0);
10231 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10232 arg01, fold_convert_loc (loc, sizetype, arg1));
10233 return fold_convert_loc (loc, type,
10234 fold_build_pointer_plus_loc (loc,
10235 arg00, inner));
10238 /* PTR_CST +p CST -> CST1 */
10239 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10240 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10241 fold_convert_loc (loc, type, arg1));
10243 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10244 of the array. Loop optimizer sometimes produce this type of
10245 expressions. */
10246 if (TREE_CODE (arg0) == ADDR_EXPR)
10248 tem = try_move_mult_to_index (loc, arg0,
10249 fold_convert_loc (loc,
10250 ssizetype, arg1));
10251 if (tem)
10252 return fold_convert_loc (loc, type, tem);
10255 return NULL_TREE;
10257 case PLUS_EXPR:
10258 /* A + (-B) -> A - B */
10259 if (TREE_CODE (arg1) == NEGATE_EXPR
10260 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10261 return fold_build2_loc (loc, MINUS_EXPR, type,
10262 fold_convert_loc (loc, type, arg0),
10263 fold_convert_loc (loc, type,
10264 TREE_OPERAND (arg1, 0)));
10265 /* (-A) + B -> B - A */
10266 if (TREE_CODE (arg0) == NEGATE_EXPR
10267 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10268 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10269 return fold_build2_loc (loc, MINUS_EXPR, type,
10270 fold_convert_loc (loc, type, arg1),
10271 fold_convert_loc (loc, type,
10272 TREE_OPERAND (arg0, 0)));
10274 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10276 /* Convert ~A + 1 to -A. */
10277 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10278 && integer_onep (arg1))
10279 return fold_build1_loc (loc, NEGATE_EXPR, type,
10280 fold_convert_loc (loc, type,
10281 TREE_OPERAND (arg0, 0)));
10283 /* ~X + X is -1. */
10284 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10285 && !TYPE_OVERFLOW_TRAPS (type))
10287 tree tem = TREE_OPERAND (arg0, 0);
10289 STRIP_NOPS (tem);
10290 if (operand_equal_p (tem, arg1, 0))
10292 t1 = build_all_ones_cst (type);
10293 return omit_one_operand_loc (loc, type, t1, arg1);
10297 /* X + ~X is -1. */
10298 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10299 && !TYPE_OVERFLOW_TRAPS (type))
10301 tree tem = TREE_OPERAND (arg1, 0);
10303 STRIP_NOPS (tem);
10304 if (operand_equal_p (arg0, tem, 0))
10306 t1 = build_all_ones_cst (type);
10307 return omit_one_operand_loc (loc, type, t1, arg0);
10311 /* X + (X / CST) * -CST is X % CST. */
10312 if (TREE_CODE (arg1) == MULT_EXPR
10313 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10314 && operand_equal_p (arg0,
10315 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10317 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10318 tree cst1 = TREE_OPERAND (arg1, 1);
10319 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10320 cst1, cst0);
10321 if (sum && integer_zerop (sum))
10322 return fold_convert_loc (loc, type,
10323 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10324 TREE_TYPE (arg0), arg0,
10325 cst0));
10329 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10330 one. Make sure the type is not saturating and has the signedness of
10331 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10332 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10333 if ((TREE_CODE (arg0) == MULT_EXPR
10334 || TREE_CODE (arg1) == MULT_EXPR)
10335 && !TYPE_SATURATING (type)
10336 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10337 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10338 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10340 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10341 if (tem)
10342 return tem;
10345 if (! FLOAT_TYPE_P (type))
10347 if (integer_zerop (arg1))
10348 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10350 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10351 with a constant, and the two constants have no bits in common,
10352 we should treat this as a BIT_IOR_EXPR since this may produce more
10353 simplifications. */
10354 if (TREE_CODE (arg0) == BIT_AND_EXPR
10355 && TREE_CODE (arg1) == BIT_AND_EXPR
10356 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10357 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10358 && wi::bit_and (TREE_OPERAND (arg0, 1),
10359 TREE_OPERAND (arg1, 1)) == 0)
10361 code = BIT_IOR_EXPR;
10362 goto bit_ior;
10365 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10366 (plus (plus (mult) (mult)) (foo)) so that we can
10367 take advantage of the factoring cases below. */
10368 if (TYPE_OVERFLOW_WRAPS (type)
10369 && (((TREE_CODE (arg0) == PLUS_EXPR
10370 || TREE_CODE (arg0) == MINUS_EXPR)
10371 && TREE_CODE (arg1) == MULT_EXPR)
10372 || ((TREE_CODE (arg1) == PLUS_EXPR
10373 || TREE_CODE (arg1) == MINUS_EXPR)
10374 && TREE_CODE (arg0) == MULT_EXPR)))
10376 tree parg0, parg1, parg, marg;
10377 enum tree_code pcode;
10379 if (TREE_CODE (arg1) == MULT_EXPR)
10380 parg = arg0, marg = arg1;
10381 else
10382 parg = arg1, marg = arg0;
10383 pcode = TREE_CODE (parg);
10384 parg0 = TREE_OPERAND (parg, 0);
10385 parg1 = TREE_OPERAND (parg, 1);
10386 STRIP_NOPS (parg0);
10387 STRIP_NOPS (parg1);
10389 if (TREE_CODE (parg0) == MULT_EXPR
10390 && TREE_CODE (parg1) != MULT_EXPR)
10391 return fold_build2_loc (loc, pcode, type,
10392 fold_build2_loc (loc, PLUS_EXPR, type,
10393 fold_convert_loc (loc, type,
10394 parg0),
10395 fold_convert_loc (loc, type,
10396 marg)),
10397 fold_convert_loc (loc, type, parg1));
10398 if (TREE_CODE (parg0) != MULT_EXPR
10399 && TREE_CODE (parg1) == MULT_EXPR)
10400 return
10401 fold_build2_loc (loc, PLUS_EXPR, type,
10402 fold_convert_loc (loc, type, parg0),
10403 fold_build2_loc (loc, pcode, type,
10404 fold_convert_loc (loc, type, marg),
10405 fold_convert_loc (loc, type,
10406 parg1)));
10409 else
10411 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10412 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10415 /* Likewise if the operands are reversed. */
10416 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10419 /* Convert X + -C into X - C. */
10420 if (TREE_CODE (arg1) == REAL_CST
10421 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10423 tem = fold_negate_const (arg1, type);
10424 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10425 return fold_build2_loc (loc, MINUS_EXPR, type,
10426 fold_convert_loc (loc, type, arg0),
10427 fold_convert_loc (loc, type, tem));
10430 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10431 to __complex__ ( x, y ). This is not the same for SNaNs or
10432 if signed zeros are involved. */
10433 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10434 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10435 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10437 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10438 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10439 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10440 bool arg0rz = false, arg0iz = false;
10441 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10442 || (arg0i && (arg0iz = real_zerop (arg0i))))
10444 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10445 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10446 if (arg0rz && arg1i && real_zerop (arg1i))
10448 tree rp = arg1r ? arg1r
10449 : build1 (REALPART_EXPR, rtype, arg1);
10450 tree ip = arg0i ? arg0i
10451 : build1 (IMAGPART_EXPR, rtype, arg0);
10452 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10454 else if (arg0iz && arg1r && real_zerop (arg1r))
10456 tree rp = arg0r ? arg0r
10457 : build1 (REALPART_EXPR, rtype, arg0);
10458 tree ip = arg1i ? arg1i
10459 : build1 (IMAGPART_EXPR, rtype, arg1);
10460 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10465 if (flag_unsafe_math_optimizations
10466 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10467 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10468 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10469 return tem;
10471 /* Convert x+x into x*2.0. */
10472 if (operand_equal_p (arg0, arg1, 0)
10473 && SCALAR_FLOAT_TYPE_P (type))
10474 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10475 build_real (type, dconst2));
10477 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10478 We associate floats only if the user has specified
10479 -fassociative-math. */
10480 if (flag_associative_math
10481 && TREE_CODE (arg1) == PLUS_EXPR
10482 && TREE_CODE (arg0) != MULT_EXPR)
10484 tree tree10 = TREE_OPERAND (arg1, 0);
10485 tree tree11 = TREE_OPERAND (arg1, 1);
10486 if (TREE_CODE (tree11) == MULT_EXPR
10487 && TREE_CODE (tree10) == MULT_EXPR)
10489 tree tree0;
10490 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10491 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10494 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10495 We associate floats only if the user has specified
10496 -fassociative-math. */
10497 if (flag_associative_math
10498 && TREE_CODE (arg0) == PLUS_EXPR
10499 && TREE_CODE (arg1) != MULT_EXPR)
10501 tree tree00 = TREE_OPERAND (arg0, 0);
10502 tree tree01 = TREE_OPERAND (arg0, 1);
10503 if (TREE_CODE (tree01) == MULT_EXPR
10504 && TREE_CODE (tree00) == MULT_EXPR)
10506 tree tree0;
10507 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10508 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10513 bit_rotate:
10514 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10515 is a rotate of A by C1 bits. */
10516 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10517 is a rotate of A by B bits. */
10519 enum tree_code code0, code1;
10520 tree rtype;
10521 code0 = TREE_CODE (arg0);
10522 code1 = TREE_CODE (arg1);
10523 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10524 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10525 && operand_equal_p (TREE_OPERAND (arg0, 0),
10526 TREE_OPERAND (arg1, 0), 0)
10527 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10528 TYPE_UNSIGNED (rtype))
10529 /* Only create rotates in complete modes. Other cases are not
10530 expanded properly. */
10531 && (element_precision (rtype)
10532 == element_precision (TYPE_MODE (rtype))))
10534 tree tree01, tree11;
10535 enum tree_code code01, code11;
10537 tree01 = TREE_OPERAND (arg0, 1);
10538 tree11 = TREE_OPERAND (arg1, 1);
10539 STRIP_NOPS (tree01);
10540 STRIP_NOPS (tree11);
10541 code01 = TREE_CODE (tree01);
10542 code11 = TREE_CODE (tree11);
10543 if (code01 == INTEGER_CST
10544 && code11 == INTEGER_CST
10545 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10546 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10548 tem = build2_loc (loc, LROTATE_EXPR,
10549 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10550 TREE_OPERAND (arg0, 0),
10551 code0 == LSHIFT_EXPR ? tree01 : tree11);
10552 return fold_convert_loc (loc, type, tem);
10554 else if (code11 == MINUS_EXPR)
10556 tree tree110, tree111;
10557 tree110 = TREE_OPERAND (tree11, 0);
10558 tree111 = TREE_OPERAND (tree11, 1);
10559 STRIP_NOPS (tree110);
10560 STRIP_NOPS (tree111);
10561 if (TREE_CODE (tree110) == INTEGER_CST
10562 && 0 == compare_tree_int (tree110,
10563 element_precision
10564 (TREE_TYPE (TREE_OPERAND
10565 (arg0, 0))))
10566 && operand_equal_p (tree01, tree111, 0))
10567 return
10568 fold_convert_loc (loc, type,
10569 build2 ((code0 == LSHIFT_EXPR
10570 ? LROTATE_EXPR
10571 : RROTATE_EXPR),
10572 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10573 TREE_OPERAND (arg0, 0), tree01));
10575 else if (code01 == MINUS_EXPR)
10577 tree tree010, tree011;
10578 tree010 = TREE_OPERAND (tree01, 0);
10579 tree011 = TREE_OPERAND (tree01, 1);
10580 STRIP_NOPS (tree010);
10581 STRIP_NOPS (tree011);
10582 if (TREE_CODE (tree010) == INTEGER_CST
10583 && 0 == compare_tree_int (tree010,
10584 element_precision
10585 (TREE_TYPE (TREE_OPERAND
10586 (arg0, 0))))
10587 && operand_equal_p (tree11, tree011, 0))
10588 return fold_convert_loc
10589 (loc, type,
10590 build2 ((code0 != LSHIFT_EXPR
10591 ? LROTATE_EXPR
10592 : RROTATE_EXPR),
10593 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10594 TREE_OPERAND (arg0, 0), tree11));
10599 associate:
10600 /* In most languages, can't associate operations on floats through
10601 parentheses. Rather than remember where the parentheses were, we
10602 don't associate floats at all, unless the user has specified
10603 -fassociative-math.
10604 And, we need to make sure type is not saturating. */
10606 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10607 && !TYPE_SATURATING (type))
10609 tree var0, con0, lit0, minus_lit0;
10610 tree var1, con1, lit1, minus_lit1;
10611 tree atype = type;
10612 bool ok = true;
10614 /* Split both trees into variables, constants, and literals. Then
10615 associate each group together, the constants with literals,
10616 then the result with variables. This increases the chances of
10617 literals being recombined later and of generating relocatable
10618 expressions for the sum of a constant and literal. */
10619 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10620 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10621 code == MINUS_EXPR);
10623 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10624 if (code == MINUS_EXPR)
10625 code = PLUS_EXPR;
10627 /* With undefined overflow prefer doing association in a type
10628 which wraps on overflow, if that is one of the operand types. */
10629 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10630 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10632 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10633 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10634 atype = TREE_TYPE (arg0);
10635 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10636 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10637 atype = TREE_TYPE (arg1);
10638 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10641 /* With undefined overflow we can only associate constants with one
10642 variable, and constants whose association doesn't overflow. */
10643 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10644 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10646 if (var0 && var1)
10648 tree tmp0 = var0;
10649 tree tmp1 = var1;
10651 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10652 tmp0 = TREE_OPERAND (tmp0, 0);
10653 if (CONVERT_EXPR_P (tmp0)
10654 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10655 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10656 <= TYPE_PRECISION (atype)))
10657 tmp0 = TREE_OPERAND (tmp0, 0);
10658 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10659 tmp1 = TREE_OPERAND (tmp1, 0);
10660 if (CONVERT_EXPR_P (tmp1)
10661 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10662 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10663 <= TYPE_PRECISION (atype)))
10664 tmp1 = TREE_OPERAND (tmp1, 0);
10665 /* The only case we can still associate with two variables
10666 is if they are the same, modulo negation and bit-pattern
10667 preserving conversions. */
10668 if (!operand_equal_p (tmp0, tmp1, 0))
10669 ok = false;
10673 /* Only do something if we found more than two objects. Otherwise,
10674 nothing has changed and we risk infinite recursion. */
10675 if (ok
10676 && (2 < ((var0 != 0) + (var1 != 0)
10677 + (con0 != 0) + (con1 != 0)
10678 + (lit0 != 0) + (lit1 != 0)
10679 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10681 bool any_overflows = false;
10682 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10683 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10684 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10685 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10686 var0 = associate_trees (loc, var0, var1, code, atype);
10687 con0 = associate_trees (loc, con0, con1, code, atype);
10688 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10689 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10690 code, atype);
10692 /* Preserve the MINUS_EXPR if the negative part of the literal is
10693 greater than the positive part. Otherwise, the multiplicative
10694 folding code (i.e extract_muldiv) may be fooled in case
10695 unsigned constants are subtracted, like in the following
10696 example: ((X*2 + 4) - 8U)/2. */
10697 if (minus_lit0 && lit0)
10699 if (TREE_CODE (lit0) == INTEGER_CST
10700 && TREE_CODE (minus_lit0) == INTEGER_CST
10701 && tree_int_cst_lt (lit0, minus_lit0))
10703 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10704 MINUS_EXPR, atype);
10705 lit0 = 0;
10707 else
10709 lit0 = associate_trees (loc, lit0, minus_lit0,
10710 MINUS_EXPR, atype);
10711 minus_lit0 = 0;
10715 /* Don't introduce overflows through reassociation. */
10716 if (!any_overflows
10717 && ((lit0 && TREE_OVERFLOW (lit0))
10718 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10719 return NULL_TREE;
10721 if (minus_lit0)
10723 if (con0 == 0)
10724 return
10725 fold_convert_loc (loc, type,
10726 associate_trees (loc, var0, minus_lit0,
10727 MINUS_EXPR, atype));
10728 else
10730 con0 = associate_trees (loc, con0, minus_lit0,
10731 MINUS_EXPR, atype);
10732 return
10733 fold_convert_loc (loc, type,
10734 associate_trees (loc, var0, con0,
10735 PLUS_EXPR, atype));
10739 con0 = associate_trees (loc, con0, lit0, code, atype);
10740 return
10741 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10742 code, atype));
10746 return NULL_TREE;
10748 case MINUS_EXPR:
10749 /* Pointer simplifications for subtraction, simple reassociations. */
10750 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10752 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10753 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10754 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10756 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10757 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10758 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10759 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10760 return fold_build2_loc (loc, PLUS_EXPR, type,
10761 fold_build2_loc (loc, MINUS_EXPR, type,
10762 arg00, arg10),
10763 fold_build2_loc (loc, MINUS_EXPR, type,
10764 arg01, arg11));
10766 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10767 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10769 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10770 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10771 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10772 fold_convert_loc (loc, type, arg1));
10773 if (tmp)
10774 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10777 /* A - (-B) -> A + B */
10778 if (TREE_CODE (arg1) == NEGATE_EXPR)
10779 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10780 fold_convert_loc (loc, type,
10781 TREE_OPERAND (arg1, 0)));
10782 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10783 if (TREE_CODE (arg0) == NEGATE_EXPR
10784 && negate_expr_p (arg1)
10785 && reorder_operands_p (arg0, arg1))
10786 return fold_build2_loc (loc, MINUS_EXPR, type,
10787 fold_convert_loc (loc, type,
10788 negate_expr (arg1)),
10789 fold_convert_loc (loc, type,
10790 TREE_OPERAND (arg0, 0)));
10791 /* Convert -A - 1 to ~A. */
10792 if (TREE_CODE (type) != COMPLEX_TYPE
10793 && TREE_CODE (arg0) == NEGATE_EXPR
10794 && integer_onep (arg1)
10795 && !TYPE_OVERFLOW_TRAPS (type))
10796 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10797 fold_convert_loc (loc, type,
10798 TREE_OPERAND (arg0, 0)));
10800 /* Convert -1 - A to ~A. */
10801 if (TREE_CODE (type) != COMPLEX_TYPE
10802 && integer_all_onesp (arg0))
10803 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10806 /* X - (X / Y) * Y is X % Y. */
10807 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10808 && TREE_CODE (arg1) == MULT_EXPR
10809 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10810 && operand_equal_p (arg0,
10811 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10812 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10813 TREE_OPERAND (arg1, 1), 0))
10814 return
10815 fold_convert_loc (loc, type,
10816 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10817 arg0, TREE_OPERAND (arg1, 1)));
10819 if (! FLOAT_TYPE_P (type))
10821 if (integer_zerop (arg0))
10822 return negate_expr (fold_convert_loc (loc, type, arg1));
10823 if (integer_zerop (arg1))
10824 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10826 /* Fold A - (A & B) into ~B & A. */
10827 if (!TREE_SIDE_EFFECTS (arg0)
10828 && TREE_CODE (arg1) == BIT_AND_EXPR)
10830 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10832 tree arg10 = fold_convert_loc (loc, type,
10833 TREE_OPERAND (arg1, 0));
10834 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10835 fold_build1_loc (loc, BIT_NOT_EXPR,
10836 type, arg10),
10837 fold_convert_loc (loc, type, arg0));
10839 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10841 tree arg11 = fold_convert_loc (loc,
10842 type, TREE_OPERAND (arg1, 1));
10843 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10844 fold_build1_loc (loc, BIT_NOT_EXPR,
10845 type, arg11),
10846 fold_convert_loc (loc, type, arg0));
10850 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10851 any power of 2 minus 1. */
10852 if (TREE_CODE (arg0) == BIT_AND_EXPR
10853 && TREE_CODE (arg1) == BIT_AND_EXPR
10854 && operand_equal_p (TREE_OPERAND (arg0, 0),
10855 TREE_OPERAND (arg1, 0), 0))
10857 tree mask0 = TREE_OPERAND (arg0, 1);
10858 tree mask1 = TREE_OPERAND (arg1, 1);
10859 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10861 if (operand_equal_p (tem, mask1, 0))
10863 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10864 TREE_OPERAND (arg0, 0), mask1);
10865 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10870 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10871 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10872 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10874 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10875 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10876 (-ARG1 + ARG0) reduces to -ARG1. */
10877 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10878 return negate_expr (fold_convert_loc (loc, type, arg1));
10880 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10881 __complex__ ( x, -y ). This is not the same for SNaNs or if
10882 signed zeros are involved. */
10883 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10884 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10885 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10887 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10888 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10889 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10890 bool arg0rz = false, arg0iz = false;
10891 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10892 || (arg0i && (arg0iz = real_zerop (arg0i))))
10894 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10895 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10896 if (arg0rz && arg1i && real_zerop (arg1i))
10898 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10899 arg1r ? arg1r
10900 : build1 (REALPART_EXPR, rtype, arg1));
10901 tree ip = arg0i ? arg0i
10902 : build1 (IMAGPART_EXPR, rtype, arg0);
10903 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10905 else if (arg0iz && arg1r && real_zerop (arg1r))
10907 tree rp = arg0r ? arg0r
10908 : build1 (REALPART_EXPR, rtype, arg0);
10909 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10910 arg1i ? arg1i
10911 : build1 (IMAGPART_EXPR, rtype, arg1));
10912 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10917 /* Fold &x - &x. This can happen from &x.foo - &x.
10918 This is unsafe for certain floats even in non-IEEE formats.
10919 In IEEE, it is unsafe because it does wrong for NaNs.
10920 Also note that operand_equal_p is always false if an operand
10921 is volatile. */
10923 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10924 && operand_equal_p (arg0, arg1, 0))
10925 return build_zero_cst (type);
10927 /* A - B -> A + (-B) if B is easily negatable. */
10928 if (negate_expr_p (arg1)
10929 && ((FLOAT_TYPE_P (type)
10930 /* Avoid this transformation if B is a positive REAL_CST. */
10931 && (TREE_CODE (arg1) != REAL_CST
10932 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10933 || INTEGRAL_TYPE_P (type)))
10934 return fold_build2_loc (loc, PLUS_EXPR, type,
10935 fold_convert_loc (loc, type, arg0),
10936 fold_convert_loc (loc, type,
10937 negate_expr (arg1)));
10939 /* Try folding difference of addresses. */
10941 HOST_WIDE_INT diff;
10943 if ((TREE_CODE (arg0) == ADDR_EXPR
10944 || TREE_CODE (arg1) == ADDR_EXPR)
10945 && ptr_difference_const (arg0, arg1, &diff))
10946 return build_int_cst_type (type, diff);
10949 /* Fold &a[i] - &a[j] to i-j. */
10950 if (TREE_CODE (arg0) == ADDR_EXPR
10951 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10952 && TREE_CODE (arg1) == ADDR_EXPR
10953 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10955 tree tem = fold_addr_of_array_ref_difference (loc, type,
10956 TREE_OPERAND (arg0, 0),
10957 TREE_OPERAND (arg1, 0));
10958 if (tem)
10959 return tem;
10962 if (FLOAT_TYPE_P (type)
10963 && flag_unsafe_math_optimizations
10964 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10965 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10966 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10967 return tem;
10969 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10970 one. Make sure the type is not saturating and has the signedness of
10971 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10972 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10973 if ((TREE_CODE (arg0) == MULT_EXPR
10974 || TREE_CODE (arg1) == MULT_EXPR)
10975 && !TYPE_SATURATING (type)
10976 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10977 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10978 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10980 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10981 if (tem)
10982 return tem;
10985 goto associate;
10987 case MULT_EXPR:
10988 /* (-A) * (-B) -> A * B */
10989 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10990 return fold_build2_loc (loc, MULT_EXPR, type,
10991 fold_convert_loc (loc, type,
10992 TREE_OPERAND (arg0, 0)),
10993 fold_convert_loc (loc, type,
10994 negate_expr (arg1)));
10995 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10996 return fold_build2_loc (loc, MULT_EXPR, type,
10997 fold_convert_loc (loc, type,
10998 negate_expr (arg0)),
10999 fold_convert_loc (loc, type,
11000 TREE_OPERAND (arg1, 0)));
11002 if (! FLOAT_TYPE_P (type))
11004 if (integer_zerop (arg1))
11005 return omit_one_operand_loc (loc, type, arg1, arg0);
11006 if (integer_onep (arg1))
11007 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11008 /* Transform x * -1 into -x. Make sure to do the negation
11009 on the original operand with conversions not stripped
11010 because we can only strip non-sign-changing conversions. */
11011 if (integer_minus_onep (arg1))
11012 return fold_convert_loc (loc, type, negate_expr (op0));
11013 /* Transform x * -C into -x * C if x is easily negatable. */
11014 if (TREE_CODE (arg1) == INTEGER_CST
11015 && tree_int_cst_sgn (arg1) == -1
11016 && negate_expr_p (arg0)
11017 && (tem = negate_expr (arg1)) != arg1
11018 && !TREE_OVERFLOW (tem))
11019 return fold_build2_loc (loc, MULT_EXPR, type,
11020 fold_convert_loc (loc, type,
11021 negate_expr (arg0)),
11022 tem);
11024 /* (a * (1 << b)) is (a << b) */
11025 if (TREE_CODE (arg1) == LSHIFT_EXPR
11026 && integer_onep (TREE_OPERAND (arg1, 0)))
11027 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11028 TREE_OPERAND (arg1, 1));
11029 if (TREE_CODE (arg0) == LSHIFT_EXPR
11030 && integer_onep (TREE_OPERAND (arg0, 0)))
11031 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11032 TREE_OPERAND (arg0, 1));
11034 /* (A + A) * C -> A * 2 * C */
11035 if (TREE_CODE (arg0) == PLUS_EXPR
11036 && TREE_CODE (arg1) == INTEGER_CST
11037 && operand_equal_p (TREE_OPERAND (arg0, 0),
11038 TREE_OPERAND (arg0, 1), 0))
11039 return fold_build2_loc (loc, MULT_EXPR, type,
11040 omit_one_operand_loc (loc, type,
11041 TREE_OPERAND (arg0, 0),
11042 TREE_OPERAND (arg0, 1)),
11043 fold_build2_loc (loc, MULT_EXPR, type,
11044 build_int_cst (type, 2) , arg1));
11046 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11047 sign-changing only. */
11048 if (TREE_CODE (arg1) == INTEGER_CST
11049 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11050 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11051 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11053 strict_overflow_p = false;
11054 if (TREE_CODE (arg1) == INTEGER_CST
11055 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11056 &strict_overflow_p)))
11058 if (strict_overflow_p)
11059 fold_overflow_warning (("assuming signed overflow does not "
11060 "occur when simplifying "
11061 "multiplication"),
11062 WARN_STRICT_OVERFLOW_MISC);
11063 return fold_convert_loc (loc, type, tem);
11066 /* Optimize z * conj(z) for integer complex numbers. */
11067 if (TREE_CODE (arg0) == CONJ_EXPR
11068 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11069 return fold_mult_zconjz (loc, type, arg1);
11070 if (TREE_CODE (arg1) == CONJ_EXPR
11071 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11072 return fold_mult_zconjz (loc, type, arg0);
11074 else
11076 /* Maybe fold x * 0 to 0. The expressions aren't the same
11077 when x is NaN, since x * 0 is also NaN. Nor are they the
11078 same in modes with signed zeros, since multiplying a
11079 negative value by 0 gives -0, not +0. */
11080 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11081 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11082 && real_zerop (arg1))
11083 return omit_one_operand_loc (loc, type, arg1, arg0);
11084 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11085 Likewise for complex arithmetic with signed zeros. */
11086 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11087 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11088 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11089 && real_onep (arg1))
11090 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11092 /* Transform x * -1.0 into -x. */
11093 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11094 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11095 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11096 && real_minus_onep (arg1))
11097 return fold_convert_loc (loc, type, negate_expr (arg0));
11099 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11100 the result for floating point types due to rounding so it is applied
11101 only if -fassociative-math was specify. */
11102 if (flag_associative_math
11103 && TREE_CODE (arg0) == RDIV_EXPR
11104 && TREE_CODE (arg1) == REAL_CST
11105 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11107 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11108 arg1);
11109 if (tem)
11110 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11111 TREE_OPERAND (arg0, 1));
11114 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11115 if (operand_equal_p (arg0, arg1, 0))
11117 tree tem = fold_strip_sign_ops (arg0);
11118 if (tem != NULL_TREE)
11120 tem = fold_convert_loc (loc, type, tem);
11121 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11125 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11126 This is not the same for NaNs or if signed zeros are
11127 involved. */
11128 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11129 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11130 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11131 && TREE_CODE (arg1) == COMPLEX_CST
11132 && real_zerop (TREE_REALPART (arg1)))
11134 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11135 if (real_onep (TREE_IMAGPART (arg1)))
11136 return
11137 fold_build2_loc (loc, COMPLEX_EXPR, type,
11138 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11139 rtype, arg0)),
11140 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11141 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11142 return
11143 fold_build2_loc (loc, COMPLEX_EXPR, type,
11144 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11145 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11146 rtype, arg0)));
11149 /* Optimize z * conj(z) for floating point complex numbers.
11150 Guarded by flag_unsafe_math_optimizations as non-finite
11151 imaginary components don't produce scalar results. */
11152 if (flag_unsafe_math_optimizations
11153 && TREE_CODE (arg0) == CONJ_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11155 return fold_mult_zconjz (loc, type, arg1);
11156 if (flag_unsafe_math_optimizations
11157 && TREE_CODE (arg1) == CONJ_EXPR
11158 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11159 return fold_mult_zconjz (loc, type, arg0);
11161 if (flag_unsafe_math_optimizations)
11163 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11164 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11166 /* Optimizations of root(...)*root(...). */
11167 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11169 tree rootfn, arg;
11170 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11171 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11173 /* Optimize sqrt(x)*sqrt(x) as x. */
11174 if (BUILTIN_SQRT_P (fcode0)
11175 && operand_equal_p (arg00, arg10, 0)
11176 && ! HONOR_SNANS (TYPE_MODE (type)))
11177 return arg00;
11179 /* Optimize root(x)*root(y) as root(x*y). */
11180 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11181 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11182 return build_call_expr_loc (loc, rootfn, 1, arg);
11185 /* Optimize expN(x)*expN(y) as expN(x+y). */
11186 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11188 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11189 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11190 CALL_EXPR_ARG (arg0, 0),
11191 CALL_EXPR_ARG (arg1, 0));
11192 return build_call_expr_loc (loc, expfn, 1, arg);
11195 /* Optimizations of pow(...)*pow(...). */
11196 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11197 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11198 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11200 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11201 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11202 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11203 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11205 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11206 if (operand_equal_p (arg01, arg11, 0))
11208 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11209 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11210 arg00, arg10);
11211 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11214 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11215 if (operand_equal_p (arg00, arg10, 0))
11217 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11218 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11219 arg01, arg11);
11220 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11224 /* Optimize tan(x)*cos(x) as sin(x). */
11225 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11226 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11227 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11228 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11229 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11230 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11231 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11232 CALL_EXPR_ARG (arg1, 0), 0))
11234 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11236 if (sinfn != NULL_TREE)
11237 return build_call_expr_loc (loc, sinfn, 1,
11238 CALL_EXPR_ARG (arg0, 0));
11241 /* Optimize x*pow(x,c) as pow(x,c+1). */
11242 if (fcode1 == BUILT_IN_POW
11243 || fcode1 == BUILT_IN_POWF
11244 || fcode1 == BUILT_IN_POWL)
11246 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11247 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11248 if (TREE_CODE (arg11) == REAL_CST
11249 && !TREE_OVERFLOW (arg11)
11250 && operand_equal_p (arg0, arg10, 0))
11252 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11253 REAL_VALUE_TYPE c;
11254 tree arg;
11256 c = TREE_REAL_CST (arg11);
11257 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11258 arg = build_real (type, c);
11259 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11263 /* Optimize pow(x,c)*x as pow(x,c+1). */
11264 if (fcode0 == BUILT_IN_POW
11265 || fcode0 == BUILT_IN_POWF
11266 || fcode0 == BUILT_IN_POWL)
11268 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11269 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11270 if (TREE_CODE (arg01) == REAL_CST
11271 && !TREE_OVERFLOW (arg01)
11272 && operand_equal_p (arg1, arg00, 0))
11274 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11275 REAL_VALUE_TYPE c;
11276 tree arg;
11278 c = TREE_REAL_CST (arg01);
11279 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11280 arg = build_real (type, c);
11281 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11285 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11286 if (!in_gimple_form
11287 && optimize
11288 && operand_equal_p (arg0, arg1, 0))
11290 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11292 if (powfn)
11294 tree arg = build_real (type, dconst2);
11295 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11300 goto associate;
11302 case BIT_IOR_EXPR:
11303 bit_ior:
11304 if (integer_all_onesp (arg1))
11305 return omit_one_operand_loc (loc, type, arg1, arg0);
11306 if (integer_zerop (arg1))
11307 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11308 if (operand_equal_p (arg0, arg1, 0))
11309 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11311 /* ~X | X is -1. */
11312 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11313 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11315 t1 = build_zero_cst (type);
11316 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11317 return omit_one_operand_loc (loc, type, t1, arg1);
11320 /* X | ~X is -1. */
11321 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11322 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11324 t1 = build_zero_cst (type);
11325 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11326 return omit_one_operand_loc (loc, type, t1, arg0);
11329 /* Canonicalize (X & C1) | C2. */
11330 if (TREE_CODE (arg0) == BIT_AND_EXPR
11331 && TREE_CODE (arg1) == INTEGER_CST
11332 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11334 int width = TYPE_PRECISION (type), w;
11335 wide_int c1 = TREE_OPERAND (arg0, 1);
11336 wide_int c2 = arg1;
11338 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11339 if ((c1 & c2) == c1)
11340 return omit_one_operand_loc (loc, type, arg1,
11341 TREE_OPERAND (arg0, 0));
11343 wide_int msk = wi::mask (width, false,
11344 TYPE_PRECISION (TREE_TYPE (arg1)));
11346 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11347 if (msk.and_not (c1 | c2) == 0)
11348 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11349 TREE_OPERAND (arg0, 0), arg1);
11351 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11352 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11353 mode which allows further optimizations. */
11354 c1 &= msk;
11355 c2 &= msk;
11356 wide_int c3 = c1.and_not (c2);
11357 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11359 wide_int mask = wi::mask (w, false,
11360 TYPE_PRECISION (type));
11361 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11363 c3 = mask;
11364 break;
11368 if (c3 != c1)
11369 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11370 fold_build2_loc (loc, BIT_AND_EXPR, type,
11371 TREE_OPERAND (arg0, 0),
11372 wide_int_to_tree (type,
11373 c3)),
11374 arg1);
11377 /* (X & Y) | Y is (X, Y). */
11378 if (TREE_CODE (arg0) == BIT_AND_EXPR
11379 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11380 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11381 /* (X & Y) | X is (Y, X). */
11382 if (TREE_CODE (arg0) == BIT_AND_EXPR
11383 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11384 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11385 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11386 /* X | (X & Y) is (Y, X). */
11387 if (TREE_CODE (arg1) == BIT_AND_EXPR
11388 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11389 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11390 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11391 /* X | (Y & X) is (Y, X). */
11392 if (TREE_CODE (arg1) == BIT_AND_EXPR
11393 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11394 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11395 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11397 /* (X & ~Y) | (~X & Y) is X ^ Y */
11398 if (TREE_CODE (arg0) == BIT_AND_EXPR
11399 && TREE_CODE (arg1) == BIT_AND_EXPR)
11401 tree a0, a1, l0, l1, n0, n1;
11403 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11404 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11406 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11407 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11409 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11410 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11412 if ((operand_equal_p (n0, a0, 0)
11413 && operand_equal_p (n1, a1, 0))
11414 || (operand_equal_p (n0, a1, 0)
11415 && operand_equal_p (n1, a0, 0)))
11416 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11419 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11420 if (t1 != NULL_TREE)
11421 return t1;
11423 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11425 This results in more efficient code for machines without a NAND
11426 instruction. Combine will canonicalize to the first form
11427 which will allow use of NAND instructions provided by the
11428 backend if they exist. */
11429 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11430 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11432 return
11433 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11434 build2 (BIT_AND_EXPR, type,
11435 fold_convert_loc (loc, type,
11436 TREE_OPERAND (arg0, 0)),
11437 fold_convert_loc (loc, type,
11438 TREE_OPERAND (arg1, 0))));
11441 /* See if this can be simplified into a rotate first. If that
11442 is unsuccessful continue in the association code. */
11443 goto bit_rotate;
11445 case BIT_XOR_EXPR:
11446 if (integer_zerop (arg1))
11447 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11448 if (integer_all_onesp (arg1))
11449 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11450 if (operand_equal_p (arg0, arg1, 0))
11451 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11453 /* ~X ^ X is -1. */
11454 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11455 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11457 t1 = build_zero_cst (type);
11458 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11459 return omit_one_operand_loc (loc, type, t1, arg1);
11462 /* X ^ ~X is -1. */
11463 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11464 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11466 t1 = build_zero_cst (type);
11467 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11468 return omit_one_operand_loc (loc, type, t1, arg0);
11471 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11472 with a constant, and the two constants have no bits in common,
11473 we should treat this as a BIT_IOR_EXPR since this may produce more
11474 simplifications. */
11475 if (TREE_CODE (arg0) == BIT_AND_EXPR
11476 && TREE_CODE (arg1) == BIT_AND_EXPR
11477 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11478 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11479 && wi::bit_and (TREE_OPERAND (arg0, 1),
11480 TREE_OPERAND (arg1, 1)) == 0)
11482 code = BIT_IOR_EXPR;
11483 goto bit_ior;
11486 /* (X | Y) ^ X -> Y & ~ X*/
11487 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11488 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11490 tree t2 = TREE_OPERAND (arg0, 1);
11491 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11492 arg1);
11493 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11494 fold_convert_loc (loc, type, t2),
11495 fold_convert_loc (loc, type, t1));
11496 return t1;
11499 /* (Y | X) ^ X -> Y & ~ X*/
11500 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11501 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11503 tree t2 = TREE_OPERAND (arg0, 0);
11504 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11505 arg1);
11506 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11507 fold_convert_loc (loc, type, t2),
11508 fold_convert_loc (loc, type, t1));
11509 return t1;
11512 /* X ^ (X | Y) -> Y & ~ X*/
11513 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11514 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11516 tree t2 = TREE_OPERAND (arg1, 1);
11517 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11518 arg0);
11519 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11520 fold_convert_loc (loc, type, t2),
11521 fold_convert_loc (loc, type, t1));
11522 return t1;
11525 /* X ^ (Y | X) -> Y & ~ X*/
11526 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11527 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11529 tree t2 = TREE_OPERAND (arg1, 0);
11530 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11531 arg0);
11532 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11533 fold_convert_loc (loc, type, t2),
11534 fold_convert_loc (loc, type, t1));
11535 return t1;
11538 /* Convert ~X ^ ~Y to X ^ Y. */
11539 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11540 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11541 return fold_build2_loc (loc, code, type,
11542 fold_convert_loc (loc, type,
11543 TREE_OPERAND (arg0, 0)),
11544 fold_convert_loc (loc, type,
11545 TREE_OPERAND (arg1, 0)));
11547 /* Convert ~X ^ C to X ^ ~C. */
11548 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11549 && TREE_CODE (arg1) == INTEGER_CST)
11550 return fold_build2_loc (loc, code, type,
11551 fold_convert_loc (loc, type,
11552 TREE_OPERAND (arg0, 0)),
11553 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11555 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11556 if (TREE_CODE (arg0) == BIT_AND_EXPR
11557 && integer_onep (TREE_OPERAND (arg0, 1))
11558 && integer_onep (arg1))
11559 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11560 build_zero_cst (TREE_TYPE (arg0)));
11562 /* Fold (X & Y) ^ Y as ~X & Y. */
11563 if (TREE_CODE (arg0) == BIT_AND_EXPR
11564 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11566 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11567 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11568 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11569 fold_convert_loc (loc, type, arg1));
11571 /* Fold (X & Y) ^ X as ~Y & X. */
11572 if (TREE_CODE (arg0) == BIT_AND_EXPR
11573 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11574 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11576 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11577 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11578 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11579 fold_convert_loc (loc, type, arg1));
11581 /* Fold X ^ (X & Y) as X & ~Y. */
11582 if (TREE_CODE (arg1) == BIT_AND_EXPR
11583 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11585 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11586 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11587 fold_convert_loc (loc, type, arg0),
11588 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11590 /* Fold X ^ (Y & X) as ~Y & X. */
11591 if (TREE_CODE (arg1) == BIT_AND_EXPR
11592 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11593 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11595 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11596 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11597 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11598 fold_convert_loc (loc, type, arg0));
11601 /* See if this can be simplified into a rotate first. If that
11602 is unsuccessful continue in the association code. */
11603 goto bit_rotate;
11605 case BIT_AND_EXPR:
11606 if (integer_all_onesp (arg1))
11607 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11608 if (integer_zerop (arg1))
11609 return omit_one_operand_loc (loc, type, arg1, arg0);
11610 if (operand_equal_p (arg0, arg1, 0))
11611 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11613 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11614 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11615 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11616 || (TREE_CODE (arg0) == EQ_EXPR
11617 && integer_zerop (TREE_OPERAND (arg0, 1))))
11618 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11619 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11621 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11622 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11623 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11624 || (TREE_CODE (arg1) == EQ_EXPR
11625 && integer_zerop (TREE_OPERAND (arg1, 1))))
11626 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11627 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11629 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11630 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11631 && TREE_CODE (arg1) == INTEGER_CST
11632 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11634 tree tmp1 = fold_convert_loc (loc, type, arg1);
11635 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11636 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11637 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11638 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11639 return
11640 fold_convert_loc (loc, type,
11641 fold_build2_loc (loc, BIT_IOR_EXPR,
11642 type, tmp2, tmp3));
11645 /* (X | Y) & Y is (X, Y). */
11646 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11647 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11648 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11649 /* (X | Y) & X is (Y, X). */
11650 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11651 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11652 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11653 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11654 /* X & (X | Y) is (Y, X). */
11655 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11656 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11657 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11658 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11659 /* X & (Y | X) is (Y, X). */
11660 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11661 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11662 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11663 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11665 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11666 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11667 && integer_onep (TREE_OPERAND (arg0, 1))
11668 && integer_onep (arg1))
11670 tree tem2;
11671 tem = TREE_OPERAND (arg0, 0);
11672 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11673 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11674 tem, tem2);
11675 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11676 build_zero_cst (TREE_TYPE (tem)));
11678 /* Fold ~X & 1 as (X & 1) == 0. */
11679 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11680 && integer_onep (arg1))
11682 tree tem2;
11683 tem = TREE_OPERAND (arg0, 0);
11684 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11685 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11686 tem, tem2);
11687 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11688 build_zero_cst (TREE_TYPE (tem)));
11690 /* Fold !X & 1 as X == 0. */
11691 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11692 && integer_onep (arg1))
11694 tem = TREE_OPERAND (arg0, 0);
11695 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11696 build_zero_cst (TREE_TYPE (tem)));
11699 /* Fold (X ^ Y) & Y as ~X & Y. */
11700 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11701 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11703 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11704 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11705 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11706 fold_convert_loc (loc, type, arg1));
11708 /* Fold (X ^ Y) & X as ~Y & X. */
11709 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11710 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11711 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11713 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11714 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11715 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11716 fold_convert_loc (loc, type, arg1));
11718 /* Fold X & (X ^ Y) as X & ~Y. */
11719 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11720 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11722 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11723 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11724 fold_convert_loc (loc, type, arg0),
11725 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11727 /* Fold X & (Y ^ X) as ~Y & X. */
11728 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11729 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11730 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11732 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11733 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11734 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11735 fold_convert_loc (loc, type, arg0));
11738 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11739 multiple of 1 << CST. */
11740 if (TREE_CODE (arg1) == INTEGER_CST)
11742 wide_int cst1 = arg1;
11743 wide_int ncst1 = -cst1;
11744 if ((cst1 & ncst1) == ncst1
11745 && multiple_of_p (type, arg0,
11746 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11747 return fold_convert_loc (loc, type, arg0);
11750 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11751 bits from CST2. */
11752 if (TREE_CODE (arg1) == INTEGER_CST
11753 && TREE_CODE (arg0) == MULT_EXPR
11754 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11756 wide_int warg1 = arg1;
11757 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11759 if (masked == 0)
11760 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11761 arg0, arg1);
11762 else if (masked != warg1)
11764 /* Avoid the transform if arg1 is a mask of some
11765 mode which allows further optimizations. */
11766 int pop = wi::popcount (warg1);
11767 if (!(pop >= BITS_PER_UNIT
11768 && exact_log2 (pop) != -1
11769 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11770 return fold_build2_loc (loc, code, type, op0,
11771 wide_int_to_tree (type, masked));
11775 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11776 ((A & N) + B) & M -> (A + B) & M
11777 Similarly if (N & M) == 0,
11778 ((A | N) + B) & M -> (A + B) & M
11779 and for - instead of + (or unary - instead of +)
11780 and/or ^ instead of |.
11781 If B is constant and (B & M) == 0, fold into A & M. */
11782 if (TREE_CODE (arg1) == INTEGER_CST)
11784 wide_int cst1 = arg1;
11785 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11786 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11787 && (TREE_CODE (arg0) == PLUS_EXPR
11788 || TREE_CODE (arg0) == MINUS_EXPR
11789 || TREE_CODE (arg0) == NEGATE_EXPR)
11790 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11791 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11793 tree pmop[2];
11794 int which = 0;
11795 wide_int cst0;
11797 /* Now we know that arg0 is (C + D) or (C - D) or
11798 -C and arg1 (M) is == (1LL << cst) - 1.
11799 Store C into PMOP[0] and D into PMOP[1]. */
11800 pmop[0] = TREE_OPERAND (arg0, 0);
11801 pmop[1] = NULL;
11802 if (TREE_CODE (arg0) != NEGATE_EXPR)
11804 pmop[1] = TREE_OPERAND (arg0, 1);
11805 which = 1;
11808 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11809 which = -1;
11811 for (; which >= 0; which--)
11812 switch (TREE_CODE (pmop[which]))
11814 case BIT_AND_EXPR:
11815 case BIT_IOR_EXPR:
11816 case BIT_XOR_EXPR:
11817 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11818 != INTEGER_CST)
11819 break;
11820 cst0 = TREE_OPERAND (pmop[which], 1);
11821 cst0 &= cst1;
11822 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11824 if (cst0 != cst1)
11825 break;
11827 else if (cst0 != 0)
11828 break;
11829 /* If C or D is of the form (A & N) where
11830 (N & M) == M, or of the form (A | N) or
11831 (A ^ N) where (N & M) == 0, replace it with A. */
11832 pmop[which] = TREE_OPERAND (pmop[which], 0);
11833 break;
11834 case INTEGER_CST:
11835 /* If C or D is a N where (N & M) == 0, it can be
11836 omitted (assumed 0). */
11837 if ((TREE_CODE (arg0) == PLUS_EXPR
11838 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11839 && (cst1 & pmop[which]) == 0)
11840 pmop[which] = NULL;
11841 break;
11842 default:
11843 break;
11846 /* Only build anything new if we optimized one or both arguments
11847 above. */
11848 if (pmop[0] != TREE_OPERAND (arg0, 0)
11849 || (TREE_CODE (arg0) != NEGATE_EXPR
11850 && pmop[1] != TREE_OPERAND (arg0, 1)))
11852 tree utype = TREE_TYPE (arg0);
11853 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11855 /* Perform the operations in a type that has defined
11856 overflow behavior. */
11857 utype = unsigned_type_for (TREE_TYPE (arg0));
11858 if (pmop[0] != NULL)
11859 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11860 if (pmop[1] != NULL)
11861 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11864 if (TREE_CODE (arg0) == NEGATE_EXPR)
11865 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11866 else if (TREE_CODE (arg0) == PLUS_EXPR)
11868 if (pmop[0] != NULL && pmop[1] != NULL)
11869 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11870 pmop[0], pmop[1]);
11871 else if (pmop[0] != NULL)
11872 tem = pmop[0];
11873 else if (pmop[1] != NULL)
11874 tem = pmop[1];
11875 else
11876 return build_int_cst (type, 0);
11878 else if (pmop[0] == NULL)
11879 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11880 else
11881 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11882 pmop[0], pmop[1]);
11883 /* TEM is now the new binary +, - or unary - replacement. */
11884 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11885 fold_convert_loc (loc, utype, arg1));
11886 return fold_convert_loc (loc, type, tem);
11891 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11892 if (t1 != NULL_TREE)
11893 return t1;
11894 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11895 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11896 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11898 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11900 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11901 if (mask == -1)
11902 return
11903 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11906 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11908 This results in more efficient code for machines without a NOR
11909 instruction. Combine will canonicalize to the first form
11910 which will allow use of NOR instructions provided by the
11911 backend if they exist. */
11912 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11913 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11915 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11916 build2 (BIT_IOR_EXPR, type,
11917 fold_convert_loc (loc, type,
11918 TREE_OPERAND (arg0, 0)),
11919 fold_convert_loc (loc, type,
11920 TREE_OPERAND (arg1, 0))));
11923 /* If arg0 is derived from the address of an object or function, we may
11924 be able to fold this expression using the object or function's
11925 alignment. */
11926 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11928 unsigned HOST_WIDE_INT modulus, residue;
11929 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11931 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11932 integer_onep (arg1));
11934 /* This works because modulus is a power of 2. If this weren't the
11935 case, we'd have to replace it by its greatest power-of-2
11936 divisor: modulus & -modulus. */
11937 if (low < modulus)
11938 return build_int_cst (type, residue & low);
11941 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11942 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11943 if the new mask might be further optimized. */
11944 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11945 || TREE_CODE (arg0) == RSHIFT_EXPR)
11946 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11947 && TREE_CODE (arg1) == INTEGER_CST
11948 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11949 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11950 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11951 < TYPE_PRECISION (TREE_TYPE (arg0))))
11953 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11954 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11955 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11956 tree shift_type = TREE_TYPE (arg0);
11958 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11959 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11960 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11961 && TYPE_PRECISION (TREE_TYPE (arg0))
11962 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11964 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11965 tree arg00 = TREE_OPERAND (arg0, 0);
11966 /* See if more bits can be proven as zero because of
11967 zero extension. */
11968 if (TREE_CODE (arg00) == NOP_EXPR
11969 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11971 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11972 if (TYPE_PRECISION (inner_type)
11973 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11974 && TYPE_PRECISION (inner_type) < prec)
11976 prec = TYPE_PRECISION (inner_type);
11977 /* See if we can shorten the right shift. */
11978 if (shiftc < prec)
11979 shift_type = inner_type;
11980 /* Otherwise X >> C1 is all zeros, so we'll optimize
11981 it into (X, 0) later on by making sure zerobits
11982 is all ones. */
11985 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11986 if (shiftc < prec)
11988 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11989 zerobits <<= prec - shiftc;
11991 /* For arithmetic shift if sign bit could be set, zerobits
11992 can contain actually sign bits, so no transformation is
11993 possible, unless MASK masks them all away. In that
11994 case the shift needs to be converted into logical shift. */
11995 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11996 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11998 if ((mask & zerobits) == 0)
11999 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12000 else
12001 zerobits = 0;
12005 /* ((X << 16) & 0xff00) is (X, 0). */
12006 if ((mask & zerobits) == mask)
12007 return omit_one_operand_loc (loc, type,
12008 build_int_cst (type, 0), arg0);
12010 newmask = mask | zerobits;
12011 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12013 /* Only do the transformation if NEWMASK is some integer
12014 mode's mask. */
12015 for (prec = BITS_PER_UNIT;
12016 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12017 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12018 break;
12019 if (prec < HOST_BITS_PER_WIDE_INT
12020 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12022 tree newmaskt;
12024 if (shift_type != TREE_TYPE (arg0))
12026 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12027 fold_convert_loc (loc, shift_type,
12028 TREE_OPERAND (arg0, 0)),
12029 TREE_OPERAND (arg0, 1));
12030 tem = fold_convert_loc (loc, type, tem);
12032 else
12033 tem = op0;
12034 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12035 if (!tree_int_cst_equal (newmaskt, arg1))
12036 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12041 goto associate;
12043 case RDIV_EXPR:
12044 /* Don't touch a floating-point divide by zero unless the mode
12045 of the constant can represent infinity. */
12046 if (TREE_CODE (arg1) == REAL_CST
12047 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12048 && real_zerop (arg1))
12049 return NULL_TREE;
12051 /* Optimize A / A to 1.0 if we don't care about
12052 NaNs or Infinities. Skip the transformation
12053 for non-real operands. */
12054 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12055 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12056 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12057 && operand_equal_p (arg0, arg1, 0))
12059 tree r = build_real (TREE_TYPE (arg0), dconst1);
12061 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12064 /* The complex version of the above A / A optimization. */
12065 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12066 && operand_equal_p (arg0, arg1, 0))
12068 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12069 if (! HONOR_NANS (TYPE_MODE (elem_type))
12070 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12072 tree r = build_real (elem_type, dconst1);
12073 /* omit_two_operands will call fold_convert for us. */
12074 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12078 /* (-A) / (-B) -> A / B */
12079 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12080 return fold_build2_loc (loc, RDIV_EXPR, type,
12081 TREE_OPERAND (arg0, 0),
12082 negate_expr (arg1));
12083 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12084 return fold_build2_loc (loc, RDIV_EXPR, type,
12085 negate_expr (arg0),
12086 TREE_OPERAND (arg1, 0));
12088 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12089 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12090 && real_onep (arg1))
12091 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12093 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12094 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12095 && real_minus_onep (arg1))
12096 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12097 negate_expr (arg0)));
12099 /* If ARG1 is a constant, we can convert this to a multiply by the
12100 reciprocal. This does not have the same rounding properties,
12101 so only do this if -freciprocal-math. We can actually
12102 always safely do it if ARG1 is a power of two, but it's hard to
12103 tell if it is or not in a portable manner. */
12104 if (optimize
12105 && (TREE_CODE (arg1) == REAL_CST
12106 || (TREE_CODE (arg1) == COMPLEX_CST
12107 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12108 || (TREE_CODE (arg1) == VECTOR_CST
12109 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12111 if (flag_reciprocal_math
12112 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12113 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12114 /* Find the reciprocal if optimizing and the result is exact.
12115 TODO: Complex reciprocal not implemented. */
12116 if (TREE_CODE (arg1) != COMPLEX_CST)
12118 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12120 if (inverse)
12121 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12124 /* Convert A/B/C to A/(B*C). */
12125 if (flag_reciprocal_math
12126 && TREE_CODE (arg0) == RDIV_EXPR)
12127 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12128 fold_build2_loc (loc, MULT_EXPR, type,
12129 TREE_OPERAND (arg0, 1), arg1));
12131 /* Convert A/(B/C) to (A/B)*C. */
12132 if (flag_reciprocal_math
12133 && TREE_CODE (arg1) == RDIV_EXPR)
12134 return fold_build2_loc (loc, MULT_EXPR, type,
12135 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12136 TREE_OPERAND (arg1, 0)),
12137 TREE_OPERAND (arg1, 1));
12139 /* Convert C1/(X*C2) into (C1/C2)/X. */
12140 if (flag_reciprocal_math
12141 && TREE_CODE (arg1) == MULT_EXPR
12142 && TREE_CODE (arg0) == REAL_CST
12143 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12145 tree tem = const_binop (RDIV_EXPR, arg0,
12146 TREE_OPERAND (arg1, 1));
12147 if (tem)
12148 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12149 TREE_OPERAND (arg1, 0));
12152 if (flag_unsafe_math_optimizations)
12154 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12155 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12157 /* Optimize sin(x)/cos(x) as tan(x). */
12158 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12159 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12160 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12161 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12162 CALL_EXPR_ARG (arg1, 0), 0))
12164 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12166 if (tanfn != NULL_TREE)
12167 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12170 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12171 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12172 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12173 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12174 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12175 CALL_EXPR_ARG (arg1, 0), 0))
12177 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12179 if (tanfn != NULL_TREE)
12181 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12182 CALL_EXPR_ARG (arg0, 0));
12183 return fold_build2_loc (loc, RDIV_EXPR, type,
12184 build_real (type, dconst1), tmp);
12188 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12189 NaNs or Infinities. */
12190 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12191 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12192 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12194 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12195 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12197 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12198 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12199 && operand_equal_p (arg00, arg01, 0))
12201 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12203 if (cosfn != NULL_TREE)
12204 return build_call_expr_loc (loc, cosfn, 1, arg00);
12208 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12209 NaNs or Infinities. */
12210 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12211 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12212 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12214 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12215 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12217 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12218 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12219 && operand_equal_p (arg00, arg01, 0))
12221 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12223 if (cosfn != NULL_TREE)
12225 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12226 return fold_build2_loc (loc, RDIV_EXPR, type,
12227 build_real (type, dconst1),
12228 tmp);
12233 /* Optimize pow(x,c)/x as pow(x,c-1). */
12234 if (fcode0 == BUILT_IN_POW
12235 || fcode0 == BUILT_IN_POWF
12236 || fcode0 == BUILT_IN_POWL)
12238 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12239 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12240 if (TREE_CODE (arg01) == REAL_CST
12241 && !TREE_OVERFLOW (arg01)
12242 && operand_equal_p (arg1, arg00, 0))
12244 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12245 REAL_VALUE_TYPE c;
12246 tree arg;
12248 c = TREE_REAL_CST (arg01);
12249 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12250 arg = build_real (type, c);
12251 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12255 /* Optimize a/root(b/c) into a*root(c/b). */
12256 if (BUILTIN_ROOT_P (fcode1))
12258 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12260 if (TREE_CODE (rootarg) == RDIV_EXPR)
12262 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12263 tree b = TREE_OPERAND (rootarg, 0);
12264 tree c = TREE_OPERAND (rootarg, 1);
12266 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12268 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12269 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12273 /* Optimize x/expN(y) into x*expN(-y). */
12274 if (BUILTIN_EXPONENT_P (fcode1))
12276 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12277 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12278 arg1 = build_call_expr_loc (loc,
12279 expfn, 1,
12280 fold_convert_loc (loc, type, arg));
12281 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12284 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12285 if (fcode1 == BUILT_IN_POW
12286 || fcode1 == BUILT_IN_POWF
12287 || fcode1 == BUILT_IN_POWL)
12289 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12290 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12291 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12292 tree neg11 = fold_convert_loc (loc, type,
12293 negate_expr (arg11));
12294 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12295 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12298 return NULL_TREE;
12300 case TRUNC_DIV_EXPR:
12301 /* Optimize (X & (-A)) / A where A is a power of 2,
12302 to X >> log2(A) */
12303 if (TREE_CODE (arg0) == BIT_AND_EXPR
12304 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12305 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12307 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12308 arg1, TREE_OPERAND (arg0, 1));
12309 if (sum && integer_zerop (sum)) {
12310 tree pow2 = build_int_cst (integer_type_node,
12311 wi::exact_log2 (arg1));
12312 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12313 TREE_OPERAND (arg0, 0), pow2);
12317 /* Fall through */
12319 case FLOOR_DIV_EXPR:
12320 /* Simplify A / (B << N) where A and B are positive and B is
12321 a power of 2, to A >> (N + log2(B)). */
12322 strict_overflow_p = false;
12323 if (TREE_CODE (arg1) == LSHIFT_EXPR
12324 && (TYPE_UNSIGNED (type)
12325 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12327 tree sval = TREE_OPERAND (arg1, 0);
12328 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12330 tree sh_cnt = TREE_OPERAND (arg1, 1);
12331 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12332 wi::exact_log2 (sval));
12334 if (strict_overflow_p)
12335 fold_overflow_warning (("assuming signed overflow does not "
12336 "occur when simplifying A / (B << N)"),
12337 WARN_STRICT_OVERFLOW_MISC);
12339 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12340 sh_cnt, pow2);
12341 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12342 fold_convert_loc (loc, type, arg0), sh_cnt);
12346 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12347 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12348 if (INTEGRAL_TYPE_P (type)
12349 && TYPE_UNSIGNED (type)
12350 && code == FLOOR_DIV_EXPR)
12351 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12353 /* Fall through */
12355 case ROUND_DIV_EXPR:
12356 case CEIL_DIV_EXPR:
12357 case EXACT_DIV_EXPR:
12358 if (integer_onep (arg1))
12359 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12360 if (integer_zerop (arg1))
12361 return NULL_TREE;
12362 /* X / -1 is -X. */
12363 if (!TYPE_UNSIGNED (type)
12364 && TREE_CODE (arg1) == INTEGER_CST
12365 && wi::eq_p (arg1, -1))
12366 return fold_convert_loc (loc, type, negate_expr (arg0));
12368 /* Convert -A / -B to A / B when the type is signed and overflow is
12369 undefined. */
12370 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12371 && TREE_CODE (arg0) == NEGATE_EXPR
12372 && negate_expr_p (arg1))
12374 if (INTEGRAL_TYPE_P (type))
12375 fold_overflow_warning (("assuming signed overflow does not occur "
12376 "when distributing negation across "
12377 "division"),
12378 WARN_STRICT_OVERFLOW_MISC);
12379 return fold_build2_loc (loc, code, type,
12380 fold_convert_loc (loc, type,
12381 TREE_OPERAND (arg0, 0)),
12382 fold_convert_loc (loc, type,
12383 negate_expr (arg1)));
12385 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12386 && TREE_CODE (arg1) == NEGATE_EXPR
12387 && negate_expr_p (arg0))
12389 if (INTEGRAL_TYPE_P (type))
12390 fold_overflow_warning (("assuming signed overflow does not occur "
12391 "when distributing negation across "
12392 "division"),
12393 WARN_STRICT_OVERFLOW_MISC);
12394 return fold_build2_loc (loc, code, type,
12395 fold_convert_loc (loc, type,
12396 negate_expr (arg0)),
12397 fold_convert_loc (loc, type,
12398 TREE_OPERAND (arg1, 0)));
12401 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12402 operation, EXACT_DIV_EXPR.
12404 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12405 At one time others generated faster code, it's not clear if they do
12406 after the last round to changes to the DIV code in expmed.c. */
12407 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12408 && multiple_of_p (type, arg0, arg1))
12409 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12411 strict_overflow_p = false;
12412 if (TREE_CODE (arg1) == INTEGER_CST
12413 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12414 &strict_overflow_p)))
12416 if (strict_overflow_p)
12417 fold_overflow_warning (("assuming signed overflow does not occur "
12418 "when simplifying division"),
12419 WARN_STRICT_OVERFLOW_MISC);
12420 return fold_convert_loc (loc, type, tem);
12423 return NULL_TREE;
12425 case CEIL_MOD_EXPR:
12426 case FLOOR_MOD_EXPR:
12427 case ROUND_MOD_EXPR:
12428 case TRUNC_MOD_EXPR:
12429 /* X % 1 is always zero, but be sure to preserve any side
12430 effects in X. */
12431 if (integer_onep (arg1))
12432 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12434 /* X % 0, return X % 0 unchanged so that we can get the
12435 proper warnings and errors. */
12436 if (integer_zerop (arg1))
12437 return NULL_TREE;
12439 /* 0 % X is always zero, but be sure to preserve any side
12440 effects in X. Place this after checking for X == 0. */
12441 if (integer_zerop (arg0))
12442 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12444 /* X % -1 is zero. */
12445 if (!TYPE_UNSIGNED (type)
12446 && TREE_CODE (arg1) == INTEGER_CST
12447 && wi::eq_p (arg1, -1))
12448 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12450 /* X % -C is the same as X % C. */
12451 if (code == TRUNC_MOD_EXPR
12452 && TYPE_SIGN (type) == SIGNED
12453 && TREE_CODE (arg1) == INTEGER_CST
12454 && !TREE_OVERFLOW (arg1)
12455 && wi::neg_p (arg1)
12456 && !TYPE_OVERFLOW_TRAPS (type)
12457 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12458 && !sign_bit_p (arg1, arg1))
12459 return fold_build2_loc (loc, code, type,
12460 fold_convert_loc (loc, type, arg0),
12461 fold_convert_loc (loc, type,
12462 negate_expr (arg1)));
12464 /* X % -Y is the same as X % Y. */
12465 if (code == TRUNC_MOD_EXPR
12466 && !TYPE_UNSIGNED (type)
12467 && TREE_CODE (arg1) == NEGATE_EXPR
12468 && !TYPE_OVERFLOW_TRAPS (type))
12469 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12470 fold_convert_loc (loc, type,
12471 TREE_OPERAND (arg1, 0)));
12473 strict_overflow_p = false;
12474 if (TREE_CODE (arg1) == INTEGER_CST
12475 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12476 &strict_overflow_p)))
12478 if (strict_overflow_p)
12479 fold_overflow_warning (("assuming signed overflow does not occur "
12480 "when simplifying modulus"),
12481 WARN_STRICT_OVERFLOW_MISC);
12482 return fold_convert_loc (loc, type, tem);
12485 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12486 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12487 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12488 && (TYPE_UNSIGNED (type)
12489 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12491 tree c = arg1;
12492 /* Also optimize A % (C << N) where C is a power of 2,
12493 to A & ((C << N) - 1). */
12494 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12495 c = TREE_OPERAND (arg1, 0);
12497 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12499 tree mask
12500 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12501 build_int_cst (TREE_TYPE (arg1), 1));
12502 if (strict_overflow_p)
12503 fold_overflow_warning (("assuming signed overflow does not "
12504 "occur when simplifying "
12505 "X % (power of two)"),
12506 WARN_STRICT_OVERFLOW_MISC);
12507 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12508 fold_convert_loc (loc, type, arg0),
12509 fold_convert_loc (loc, type, mask));
12513 return NULL_TREE;
12515 case LROTATE_EXPR:
12516 case RROTATE_EXPR:
12517 if (integer_all_onesp (arg0))
12518 return omit_one_operand_loc (loc, type, arg0, arg1);
12519 goto shift;
12521 case RSHIFT_EXPR:
12522 /* Optimize -1 >> x for arithmetic right shifts. */
12523 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12524 && tree_expr_nonnegative_p (arg1))
12525 return omit_one_operand_loc (loc, type, arg0, arg1);
12526 /* ... fall through ... */
12528 case LSHIFT_EXPR:
12529 shift:
12530 if (integer_zerop (arg1))
12531 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12532 if (integer_zerop (arg0))
12533 return omit_one_operand_loc (loc, type, arg0, arg1);
12535 /* Prefer vector1 << scalar to vector1 << vector2
12536 if vector2 is uniform. */
12537 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12538 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12539 return fold_build2_loc (loc, code, type, op0, tem);
12541 /* Since negative shift count is not well-defined,
12542 don't try to compute it in the compiler. */
12543 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12544 return NULL_TREE;
12546 prec = element_precision (type);
12548 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12549 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12550 && tree_to_uhwi (arg1) < prec
12551 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12552 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12554 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12555 + tree_to_uhwi (arg1));
12557 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12558 being well defined. */
12559 if (low >= prec)
12561 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12562 low = low % prec;
12563 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12564 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12565 TREE_OPERAND (arg0, 0));
12566 else
12567 low = prec - 1;
12570 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12571 build_int_cst (TREE_TYPE (arg1), low));
12574 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12575 into x & ((unsigned)-1 >> c) for unsigned types. */
12576 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12577 || (TYPE_UNSIGNED (type)
12578 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12579 && tree_fits_uhwi_p (arg1)
12580 && tree_to_uhwi (arg1) < prec
12581 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12582 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12584 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12585 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12586 tree lshift;
12587 tree arg00;
12589 if (low0 == low1)
12591 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12593 lshift = build_minus_one_cst (type);
12594 lshift = const_binop (code, lshift, arg1);
12596 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12600 /* Rewrite an LROTATE_EXPR by a constant into an
12601 RROTATE_EXPR by a new constant. */
12602 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12604 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12605 tem = const_binop (MINUS_EXPR, tem, arg1);
12606 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12609 /* If we have a rotate of a bit operation with the rotate count and
12610 the second operand of the bit operation both constant,
12611 permute the two operations. */
12612 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12613 && (TREE_CODE (arg0) == BIT_AND_EXPR
12614 || TREE_CODE (arg0) == BIT_IOR_EXPR
12615 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12616 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12617 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12618 fold_build2_loc (loc, code, type,
12619 TREE_OPERAND (arg0, 0), arg1),
12620 fold_build2_loc (loc, code, type,
12621 TREE_OPERAND (arg0, 1), arg1));
12623 /* Two consecutive rotates adding up to the some integer
12624 multiple of the precision of the type can be ignored. */
12625 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12626 && TREE_CODE (arg0) == RROTATE_EXPR
12627 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12628 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12629 prec) == 0)
12630 return TREE_OPERAND (arg0, 0);
12632 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12633 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12634 if the latter can be further optimized. */
12635 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12636 && TREE_CODE (arg0) == BIT_AND_EXPR
12637 && TREE_CODE (arg1) == INTEGER_CST
12638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12640 tree mask = fold_build2_loc (loc, code, type,
12641 fold_convert_loc (loc, type,
12642 TREE_OPERAND (arg0, 1)),
12643 arg1);
12644 tree shift = fold_build2_loc (loc, code, type,
12645 fold_convert_loc (loc, type,
12646 TREE_OPERAND (arg0, 0)),
12647 arg1);
12648 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12649 if (tem)
12650 return tem;
12653 return NULL_TREE;
12655 case MIN_EXPR:
12656 if (operand_equal_p (arg0, arg1, 0))
12657 return omit_one_operand_loc (loc, type, arg0, arg1);
12658 if (INTEGRAL_TYPE_P (type)
12659 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12660 return omit_one_operand_loc (loc, type, arg1, arg0);
12661 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12662 if (tem)
12663 return tem;
12664 goto associate;
12666 case MAX_EXPR:
12667 if (operand_equal_p (arg0, arg1, 0))
12668 return omit_one_operand_loc (loc, type, arg0, arg1);
12669 if (INTEGRAL_TYPE_P (type)
12670 && TYPE_MAX_VALUE (type)
12671 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12672 return omit_one_operand_loc (loc, type, arg1, arg0);
12673 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12674 if (tem)
12675 return tem;
12676 goto associate;
12678 case TRUTH_ANDIF_EXPR:
12679 /* Note that the operands of this must be ints
12680 and their values must be 0 or 1.
12681 ("true" is a fixed value perhaps depending on the language.) */
12682 /* If first arg is constant zero, return it. */
12683 if (integer_zerop (arg0))
12684 return fold_convert_loc (loc, type, arg0);
12685 case TRUTH_AND_EXPR:
12686 /* If either arg is constant true, drop it. */
12687 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12688 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12689 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12690 /* Preserve sequence points. */
12691 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12692 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12693 /* If second arg is constant zero, result is zero, but first arg
12694 must be evaluated. */
12695 if (integer_zerop (arg1))
12696 return omit_one_operand_loc (loc, type, arg1, arg0);
12697 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12698 case will be handled here. */
12699 if (integer_zerop (arg0))
12700 return omit_one_operand_loc (loc, type, arg0, arg1);
12702 /* !X && X is always false. */
12703 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12704 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12705 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12706 /* X && !X is always false. */
12707 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12708 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12709 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12711 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12712 means A >= Y && A != MAX, but in this case we know that
12713 A < X <= MAX. */
12715 if (!TREE_SIDE_EFFECTS (arg0)
12716 && !TREE_SIDE_EFFECTS (arg1))
12718 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12719 if (tem && !operand_equal_p (tem, arg0, 0))
12720 return fold_build2_loc (loc, code, type, tem, arg1);
12722 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12723 if (tem && !operand_equal_p (tem, arg1, 0))
12724 return fold_build2_loc (loc, code, type, arg0, tem);
12727 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12728 != NULL_TREE)
12729 return tem;
12731 return NULL_TREE;
12733 case TRUTH_ORIF_EXPR:
12734 /* Note that the operands of this must be ints
12735 and their values must be 0 or true.
12736 ("true" is a fixed value perhaps depending on the language.) */
12737 /* If first arg is constant true, return it. */
12738 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12739 return fold_convert_loc (loc, type, arg0);
12740 case TRUTH_OR_EXPR:
12741 /* If either arg is constant zero, drop it. */
12742 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12743 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12744 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12745 /* Preserve sequence points. */
12746 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12747 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12748 /* If second arg is constant true, result is true, but we must
12749 evaluate first arg. */
12750 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12751 return omit_one_operand_loc (loc, type, arg1, arg0);
12752 /* Likewise for first arg, but note this only occurs here for
12753 TRUTH_OR_EXPR. */
12754 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12755 return omit_one_operand_loc (loc, type, arg0, arg1);
12757 /* !X || X is always true. */
12758 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12759 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12760 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12761 /* X || !X is always true. */
12762 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12763 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12764 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12766 /* (X && !Y) || (!X && Y) is X ^ Y */
12767 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12768 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12770 tree a0, a1, l0, l1, n0, n1;
12772 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12773 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12775 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12776 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12778 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12779 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12781 if ((operand_equal_p (n0, a0, 0)
12782 && operand_equal_p (n1, a1, 0))
12783 || (operand_equal_p (n0, a1, 0)
12784 && operand_equal_p (n1, a0, 0)))
12785 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12788 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12789 != NULL_TREE)
12790 return tem;
12792 return NULL_TREE;
12794 case TRUTH_XOR_EXPR:
12795 /* If the second arg is constant zero, drop it. */
12796 if (integer_zerop (arg1))
12797 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12798 /* If the second arg is constant true, this is a logical inversion. */
12799 if (integer_onep (arg1))
12801 tem = invert_truthvalue_loc (loc, arg0);
12802 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12804 /* Identical arguments cancel to zero. */
12805 if (operand_equal_p (arg0, arg1, 0))
12806 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12808 /* !X ^ X is always true. */
12809 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12811 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12813 /* X ^ !X is always true. */
12814 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12815 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12816 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12818 return NULL_TREE;
12820 case EQ_EXPR:
12821 case NE_EXPR:
12822 STRIP_NOPS (arg0);
12823 STRIP_NOPS (arg1);
12825 tem = fold_comparison (loc, code, type, op0, op1);
12826 if (tem != NULL_TREE)
12827 return tem;
12829 /* bool_var != 0 becomes bool_var. */
12830 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12831 && code == NE_EXPR)
12832 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12834 /* bool_var == 1 becomes bool_var. */
12835 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12836 && code == EQ_EXPR)
12837 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12839 /* bool_var != 1 becomes !bool_var. */
12840 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12841 && code == NE_EXPR)
12842 return fold_convert_loc (loc, type,
12843 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12844 TREE_TYPE (arg0), arg0));
12846 /* bool_var == 0 becomes !bool_var. */
12847 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12848 && code == EQ_EXPR)
12849 return fold_convert_loc (loc, type,
12850 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12851 TREE_TYPE (arg0), arg0));
12853 /* !exp != 0 becomes !exp */
12854 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12855 && code == NE_EXPR)
12856 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12858 /* If this is an equality comparison of the address of two non-weak,
12859 unaliased symbols neither of which are extern (since we do not
12860 have access to attributes for externs), then we know the result. */
12861 if (TREE_CODE (arg0) == ADDR_EXPR
12862 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12863 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12864 && ! lookup_attribute ("alias",
12865 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12866 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12867 && TREE_CODE (arg1) == ADDR_EXPR
12868 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12869 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12870 && ! lookup_attribute ("alias",
12871 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12872 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12874 /* We know that we're looking at the address of two
12875 non-weak, unaliased, static _DECL nodes.
12877 It is both wasteful and incorrect to call operand_equal_p
12878 to compare the two ADDR_EXPR nodes. It is wasteful in that
12879 all we need to do is test pointer equality for the arguments
12880 to the two ADDR_EXPR nodes. It is incorrect to use
12881 operand_equal_p as that function is NOT equivalent to a
12882 C equality test. It can in fact return false for two
12883 objects which would test as equal using the C equality
12884 operator. */
12885 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12886 return constant_boolean_node (equal
12887 ? code == EQ_EXPR : code != EQ_EXPR,
12888 type);
12891 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12892 a MINUS_EXPR of a constant, we can convert it into a comparison with
12893 a revised constant as long as no overflow occurs. */
12894 if (TREE_CODE (arg1) == INTEGER_CST
12895 && (TREE_CODE (arg0) == PLUS_EXPR
12896 || TREE_CODE (arg0) == MINUS_EXPR)
12897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12898 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12899 ? MINUS_EXPR : PLUS_EXPR,
12900 fold_convert_loc (loc, TREE_TYPE (arg0),
12901 arg1),
12902 TREE_OPERAND (arg0, 1)))
12903 && !TREE_OVERFLOW (tem))
12904 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12906 /* Similarly for a NEGATE_EXPR. */
12907 if (TREE_CODE (arg0) == NEGATE_EXPR
12908 && TREE_CODE (arg1) == INTEGER_CST
12909 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12910 arg1)))
12911 && TREE_CODE (tem) == INTEGER_CST
12912 && !TREE_OVERFLOW (tem))
12913 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12915 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12916 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12917 && TREE_CODE (arg1) == INTEGER_CST
12918 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12919 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12920 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12921 fold_convert_loc (loc,
12922 TREE_TYPE (arg0),
12923 arg1),
12924 TREE_OPERAND (arg0, 1)));
12926 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12927 if ((TREE_CODE (arg0) == PLUS_EXPR
12928 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12929 || TREE_CODE (arg0) == MINUS_EXPR)
12930 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12931 0)),
12932 arg1, 0)
12933 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12934 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12936 tree val = TREE_OPERAND (arg0, 1);
12937 return omit_two_operands_loc (loc, type,
12938 fold_build2_loc (loc, code, type,
12939 val,
12940 build_int_cst (TREE_TYPE (val),
12941 0)),
12942 TREE_OPERAND (arg0, 0), arg1);
12945 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12946 if (TREE_CODE (arg0) == MINUS_EXPR
12947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12948 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12949 1)),
12950 arg1, 0)
12951 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12953 return omit_two_operands_loc (loc, type,
12954 code == NE_EXPR
12955 ? boolean_true_node : boolean_false_node,
12956 TREE_OPERAND (arg0, 1), arg1);
12959 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12960 for !=. Don't do this for ordered comparisons due to overflow. */
12961 if (TREE_CODE (arg0) == MINUS_EXPR
12962 && integer_zerop (arg1))
12963 return fold_build2_loc (loc, code, type,
12964 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12966 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12967 if (TREE_CODE (arg0) == ABS_EXPR
12968 && (integer_zerop (arg1) || real_zerop (arg1)))
12969 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12971 /* If this is an EQ or NE comparison with zero and ARG0 is
12972 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12973 two operations, but the latter can be done in one less insn
12974 on machines that have only two-operand insns or on which a
12975 constant cannot be the first operand. */
12976 if (TREE_CODE (arg0) == BIT_AND_EXPR
12977 && integer_zerop (arg1))
12979 tree arg00 = TREE_OPERAND (arg0, 0);
12980 tree arg01 = TREE_OPERAND (arg0, 1);
12981 if (TREE_CODE (arg00) == LSHIFT_EXPR
12982 && integer_onep (TREE_OPERAND (arg00, 0)))
12984 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12985 arg01, TREE_OPERAND (arg00, 1));
12986 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12987 build_int_cst (TREE_TYPE (arg0), 1));
12988 return fold_build2_loc (loc, code, type,
12989 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12990 arg1);
12992 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12993 && integer_onep (TREE_OPERAND (arg01, 0)))
12995 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12996 arg00, TREE_OPERAND (arg01, 1));
12997 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12998 build_int_cst (TREE_TYPE (arg0), 1));
12999 return fold_build2_loc (loc, code, type,
13000 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13001 arg1);
13005 /* If this is an NE or EQ comparison of zero against the result of a
13006 signed MOD operation whose second operand is a power of 2, make
13007 the MOD operation unsigned since it is simpler and equivalent. */
13008 if (integer_zerop (arg1)
13009 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13010 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13011 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13012 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13013 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13014 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13016 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13017 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13018 fold_convert_loc (loc, newtype,
13019 TREE_OPERAND (arg0, 0)),
13020 fold_convert_loc (loc, newtype,
13021 TREE_OPERAND (arg0, 1)));
13023 return fold_build2_loc (loc, code, type, newmod,
13024 fold_convert_loc (loc, newtype, arg1));
13027 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13028 C1 is a valid shift constant, and C2 is a power of two, i.e.
13029 a single bit. */
13030 if (TREE_CODE (arg0) == BIT_AND_EXPR
13031 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13032 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13033 == INTEGER_CST
13034 && integer_pow2p (TREE_OPERAND (arg0, 1))
13035 && integer_zerop (arg1))
13037 tree itype = TREE_TYPE (arg0);
13038 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13039 prec = TYPE_PRECISION (itype);
13041 /* Check for a valid shift count. */
13042 if (wi::ltu_p (arg001, prec))
13044 tree arg01 = TREE_OPERAND (arg0, 1);
13045 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13046 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13047 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13048 can be rewritten as (X & (C2 << C1)) != 0. */
13049 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13051 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13052 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13053 return fold_build2_loc (loc, code, type, tem,
13054 fold_convert_loc (loc, itype, arg1));
13056 /* Otherwise, for signed (arithmetic) shifts,
13057 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13058 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13059 else if (!TYPE_UNSIGNED (itype))
13060 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13061 arg000, build_int_cst (itype, 0));
13062 /* Otherwise, of unsigned (logical) shifts,
13063 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13064 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13065 else
13066 return omit_one_operand_loc (loc, type,
13067 code == EQ_EXPR ? integer_one_node
13068 : integer_zero_node,
13069 arg000);
13073 /* If we have (A & C) == C where C is a power of 2, convert this into
13074 (A & C) != 0. Similarly for NE_EXPR. */
13075 if (TREE_CODE (arg0) == BIT_AND_EXPR
13076 && integer_pow2p (TREE_OPERAND (arg0, 1))
13077 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13078 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13079 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13080 integer_zero_node));
13082 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13083 bit, then fold the expression into A < 0 or A >= 0. */
13084 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13085 if (tem)
13086 return tem;
13088 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13089 Similarly for NE_EXPR. */
13090 if (TREE_CODE (arg0) == BIT_AND_EXPR
13091 && TREE_CODE (arg1) == INTEGER_CST
13092 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13094 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13095 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13096 TREE_OPERAND (arg0, 1));
13097 tree dandnotc
13098 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13099 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13100 notc);
13101 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13102 if (integer_nonzerop (dandnotc))
13103 return omit_one_operand_loc (loc, type, rslt, arg0);
13106 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13107 Similarly for NE_EXPR. */
13108 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13109 && TREE_CODE (arg1) == INTEGER_CST
13110 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13112 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13113 tree candnotd
13114 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13115 TREE_OPERAND (arg0, 1),
13116 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13117 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13118 if (integer_nonzerop (candnotd))
13119 return omit_one_operand_loc (loc, type, rslt, arg0);
13122 /* If this is a comparison of a field, we may be able to simplify it. */
13123 if ((TREE_CODE (arg0) == COMPONENT_REF
13124 || TREE_CODE (arg0) == BIT_FIELD_REF)
13125 /* Handle the constant case even without -O
13126 to make sure the warnings are given. */
13127 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13129 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13130 if (t1)
13131 return t1;
13134 /* Optimize comparisons of strlen vs zero to a compare of the
13135 first character of the string vs zero. To wit,
13136 strlen(ptr) == 0 => *ptr == 0
13137 strlen(ptr) != 0 => *ptr != 0
13138 Other cases should reduce to one of these two (or a constant)
13139 due to the return value of strlen being unsigned. */
13140 if (TREE_CODE (arg0) == CALL_EXPR
13141 && integer_zerop (arg1))
13143 tree fndecl = get_callee_fndecl (arg0);
13145 if (fndecl
13146 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13147 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13148 && call_expr_nargs (arg0) == 1
13149 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13151 tree iref = build_fold_indirect_ref_loc (loc,
13152 CALL_EXPR_ARG (arg0, 0));
13153 return fold_build2_loc (loc, code, type, iref,
13154 build_int_cst (TREE_TYPE (iref), 0));
13158 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13159 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13160 if (TREE_CODE (arg0) == RSHIFT_EXPR
13161 && integer_zerop (arg1)
13162 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13164 tree arg00 = TREE_OPERAND (arg0, 0);
13165 tree arg01 = TREE_OPERAND (arg0, 1);
13166 tree itype = TREE_TYPE (arg00);
13167 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13169 if (TYPE_UNSIGNED (itype))
13171 itype = signed_type_for (itype);
13172 arg00 = fold_convert_loc (loc, itype, arg00);
13174 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13175 type, arg00, build_zero_cst (itype));
13179 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13180 if (integer_zerop (arg1)
13181 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13182 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13183 TREE_OPERAND (arg0, 1));
13185 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13186 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13187 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13188 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13189 build_zero_cst (TREE_TYPE (arg0)));
13190 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13191 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13192 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13193 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13194 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13195 build_zero_cst (TREE_TYPE (arg0)));
13197 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13198 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13199 && TREE_CODE (arg1) == INTEGER_CST
13200 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13201 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13202 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13203 TREE_OPERAND (arg0, 1), arg1));
13205 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13206 (X & C) == 0 when C is a single bit. */
13207 if (TREE_CODE (arg0) == BIT_AND_EXPR
13208 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13209 && integer_zerop (arg1)
13210 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13212 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13213 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13214 TREE_OPERAND (arg0, 1));
13215 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13216 type, tem,
13217 fold_convert_loc (loc, TREE_TYPE (arg0),
13218 arg1));
13221 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13222 constant C is a power of two, i.e. a single bit. */
13223 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13224 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13225 && integer_zerop (arg1)
13226 && integer_pow2p (TREE_OPERAND (arg0, 1))
13227 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13228 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13230 tree arg00 = TREE_OPERAND (arg0, 0);
13231 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13232 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13235 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13236 when is C is a power of two, i.e. a single bit. */
13237 if (TREE_CODE (arg0) == BIT_AND_EXPR
13238 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13239 && integer_zerop (arg1)
13240 && integer_pow2p (TREE_OPERAND (arg0, 1))
13241 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13242 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13244 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13245 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13246 arg000, TREE_OPERAND (arg0, 1));
13247 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13248 tem, build_int_cst (TREE_TYPE (tem), 0));
13251 if (integer_zerop (arg1)
13252 && tree_expr_nonzero_p (arg0))
13254 tree res = constant_boolean_node (code==NE_EXPR, type);
13255 return omit_one_operand_loc (loc, type, res, arg0);
13258 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13259 if (TREE_CODE (arg0) == NEGATE_EXPR
13260 && TREE_CODE (arg1) == NEGATE_EXPR)
13261 return fold_build2_loc (loc, code, type,
13262 TREE_OPERAND (arg0, 0),
13263 fold_convert_loc (loc, TREE_TYPE (arg0),
13264 TREE_OPERAND (arg1, 0)));
13266 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13267 if (TREE_CODE (arg0) == BIT_AND_EXPR
13268 && TREE_CODE (arg1) == BIT_AND_EXPR)
13270 tree arg00 = TREE_OPERAND (arg0, 0);
13271 tree arg01 = TREE_OPERAND (arg0, 1);
13272 tree arg10 = TREE_OPERAND (arg1, 0);
13273 tree arg11 = TREE_OPERAND (arg1, 1);
13274 tree itype = TREE_TYPE (arg0);
13276 if (operand_equal_p (arg01, arg11, 0))
13277 return fold_build2_loc (loc, code, type,
13278 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13279 fold_build2_loc (loc,
13280 BIT_XOR_EXPR, itype,
13281 arg00, arg10),
13282 arg01),
13283 build_zero_cst (itype));
13285 if (operand_equal_p (arg01, arg10, 0))
13286 return fold_build2_loc (loc, code, type,
13287 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13288 fold_build2_loc (loc,
13289 BIT_XOR_EXPR, itype,
13290 arg00, arg11),
13291 arg01),
13292 build_zero_cst (itype));
13294 if (operand_equal_p (arg00, arg11, 0))
13295 return fold_build2_loc (loc, code, type,
13296 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13297 fold_build2_loc (loc,
13298 BIT_XOR_EXPR, itype,
13299 arg01, arg10),
13300 arg00),
13301 build_zero_cst (itype));
13303 if (operand_equal_p (arg00, arg10, 0))
13304 return fold_build2_loc (loc, code, type,
13305 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13306 fold_build2_loc (loc,
13307 BIT_XOR_EXPR, itype,
13308 arg01, arg11),
13309 arg00),
13310 build_zero_cst (itype));
13313 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13314 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13316 tree arg00 = TREE_OPERAND (arg0, 0);
13317 tree arg01 = TREE_OPERAND (arg0, 1);
13318 tree arg10 = TREE_OPERAND (arg1, 0);
13319 tree arg11 = TREE_OPERAND (arg1, 1);
13320 tree itype = TREE_TYPE (arg0);
13322 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13323 operand_equal_p guarantees no side-effects so we don't need
13324 to use omit_one_operand on Z. */
13325 if (operand_equal_p (arg01, arg11, 0))
13326 return fold_build2_loc (loc, code, type, arg00,
13327 fold_convert_loc (loc, TREE_TYPE (arg00),
13328 arg10));
13329 if (operand_equal_p (arg01, arg10, 0))
13330 return fold_build2_loc (loc, code, type, arg00,
13331 fold_convert_loc (loc, TREE_TYPE (arg00),
13332 arg11));
13333 if (operand_equal_p (arg00, arg11, 0))
13334 return fold_build2_loc (loc, code, type, arg01,
13335 fold_convert_loc (loc, TREE_TYPE (arg01),
13336 arg10));
13337 if (operand_equal_p (arg00, arg10, 0))
13338 return fold_build2_loc (loc, code, type, arg01,
13339 fold_convert_loc (loc, TREE_TYPE (arg01),
13340 arg11));
13342 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13343 if (TREE_CODE (arg01) == INTEGER_CST
13344 && TREE_CODE (arg11) == INTEGER_CST)
13346 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13347 fold_convert_loc (loc, itype, arg11));
13348 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13349 return fold_build2_loc (loc, code, type, tem,
13350 fold_convert_loc (loc, itype, arg10));
13354 /* Attempt to simplify equality/inequality comparisons of complex
13355 values. Only lower the comparison if the result is known or
13356 can be simplified to a single scalar comparison. */
13357 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13358 || TREE_CODE (arg0) == COMPLEX_CST)
13359 && (TREE_CODE (arg1) == COMPLEX_EXPR
13360 || TREE_CODE (arg1) == COMPLEX_CST))
13362 tree real0, imag0, real1, imag1;
13363 tree rcond, icond;
13365 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13367 real0 = TREE_OPERAND (arg0, 0);
13368 imag0 = TREE_OPERAND (arg0, 1);
13370 else
13372 real0 = TREE_REALPART (arg0);
13373 imag0 = TREE_IMAGPART (arg0);
13376 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13378 real1 = TREE_OPERAND (arg1, 0);
13379 imag1 = TREE_OPERAND (arg1, 1);
13381 else
13383 real1 = TREE_REALPART (arg1);
13384 imag1 = TREE_IMAGPART (arg1);
13387 rcond = fold_binary_loc (loc, code, type, real0, real1);
13388 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13390 if (integer_zerop (rcond))
13392 if (code == EQ_EXPR)
13393 return omit_two_operands_loc (loc, type, boolean_false_node,
13394 imag0, imag1);
13395 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13397 else
13399 if (code == NE_EXPR)
13400 return omit_two_operands_loc (loc, type, boolean_true_node,
13401 imag0, imag1);
13402 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13406 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13407 if (icond && TREE_CODE (icond) == INTEGER_CST)
13409 if (integer_zerop (icond))
13411 if (code == EQ_EXPR)
13412 return omit_two_operands_loc (loc, type, boolean_false_node,
13413 real0, real1);
13414 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13416 else
13418 if (code == NE_EXPR)
13419 return omit_two_operands_loc (loc, type, boolean_true_node,
13420 real0, real1);
13421 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13426 return NULL_TREE;
13428 case LT_EXPR:
13429 case GT_EXPR:
13430 case LE_EXPR:
13431 case GE_EXPR:
13432 tem = fold_comparison (loc, code, type, op0, op1);
13433 if (tem != NULL_TREE)
13434 return tem;
13436 /* Transform comparisons of the form X +- C CMP X. */
13437 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13438 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13439 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13440 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13441 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13442 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13444 tree arg01 = TREE_OPERAND (arg0, 1);
13445 enum tree_code code0 = TREE_CODE (arg0);
13446 int is_positive;
13448 if (TREE_CODE (arg01) == REAL_CST)
13449 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13450 else
13451 is_positive = tree_int_cst_sgn (arg01);
13453 /* (X - c) > X becomes false. */
13454 if (code == GT_EXPR
13455 && ((code0 == MINUS_EXPR && is_positive >= 0)
13456 || (code0 == PLUS_EXPR && is_positive <= 0)))
13458 if (TREE_CODE (arg01) == INTEGER_CST
13459 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13460 fold_overflow_warning (("assuming signed overflow does not "
13461 "occur when assuming that (X - c) > X "
13462 "is always false"),
13463 WARN_STRICT_OVERFLOW_ALL);
13464 return constant_boolean_node (0, type);
13467 /* Likewise (X + c) < X becomes false. */
13468 if (code == LT_EXPR
13469 && ((code0 == PLUS_EXPR && is_positive >= 0)
13470 || (code0 == MINUS_EXPR && is_positive <= 0)))
13472 if (TREE_CODE (arg01) == INTEGER_CST
13473 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13474 fold_overflow_warning (("assuming signed overflow does not "
13475 "occur when assuming that "
13476 "(X + c) < X is always false"),
13477 WARN_STRICT_OVERFLOW_ALL);
13478 return constant_boolean_node (0, type);
13481 /* Convert (X - c) <= X to true. */
13482 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13483 && code == LE_EXPR
13484 && ((code0 == MINUS_EXPR && is_positive >= 0)
13485 || (code0 == PLUS_EXPR && is_positive <= 0)))
13487 if (TREE_CODE (arg01) == INTEGER_CST
13488 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13489 fold_overflow_warning (("assuming signed overflow does not "
13490 "occur when assuming that "
13491 "(X - c) <= X is always true"),
13492 WARN_STRICT_OVERFLOW_ALL);
13493 return constant_boolean_node (1, type);
13496 /* Convert (X + c) >= X to true. */
13497 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13498 && code == GE_EXPR
13499 && ((code0 == PLUS_EXPR && is_positive >= 0)
13500 || (code0 == MINUS_EXPR && is_positive <= 0)))
13502 if (TREE_CODE (arg01) == INTEGER_CST
13503 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13504 fold_overflow_warning (("assuming signed overflow does not "
13505 "occur when assuming that "
13506 "(X + c) >= X is always true"),
13507 WARN_STRICT_OVERFLOW_ALL);
13508 return constant_boolean_node (1, type);
13511 if (TREE_CODE (arg01) == INTEGER_CST)
13513 /* Convert X + c > X and X - c < X to true for integers. */
13514 if (code == GT_EXPR
13515 && ((code0 == PLUS_EXPR && is_positive > 0)
13516 || (code0 == MINUS_EXPR && is_positive < 0)))
13518 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13519 fold_overflow_warning (("assuming signed overflow does "
13520 "not occur when assuming that "
13521 "(X + c) > X is always true"),
13522 WARN_STRICT_OVERFLOW_ALL);
13523 return constant_boolean_node (1, type);
13526 if (code == LT_EXPR
13527 && ((code0 == MINUS_EXPR && is_positive > 0)
13528 || (code0 == PLUS_EXPR && is_positive < 0)))
13530 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13531 fold_overflow_warning (("assuming signed overflow does "
13532 "not occur when assuming that "
13533 "(X - c) < X is always true"),
13534 WARN_STRICT_OVERFLOW_ALL);
13535 return constant_boolean_node (1, type);
13538 /* Convert X + c <= X and X - c >= X to false for integers. */
13539 if (code == LE_EXPR
13540 && ((code0 == PLUS_EXPR && is_positive > 0)
13541 || (code0 == MINUS_EXPR && is_positive < 0)))
13543 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13544 fold_overflow_warning (("assuming signed overflow does "
13545 "not occur when assuming that "
13546 "(X + c) <= X is always false"),
13547 WARN_STRICT_OVERFLOW_ALL);
13548 return constant_boolean_node (0, type);
13551 if (code == GE_EXPR
13552 && ((code0 == MINUS_EXPR && is_positive > 0)
13553 || (code0 == PLUS_EXPR && is_positive < 0)))
13555 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13556 fold_overflow_warning (("assuming signed overflow does "
13557 "not occur when assuming that "
13558 "(X - c) >= X is always false"),
13559 WARN_STRICT_OVERFLOW_ALL);
13560 return constant_boolean_node (0, type);
13565 /* Comparisons with the highest or lowest possible integer of
13566 the specified precision will have known values. */
13568 tree arg1_type = TREE_TYPE (arg1);
13569 unsigned int prec = TYPE_PRECISION (arg1_type);
13571 if (TREE_CODE (arg1) == INTEGER_CST
13572 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13574 wide_int max = wi::max_value (arg1_type);
13575 wide_int signed_max = wi::max_value (prec, SIGNED);
13576 wide_int min = wi::min_value (arg1_type);
13578 if (wi::eq_p (arg1, max))
13579 switch (code)
13581 case GT_EXPR:
13582 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13584 case GE_EXPR:
13585 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13587 case LE_EXPR:
13588 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13590 case LT_EXPR:
13591 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13593 /* The GE_EXPR and LT_EXPR cases above are not normally
13594 reached because of previous transformations. */
13596 default:
13597 break;
13599 else if (wi::eq_p (arg1, max - 1))
13600 switch (code)
13602 case GT_EXPR:
13603 arg1 = const_binop (PLUS_EXPR, arg1,
13604 build_int_cst (TREE_TYPE (arg1), 1));
13605 return fold_build2_loc (loc, EQ_EXPR, type,
13606 fold_convert_loc (loc,
13607 TREE_TYPE (arg1), arg0),
13608 arg1);
13609 case LE_EXPR:
13610 arg1 = const_binop (PLUS_EXPR, arg1,
13611 build_int_cst (TREE_TYPE (arg1), 1));
13612 return fold_build2_loc (loc, NE_EXPR, type,
13613 fold_convert_loc (loc, TREE_TYPE (arg1),
13614 arg0),
13615 arg1);
13616 default:
13617 break;
13619 else if (wi::eq_p (arg1, min))
13620 switch (code)
13622 case LT_EXPR:
13623 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13625 case LE_EXPR:
13626 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13628 case GE_EXPR:
13629 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13631 case GT_EXPR:
13632 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13634 default:
13635 break;
13637 else if (wi::eq_p (arg1, min + 1))
13638 switch (code)
13640 case GE_EXPR:
13641 arg1 = const_binop (MINUS_EXPR, arg1,
13642 build_int_cst (TREE_TYPE (arg1), 1));
13643 return fold_build2_loc (loc, NE_EXPR, type,
13644 fold_convert_loc (loc,
13645 TREE_TYPE (arg1), arg0),
13646 arg1);
13647 case LT_EXPR:
13648 arg1 = const_binop (MINUS_EXPR, arg1,
13649 build_int_cst (TREE_TYPE (arg1), 1));
13650 return fold_build2_loc (loc, EQ_EXPR, type,
13651 fold_convert_loc (loc, TREE_TYPE (arg1),
13652 arg0),
13653 arg1);
13654 default:
13655 break;
13658 else if (wi::eq_p (arg1, signed_max)
13659 && TYPE_UNSIGNED (arg1_type)
13660 /* We will flip the signedness of the comparison operator
13661 associated with the mode of arg1, so the sign bit is
13662 specified by this mode. Check that arg1 is the signed
13663 max associated with this sign bit. */
13664 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13665 /* signed_type does not work on pointer types. */
13666 && INTEGRAL_TYPE_P (arg1_type))
13668 /* The following case also applies to X < signed_max+1
13669 and X >= signed_max+1 because previous transformations. */
13670 if (code == LE_EXPR || code == GT_EXPR)
13672 tree st = signed_type_for (arg1_type);
13673 return fold_build2_loc (loc,
13674 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13675 type, fold_convert_loc (loc, st, arg0),
13676 build_int_cst (st, 0));
13682 /* If we are comparing an ABS_EXPR with a constant, we can
13683 convert all the cases into explicit comparisons, but they may
13684 well not be faster than doing the ABS and one comparison.
13685 But ABS (X) <= C is a range comparison, which becomes a subtraction
13686 and a comparison, and is probably faster. */
13687 if (code == LE_EXPR
13688 && TREE_CODE (arg1) == INTEGER_CST
13689 && TREE_CODE (arg0) == ABS_EXPR
13690 && ! TREE_SIDE_EFFECTS (arg0)
13691 && (0 != (tem = negate_expr (arg1)))
13692 && TREE_CODE (tem) == INTEGER_CST
13693 && !TREE_OVERFLOW (tem))
13694 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13695 build2 (GE_EXPR, type,
13696 TREE_OPERAND (arg0, 0), tem),
13697 build2 (LE_EXPR, type,
13698 TREE_OPERAND (arg0, 0), arg1));
13700 /* Convert ABS_EXPR<x> >= 0 to true. */
13701 strict_overflow_p = false;
13702 if (code == GE_EXPR
13703 && (integer_zerop (arg1)
13704 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13705 && real_zerop (arg1)))
13706 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13708 if (strict_overflow_p)
13709 fold_overflow_warning (("assuming signed overflow does not occur "
13710 "when simplifying comparison of "
13711 "absolute value and zero"),
13712 WARN_STRICT_OVERFLOW_CONDITIONAL);
13713 return omit_one_operand_loc (loc, type,
13714 constant_boolean_node (true, type),
13715 arg0);
13718 /* Convert ABS_EXPR<x> < 0 to false. */
13719 strict_overflow_p = false;
13720 if (code == LT_EXPR
13721 && (integer_zerop (arg1) || real_zerop (arg1))
13722 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13724 if (strict_overflow_p)
13725 fold_overflow_warning (("assuming signed overflow does not occur "
13726 "when simplifying comparison of "
13727 "absolute value and zero"),
13728 WARN_STRICT_OVERFLOW_CONDITIONAL);
13729 return omit_one_operand_loc (loc, type,
13730 constant_boolean_node (false, type),
13731 arg0);
13734 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13735 and similarly for >= into !=. */
13736 if ((code == LT_EXPR || code == GE_EXPR)
13737 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13738 && TREE_CODE (arg1) == LSHIFT_EXPR
13739 && integer_onep (TREE_OPERAND (arg1, 0)))
13740 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13741 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13742 TREE_OPERAND (arg1, 1)),
13743 build_zero_cst (TREE_TYPE (arg0)));
13745 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13746 otherwise Y might be >= # of bits in X's type and thus e.g.
13747 (unsigned char) (1 << Y) for Y 15 might be 0.
13748 If the cast is widening, then 1 << Y should have unsigned type,
13749 otherwise if Y is number of bits in the signed shift type minus 1,
13750 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13751 31 might be 0xffffffff80000000. */
13752 if ((code == LT_EXPR || code == GE_EXPR)
13753 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13754 && CONVERT_EXPR_P (arg1)
13755 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13756 && (TYPE_PRECISION (TREE_TYPE (arg1))
13757 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13758 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13759 || (TYPE_PRECISION (TREE_TYPE (arg1))
13760 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13761 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13763 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13764 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13765 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13766 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13767 build_zero_cst (TREE_TYPE (arg0)));
13770 return NULL_TREE;
13772 case UNORDERED_EXPR:
13773 case ORDERED_EXPR:
13774 case UNLT_EXPR:
13775 case UNLE_EXPR:
13776 case UNGT_EXPR:
13777 case UNGE_EXPR:
13778 case UNEQ_EXPR:
13779 case LTGT_EXPR:
13780 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13782 t1 = fold_relational_const (code, type, arg0, arg1);
13783 if (t1 != NULL_TREE)
13784 return t1;
13787 /* If the first operand is NaN, the result is constant. */
13788 if (TREE_CODE (arg0) == REAL_CST
13789 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13790 && (code != LTGT_EXPR || ! flag_trapping_math))
13792 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13793 ? integer_zero_node
13794 : integer_one_node;
13795 return omit_one_operand_loc (loc, type, t1, arg1);
13798 /* If the second operand is NaN, the result is constant. */
13799 if (TREE_CODE (arg1) == REAL_CST
13800 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13801 && (code != LTGT_EXPR || ! flag_trapping_math))
13803 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13804 ? integer_zero_node
13805 : integer_one_node;
13806 return omit_one_operand_loc (loc, type, t1, arg0);
13809 /* Simplify unordered comparison of something with itself. */
13810 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13811 && operand_equal_p (arg0, arg1, 0))
13812 return constant_boolean_node (1, type);
13814 if (code == LTGT_EXPR
13815 && !flag_trapping_math
13816 && operand_equal_p (arg0, arg1, 0))
13817 return constant_boolean_node (0, type);
13819 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13821 tree targ0 = strip_float_extensions (arg0);
13822 tree targ1 = strip_float_extensions (arg1);
13823 tree newtype = TREE_TYPE (targ0);
13825 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13826 newtype = TREE_TYPE (targ1);
13828 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13829 return fold_build2_loc (loc, code, type,
13830 fold_convert_loc (loc, newtype, targ0),
13831 fold_convert_loc (loc, newtype, targ1));
13834 return NULL_TREE;
13836 case COMPOUND_EXPR:
13837 /* When pedantic, a compound expression can be neither an lvalue
13838 nor an integer constant expression. */
13839 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13840 return NULL_TREE;
13841 /* Don't let (0, 0) be null pointer constant. */
13842 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13843 : fold_convert_loc (loc, type, arg1);
13844 return pedantic_non_lvalue_loc (loc, tem);
13846 case COMPLEX_EXPR:
13847 if ((TREE_CODE (arg0) == REAL_CST
13848 && TREE_CODE (arg1) == REAL_CST)
13849 || (TREE_CODE (arg0) == INTEGER_CST
13850 && TREE_CODE (arg1) == INTEGER_CST))
13851 return build_complex (type, arg0, arg1);
13852 if (TREE_CODE (arg0) == REALPART_EXPR
13853 && TREE_CODE (arg1) == IMAGPART_EXPR
13854 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13855 && operand_equal_p (TREE_OPERAND (arg0, 0),
13856 TREE_OPERAND (arg1, 0), 0))
13857 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13858 TREE_OPERAND (arg1, 0));
13859 return NULL_TREE;
13861 case ASSERT_EXPR:
13862 /* An ASSERT_EXPR should never be passed to fold_binary. */
13863 gcc_unreachable ();
13865 case VEC_PACK_TRUNC_EXPR:
13866 case VEC_PACK_FIX_TRUNC_EXPR:
13868 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13869 tree *elts;
13871 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13872 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13873 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13874 return NULL_TREE;
13876 elts = XALLOCAVEC (tree, nelts);
13877 if (!vec_cst_ctor_to_array (arg0, elts)
13878 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13879 return NULL_TREE;
13881 for (i = 0; i < nelts; i++)
13883 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13884 ? NOP_EXPR : FIX_TRUNC_EXPR,
13885 TREE_TYPE (type), elts[i]);
13886 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13887 return NULL_TREE;
13890 return build_vector (type, elts);
13893 case VEC_WIDEN_MULT_LO_EXPR:
13894 case VEC_WIDEN_MULT_HI_EXPR:
13895 case VEC_WIDEN_MULT_EVEN_EXPR:
13896 case VEC_WIDEN_MULT_ODD_EXPR:
13898 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13899 unsigned int out, ofs, scale;
13900 tree *elts;
13902 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13903 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13904 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13905 return NULL_TREE;
13907 elts = XALLOCAVEC (tree, nelts * 4);
13908 if (!vec_cst_ctor_to_array (arg0, elts)
13909 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13910 return NULL_TREE;
13912 if (code == VEC_WIDEN_MULT_LO_EXPR)
13913 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13914 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13915 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13916 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13917 scale = 1, ofs = 0;
13918 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13919 scale = 1, ofs = 1;
13921 for (out = 0; out < nelts; out++)
13923 unsigned int in1 = (out << scale) + ofs;
13924 unsigned int in2 = in1 + nelts * 2;
13925 tree t1, t2;
13927 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13928 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13930 if (t1 == NULL_TREE || t2 == NULL_TREE)
13931 return NULL_TREE;
13932 elts[out] = const_binop (MULT_EXPR, t1, t2);
13933 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13934 return NULL_TREE;
13937 return build_vector (type, elts);
13940 default:
13941 return NULL_TREE;
13942 } /* switch (code) */
13945 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13946 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13947 of GOTO_EXPR. */
13949 static tree
13950 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13952 switch (TREE_CODE (*tp))
13954 case LABEL_EXPR:
13955 return *tp;
13957 case GOTO_EXPR:
13958 *walk_subtrees = 0;
13960 /* ... fall through ... */
13962 default:
13963 return NULL_TREE;
13967 /* Return whether the sub-tree ST contains a label which is accessible from
13968 outside the sub-tree. */
13970 static bool
13971 contains_label_p (tree st)
13973 return
13974 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13977 /* Fold a ternary expression of code CODE and type TYPE with operands
13978 OP0, OP1, and OP2. Return the folded expression if folding is
13979 successful. Otherwise, return NULL_TREE. */
13981 tree
13982 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13983 tree op0, tree op1, tree op2)
13985 tree tem;
13986 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13987 enum tree_code_class kind = TREE_CODE_CLASS (code);
13989 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13990 && TREE_CODE_LENGTH (code) == 3);
13992 /* Strip any conversions that don't change the mode. This is safe
13993 for every expression, except for a comparison expression because
13994 its signedness is derived from its operands. So, in the latter
13995 case, only strip conversions that don't change the signedness.
13997 Note that this is done as an internal manipulation within the
13998 constant folder, in order to find the simplest representation of
13999 the arguments so that their form can be studied. In any cases,
14000 the appropriate type conversions should be put back in the tree
14001 that will get out of the constant folder. */
14002 if (op0)
14004 arg0 = op0;
14005 STRIP_NOPS (arg0);
14008 if (op1)
14010 arg1 = op1;
14011 STRIP_NOPS (arg1);
14014 if (op2)
14016 arg2 = op2;
14017 STRIP_NOPS (arg2);
14020 switch (code)
14022 case COMPONENT_REF:
14023 if (TREE_CODE (arg0) == CONSTRUCTOR
14024 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14026 unsigned HOST_WIDE_INT idx;
14027 tree field, value;
14028 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14029 if (field == arg1)
14030 return value;
14032 return NULL_TREE;
14034 case COND_EXPR:
14035 case VEC_COND_EXPR:
14036 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14037 so all simple results must be passed through pedantic_non_lvalue. */
14038 if (TREE_CODE (arg0) == INTEGER_CST)
14040 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14041 tem = integer_zerop (arg0) ? op2 : op1;
14042 /* Only optimize constant conditions when the selected branch
14043 has the same type as the COND_EXPR. This avoids optimizing
14044 away "c ? x : throw", where the throw has a void type.
14045 Avoid throwing away that operand which contains label. */
14046 if ((!TREE_SIDE_EFFECTS (unused_op)
14047 || !contains_label_p (unused_op))
14048 && (! VOID_TYPE_P (TREE_TYPE (tem))
14049 || VOID_TYPE_P (type)))
14050 return pedantic_non_lvalue_loc (loc, tem);
14051 return NULL_TREE;
14053 else if (TREE_CODE (arg0) == VECTOR_CST)
14055 if (integer_all_onesp (arg0))
14056 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14057 if (integer_zerop (arg0))
14058 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14060 if ((TREE_CODE (arg1) == VECTOR_CST
14061 || TREE_CODE (arg1) == CONSTRUCTOR)
14062 && (TREE_CODE (arg2) == VECTOR_CST
14063 || TREE_CODE (arg2) == CONSTRUCTOR))
14065 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14066 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14067 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14068 for (i = 0; i < nelts; i++)
14070 tree val = VECTOR_CST_ELT (arg0, i);
14071 if (integer_all_onesp (val))
14072 sel[i] = i;
14073 else if (integer_zerop (val))
14074 sel[i] = nelts + i;
14075 else /* Currently unreachable. */
14076 return NULL_TREE;
14078 tree t = fold_vec_perm (type, arg1, arg2, sel);
14079 if (t != NULL_TREE)
14080 return t;
14084 if (operand_equal_p (arg1, op2, 0))
14085 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14087 /* If we have A op B ? A : C, we may be able to convert this to a
14088 simpler expression, depending on the operation and the values
14089 of B and C. Signed zeros prevent all of these transformations,
14090 for reasons given above each one.
14092 Also try swapping the arguments and inverting the conditional. */
14093 if (COMPARISON_CLASS_P (arg0)
14094 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14095 arg1, TREE_OPERAND (arg0, 1))
14096 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14098 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14099 if (tem)
14100 return tem;
14103 if (COMPARISON_CLASS_P (arg0)
14104 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14105 op2,
14106 TREE_OPERAND (arg0, 1))
14107 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14109 location_t loc0 = expr_location_or (arg0, loc);
14110 tem = fold_invert_truthvalue (loc0, arg0);
14111 if (tem && COMPARISON_CLASS_P (tem))
14113 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14114 if (tem)
14115 return tem;
14119 /* If the second operand is simpler than the third, swap them
14120 since that produces better jump optimization results. */
14121 if (truth_value_p (TREE_CODE (arg0))
14122 && tree_swap_operands_p (op1, op2, false))
14124 location_t loc0 = expr_location_or (arg0, loc);
14125 /* See if this can be inverted. If it can't, possibly because
14126 it was a floating-point inequality comparison, don't do
14127 anything. */
14128 tem = fold_invert_truthvalue (loc0, arg0);
14129 if (tem)
14130 return fold_build3_loc (loc, code, type, tem, op2, op1);
14133 /* Convert A ? 1 : 0 to simply A. */
14134 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14135 : (integer_onep (op1)
14136 && !VECTOR_TYPE_P (type)))
14137 && integer_zerop (op2)
14138 /* If we try to convert OP0 to our type, the
14139 call to fold will try to move the conversion inside
14140 a COND, which will recurse. In that case, the COND_EXPR
14141 is probably the best choice, so leave it alone. */
14142 && type == TREE_TYPE (arg0))
14143 return pedantic_non_lvalue_loc (loc, arg0);
14145 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14146 over COND_EXPR in cases such as floating point comparisons. */
14147 if (integer_zerop (op1)
14148 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14149 : (integer_onep (op2)
14150 && !VECTOR_TYPE_P (type)))
14151 && truth_value_p (TREE_CODE (arg0)))
14152 return pedantic_non_lvalue_loc (loc,
14153 fold_convert_loc (loc, type,
14154 invert_truthvalue_loc (loc,
14155 arg0)));
14157 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14158 if (TREE_CODE (arg0) == LT_EXPR
14159 && integer_zerop (TREE_OPERAND (arg0, 1))
14160 && integer_zerop (op2)
14161 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14163 /* sign_bit_p looks through both zero and sign extensions,
14164 but for this optimization only sign extensions are
14165 usable. */
14166 tree tem2 = TREE_OPERAND (arg0, 0);
14167 while (tem != tem2)
14169 if (TREE_CODE (tem2) != NOP_EXPR
14170 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14172 tem = NULL_TREE;
14173 break;
14175 tem2 = TREE_OPERAND (tem2, 0);
14177 /* sign_bit_p only checks ARG1 bits within A's precision.
14178 If <sign bit of A> has wider type than A, bits outside
14179 of A's precision in <sign bit of A> need to be checked.
14180 If they are all 0, this optimization needs to be done
14181 in unsigned A's type, if they are all 1 in signed A's type,
14182 otherwise this can't be done. */
14183 if (tem
14184 && TYPE_PRECISION (TREE_TYPE (tem))
14185 < TYPE_PRECISION (TREE_TYPE (arg1))
14186 && TYPE_PRECISION (TREE_TYPE (tem))
14187 < TYPE_PRECISION (type))
14189 int inner_width, outer_width;
14190 tree tem_type;
14192 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14193 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14194 if (outer_width > TYPE_PRECISION (type))
14195 outer_width = TYPE_PRECISION (type);
14197 wide_int mask = wi::shifted_mask
14198 (inner_width, outer_width - inner_width, false,
14199 TYPE_PRECISION (TREE_TYPE (arg1)));
14201 wide_int common = mask & arg1;
14202 if (common == mask)
14204 tem_type = signed_type_for (TREE_TYPE (tem));
14205 tem = fold_convert_loc (loc, tem_type, tem);
14207 else if (common == 0)
14209 tem_type = unsigned_type_for (TREE_TYPE (tem));
14210 tem = fold_convert_loc (loc, tem_type, tem);
14212 else
14213 tem = NULL;
14216 if (tem)
14217 return
14218 fold_convert_loc (loc, type,
14219 fold_build2_loc (loc, BIT_AND_EXPR,
14220 TREE_TYPE (tem), tem,
14221 fold_convert_loc (loc,
14222 TREE_TYPE (tem),
14223 arg1)));
14226 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14227 already handled above. */
14228 if (TREE_CODE (arg0) == BIT_AND_EXPR
14229 && integer_onep (TREE_OPERAND (arg0, 1))
14230 && integer_zerop (op2)
14231 && integer_pow2p (arg1))
14233 tree tem = TREE_OPERAND (arg0, 0);
14234 STRIP_NOPS (tem);
14235 if (TREE_CODE (tem) == RSHIFT_EXPR
14236 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14237 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14238 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14239 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14240 TREE_OPERAND (tem, 0), arg1);
14243 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14244 is probably obsolete because the first operand should be a
14245 truth value (that's why we have the two cases above), but let's
14246 leave it in until we can confirm this for all front-ends. */
14247 if (integer_zerop (op2)
14248 && TREE_CODE (arg0) == NE_EXPR
14249 && integer_zerop (TREE_OPERAND (arg0, 1))
14250 && integer_pow2p (arg1)
14251 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14252 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14253 arg1, OEP_ONLY_CONST))
14254 return pedantic_non_lvalue_loc (loc,
14255 fold_convert_loc (loc, type,
14256 TREE_OPERAND (arg0, 0)));
14258 /* Disable the transformations below for vectors, since
14259 fold_binary_op_with_conditional_arg may undo them immediately,
14260 yielding an infinite loop. */
14261 if (code == VEC_COND_EXPR)
14262 return NULL_TREE;
14264 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14265 if (integer_zerop (op2)
14266 && truth_value_p (TREE_CODE (arg0))
14267 && truth_value_p (TREE_CODE (arg1))
14268 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14269 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14270 : TRUTH_ANDIF_EXPR,
14271 type, fold_convert_loc (loc, type, arg0), arg1);
14273 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14274 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14275 && truth_value_p (TREE_CODE (arg0))
14276 && truth_value_p (TREE_CODE (arg1))
14277 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14279 location_t loc0 = expr_location_or (arg0, loc);
14280 /* Only perform transformation if ARG0 is easily inverted. */
14281 tem = fold_invert_truthvalue (loc0, arg0);
14282 if (tem)
14283 return fold_build2_loc (loc, code == VEC_COND_EXPR
14284 ? BIT_IOR_EXPR
14285 : TRUTH_ORIF_EXPR,
14286 type, fold_convert_loc (loc, type, tem),
14287 arg1);
14290 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14291 if (integer_zerop (arg1)
14292 && truth_value_p (TREE_CODE (arg0))
14293 && truth_value_p (TREE_CODE (op2))
14294 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14296 location_t loc0 = expr_location_or (arg0, loc);
14297 /* Only perform transformation if ARG0 is easily inverted. */
14298 tem = fold_invert_truthvalue (loc0, arg0);
14299 if (tem)
14300 return fold_build2_loc (loc, code == VEC_COND_EXPR
14301 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14302 type, fold_convert_loc (loc, type, tem),
14303 op2);
14306 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14307 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14308 && truth_value_p (TREE_CODE (arg0))
14309 && truth_value_p (TREE_CODE (op2))
14310 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14311 return fold_build2_loc (loc, code == VEC_COND_EXPR
14312 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14313 type, fold_convert_loc (loc, type, arg0), op2);
14315 return NULL_TREE;
14317 case CALL_EXPR:
14318 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14319 of fold_ternary on them. */
14320 gcc_unreachable ();
14322 case BIT_FIELD_REF:
14323 if ((TREE_CODE (arg0) == VECTOR_CST
14324 || (TREE_CODE (arg0) == CONSTRUCTOR
14325 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14326 && (type == TREE_TYPE (TREE_TYPE (arg0))
14327 || (TREE_CODE (type) == VECTOR_TYPE
14328 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14330 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14331 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14332 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14333 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14335 if (n != 0
14336 && (idx % width) == 0
14337 && (n % width) == 0
14338 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14340 idx = idx / width;
14341 n = n / width;
14343 if (TREE_CODE (arg0) == VECTOR_CST)
14345 if (n == 1)
14346 return VECTOR_CST_ELT (arg0, idx);
14348 tree *vals = XALLOCAVEC (tree, n);
14349 for (unsigned i = 0; i < n; ++i)
14350 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14351 return build_vector (type, vals);
14354 /* Constructor elements can be subvectors. */
14355 unsigned HOST_WIDE_INT k = 1;
14356 if (CONSTRUCTOR_NELTS (arg0) != 0)
14358 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14359 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14360 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14363 /* We keep an exact subset of the constructor elements. */
14364 if ((idx % k) == 0 && (n % k) == 0)
14366 if (CONSTRUCTOR_NELTS (arg0) == 0)
14367 return build_constructor (type, NULL);
14368 idx /= k;
14369 n /= k;
14370 if (n == 1)
14372 if (idx < CONSTRUCTOR_NELTS (arg0))
14373 return CONSTRUCTOR_ELT (arg0, idx)->value;
14374 return build_zero_cst (type);
14377 vec<constructor_elt, va_gc> *vals;
14378 vec_alloc (vals, n);
14379 for (unsigned i = 0;
14380 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14381 ++i)
14382 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14383 CONSTRUCTOR_ELT
14384 (arg0, idx + i)->value);
14385 return build_constructor (type, vals);
14387 /* The bitfield references a single constructor element. */
14388 else if (idx + n <= (idx / k + 1) * k)
14390 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14391 return build_zero_cst (type);
14392 else if (n == k)
14393 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14394 else
14395 return fold_build3_loc (loc, code, type,
14396 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14397 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14402 /* A bit-field-ref that referenced the full argument can be stripped. */
14403 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14404 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14405 && integer_zerop (op2))
14406 return fold_convert_loc (loc, type, arg0);
14408 /* On constants we can use native encode/interpret to constant
14409 fold (nearly) all BIT_FIELD_REFs. */
14410 if (CONSTANT_CLASS_P (arg0)
14411 && can_native_interpret_type_p (type)
14412 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14413 /* This limitation should not be necessary, we just need to
14414 round this up to mode size. */
14415 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14416 /* Need bit-shifting of the buffer to relax the following. */
14417 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14419 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14420 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14421 unsigned HOST_WIDE_INT clen;
14422 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14423 /* ??? We cannot tell native_encode_expr to start at
14424 some random byte only. So limit us to a reasonable amount
14425 of work. */
14426 if (clen <= 4096)
14428 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14429 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14430 if (len > 0
14431 && len * BITS_PER_UNIT >= bitpos + bitsize)
14433 tree v = native_interpret_expr (type,
14434 b + bitpos / BITS_PER_UNIT,
14435 bitsize / BITS_PER_UNIT);
14436 if (v)
14437 return v;
14442 return NULL_TREE;
14444 case FMA_EXPR:
14445 /* For integers we can decompose the FMA if possible. */
14446 if (TREE_CODE (arg0) == INTEGER_CST
14447 && TREE_CODE (arg1) == INTEGER_CST)
14448 return fold_build2_loc (loc, PLUS_EXPR, type,
14449 const_binop (MULT_EXPR, arg0, arg1), arg2);
14450 if (integer_zerop (arg2))
14451 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14453 return fold_fma (loc, type, arg0, arg1, arg2);
14455 case VEC_PERM_EXPR:
14456 if (TREE_CODE (arg2) == VECTOR_CST)
14458 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14459 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14460 bool need_mask_canon = false;
14461 bool all_in_vec0 = true;
14462 bool all_in_vec1 = true;
14463 bool maybe_identity = true;
14464 bool single_arg = (op0 == op1);
14465 bool changed = false;
14467 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14468 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14469 for (i = 0; i < nelts; i++)
14471 tree val = VECTOR_CST_ELT (arg2, i);
14472 if (TREE_CODE (val) != INTEGER_CST)
14473 return NULL_TREE;
14475 /* Make sure that the perm value is in an acceptable
14476 range. */
14477 wide_int t = val;
14478 if (wi::gtu_p (t, mask))
14480 need_mask_canon = true;
14481 sel[i] = t.to_uhwi () & mask;
14483 else
14484 sel[i] = t.to_uhwi ();
14486 if (sel[i] < nelts)
14487 all_in_vec1 = false;
14488 else
14489 all_in_vec0 = false;
14491 if ((sel[i] & (nelts-1)) != i)
14492 maybe_identity = false;
14495 if (maybe_identity)
14497 if (all_in_vec0)
14498 return op0;
14499 if (all_in_vec1)
14500 return op1;
14503 if (all_in_vec0)
14504 op1 = op0;
14505 else if (all_in_vec1)
14507 op0 = op1;
14508 for (i = 0; i < nelts; i++)
14509 sel[i] -= nelts;
14510 need_mask_canon = true;
14513 if ((TREE_CODE (op0) == VECTOR_CST
14514 || TREE_CODE (op0) == CONSTRUCTOR)
14515 && (TREE_CODE (op1) == VECTOR_CST
14516 || TREE_CODE (op1) == CONSTRUCTOR))
14518 tree t = fold_vec_perm (type, op0, op1, sel);
14519 if (t != NULL_TREE)
14520 return t;
14523 if (op0 == op1 && !single_arg)
14524 changed = true;
14526 if (need_mask_canon && arg2 == op2)
14528 tree *tsel = XALLOCAVEC (tree, nelts);
14529 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14530 for (i = 0; i < nelts; i++)
14531 tsel[i] = build_int_cst (eltype, sel[i]);
14532 op2 = build_vector (TREE_TYPE (arg2), tsel);
14533 changed = true;
14536 if (changed)
14537 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14539 return NULL_TREE;
14541 default:
14542 return NULL_TREE;
14543 } /* switch (code) */
14546 /* Perform constant folding and related simplification of EXPR.
14547 The related simplifications include x*1 => x, x*0 => 0, etc.,
14548 and application of the associative law.
14549 NOP_EXPR conversions may be removed freely (as long as we
14550 are careful not to change the type of the overall expression).
14551 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14552 but we can constant-fold them if they have constant operands. */
14554 #ifdef ENABLE_FOLD_CHECKING
14555 # define fold(x) fold_1 (x)
14556 static tree fold_1 (tree);
14557 static
14558 #endif
14559 tree
14560 fold (tree expr)
14562 const tree t = expr;
14563 enum tree_code code = TREE_CODE (t);
14564 enum tree_code_class kind = TREE_CODE_CLASS (code);
14565 tree tem;
14566 location_t loc = EXPR_LOCATION (expr);
14568 /* Return right away if a constant. */
14569 if (kind == tcc_constant)
14570 return t;
14572 /* CALL_EXPR-like objects with variable numbers of operands are
14573 treated specially. */
14574 if (kind == tcc_vl_exp)
14576 if (code == CALL_EXPR)
14578 tem = fold_call_expr (loc, expr, false);
14579 return tem ? tem : expr;
14581 return expr;
14584 if (IS_EXPR_CODE_CLASS (kind))
14586 tree type = TREE_TYPE (t);
14587 tree op0, op1, op2;
14589 switch (TREE_CODE_LENGTH (code))
14591 case 1:
14592 op0 = TREE_OPERAND (t, 0);
14593 tem = fold_unary_loc (loc, code, type, op0);
14594 return tem ? tem : expr;
14595 case 2:
14596 op0 = TREE_OPERAND (t, 0);
14597 op1 = TREE_OPERAND (t, 1);
14598 tem = fold_binary_loc (loc, code, type, op0, op1);
14599 return tem ? tem : expr;
14600 case 3:
14601 op0 = TREE_OPERAND (t, 0);
14602 op1 = TREE_OPERAND (t, 1);
14603 op2 = TREE_OPERAND (t, 2);
14604 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14605 return tem ? tem : expr;
14606 default:
14607 break;
14611 switch (code)
14613 case ARRAY_REF:
14615 tree op0 = TREE_OPERAND (t, 0);
14616 tree op1 = TREE_OPERAND (t, 1);
14618 if (TREE_CODE (op1) == INTEGER_CST
14619 && TREE_CODE (op0) == CONSTRUCTOR
14620 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14622 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14623 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14624 unsigned HOST_WIDE_INT begin = 0;
14626 /* Find a matching index by means of a binary search. */
14627 while (begin != end)
14629 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14630 tree index = (*elts)[middle].index;
14632 if (TREE_CODE (index) == INTEGER_CST
14633 && tree_int_cst_lt (index, op1))
14634 begin = middle + 1;
14635 else if (TREE_CODE (index) == INTEGER_CST
14636 && tree_int_cst_lt (op1, index))
14637 end = middle;
14638 else if (TREE_CODE (index) == RANGE_EXPR
14639 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14640 begin = middle + 1;
14641 else if (TREE_CODE (index) == RANGE_EXPR
14642 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14643 end = middle;
14644 else
14645 return (*elts)[middle].value;
14649 return t;
14652 /* Return a VECTOR_CST if possible. */
14653 case CONSTRUCTOR:
14655 tree type = TREE_TYPE (t);
14656 if (TREE_CODE (type) != VECTOR_TYPE)
14657 return t;
14659 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14660 unsigned HOST_WIDE_INT idx, pos = 0;
14661 tree value;
14663 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14665 if (!CONSTANT_CLASS_P (value))
14666 return t;
14667 if (TREE_CODE (value) == VECTOR_CST)
14669 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14670 vec[pos++] = VECTOR_CST_ELT (value, i);
14672 else
14673 vec[pos++] = value;
14675 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14676 vec[pos] = build_zero_cst (TREE_TYPE (type));
14678 return build_vector (type, vec);
14681 case CONST_DECL:
14682 return fold (DECL_INITIAL (t));
14684 default:
14685 return t;
14686 } /* switch (code) */
14689 #ifdef ENABLE_FOLD_CHECKING
14690 #undef fold
14692 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14693 hash_table <pointer_hash <tree_node> >);
14694 static void fold_check_failed (const_tree, const_tree);
14695 void print_fold_checksum (const_tree);
14697 /* When --enable-checking=fold, compute a digest of expr before
14698 and after actual fold call to see if fold did not accidentally
14699 change original expr. */
14701 tree
14702 fold (tree expr)
14704 tree ret;
14705 struct md5_ctx ctx;
14706 unsigned char checksum_before[16], checksum_after[16];
14707 hash_table <pointer_hash <tree_node> > ht;
14709 ht.create (32);
14710 md5_init_ctx (&ctx);
14711 fold_checksum_tree (expr, &ctx, ht);
14712 md5_finish_ctx (&ctx, checksum_before);
14713 ht.empty ();
14715 ret = fold_1 (expr);
14717 md5_init_ctx (&ctx);
14718 fold_checksum_tree (expr, &ctx, ht);
14719 md5_finish_ctx (&ctx, checksum_after);
14720 ht.dispose ();
14722 if (memcmp (checksum_before, checksum_after, 16))
14723 fold_check_failed (expr, ret);
14725 return ret;
14728 void
14729 print_fold_checksum (const_tree expr)
14731 struct md5_ctx ctx;
14732 unsigned char checksum[16], cnt;
14733 hash_table <pointer_hash <tree_node> > ht;
14735 ht.create (32);
14736 md5_init_ctx (&ctx);
14737 fold_checksum_tree (expr, &ctx, ht);
14738 md5_finish_ctx (&ctx, checksum);
14739 ht.dispose ();
14740 for (cnt = 0; cnt < 16; ++cnt)
14741 fprintf (stderr, "%02x", checksum[cnt]);
14742 putc ('\n', stderr);
14745 static void
14746 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14748 internal_error ("fold check: original tree changed by fold");
14751 static void
14752 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14753 hash_table <pointer_hash <tree_node> > ht)
14755 tree_node **slot;
14756 enum tree_code code;
14757 union tree_node buf;
14758 int i, len;
14760 recursive_label:
14761 if (expr == NULL)
14762 return;
14763 slot = ht.find_slot (expr, INSERT);
14764 if (*slot != NULL)
14765 return;
14766 *slot = CONST_CAST_TREE (expr);
14767 code = TREE_CODE (expr);
14768 if (TREE_CODE_CLASS (code) == tcc_declaration
14769 && DECL_ASSEMBLER_NAME_SET_P (expr))
14771 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14772 memcpy ((char *) &buf, expr, tree_size (expr));
14773 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14774 expr = (tree) &buf;
14776 else if (TREE_CODE_CLASS (code) == tcc_type
14777 && (TYPE_POINTER_TO (expr)
14778 || TYPE_REFERENCE_TO (expr)
14779 || TYPE_CACHED_VALUES_P (expr)
14780 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14781 || TYPE_NEXT_VARIANT (expr)))
14783 /* Allow these fields to be modified. */
14784 tree tmp;
14785 memcpy ((char *) &buf, expr, tree_size (expr));
14786 expr = tmp = (tree) &buf;
14787 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14788 TYPE_POINTER_TO (tmp) = NULL;
14789 TYPE_REFERENCE_TO (tmp) = NULL;
14790 TYPE_NEXT_VARIANT (tmp) = NULL;
14791 if (TYPE_CACHED_VALUES_P (tmp))
14793 TYPE_CACHED_VALUES_P (tmp) = 0;
14794 TYPE_CACHED_VALUES (tmp) = NULL;
14797 md5_process_bytes (expr, tree_size (expr), ctx);
14798 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14799 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14800 if (TREE_CODE_CLASS (code) != tcc_type
14801 && TREE_CODE_CLASS (code) != tcc_declaration
14802 && code != TREE_LIST
14803 && code != SSA_NAME
14804 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14805 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14806 switch (TREE_CODE_CLASS (code))
14808 case tcc_constant:
14809 switch (code)
14811 case STRING_CST:
14812 md5_process_bytes (TREE_STRING_POINTER (expr),
14813 TREE_STRING_LENGTH (expr), ctx);
14814 break;
14815 case COMPLEX_CST:
14816 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14817 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14818 break;
14819 case VECTOR_CST:
14820 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14821 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14822 break;
14823 default:
14824 break;
14826 break;
14827 case tcc_exceptional:
14828 switch (code)
14830 case TREE_LIST:
14831 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14832 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14833 expr = TREE_CHAIN (expr);
14834 goto recursive_label;
14835 break;
14836 case TREE_VEC:
14837 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14838 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14839 break;
14840 default:
14841 break;
14843 break;
14844 case tcc_expression:
14845 case tcc_reference:
14846 case tcc_comparison:
14847 case tcc_unary:
14848 case tcc_binary:
14849 case tcc_statement:
14850 case tcc_vl_exp:
14851 len = TREE_OPERAND_LENGTH (expr);
14852 for (i = 0; i < len; ++i)
14853 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14854 break;
14855 case tcc_declaration:
14856 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14857 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14858 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14860 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14861 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14862 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14863 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14864 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14866 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14867 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14869 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14871 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14872 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14873 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14875 break;
14876 case tcc_type:
14877 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14878 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14879 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14880 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14881 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14882 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14883 if (INTEGRAL_TYPE_P (expr)
14884 || SCALAR_FLOAT_TYPE_P (expr))
14886 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14887 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14889 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14890 if (TREE_CODE (expr) == RECORD_TYPE
14891 || TREE_CODE (expr) == UNION_TYPE
14892 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14893 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14894 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14895 break;
14896 default:
14897 break;
14901 /* Helper function for outputting the checksum of a tree T. When
14902 debugging with gdb, you can "define mynext" to be "next" followed
14903 by "call debug_fold_checksum (op0)", then just trace down till the
14904 outputs differ. */
14906 DEBUG_FUNCTION void
14907 debug_fold_checksum (const_tree t)
14909 int i;
14910 unsigned char checksum[16];
14911 struct md5_ctx ctx;
14912 hash_table <pointer_hash <tree_node> > ht;
14913 ht.create (32);
14915 md5_init_ctx (&ctx);
14916 fold_checksum_tree (t, &ctx, ht);
14917 md5_finish_ctx (&ctx, checksum);
14918 ht.empty ();
14920 for (i = 0; i < 16; i++)
14921 fprintf (stderr, "%d ", checksum[i]);
14923 fprintf (stderr, "\n");
14926 #endif
14928 /* Fold a unary tree expression with code CODE of type TYPE with an
14929 operand OP0. LOC is the location of the resulting expression.
14930 Return a folded expression if successful. Otherwise, return a tree
14931 expression with code CODE of type TYPE with an operand OP0. */
14933 tree
14934 fold_build1_stat_loc (location_t loc,
14935 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14937 tree tem;
14938 #ifdef ENABLE_FOLD_CHECKING
14939 unsigned char checksum_before[16], checksum_after[16];
14940 struct md5_ctx ctx;
14941 hash_table <pointer_hash <tree_node> > ht;
14943 ht.create (32);
14944 md5_init_ctx (&ctx);
14945 fold_checksum_tree (op0, &ctx, ht);
14946 md5_finish_ctx (&ctx, checksum_before);
14947 ht.empty ();
14948 #endif
14950 tem = fold_unary_loc (loc, code, type, op0);
14951 if (!tem)
14952 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14954 #ifdef ENABLE_FOLD_CHECKING
14955 md5_init_ctx (&ctx);
14956 fold_checksum_tree (op0, &ctx, ht);
14957 md5_finish_ctx (&ctx, checksum_after);
14958 ht.dispose ();
14960 if (memcmp (checksum_before, checksum_after, 16))
14961 fold_check_failed (op0, tem);
14962 #endif
14963 return tem;
14966 /* Fold a binary tree expression with code CODE of type TYPE with
14967 operands OP0 and OP1. LOC is the location of the resulting
14968 expression. Return a folded expression if successful. Otherwise,
14969 return a tree expression with code CODE of type TYPE with operands
14970 OP0 and OP1. */
14972 tree
14973 fold_build2_stat_loc (location_t loc,
14974 enum tree_code code, tree type, tree op0, tree op1
14975 MEM_STAT_DECL)
14977 tree tem;
14978 #ifdef ENABLE_FOLD_CHECKING
14979 unsigned char checksum_before_op0[16],
14980 checksum_before_op1[16],
14981 checksum_after_op0[16],
14982 checksum_after_op1[16];
14983 struct md5_ctx ctx;
14984 hash_table <pointer_hash <tree_node> > ht;
14986 ht.create (32);
14987 md5_init_ctx (&ctx);
14988 fold_checksum_tree (op0, &ctx, ht);
14989 md5_finish_ctx (&ctx, checksum_before_op0);
14990 ht.empty ();
14992 md5_init_ctx (&ctx);
14993 fold_checksum_tree (op1, &ctx, ht);
14994 md5_finish_ctx (&ctx, checksum_before_op1);
14995 ht.empty ();
14996 #endif
14998 tem = fold_binary_loc (loc, code, type, op0, op1);
14999 if (!tem)
15000 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15002 #ifdef ENABLE_FOLD_CHECKING
15003 md5_init_ctx (&ctx);
15004 fold_checksum_tree (op0, &ctx, ht);
15005 md5_finish_ctx (&ctx, checksum_after_op0);
15006 ht.empty ();
15008 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15009 fold_check_failed (op0, tem);
15011 md5_init_ctx (&ctx);
15012 fold_checksum_tree (op1, &ctx, ht);
15013 md5_finish_ctx (&ctx, checksum_after_op1);
15014 ht.dispose ();
15016 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15017 fold_check_failed (op1, tem);
15018 #endif
15019 return tem;
15022 /* Fold a ternary tree expression with code CODE of type TYPE with
15023 operands OP0, OP1, and OP2. Return a folded expression if
15024 successful. Otherwise, return a tree expression with code CODE of
15025 type TYPE with operands OP0, OP1, and OP2. */
15027 tree
15028 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15029 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15031 tree tem;
15032 #ifdef ENABLE_FOLD_CHECKING
15033 unsigned char checksum_before_op0[16],
15034 checksum_before_op1[16],
15035 checksum_before_op2[16],
15036 checksum_after_op0[16],
15037 checksum_after_op1[16],
15038 checksum_after_op2[16];
15039 struct md5_ctx ctx;
15040 hash_table <pointer_hash <tree_node> > ht;
15042 ht.create (32);
15043 md5_init_ctx (&ctx);
15044 fold_checksum_tree (op0, &ctx, ht);
15045 md5_finish_ctx (&ctx, checksum_before_op0);
15046 ht.empty ();
15048 md5_init_ctx (&ctx);
15049 fold_checksum_tree (op1, &ctx, ht);
15050 md5_finish_ctx (&ctx, checksum_before_op1);
15051 ht.empty ();
15053 md5_init_ctx (&ctx);
15054 fold_checksum_tree (op2, &ctx, ht);
15055 md5_finish_ctx (&ctx, checksum_before_op2);
15056 ht.empty ();
15057 #endif
15059 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15060 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15061 if (!tem)
15062 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15064 #ifdef ENABLE_FOLD_CHECKING
15065 md5_init_ctx (&ctx);
15066 fold_checksum_tree (op0, &ctx, ht);
15067 md5_finish_ctx (&ctx, checksum_after_op0);
15068 ht.empty ();
15070 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15071 fold_check_failed (op0, tem);
15073 md5_init_ctx (&ctx);
15074 fold_checksum_tree (op1, &ctx, ht);
15075 md5_finish_ctx (&ctx, checksum_after_op1);
15076 ht.empty ();
15078 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15079 fold_check_failed (op1, tem);
15081 md5_init_ctx (&ctx);
15082 fold_checksum_tree (op2, &ctx, ht);
15083 md5_finish_ctx (&ctx, checksum_after_op2);
15084 ht.dispose ();
15086 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15087 fold_check_failed (op2, tem);
15088 #endif
15089 return tem;
15092 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15093 arguments in ARGARRAY, and a null static chain.
15094 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15095 of type TYPE from the given operands as constructed by build_call_array. */
15097 tree
15098 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15099 int nargs, tree *argarray)
15101 tree tem;
15102 #ifdef ENABLE_FOLD_CHECKING
15103 unsigned char checksum_before_fn[16],
15104 checksum_before_arglist[16],
15105 checksum_after_fn[16],
15106 checksum_after_arglist[16];
15107 struct md5_ctx ctx;
15108 hash_table <pointer_hash <tree_node> > ht;
15109 int i;
15111 ht.create (32);
15112 md5_init_ctx (&ctx);
15113 fold_checksum_tree (fn, &ctx, ht);
15114 md5_finish_ctx (&ctx, checksum_before_fn);
15115 ht.empty ();
15117 md5_init_ctx (&ctx);
15118 for (i = 0; i < nargs; i++)
15119 fold_checksum_tree (argarray[i], &ctx, ht);
15120 md5_finish_ctx (&ctx, checksum_before_arglist);
15121 ht.empty ();
15122 #endif
15124 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15126 #ifdef ENABLE_FOLD_CHECKING
15127 md5_init_ctx (&ctx);
15128 fold_checksum_tree (fn, &ctx, ht);
15129 md5_finish_ctx (&ctx, checksum_after_fn);
15130 ht.empty ();
15132 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15133 fold_check_failed (fn, tem);
15135 md5_init_ctx (&ctx);
15136 for (i = 0; i < nargs; i++)
15137 fold_checksum_tree (argarray[i], &ctx, ht);
15138 md5_finish_ctx (&ctx, checksum_after_arglist);
15139 ht.dispose ();
15141 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15142 fold_check_failed (NULL_TREE, tem);
15143 #endif
15144 return tem;
15147 /* Perform constant folding and related simplification of initializer
15148 expression EXPR. These behave identically to "fold_buildN" but ignore
15149 potential run-time traps and exceptions that fold must preserve. */
15151 #define START_FOLD_INIT \
15152 int saved_signaling_nans = flag_signaling_nans;\
15153 int saved_trapping_math = flag_trapping_math;\
15154 int saved_rounding_math = flag_rounding_math;\
15155 int saved_trapv = flag_trapv;\
15156 int saved_folding_initializer = folding_initializer;\
15157 flag_signaling_nans = 0;\
15158 flag_trapping_math = 0;\
15159 flag_rounding_math = 0;\
15160 flag_trapv = 0;\
15161 folding_initializer = 1;
15163 #define END_FOLD_INIT \
15164 flag_signaling_nans = saved_signaling_nans;\
15165 flag_trapping_math = saved_trapping_math;\
15166 flag_rounding_math = saved_rounding_math;\
15167 flag_trapv = saved_trapv;\
15168 folding_initializer = saved_folding_initializer;
15170 tree
15171 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15172 tree type, tree op)
15174 tree result;
15175 START_FOLD_INIT;
15177 result = fold_build1_loc (loc, code, type, op);
15179 END_FOLD_INIT;
15180 return result;
15183 tree
15184 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15185 tree type, tree op0, tree op1)
15187 tree result;
15188 START_FOLD_INIT;
15190 result = fold_build2_loc (loc, code, type, op0, op1);
15192 END_FOLD_INIT;
15193 return result;
15196 tree
15197 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15198 int nargs, tree *argarray)
15200 tree result;
15201 START_FOLD_INIT;
15203 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15205 END_FOLD_INIT;
15206 return result;
15209 #undef START_FOLD_INIT
15210 #undef END_FOLD_INIT
15212 /* Determine if first argument is a multiple of second argument. Return 0 if
15213 it is not, or we cannot easily determined it to be.
15215 An example of the sort of thing we care about (at this point; this routine
15216 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15217 fold cases do now) is discovering that
15219 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15221 is a multiple of
15223 SAVE_EXPR (J * 8)
15225 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15227 This code also handles discovering that
15229 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15231 is a multiple of 8 so we don't have to worry about dealing with a
15232 possible remainder.
15234 Note that we *look* inside a SAVE_EXPR only to determine how it was
15235 calculated; it is not safe for fold to do much of anything else with the
15236 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15237 at run time. For example, the latter example above *cannot* be implemented
15238 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15239 evaluation time of the original SAVE_EXPR is not necessarily the same at
15240 the time the new expression is evaluated. The only optimization of this
15241 sort that would be valid is changing
15243 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15245 divided by 8 to
15247 SAVE_EXPR (I) * SAVE_EXPR (J)
15249 (where the same SAVE_EXPR (J) is used in the original and the
15250 transformed version). */
15253 multiple_of_p (tree type, const_tree top, const_tree bottom)
15255 if (operand_equal_p (top, bottom, 0))
15256 return 1;
15258 if (TREE_CODE (type) != INTEGER_TYPE)
15259 return 0;
15261 switch (TREE_CODE (top))
15263 case BIT_AND_EXPR:
15264 /* Bitwise and provides a power of two multiple. If the mask is
15265 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15266 if (!integer_pow2p (bottom))
15267 return 0;
15268 /* FALLTHRU */
15270 case MULT_EXPR:
15271 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15272 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15274 case PLUS_EXPR:
15275 case MINUS_EXPR:
15276 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15277 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15279 case LSHIFT_EXPR:
15280 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15282 tree op1, t1;
15284 op1 = TREE_OPERAND (top, 1);
15285 /* const_binop may not detect overflow correctly,
15286 so check for it explicitly here. */
15287 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15288 && 0 != (t1 = fold_convert (type,
15289 const_binop (LSHIFT_EXPR,
15290 size_one_node,
15291 op1)))
15292 && !TREE_OVERFLOW (t1))
15293 return multiple_of_p (type, t1, bottom);
15295 return 0;
15297 case NOP_EXPR:
15298 /* Can't handle conversions from non-integral or wider integral type. */
15299 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15300 || (TYPE_PRECISION (type)
15301 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15302 return 0;
15304 /* .. fall through ... */
15306 case SAVE_EXPR:
15307 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15309 case COND_EXPR:
15310 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15311 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15313 case INTEGER_CST:
15314 if (TREE_CODE (bottom) != INTEGER_CST
15315 || integer_zerop (bottom)
15316 || (TYPE_UNSIGNED (type)
15317 && (tree_int_cst_sgn (top) < 0
15318 || tree_int_cst_sgn (bottom) < 0)))
15319 return 0;
15320 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15321 SIGNED);
15323 default:
15324 return 0;
15328 /* Return true if CODE or TYPE is known to be non-negative. */
15330 static bool
15331 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15333 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15334 && truth_value_p (code))
15335 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15336 have a signed:1 type (where the value is -1 and 0). */
15337 return true;
15338 return false;
15341 /* Return true if (CODE OP0) is known to be non-negative. If the return
15342 value is based on the assumption that signed overflow is undefined,
15343 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15344 *STRICT_OVERFLOW_P. */
15346 bool
15347 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15348 bool *strict_overflow_p)
15350 if (TYPE_UNSIGNED (type))
15351 return true;
15353 switch (code)
15355 case ABS_EXPR:
15356 /* We can't return 1 if flag_wrapv is set because
15357 ABS_EXPR<INT_MIN> = INT_MIN. */
15358 if (!INTEGRAL_TYPE_P (type))
15359 return true;
15360 if (TYPE_OVERFLOW_UNDEFINED (type))
15362 *strict_overflow_p = true;
15363 return true;
15365 break;
15367 case NON_LVALUE_EXPR:
15368 case FLOAT_EXPR:
15369 case FIX_TRUNC_EXPR:
15370 return tree_expr_nonnegative_warnv_p (op0,
15371 strict_overflow_p);
15373 case NOP_EXPR:
15375 tree inner_type = TREE_TYPE (op0);
15376 tree outer_type = type;
15378 if (TREE_CODE (outer_type) == REAL_TYPE)
15380 if (TREE_CODE (inner_type) == REAL_TYPE)
15381 return tree_expr_nonnegative_warnv_p (op0,
15382 strict_overflow_p);
15383 if (INTEGRAL_TYPE_P (inner_type))
15385 if (TYPE_UNSIGNED (inner_type))
15386 return true;
15387 return tree_expr_nonnegative_warnv_p (op0,
15388 strict_overflow_p);
15391 else if (INTEGRAL_TYPE_P (outer_type))
15393 if (TREE_CODE (inner_type) == REAL_TYPE)
15394 return tree_expr_nonnegative_warnv_p (op0,
15395 strict_overflow_p);
15396 if (INTEGRAL_TYPE_P (inner_type))
15397 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15398 && TYPE_UNSIGNED (inner_type);
15401 break;
15403 default:
15404 return tree_simple_nonnegative_warnv_p (code, type);
15407 /* We don't know sign of `t', so be conservative and return false. */
15408 return false;
15411 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15412 value is based on the assumption that signed overflow is undefined,
15413 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15414 *STRICT_OVERFLOW_P. */
15416 bool
15417 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15418 tree op1, bool *strict_overflow_p)
15420 if (TYPE_UNSIGNED (type))
15421 return true;
15423 switch (code)
15425 case POINTER_PLUS_EXPR:
15426 case PLUS_EXPR:
15427 if (FLOAT_TYPE_P (type))
15428 return (tree_expr_nonnegative_warnv_p (op0,
15429 strict_overflow_p)
15430 && tree_expr_nonnegative_warnv_p (op1,
15431 strict_overflow_p));
15433 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15434 both unsigned and at least 2 bits shorter than the result. */
15435 if (TREE_CODE (type) == INTEGER_TYPE
15436 && TREE_CODE (op0) == NOP_EXPR
15437 && TREE_CODE (op1) == NOP_EXPR)
15439 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15440 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15441 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15442 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15444 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15445 TYPE_PRECISION (inner2)) + 1;
15446 return prec < TYPE_PRECISION (type);
15449 break;
15451 case MULT_EXPR:
15452 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15454 /* x * x is always non-negative for floating point x
15455 or without overflow. */
15456 if (operand_equal_p (op0, op1, 0)
15457 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15458 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15460 if (TYPE_OVERFLOW_UNDEFINED (type))
15461 *strict_overflow_p = true;
15462 return true;
15466 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15467 both unsigned and their total bits is shorter than the result. */
15468 if (TREE_CODE (type) == INTEGER_TYPE
15469 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15470 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15472 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15473 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15474 : TREE_TYPE (op0);
15475 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15476 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15477 : TREE_TYPE (op1);
15479 bool unsigned0 = TYPE_UNSIGNED (inner0);
15480 bool unsigned1 = TYPE_UNSIGNED (inner1);
15482 if (TREE_CODE (op0) == INTEGER_CST)
15483 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15485 if (TREE_CODE (op1) == INTEGER_CST)
15486 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15488 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15489 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15491 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15492 ? tree_int_cst_min_precision (op0, UNSIGNED)
15493 : TYPE_PRECISION (inner0);
15495 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15496 ? tree_int_cst_min_precision (op1, UNSIGNED)
15497 : TYPE_PRECISION (inner1);
15499 return precision0 + precision1 < TYPE_PRECISION (type);
15502 return false;
15504 case BIT_AND_EXPR:
15505 case MAX_EXPR:
15506 return (tree_expr_nonnegative_warnv_p (op0,
15507 strict_overflow_p)
15508 || tree_expr_nonnegative_warnv_p (op1,
15509 strict_overflow_p));
15511 case BIT_IOR_EXPR:
15512 case BIT_XOR_EXPR:
15513 case MIN_EXPR:
15514 case RDIV_EXPR:
15515 case TRUNC_DIV_EXPR:
15516 case CEIL_DIV_EXPR:
15517 case FLOOR_DIV_EXPR:
15518 case ROUND_DIV_EXPR:
15519 return (tree_expr_nonnegative_warnv_p (op0,
15520 strict_overflow_p)
15521 && tree_expr_nonnegative_warnv_p (op1,
15522 strict_overflow_p));
15524 case TRUNC_MOD_EXPR:
15525 case CEIL_MOD_EXPR:
15526 case FLOOR_MOD_EXPR:
15527 case ROUND_MOD_EXPR:
15528 return tree_expr_nonnegative_warnv_p (op0,
15529 strict_overflow_p);
15530 default:
15531 return tree_simple_nonnegative_warnv_p (code, type);
15534 /* We don't know sign of `t', so be conservative and return false. */
15535 return false;
15538 /* Return true if T is known to be non-negative. If the return
15539 value is based on the assumption that signed overflow is undefined,
15540 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15541 *STRICT_OVERFLOW_P. */
15543 bool
15544 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15546 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15547 return true;
15549 switch (TREE_CODE (t))
15551 case INTEGER_CST:
15552 return tree_int_cst_sgn (t) >= 0;
15554 case REAL_CST:
15555 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15557 case FIXED_CST:
15558 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15560 case COND_EXPR:
15561 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15562 strict_overflow_p)
15563 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15564 strict_overflow_p));
15565 default:
15566 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15567 TREE_TYPE (t));
15569 /* We don't know sign of `t', so be conservative and return false. */
15570 return false;
15573 /* Return true if T is known to be non-negative. If the return
15574 value is based on the assumption that signed overflow is undefined,
15575 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15576 *STRICT_OVERFLOW_P. */
15578 bool
15579 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15580 tree arg0, tree arg1, bool *strict_overflow_p)
15582 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15583 switch (DECL_FUNCTION_CODE (fndecl))
15585 CASE_FLT_FN (BUILT_IN_ACOS):
15586 CASE_FLT_FN (BUILT_IN_ACOSH):
15587 CASE_FLT_FN (BUILT_IN_CABS):
15588 CASE_FLT_FN (BUILT_IN_COSH):
15589 CASE_FLT_FN (BUILT_IN_ERFC):
15590 CASE_FLT_FN (BUILT_IN_EXP):
15591 CASE_FLT_FN (BUILT_IN_EXP10):
15592 CASE_FLT_FN (BUILT_IN_EXP2):
15593 CASE_FLT_FN (BUILT_IN_FABS):
15594 CASE_FLT_FN (BUILT_IN_FDIM):
15595 CASE_FLT_FN (BUILT_IN_HYPOT):
15596 CASE_FLT_FN (BUILT_IN_POW10):
15597 CASE_INT_FN (BUILT_IN_FFS):
15598 CASE_INT_FN (BUILT_IN_PARITY):
15599 CASE_INT_FN (BUILT_IN_POPCOUNT):
15600 CASE_INT_FN (BUILT_IN_CLZ):
15601 CASE_INT_FN (BUILT_IN_CLRSB):
15602 case BUILT_IN_BSWAP32:
15603 case BUILT_IN_BSWAP64:
15604 /* Always true. */
15605 return true;
15607 CASE_FLT_FN (BUILT_IN_SQRT):
15608 /* sqrt(-0.0) is -0.0. */
15609 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15610 return true;
15611 return tree_expr_nonnegative_warnv_p (arg0,
15612 strict_overflow_p);
15614 CASE_FLT_FN (BUILT_IN_ASINH):
15615 CASE_FLT_FN (BUILT_IN_ATAN):
15616 CASE_FLT_FN (BUILT_IN_ATANH):
15617 CASE_FLT_FN (BUILT_IN_CBRT):
15618 CASE_FLT_FN (BUILT_IN_CEIL):
15619 CASE_FLT_FN (BUILT_IN_ERF):
15620 CASE_FLT_FN (BUILT_IN_EXPM1):
15621 CASE_FLT_FN (BUILT_IN_FLOOR):
15622 CASE_FLT_FN (BUILT_IN_FMOD):
15623 CASE_FLT_FN (BUILT_IN_FREXP):
15624 CASE_FLT_FN (BUILT_IN_ICEIL):
15625 CASE_FLT_FN (BUILT_IN_IFLOOR):
15626 CASE_FLT_FN (BUILT_IN_IRINT):
15627 CASE_FLT_FN (BUILT_IN_IROUND):
15628 CASE_FLT_FN (BUILT_IN_LCEIL):
15629 CASE_FLT_FN (BUILT_IN_LDEXP):
15630 CASE_FLT_FN (BUILT_IN_LFLOOR):
15631 CASE_FLT_FN (BUILT_IN_LLCEIL):
15632 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15633 CASE_FLT_FN (BUILT_IN_LLRINT):
15634 CASE_FLT_FN (BUILT_IN_LLROUND):
15635 CASE_FLT_FN (BUILT_IN_LRINT):
15636 CASE_FLT_FN (BUILT_IN_LROUND):
15637 CASE_FLT_FN (BUILT_IN_MODF):
15638 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15639 CASE_FLT_FN (BUILT_IN_RINT):
15640 CASE_FLT_FN (BUILT_IN_ROUND):
15641 CASE_FLT_FN (BUILT_IN_SCALB):
15642 CASE_FLT_FN (BUILT_IN_SCALBLN):
15643 CASE_FLT_FN (BUILT_IN_SCALBN):
15644 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15645 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15646 CASE_FLT_FN (BUILT_IN_SINH):
15647 CASE_FLT_FN (BUILT_IN_TANH):
15648 CASE_FLT_FN (BUILT_IN_TRUNC):
15649 /* True if the 1st argument is nonnegative. */
15650 return tree_expr_nonnegative_warnv_p (arg0,
15651 strict_overflow_p);
15653 CASE_FLT_FN (BUILT_IN_FMAX):
15654 /* True if the 1st OR 2nd arguments are nonnegative. */
15655 return (tree_expr_nonnegative_warnv_p (arg0,
15656 strict_overflow_p)
15657 || (tree_expr_nonnegative_warnv_p (arg1,
15658 strict_overflow_p)));
15660 CASE_FLT_FN (BUILT_IN_FMIN):
15661 /* True if the 1st AND 2nd arguments are nonnegative. */
15662 return (tree_expr_nonnegative_warnv_p (arg0,
15663 strict_overflow_p)
15664 && (tree_expr_nonnegative_warnv_p (arg1,
15665 strict_overflow_p)));
15667 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15668 /* True if the 2nd argument is nonnegative. */
15669 return tree_expr_nonnegative_warnv_p (arg1,
15670 strict_overflow_p);
15672 CASE_FLT_FN (BUILT_IN_POWI):
15673 /* True if the 1st argument is nonnegative or the second
15674 argument is an even integer. */
15675 if (TREE_CODE (arg1) == INTEGER_CST
15676 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15677 return true;
15678 return tree_expr_nonnegative_warnv_p (arg0,
15679 strict_overflow_p);
15681 CASE_FLT_FN (BUILT_IN_POW):
15682 /* True if the 1st argument is nonnegative or the second
15683 argument is an even integer valued real. */
15684 if (TREE_CODE (arg1) == REAL_CST)
15686 REAL_VALUE_TYPE c;
15687 HOST_WIDE_INT n;
15689 c = TREE_REAL_CST (arg1);
15690 n = real_to_integer (&c);
15691 if ((n & 1) == 0)
15693 REAL_VALUE_TYPE cint;
15694 real_from_integer (&cint, VOIDmode, n, SIGNED);
15695 if (real_identical (&c, &cint))
15696 return true;
15699 return tree_expr_nonnegative_warnv_p (arg0,
15700 strict_overflow_p);
15702 default:
15703 break;
15705 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15706 type);
15709 /* Return true if T is known to be non-negative. If the return
15710 value is based on the assumption that signed overflow is undefined,
15711 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15712 *STRICT_OVERFLOW_P. */
15714 static bool
15715 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15717 enum tree_code code = TREE_CODE (t);
15718 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15719 return true;
15721 switch (code)
15723 case TARGET_EXPR:
15725 tree temp = TARGET_EXPR_SLOT (t);
15726 t = TARGET_EXPR_INITIAL (t);
15728 /* If the initializer is non-void, then it's a normal expression
15729 that will be assigned to the slot. */
15730 if (!VOID_TYPE_P (t))
15731 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15733 /* Otherwise, the initializer sets the slot in some way. One common
15734 way is an assignment statement at the end of the initializer. */
15735 while (1)
15737 if (TREE_CODE (t) == BIND_EXPR)
15738 t = expr_last (BIND_EXPR_BODY (t));
15739 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15740 || TREE_CODE (t) == TRY_CATCH_EXPR)
15741 t = expr_last (TREE_OPERAND (t, 0));
15742 else if (TREE_CODE (t) == STATEMENT_LIST)
15743 t = expr_last (t);
15744 else
15745 break;
15747 if (TREE_CODE (t) == MODIFY_EXPR
15748 && TREE_OPERAND (t, 0) == temp)
15749 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15750 strict_overflow_p);
15752 return false;
15755 case CALL_EXPR:
15757 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15758 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15760 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15761 get_callee_fndecl (t),
15762 arg0,
15763 arg1,
15764 strict_overflow_p);
15766 case COMPOUND_EXPR:
15767 case MODIFY_EXPR:
15768 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15769 strict_overflow_p);
15770 case BIND_EXPR:
15771 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15772 strict_overflow_p);
15773 case SAVE_EXPR:
15774 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15775 strict_overflow_p);
15777 default:
15778 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15779 TREE_TYPE (t));
15782 /* We don't know sign of `t', so be conservative and return false. */
15783 return false;
15786 /* Return true if T is known to be non-negative. If the return
15787 value is based on the assumption that signed overflow is undefined,
15788 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15789 *STRICT_OVERFLOW_P. */
15791 bool
15792 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15794 enum tree_code code;
15795 if (t == error_mark_node)
15796 return false;
15798 code = TREE_CODE (t);
15799 switch (TREE_CODE_CLASS (code))
15801 case tcc_binary:
15802 case tcc_comparison:
15803 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15804 TREE_TYPE (t),
15805 TREE_OPERAND (t, 0),
15806 TREE_OPERAND (t, 1),
15807 strict_overflow_p);
15809 case tcc_unary:
15810 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15811 TREE_TYPE (t),
15812 TREE_OPERAND (t, 0),
15813 strict_overflow_p);
15815 case tcc_constant:
15816 case tcc_declaration:
15817 case tcc_reference:
15818 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15820 default:
15821 break;
15824 switch (code)
15826 case TRUTH_AND_EXPR:
15827 case TRUTH_OR_EXPR:
15828 case TRUTH_XOR_EXPR:
15829 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15830 TREE_TYPE (t),
15831 TREE_OPERAND (t, 0),
15832 TREE_OPERAND (t, 1),
15833 strict_overflow_p);
15834 case TRUTH_NOT_EXPR:
15835 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15836 TREE_TYPE (t),
15837 TREE_OPERAND (t, 0),
15838 strict_overflow_p);
15840 case COND_EXPR:
15841 case CONSTRUCTOR:
15842 case OBJ_TYPE_REF:
15843 case ASSERT_EXPR:
15844 case ADDR_EXPR:
15845 case WITH_SIZE_EXPR:
15846 case SSA_NAME:
15847 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15849 default:
15850 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15854 /* Return true if `t' is known to be non-negative. Handle warnings
15855 about undefined signed overflow. */
15857 bool
15858 tree_expr_nonnegative_p (tree t)
15860 bool ret, strict_overflow_p;
15862 strict_overflow_p = false;
15863 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15864 if (strict_overflow_p)
15865 fold_overflow_warning (("assuming signed overflow does not occur when "
15866 "determining that expression is always "
15867 "non-negative"),
15868 WARN_STRICT_OVERFLOW_MISC);
15869 return ret;
15873 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15874 For floating point we further ensure that T is not denormal.
15875 Similar logic is present in nonzero_address in rtlanal.h.
15877 If the return value is based on the assumption that signed overflow
15878 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15879 change *STRICT_OVERFLOW_P. */
15881 bool
15882 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15883 bool *strict_overflow_p)
15885 switch (code)
15887 case ABS_EXPR:
15888 return tree_expr_nonzero_warnv_p (op0,
15889 strict_overflow_p);
15891 case NOP_EXPR:
15893 tree inner_type = TREE_TYPE (op0);
15894 tree outer_type = type;
15896 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15897 && tree_expr_nonzero_warnv_p (op0,
15898 strict_overflow_p));
15900 break;
15902 case NON_LVALUE_EXPR:
15903 return tree_expr_nonzero_warnv_p (op0,
15904 strict_overflow_p);
15906 default:
15907 break;
15910 return false;
15913 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15914 For floating point we further ensure that T is not denormal.
15915 Similar logic is present in nonzero_address in rtlanal.h.
15917 If the return value is based on the assumption that signed overflow
15918 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15919 change *STRICT_OVERFLOW_P. */
15921 bool
15922 tree_binary_nonzero_warnv_p (enum tree_code code,
15923 tree type,
15924 tree op0,
15925 tree op1, bool *strict_overflow_p)
15927 bool sub_strict_overflow_p;
15928 switch (code)
15930 case POINTER_PLUS_EXPR:
15931 case PLUS_EXPR:
15932 if (TYPE_OVERFLOW_UNDEFINED (type))
15934 /* With the presence of negative values it is hard
15935 to say something. */
15936 sub_strict_overflow_p = false;
15937 if (!tree_expr_nonnegative_warnv_p (op0,
15938 &sub_strict_overflow_p)
15939 || !tree_expr_nonnegative_warnv_p (op1,
15940 &sub_strict_overflow_p))
15941 return false;
15942 /* One of operands must be positive and the other non-negative. */
15943 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15944 overflows, on a twos-complement machine the sum of two
15945 nonnegative numbers can never be zero. */
15946 return (tree_expr_nonzero_warnv_p (op0,
15947 strict_overflow_p)
15948 || tree_expr_nonzero_warnv_p (op1,
15949 strict_overflow_p));
15951 break;
15953 case MULT_EXPR:
15954 if (TYPE_OVERFLOW_UNDEFINED (type))
15956 if (tree_expr_nonzero_warnv_p (op0,
15957 strict_overflow_p)
15958 && tree_expr_nonzero_warnv_p (op1,
15959 strict_overflow_p))
15961 *strict_overflow_p = true;
15962 return true;
15965 break;
15967 case MIN_EXPR:
15968 sub_strict_overflow_p = false;
15969 if (tree_expr_nonzero_warnv_p (op0,
15970 &sub_strict_overflow_p)
15971 && tree_expr_nonzero_warnv_p (op1,
15972 &sub_strict_overflow_p))
15974 if (sub_strict_overflow_p)
15975 *strict_overflow_p = true;
15977 break;
15979 case MAX_EXPR:
15980 sub_strict_overflow_p = false;
15981 if (tree_expr_nonzero_warnv_p (op0,
15982 &sub_strict_overflow_p))
15984 if (sub_strict_overflow_p)
15985 *strict_overflow_p = true;
15987 /* When both operands are nonzero, then MAX must be too. */
15988 if (tree_expr_nonzero_warnv_p (op1,
15989 strict_overflow_p))
15990 return true;
15992 /* MAX where operand 0 is positive is positive. */
15993 return tree_expr_nonnegative_warnv_p (op0,
15994 strict_overflow_p);
15996 /* MAX where operand 1 is positive is positive. */
15997 else if (tree_expr_nonzero_warnv_p (op1,
15998 &sub_strict_overflow_p)
15999 && tree_expr_nonnegative_warnv_p (op1,
16000 &sub_strict_overflow_p))
16002 if (sub_strict_overflow_p)
16003 *strict_overflow_p = true;
16004 return true;
16006 break;
16008 case BIT_IOR_EXPR:
16009 return (tree_expr_nonzero_warnv_p (op1,
16010 strict_overflow_p)
16011 || tree_expr_nonzero_warnv_p (op0,
16012 strict_overflow_p));
16014 default:
16015 break;
16018 return false;
16021 /* Return true when T is an address and is known to be nonzero.
16022 For floating point we further ensure that T is not denormal.
16023 Similar logic is present in nonzero_address in rtlanal.h.
16025 If the return value is based on the assumption that signed overflow
16026 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16027 change *STRICT_OVERFLOW_P. */
16029 bool
16030 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16032 bool sub_strict_overflow_p;
16033 switch (TREE_CODE (t))
16035 case INTEGER_CST:
16036 return !integer_zerop (t);
16038 case ADDR_EXPR:
16040 tree base = TREE_OPERAND (t, 0);
16041 if (!DECL_P (base))
16042 base = get_base_address (base);
16044 if (!base)
16045 return false;
16047 /* Weak declarations may link to NULL. Other things may also be NULL
16048 so protect with -fdelete-null-pointer-checks; but not variables
16049 allocated on the stack. */
16050 if (DECL_P (base)
16051 && (flag_delete_null_pointer_checks
16052 || (DECL_CONTEXT (base)
16053 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16054 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16055 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16057 /* Constants are never weak. */
16058 if (CONSTANT_CLASS_P (base))
16059 return true;
16061 return false;
16064 case COND_EXPR:
16065 sub_strict_overflow_p = false;
16066 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16067 &sub_strict_overflow_p)
16068 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16069 &sub_strict_overflow_p))
16071 if (sub_strict_overflow_p)
16072 *strict_overflow_p = true;
16073 return true;
16075 break;
16077 default:
16078 break;
16080 return false;
16083 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16084 attempt to fold the expression to a constant without modifying TYPE,
16085 OP0 or OP1.
16087 If the expression could be simplified to a constant, then return
16088 the constant. If the expression would not be simplified to a
16089 constant, then return NULL_TREE. */
16091 tree
16092 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16094 tree tem = fold_binary (code, type, op0, op1);
16095 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16098 /* Given the components of a unary expression CODE, TYPE and OP0,
16099 attempt to fold the expression to a constant without modifying
16100 TYPE or OP0.
16102 If the expression could be simplified to a constant, then return
16103 the constant. If the expression would not be simplified to a
16104 constant, then return NULL_TREE. */
16106 tree
16107 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16109 tree tem = fold_unary (code, type, op0);
16110 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16113 /* If EXP represents referencing an element in a constant string
16114 (either via pointer arithmetic or array indexing), return the
16115 tree representing the value accessed, otherwise return NULL. */
16117 tree
16118 fold_read_from_constant_string (tree exp)
16120 if ((TREE_CODE (exp) == INDIRECT_REF
16121 || TREE_CODE (exp) == ARRAY_REF)
16122 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16124 tree exp1 = TREE_OPERAND (exp, 0);
16125 tree index;
16126 tree string;
16127 location_t loc = EXPR_LOCATION (exp);
16129 if (TREE_CODE (exp) == INDIRECT_REF)
16130 string = string_constant (exp1, &index);
16131 else
16133 tree low_bound = array_ref_low_bound (exp);
16134 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16136 /* Optimize the special-case of a zero lower bound.
16138 We convert the low_bound to sizetype to avoid some problems
16139 with constant folding. (E.g. suppose the lower bound is 1,
16140 and its mode is QI. Without the conversion,l (ARRAY
16141 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16142 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16143 if (! integer_zerop (low_bound))
16144 index = size_diffop_loc (loc, index,
16145 fold_convert_loc (loc, sizetype, low_bound));
16147 string = exp1;
16150 if (string
16151 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16152 && TREE_CODE (string) == STRING_CST
16153 && TREE_CODE (index) == INTEGER_CST
16154 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16155 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16156 == MODE_INT)
16157 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16158 return build_int_cst_type (TREE_TYPE (exp),
16159 (TREE_STRING_POINTER (string)
16160 [TREE_INT_CST_LOW (index)]));
16162 return NULL;
16165 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16166 an integer constant, real, or fixed-point constant.
16168 TYPE is the type of the result. */
16170 static tree
16171 fold_negate_const (tree arg0, tree type)
16173 tree t = NULL_TREE;
16175 switch (TREE_CODE (arg0))
16177 case INTEGER_CST:
16179 bool overflow;
16180 wide_int val = wi::neg (arg0, &overflow);
16181 t = force_fit_type (type, val, 1,
16182 (overflow | TREE_OVERFLOW (arg0))
16183 && !TYPE_UNSIGNED (type));
16184 break;
16187 case REAL_CST:
16188 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16189 break;
16191 case FIXED_CST:
16193 FIXED_VALUE_TYPE f;
16194 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16195 &(TREE_FIXED_CST (arg0)), NULL,
16196 TYPE_SATURATING (type));
16197 t = build_fixed (type, f);
16198 /* Propagate overflow flags. */
16199 if (overflow_p | TREE_OVERFLOW (arg0))
16200 TREE_OVERFLOW (t) = 1;
16201 break;
16204 default:
16205 gcc_unreachable ();
16208 return t;
16211 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16212 an integer constant or real constant.
16214 TYPE is the type of the result. */
16216 tree
16217 fold_abs_const (tree arg0, tree type)
16219 tree t = NULL_TREE;
16221 switch (TREE_CODE (arg0))
16223 case INTEGER_CST:
16225 /* If the value is unsigned or non-negative, then the absolute value
16226 is the same as the ordinary value. */
16227 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16228 t = arg0;
16230 /* If the value is negative, then the absolute value is
16231 its negation. */
16232 else
16234 bool overflow;
16235 wide_int val = wi::neg (arg0, &overflow);
16236 t = force_fit_type (type, val, -1,
16237 overflow | TREE_OVERFLOW (arg0));
16240 break;
16242 case REAL_CST:
16243 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16244 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16245 else
16246 t = arg0;
16247 break;
16249 default:
16250 gcc_unreachable ();
16253 return t;
16256 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16257 constant. TYPE is the type of the result. */
16259 static tree
16260 fold_not_const (const_tree arg0, tree type)
16262 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16264 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16267 /* Given CODE, a relational operator, the target type, TYPE and two
16268 constant operands OP0 and OP1, return the result of the
16269 relational operation. If the result is not a compile time
16270 constant, then return NULL_TREE. */
16272 static tree
16273 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16275 int result, invert;
16277 /* From here on, the only cases we handle are when the result is
16278 known to be a constant. */
16280 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16282 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16283 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16285 /* Handle the cases where either operand is a NaN. */
16286 if (real_isnan (c0) || real_isnan (c1))
16288 switch (code)
16290 case EQ_EXPR:
16291 case ORDERED_EXPR:
16292 result = 0;
16293 break;
16295 case NE_EXPR:
16296 case UNORDERED_EXPR:
16297 case UNLT_EXPR:
16298 case UNLE_EXPR:
16299 case UNGT_EXPR:
16300 case UNGE_EXPR:
16301 case UNEQ_EXPR:
16302 result = 1;
16303 break;
16305 case LT_EXPR:
16306 case LE_EXPR:
16307 case GT_EXPR:
16308 case GE_EXPR:
16309 case LTGT_EXPR:
16310 if (flag_trapping_math)
16311 return NULL_TREE;
16312 result = 0;
16313 break;
16315 default:
16316 gcc_unreachable ();
16319 return constant_boolean_node (result, type);
16322 return constant_boolean_node (real_compare (code, c0, c1), type);
16325 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16327 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16328 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16329 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16332 /* Handle equality/inequality of complex constants. */
16333 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16335 tree rcond = fold_relational_const (code, type,
16336 TREE_REALPART (op0),
16337 TREE_REALPART (op1));
16338 tree icond = fold_relational_const (code, type,
16339 TREE_IMAGPART (op0),
16340 TREE_IMAGPART (op1));
16341 if (code == EQ_EXPR)
16342 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16343 else if (code == NE_EXPR)
16344 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16345 else
16346 return NULL_TREE;
16349 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16351 unsigned count = VECTOR_CST_NELTS (op0);
16352 tree *elts = XALLOCAVEC (tree, count);
16353 gcc_assert (VECTOR_CST_NELTS (op1) == count
16354 && TYPE_VECTOR_SUBPARTS (type) == count);
16356 for (unsigned i = 0; i < count; i++)
16358 tree elem_type = TREE_TYPE (type);
16359 tree elem0 = VECTOR_CST_ELT (op0, i);
16360 tree elem1 = VECTOR_CST_ELT (op1, i);
16362 tree tem = fold_relational_const (code, elem_type,
16363 elem0, elem1);
16365 if (tem == NULL_TREE)
16366 return NULL_TREE;
16368 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16371 return build_vector (type, elts);
16374 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16376 To compute GT, swap the arguments and do LT.
16377 To compute GE, do LT and invert the result.
16378 To compute LE, swap the arguments, do LT and invert the result.
16379 To compute NE, do EQ and invert the result.
16381 Therefore, the code below must handle only EQ and LT. */
16383 if (code == LE_EXPR || code == GT_EXPR)
16385 tree tem = op0;
16386 op0 = op1;
16387 op1 = tem;
16388 code = swap_tree_comparison (code);
16391 /* Note that it is safe to invert for real values here because we
16392 have already handled the one case that it matters. */
16394 invert = 0;
16395 if (code == NE_EXPR || code == GE_EXPR)
16397 invert = 1;
16398 code = invert_tree_comparison (code, false);
16401 /* Compute a result for LT or EQ if args permit;
16402 Otherwise return T. */
16403 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16405 if (code == EQ_EXPR)
16406 result = tree_int_cst_equal (op0, op1);
16407 else
16408 result = tree_int_cst_lt (op0, op1);
16410 else
16411 return NULL_TREE;
16413 if (invert)
16414 result ^= 1;
16415 return constant_boolean_node (result, type);
16418 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16419 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16420 itself. */
16422 tree
16423 fold_build_cleanup_point_expr (tree type, tree expr)
16425 /* If the expression does not have side effects then we don't have to wrap
16426 it with a cleanup point expression. */
16427 if (!TREE_SIDE_EFFECTS (expr))
16428 return expr;
16430 /* If the expression is a return, check to see if the expression inside the
16431 return has no side effects or the right hand side of the modify expression
16432 inside the return. If either don't have side effects set we don't need to
16433 wrap the expression in a cleanup point expression. Note we don't check the
16434 left hand side of the modify because it should always be a return decl. */
16435 if (TREE_CODE (expr) == RETURN_EXPR)
16437 tree op = TREE_OPERAND (expr, 0);
16438 if (!op || !TREE_SIDE_EFFECTS (op))
16439 return expr;
16440 op = TREE_OPERAND (op, 1);
16441 if (!TREE_SIDE_EFFECTS (op))
16442 return expr;
16445 return build1 (CLEANUP_POINT_EXPR, type, expr);
16448 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16449 of an indirection through OP0, or NULL_TREE if no simplification is
16450 possible. */
16452 tree
16453 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16455 tree sub = op0;
16456 tree subtype;
16458 STRIP_NOPS (sub);
16459 subtype = TREE_TYPE (sub);
16460 if (!POINTER_TYPE_P (subtype))
16461 return NULL_TREE;
16463 if (TREE_CODE (sub) == ADDR_EXPR)
16465 tree op = TREE_OPERAND (sub, 0);
16466 tree optype = TREE_TYPE (op);
16467 /* *&CONST_DECL -> to the value of the const decl. */
16468 if (TREE_CODE (op) == CONST_DECL)
16469 return DECL_INITIAL (op);
16470 /* *&p => p; make sure to handle *&"str"[cst] here. */
16471 if (type == optype)
16473 tree fop = fold_read_from_constant_string (op);
16474 if (fop)
16475 return fop;
16476 else
16477 return op;
16479 /* *(foo *)&fooarray => fooarray[0] */
16480 else if (TREE_CODE (optype) == ARRAY_TYPE
16481 && type == TREE_TYPE (optype)
16482 && (!in_gimple_form
16483 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16485 tree type_domain = TYPE_DOMAIN (optype);
16486 tree min_val = size_zero_node;
16487 if (type_domain && TYPE_MIN_VALUE (type_domain))
16488 min_val = TYPE_MIN_VALUE (type_domain);
16489 if (in_gimple_form
16490 && TREE_CODE (min_val) != INTEGER_CST)
16491 return NULL_TREE;
16492 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16493 NULL_TREE, NULL_TREE);
16495 /* *(foo *)&complexfoo => __real__ complexfoo */
16496 else if (TREE_CODE (optype) == COMPLEX_TYPE
16497 && type == TREE_TYPE (optype))
16498 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16499 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16500 else if (TREE_CODE (optype) == VECTOR_TYPE
16501 && type == TREE_TYPE (optype))
16503 tree part_width = TYPE_SIZE (type);
16504 tree index = bitsize_int (0);
16505 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16509 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16510 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16512 tree op00 = TREE_OPERAND (sub, 0);
16513 tree op01 = TREE_OPERAND (sub, 1);
16515 STRIP_NOPS (op00);
16516 if (TREE_CODE (op00) == ADDR_EXPR)
16518 tree op00type;
16519 op00 = TREE_OPERAND (op00, 0);
16520 op00type = TREE_TYPE (op00);
16522 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16523 if (TREE_CODE (op00type) == VECTOR_TYPE
16524 && type == TREE_TYPE (op00type))
16526 HOST_WIDE_INT offset = tree_to_shwi (op01);
16527 tree part_width = TYPE_SIZE (type);
16528 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16529 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16530 tree index = bitsize_int (indexi);
16532 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16533 return fold_build3_loc (loc,
16534 BIT_FIELD_REF, type, op00,
16535 part_width, index);
16538 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16539 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16540 && type == TREE_TYPE (op00type))
16542 tree size = TYPE_SIZE_UNIT (type);
16543 if (tree_int_cst_equal (size, op01))
16544 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16546 /* ((foo *)&fooarray)[1] => fooarray[1] */
16547 else if (TREE_CODE (op00type) == ARRAY_TYPE
16548 && type == TREE_TYPE (op00type))
16550 tree type_domain = TYPE_DOMAIN (op00type);
16551 tree min_val = size_zero_node;
16552 if (type_domain && TYPE_MIN_VALUE (type_domain))
16553 min_val = TYPE_MIN_VALUE (type_domain);
16554 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16555 TYPE_SIZE_UNIT (type));
16556 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16557 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16558 NULL_TREE, NULL_TREE);
16563 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16564 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16565 && type == TREE_TYPE (TREE_TYPE (subtype))
16566 && (!in_gimple_form
16567 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16569 tree type_domain;
16570 tree min_val = size_zero_node;
16571 sub = build_fold_indirect_ref_loc (loc, sub);
16572 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16573 if (type_domain && TYPE_MIN_VALUE (type_domain))
16574 min_val = TYPE_MIN_VALUE (type_domain);
16575 if (in_gimple_form
16576 && TREE_CODE (min_val) != INTEGER_CST)
16577 return NULL_TREE;
16578 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16579 NULL_TREE);
16582 return NULL_TREE;
16585 /* Builds an expression for an indirection through T, simplifying some
16586 cases. */
16588 tree
16589 build_fold_indirect_ref_loc (location_t loc, tree t)
16591 tree type = TREE_TYPE (TREE_TYPE (t));
16592 tree sub = fold_indirect_ref_1 (loc, type, t);
16594 if (sub)
16595 return sub;
16597 return build1_loc (loc, INDIRECT_REF, type, t);
16600 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16602 tree
16603 fold_indirect_ref_loc (location_t loc, tree t)
16605 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16607 if (sub)
16608 return sub;
16609 else
16610 return t;
16613 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16614 whose result is ignored. The type of the returned tree need not be
16615 the same as the original expression. */
16617 tree
16618 fold_ignored_result (tree t)
16620 if (!TREE_SIDE_EFFECTS (t))
16621 return integer_zero_node;
16623 for (;;)
16624 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16626 case tcc_unary:
16627 t = TREE_OPERAND (t, 0);
16628 break;
16630 case tcc_binary:
16631 case tcc_comparison:
16632 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16633 t = TREE_OPERAND (t, 0);
16634 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16635 t = TREE_OPERAND (t, 1);
16636 else
16637 return t;
16638 break;
16640 case tcc_expression:
16641 switch (TREE_CODE (t))
16643 case COMPOUND_EXPR:
16644 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16645 return t;
16646 t = TREE_OPERAND (t, 0);
16647 break;
16649 case COND_EXPR:
16650 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16651 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16652 return t;
16653 t = TREE_OPERAND (t, 0);
16654 break;
16656 default:
16657 return t;
16659 break;
16661 default:
16662 return t;
16666 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16668 tree
16669 round_up_loc (location_t loc, tree value, int divisor)
16671 tree div = NULL_TREE;
16673 gcc_assert (divisor > 0);
16674 if (divisor == 1)
16675 return value;
16677 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16678 have to do anything. Only do this when we are not given a const,
16679 because in that case, this check is more expensive than just
16680 doing it. */
16681 if (TREE_CODE (value) != INTEGER_CST)
16683 div = build_int_cst (TREE_TYPE (value), divisor);
16685 if (multiple_of_p (TREE_TYPE (value), value, div))
16686 return value;
16689 /* If divisor is a power of two, simplify this to bit manipulation. */
16690 if (divisor == (divisor & -divisor))
16692 if (TREE_CODE (value) == INTEGER_CST)
16694 wide_int val = value;
16695 bool overflow_p;
16697 if ((val & (divisor - 1)) == 0)
16698 return value;
16700 overflow_p = TREE_OVERFLOW (value);
16701 val &= ~(divisor - 1);
16702 val += divisor;
16703 if (val == 0)
16704 overflow_p = true;
16706 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16708 else
16710 tree t;
16712 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16713 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16714 t = build_int_cst (TREE_TYPE (value), -divisor);
16715 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16718 else
16720 if (!div)
16721 div = build_int_cst (TREE_TYPE (value), divisor);
16722 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16723 value = size_binop_loc (loc, MULT_EXPR, value, div);
16726 return value;
16729 /* Likewise, but round down. */
16731 tree
16732 round_down_loc (location_t loc, tree value, int divisor)
16734 tree div = NULL_TREE;
16736 gcc_assert (divisor > 0);
16737 if (divisor == 1)
16738 return value;
16740 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16741 have to do anything. Only do this when we are not given a const,
16742 because in that case, this check is more expensive than just
16743 doing it. */
16744 if (TREE_CODE (value) != INTEGER_CST)
16746 div = build_int_cst (TREE_TYPE (value), divisor);
16748 if (multiple_of_p (TREE_TYPE (value), value, div))
16749 return value;
16752 /* If divisor is a power of two, simplify this to bit manipulation. */
16753 if (divisor == (divisor & -divisor))
16755 tree t;
16757 t = build_int_cst (TREE_TYPE (value), -divisor);
16758 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16760 else
16762 if (!div)
16763 div = build_int_cst (TREE_TYPE (value), divisor);
16764 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16765 value = size_binop_loc (loc, MULT_EXPR, value, div);
16768 return value;
16771 /* Returns the pointer to the base of the object addressed by EXP and
16772 extracts the information about the offset of the access, storing it
16773 to PBITPOS and POFFSET. */
16775 static tree
16776 split_address_to_core_and_offset (tree exp,
16777 HOST_WIDE_INT *pbitpos, tree *poffset)
16779 tree core;
16780 enum machine_mode mode;
16781 int unsignedp, volatilep;
16782 HOST_WIDE_INT bitsize;
16783 location_t loc = EXPR_LOCATION (exp);
16785 if (TREE_CODE (exp) == ADDR_EXPR)
16787 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16788 poffset, &mode, &unsignedp, &volatilep,
16789 false);
16790 core = build_fold_addr_expr_loc (loc, core);
16792 else
16794 core = exp;
16795 *pbitpos = 0;
16796 *poffset = NULL_TREE;
16799 return core;
16802 /* Returns true if addresses of E1 and E2 differ by a constant, false
16803 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16805 bool
16806 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16808 tree core1, core2;
16809 HOST_WIDE_INT bitpos1, bitpos2;
16810 tree toffset1, toffset2, tdiff, type;
16812 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16813 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16815 if (bitpos1 % BITS_PER_UNIT != 0
16816 || bitpos2 % BITS_PER_UNIT != 0
16817 || !operand_equal_p (core1, core2, 0))
16818 return false;
16820 if (toffset1 && toffset2)
16822 type = TREE_TYPE (toffset1);
16823 if (type != TREE_TYPE (toffset2))
16824 toffset2 = fold_convert (type, toffset2);
16826 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16827 if (!cst_and_fits_in_hwi (tdiff))
16828 return false;
16830 *diff = int_cst_value (tdiff);
16832 else if (toffset1 || toffset2)
16834 /* If only one of the offsets is non-constant, the difference cannot
16835 be a constant. */
16836 return false;
16838 else
16839 *diff = 0;
16841 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16842 return true;
16845 /* Simplify the floating point expression EXP when the sign of the
16846 result is not significant. Return NULL_TREE if no simplification
16847 is possible. */
16849 tree
16850 fold_strip_sign_ops (tree exp)
16852 tree arg0, arg1;
16853 location_t loc = EXPR_LOCATION (exp);
16855 switch (TREE_CODE (exp))
16857 case ABS_EXPR:
16858 case NEGATE_EXPR:
16859 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16860 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16862 case MULT_EXPR:
16863 case RDIV_EXPR:
16864 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16865 return NULL_TREE;
16866 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16867 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16868 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16869 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16870 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16871 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16872 break;
16874 case COMPOUND_EXPR:
16875 arg0 = TREE_OPERAND (exp, 0);
16876 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16877 if (arg1)
16878 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16879 break;
16881 case COND_EXPR:
16882 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16883 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16884 if (arg0 || arg1)
16885 return fold_build3_loc (loc,
16886 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16887 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16888 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16889 break;
16891 case CALL_EXPR:
16893 const enum built_in_function fcode = builtin_mathfn_code (exp);
16894 switch (fcode)
16896 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16897 /* Strip copysign function call, return the 1st argument. */
16898 arg0 = CALL_EXPR_ARG (exp, 0);
16899 arg1 = CALL_EXPR_ARG (exp, 1);
16900 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16902 default:
16903 /* Strip sign ops from the argument of "odd" math functions. */
16904 if (negate_mathfn_p (fcode))
16906 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16907 if (arg0)
16908 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16910 break;
16913 break;
16915 default:
16916 break;
16918 return NULL_TREE;