[PR67828] don't unswitch on default defs of non-parms
[official-gcc.git] / gcc / fold-const.c
blob5d8822fde8e89b0d661eda8a2264ede45332692e
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "predict.h"
48 #include "tree.h"
49 #include "gimple.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "alias.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
55 #include "calls.h"
56 #include "tree-iterator.h"
57 #include "realmpfr.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "varasm.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tm_p.h"
67 #include "target.h"
68 #include "diagnostic-core.h"
69 #include "intl.h"
70 #include "langhooks.h"
71 #include "md5.h"
72 #include "internal-fn.h"
73 #include "tree-eh.h"
74 #include "gimplify.h"
75 #include "tree-dfa.h"
76 #include "builtins.h"
77 #include "cgraph.h"
78 #include "generic-match.h"
79 #include "optabs-query.h"
80 #include "gimple-fold.h"
81 #include "params.h"
83 #ifndef LOAD_EXTEND_OP
84 #define LOAD_EXTEND_OP(M) UNKNOWN
85 #endif
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree make_bit_field_ref (location_t, tree, tree,
124 HOST_WIDE_INT, HOST_WIDE_INT, int);
125 static tree optimize_bit_field_compare (location_t, enum tree_code,
126 tree, tree, tree);
127 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
128 HOST_WIDE_INT *,
129 machine_mode *, int *, int *,
130 tree *, tree *);
131 static int simple_operand_p (const_tree);
132 static bool simple_operand_p_2 (tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
138 static tree unextend (tree, int, int, tree);
139 static tree optimize_minmax_comparison (location_t, enum tree_code,
140 tree, tree, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
148 static bool reorder_operands_p (const_tree, const_tree);
149 static tree fold_negate_const (tree, tree);
150 static tree fold_not_const (const_tree, tree);
151 static tree fold_relational_const (enum tree_code, tree, tree, tree);
152 static tree fold_convert_const (enum tree_code, tree, tree);
153 static tree fold_view_convert_expr (tree, tree);
154 static bool vec_cst_ctor_to_array (tree, tree *);
157 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
158 Otherwise, return LOC. */
160 static location_t
161 expr_location_or (tree t, location_t loc)
163 location_t tloc = EXPR_LOCATION (t);
164 return tloc == UNKNOWN_LOCATION ? loc : tloc;
167 /* Similar to protected_set_expr_location, but never modify x in place,
168 if location can and needs to be set, unshare it. */
170 static inline tree
171 protected_set_expr_location_unshare (tree x, location_t loc)
173 if (CAN_HAVE_LOCATION_P (x)
174 && EXPR_LOCATION (x) != loc
175 && !(TREE_CODE (x) == SAVE_EXPR
176 || TREE_CODE (x) == TARGET_EXPR
177 || TREE_CODE (x) == BIND_EXPR))
179 x = copy_node (x);
180 SET_EXPR_LOCATION (x, loc);
182 return x;
185 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
186 division and returns the quotient. Otherwise returns
187 NULL_TREE. */
189 tree
190 div_if_zero_remainder (const_tree arg1, const_tree arg2)
192 widest_int quo;
194 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
195 SIGNED, &quo))
196 return wide_int_to_tree (TREE_TYPE (arg1), quo);
198 return NULL_TREE;
201 /* This is nonzero if we should defer warnings about undefined
202 overflow. This facility exists because these warnings are a
203 special case. The code to estimate loop iterations does not want
204 to issue any warnings, since it works with expressions which do not
205 occur in user code. Various bits of cleanup code call fold(), but
206 only use the result if it has certain characteristics (e.g., is a
207 constant); that code only wants to issue a warning if the result is
208 used. */
210 static int fold_deferring_overflow_warnings;
212 /* If a warning about undefined overflow is deferred, this is the
213 warning. Note that this may cause us to turn two warnings into
214 one, but that is fine since it is sufficient to only give one
215 warning per expression. */
217 static const char* fold_deferred_overflow_warning;
219 /* If a warning about undefined overflow is deferred, this is the
220 level at which the warning should be emitted. */
222 static enum warn_strict_overflow_code fold_deferred_overflow_code;
224 /* Start deferring overflow warnings. We could use a stack here to
225 permit nested calls, but at present it is not necessary. */
227 void
228 fold_defer_overflow_warnings (void)
230 ++fold_deferring_overflow_warnings;
233 /* Stop deferring overflow warnings. If there is a pending warning,
234 and ISSUE is true, then issue the warning if appropriate. STMT is
235 the statement with which the warning should be associated (used for
236 location information); STMT may be NULL. CODE is the level of the
237 warning--a warn_strict_overflow_code value. This function will use
238 the smaller of CODE and the deferred code when deciding whether to
239 issue the warning. CODE may be zero to mean to always use the
240 deferred code. */
242 void
243 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
245 const char *warnmsg;
246 location_t locus;
248 gcc_assert (fold_deferring_overflow_warnings > 0);
249 --fold_deferring_overflow_warnings;
250 if (fold_deferring_overflow_warnings > 0)
252 if (fold_deferred_overflow_warning != NULL
253 && code != 0
254 && code < (int) fold_deferred_overflow_code)
255 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
256 return;
259 warnmsg = fold_deferred_overflow_warning;
260 fold_deferred_overflow_warning = NULL;
262 if (!issue || warnmsg == NULL)
263 return;
265 if (gimple_no_warning_p (stmt))
266 return;
268 /* Use the smallest code level when deciding to issue the
269 warning. */
270 if (code == 0 || code > (int) fold_deferred_overflow_code)
271 code = fold_deferred_overflow_code;
273 if (!issue_strict_overflow_warning (code))
274 return;
276 if (stmt == NULL)
277 locus = input_location;
278 else
279 locus = gimple_location (stmt);
280 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
283 /* Stop deferring overflow warnings, ignoring any deferred
284 warnings. */
286 void
287 fold_undefer_and_ignore_overflow_warnings (void)
289 fold_undefer_overflow_warnings (false, NULL, 0);
292 /* Whether we are deferring overflow warnings. */
294 bool
295 fold_deferring_overflow_warnings_p (void)
297 return fold_deferring_overflow_warnings > 0;
300 /* This is called when we fold something based on the fact that signed
301 overflow is undefined. */
303 static void
304 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
306 if (fold_deferring_overflow_warnings > 0)
308 if (fold_deferred_overflow_warning == NULL
309 || wc < fold_deferred_overflow_code)
311 fold_deferred_overflow_warning = gmsgid;
312 fold_deferred_overflow_code = wc;
315 else if (issue_strict_overflow_warning (wc))
316 warning (OPT_Wstrict_overflow, gmsgid);
319 /* Return true if the built-in mathematical function specified by CODE
320 is odd, i.e. -f(x) == f(-x). */
322 static bool
323 negate_mathfn_p (enum built_in_function code)
325 switch (code)
327 CASE_FLT_FN (BUILT_IN_ASIN):
328 CASE_FLT_FN (BUILT_IN_ASINH):
329 CASE_FLT_FN (BUILT_IN_ATAN):
330 CASE_FLT_FN (BUILT_IN_ATANH):
331 CASE_FLT_FN (BUILT_IN_CASIN):
332 CASE_FLT_FN (BUILT_IN_CASINH):
333 CASE_FLT_FN (BUILT_IN_CATAN):
334 CASE_FLT_FN (BUILT_IN_CATANH):
335 CASE_FLT_FN (BUILT_IN_CBRT):
336 CASE_FLT_FN (BUILT_IN_CPROJ):
337 CASE_FLT_FN (BUILT_IN_CSIN):
338 CASE_FLT_FN (BUILT_IN_CSINH):
339 CASE_FLT_FN (BUILT_IN_CTAN):
340 CASE_FLT_FN (BUILT_IN_CTANH):
341 CASE_FLT_FN (BUILT_IN_ERF):
342 CASE_FLT_FN (BUILT_IN_LLROUND):
343 CASE_FLT_FN (BUILT_IN_LROUND):
344 CASE_FLT_FN (BUILT_IN_ROUND):
345 CASE_FLT_FN (BUILT_IN_SIN):
346 CASE_FLT_FN (BUILT_IN_SINH):
347 CASE_FLT_FN (BUILT_IN_TAN):
348 CASE_FLT_FN (BUILT_IN_TANH):
349 CASE_FLT_FN (BUILT_IN_TRUNC):
350 return true;
352 CASE_FLT_FN (BUILT_IN_LLRINT):
353 CASE_FLT_FN (BUILT_IN_LRINT):
354 CASE_FLT_FN (BUILT_IN_NEARBYINT):
355 CASE_FLT_FN (BUILT_IN_RINT):
356 return !flag_rounding_math;
358 default:
359 break;
361 return false;
364 /* Check whether we may negate an integer constant T without causing
365 overflow. */
367 bool
368 may_negate_without_overflow_p (const_tree t)
370 tree type;
372 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374 type = TREE_TYPE (t);
375 if (TYPE_UNSIGNED (type))
376 return false;
378 return !wi::only_sign_bit_p (t);
381 /* Determine whether an expression T can be cheaply negated using
382 the function negate_expr without introducing undefined overflow. */
384 static bool
385 negate_expr_p (tree t)
387 tree type;
389 if (t == 0)
390 return false;
392 type = TREE_TYPE (t);
394 STRIP_SIGN_NOPS (t);
395 switch (TREE_CODE (t))
397 case INTEGER_CST:
398 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
399 return true;
401 /* Check that -CST will not overflow type. */
402 return may_negate_without_overflow_p (t);
403 case BIT_NOT_EXPR:
404 return (INTEGRAL_TYPE_P (type)
405 && TYPE_OVERFLOW_WRAPS (type));
407 case FIXED_CST:
408 return true;
410 case NEGATE_EXPR:
411 return !TYPE_OVERFLOW_SANITIZED (type);
413 case REAL_CST:
414 /* We want to canonicalize to positive real constants. Pretend
415 that only negative ones can be easily negated. */
416 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
418 case COMPLEX_CST:
419 return negate_expr_p (TREE_REALPART (t))
420 && negate_expr_p (TREE_IMAGPART (t));
422 case VECTOR_CST:
424 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
425 return true;
427 int count = TYPE_VECTOR_SUBPARTS (type), i;
429 for (i = 0; i < count; i++)
430 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
431 return false;
433 return true;
436 case COMPLEX_EXPR:
437 return negate_expr_p (TREE_OPERAND (t, 0))
438 && negate_expr_p (TREE_OPERAND (t, 1));
440 case CONJ_EXPR:
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case PLUS_EXPR:
444 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 || HONOR_SIGNED_ZEROS (element_mode (type)))
446 return false;
447 /* -(A + B) -> (-B) - A. */
448 if (negate_expr_p (TREE_OPERAND (t, 1))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1)))
451 return true;
452 /* -(A + B) -> (-A) - B. */
453 return negate_expr_p (TREE_OPERAND (t, 0));
455 case MINUS_EXPR:
456 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
457 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
458 && !HONOR_SIGNED_ZEROS (element_mode (type))
459 && reorder_operands_p (TREE_OPERAND (t, 0),
460 TREE_OPERAND (t, 1));
462 case MULT_EXPR:
463 if (TYPE_UNSIGNED (TREE_TYPE (t)))
464 break;
466 /* Fall through. */
468 case RDIV_EXPR:
469 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
470 return negate_expr_p (TREE_OPERAND (t, 1))
471 || negate_expr_p (TREE_OPERAND (t, 0));
472 break;
474 case TRUNC_DIV_EXPR:
475 case ROUND_DIV_EXPR:
476 case EXACT_DIV_EXPR:
477 /* In general we can't negate A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. But if overflow is
480 undefined, we can negate, because - (INT_MIN / 1) is an
481 overflow. */
482 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
484 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
485 break;
486 /* If overflow is undefined then we have to be careful because
487 we ask whether it's ok to associate the negate with the
488 division which is not ok for example for
489 -((a - b) / c) where (-(a - b)) / c may invoke undefined
490 overflow because of negating INT_MIN. So do not use
491 negate_expr_p here but open-code the two important cases. */
492 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
493 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
494 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
495 return true;
497 else if (negate_expr_p (TREE_OPERAND (t, 0)))
498 return true;
499 return negate_expr_p (TREE_OPERAND (t, 1));
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
509 break;
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (builtin_mathfn_code (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
525 break;
527 default:
528 break;
530 return false;
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
538 static tree
539 fold_negate_expr (location_t loc, tree t)
541 tree type = TREE_TYPE (t);
542 tree tem;
544 switch (TREE_CODE (t))
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
571 case COMPLEX_CST:
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
578 break;
580 case VECTOR_CST:
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
585 for (i = 0; i < count; i++)
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
592 return build_vector (type, elts);
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
635 break;
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
650 /* Fall through. */
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
664 break;
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 /* In general we can't negate A / B, because if A is INT_MIN and
670 B is 1, we may turn this into INT_MIN / -1 which is undefined
671 and actually traps on some architectures. But if overflow is
672 undefined, we can negate, because - (INT_MIN / 1) is an
673 overflow. */
674 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
676 const char * const warnmsg = G_("assuming signed overflow does not "
677 "occur when negating a division");
678 tem = TREE_OPERAND (t, 1);
679 if (negate_expr_p (tem))
681 if (INTEGRAL_TYPE_P (type)
682 && (TREE_CODE (tem) != INTEGER_CST
683 || integer_onep (tem)))
684 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
685 return fold_build2_loc (loc, TREE_CODE (t), type,
686 TREE_OPERAND (t, 0), negate_expr (tem));
688 /* If overflow is undefined then we have to be careful because
689 we ask whether it's ok to associate the negate with the
690 division which is not ok for example for
691 -((a - b) / c) where (-(a - b)) / c may invoke undefined
692 overflow because of negating INT_MIN. So do not use
693 negate_expr_p here but open-code the two important cases. */
694 tem = TREE_OPERAND (t, 0);
695 if ((INTEGRAL_TYPE_P (type)
696 && (TREE_CODE (tem) == NEGATE_EXPR
697 || (TREE_CODE (tem) == INTEGER_CST
698 && may_negate_without_overflow_p (tem))))
699 || !INTEGRAL_TYPE_P (type))
700 return fold_build2_loc (loc, TREE_CODE (t), type,
701 negate_expr (tem), TREE_OPERAND (t, 1));
703 break;
705 case NOP_EXPR:
706 /* Convert -((double)float) into (double)(-float). */
707 if (TREE_CODE (type) == REAL_TYPE)
709 tem = strip_float_extensions (t);
710 if (tem != t && negate_expr_p (tem))
711 return fold_convert_loc (loc, type, negate_expr (tem));
713 break;
715 case CALL_EXPR:
716 /* Negate -f(x) as f(-x). */
717 if (negate_mathfn_p (builtin_mathfn_code (t))
718 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
720 tree fndecl, arg;
722 fndecl = get_callee_fndecl (t);
723 arg = negate_expr (CALL_EXPR_ARG (t, 0));
724 return build_call_expr_loc (loc, fndecl, 1, arg);
726 break;
728 case RSHIFT_EXPR:
729 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
730 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
732 tree op1 = TREE_OPERAND (t, 1);
733 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
735 tree ntype = TYPE_UNSIGNED (type)
736 ? signed_type_for (type)
737 : unsigned_type_for (type);
738 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
739 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
740 return fold_convert_loc (loc, type, temp);
743 break;
745 default:
746 break;
749 return NULL_TREE;
752 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
753 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
754 return NULL_TREE. */
756 static tree
757 negate_expr (tree t)
759 tree type, tem;
760 location_t loc;
762 if (t == NULL_TREE)
763 return NULL_TREE;
765 loc = EXPR_LOCATION (t);
766 type = TREE_TYPE (t);
767 STRIP_SIGN_NOPS (t);
769 tem = fold_negate_expr (loc, t);
770 if (!tem)
771 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
772 return fold_convert_loc (loc, type, tem);
775 /* Split a tree IN into a constant, literal and variable parts that could be
776 combined with CODE to make IN. "constant" means an expression with
777 TREE_CONSTANT but that isn't an actual constant. CODE must be a
778 commutative arithmetic operation. Store the constant part into *CONP,
779 the literal in *LITP and return the variable part. If a part isn't
780 present, set it to null. If the tree does not decompose in this way,
781 return the entire tree as the variable part and the other parts as null.
783 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
784 case, we negate an operand that was subtracted. Except if it is a
785 literal for which we use *MINUS_LITP instead.
787 If NEGATE_P is true, we are negating all of IN, again except a literal
788 for which we use *MINUS_LITP instead.
790 If IN is itself a literal or constant, return it as appropriate.
792 Note that we do not guarantee that any of the three values will be the
793 same type as IN, but they will have the same signedness and mode. */
795 static tree
796 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
797 tree *minus_litp, int negate_p)
799 tree var = 0;
801 *conp = 0;
802 *litp = 0;
803 *minus_litp = 0;
805 /* Strip any conversions that don't change the machine mode or signedness. */
806 STRIP_SIGN_NOPS (in);
808 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
809 || TREE_CODE (in) == FIXED_CST)
810 *litp = in;
811 else if (TREE_CODE (in) == code
812 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
813 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
814 /* We can associate addition and subtraction together (even
815 though the C standard doesn't say so) for integers because
816 the value is not affected. For reals, the value might be
817 affected, so we can't. */
818 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
819 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
821 tree op0 = TREE_OPERAND (in, 0);
822 tree op1 = TREE_OPERAND (in, 1);
823 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
824 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
826 /* First see if either of the operands is a literal, then a constant. */
827 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
828 || TREE_CODE (op0) == FIXED_CST)
829 *litp = op0, op0 = 0;
830 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
831 || TREE_CODE (op1) == FIXED_CST)
832 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
834 if (op0 != 0 && TREE_CONSTANT (op0))
835 *conp = op0, op0 = 0;
836 else if (op1 != 0 && TREE_CONSTANT (op1))
837 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
839 /* If we haven't dealt with either operand, this is not a case we can
840 decompose. Otherwise, VAR is either of the ones remaining, if any. */
841 if (op0 != 0 && op1 != 0)
842 var = in;
843 else if (op0 != 0)
844 var = op0;
845 else
846 var = op1, neg_var_p = neg1_p;
848 /* Now do any needed negations. */
849 if (neg_litp_p)
850 *minus_litp = *litp, *litp = 0;
851 if (neg_conp_p)
852 *conp = negate_expr (*conp);
853 if (neg_var_p)
854 var = negate_expr (var);
856 else if (TREE_CODE (in) == BIT_NOT_EXPR
857 && code == PLUS_EXPR)
859 /* -X - 1 is folded to ~X, undo that here. */
860 *minus_litp = build_one_cst (TREE_TYPE (in));
861 var = negate_expr (TREE_OPERAND (in, 0));
863 else if (TREE_CONSTANT (in))
864 *conp = in;
865 else
866 var = in;
868 if (negate_p)
870 if (*litp)
871 *minus_litp = *litp, *litp = 0;
872 else if (*minus_litp)
873 *litp = *minus_litp, *minus_litp = 0;
874 *conp = negate_expr (*conp);
875 var = negate_expr (var);
878 return var;
881 /* Re-associate trees split by the above function. T1 and T2 are
882 either expressions to associate or null. Return the new
883 expression, if any. LOC is the location of the new expression. If
884 we build an operation, do it in TYPE and with CODE. */
886 static tree
887 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
889 if (t1 == 0)
890 return t2;
891 else if (t2 == 0)
892 return t1;
894 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
895 try to fold this since we will have infinite recursion. But do
896 deal with any NEGATE_EXPRs. */
897 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
898 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
900 if (code == PLUS_EXPR)
902 if (TREE_CODE (t1) == NEGATE_EXPR)
903 return build2_loc (loc, MINUS_EXPR, type,
904 fold_convert_loc (loc, type, t2),
905 fold_convert_loc (loc, type,
906 TREE_OPERAND (t1, 0)));
907 else if (TREE_CODE (t2) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t1),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t2, 0)));
912 else if (integer_zerop (t2))
913 return fold_convert_loc (loc, type, t1);
915 else if (code == MINUS_EXPR)
917 if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
921 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type, t2));
925 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
930 for use in int_const_binop, size_binop and size_diffop. */
932 static bool
933 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
935 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
936 return false;
937 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
938 return false;
940 switch (code)
942 case LSHIFT_EXPR:
943 case RSHIFT_EXPR:
944 case LROTATE_EXPR:
945 case RROTATE_EXPR:
946 return true;
948 default:
949 break;
952 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
953 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
954 && TYPE_MODE (type1) == TYPE_MODE (type2);
958 /* Combine two integer constants ARG1 and ARG2 under operation CODE
959 to produce a new constant. Return NULL_TREE if we don't know how
960 to evaluate CODE at compile-time. */
962 static tree
963 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
964 int overflowable)
966 wide_int res;
967 tree t;
968 tree type = TREE_TYPE (arg1);
969 signop sign = TYPE_SIGN (type);
970 bool overflow = false;
972 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
973 TYPE_SIGN (TREE_TYPE (parg2)));
975 switch (code)
977 case BIT_IOR_EXPR:
978 res = wi::bit_or (arg1, arg2);
979 break;
981 case BIT_XOR_EXPR:
982 res = wi::bit_xor (arg1, arg2);
983 break;
985 case BIT_AND_EXPR:
986 res = wi::bit_and (arg1, arg2);
987 break;
989 case RSHIFT_EXPR:
990 case LSHIFT_EXPR:
991 if (wi::neg_p (arg2))
993 arg2 = -arg2;
994 if (code == RSHIFT_EXPR)
995 code = LSHIFT_EXPR;
996 else
997 code = RSHIFT_EXPR;
1000 if (code == RSHIFT_EXPR)
1001 /* It's unclear from the C standard whether shifts can overflow.
1002 The following code ignores overflow; perhaps a C standard
1003 interpretation ruling is needed. */
1004 res = wi::rshift (arg1, arg2, sign);
1005 else
1006 res = wi::lshift (arg1, arg2);
1007 break;
1009 case RROTATE_EXPR:
1010 case LROTATE_EXPR:
1011 if (wi::neg_p (arg2))
1013 arg2 = -arg2;
1014 if (code == RROTATE_EXPR)
1015 code = LROTATE_EXPR;
1016 else
1017 code = RROTATE_EXPR;
1020 if (code == RROTATE_EXPR)
1021 res = wi::rrotate (arg1, arg2);
1022 else
1023 res = wi::lrotate (arg1, arg2);
1024 break;
1026 case PLUS_EXPR:
1027 res = wi::add (arg1, arg2, sign, &overflow);
1028 break;
1030 case MINUS_EXPR:
1031 res = wi::sub (arg1, arg2, sign, &overflow);
1032 break;
1034 case MULT_EXPR:
1035 res = wi::mul (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_HIGHPART_EXPR:
1039 res = wi::mul_high (arg1, arg2, sign);
1040 break;
1042 case TRUNC_DIV_EXPR:
1043 case EXACT_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1047 break;
1049 case FLOOR_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_floor (arg1, arg2, sign, &overflow);
1053 break;
1055 case CEIL_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1059 break;
1061 case ROUND_DIV_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::div_round (arg1, arg2, sign, &overflow);
1065 break;
1067 case TRUNC_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1071 break;
1073 case FLOOR_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1077 break;
1079 case CEIL_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1083 break;
1085 case ROUND_MOD_EXPR:
1086 if (arg2 == 0)
1087 return NULL_TREE;
1088 res = wi::mod_round (arg1, arg2, sign, &overflow);
1089 break;
1091 case MIN_EXPR:
1092 res = wi::min (arg1, arg2, sign);
1093 break;
1095 case MAX_EXPR:
1096 res = wi::max (arg1, arg2, sign);
1097 break;
1099 default:
1100 return NULL_TREE;
1103 t = force_fit_type (type, res, overflowable,
1104 (((sign == SIGNED || overflowable == -1)
1105 && overflow)
1106 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1108 return t;
1111 tree
1112 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1114 return int_const_binop_1 (code, arg1, arg2, 1);
1117 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1118 constant. We assume ARG1 and ARG2 have the same data type, or at least
1119 are the same kind of constant and the same machine mode. Return zero if
1120 combining the constants is not allowed in the current operating mode. */
1122 static tree
1123 const_binop (enum tree_code code, tree arg1, tree arg2)
1125 /* Sanity check for the recursive cases. */
1126 if (!arg1 || !arg2)
1127 return NULL_TREE;
1129 STRIP_NOPS (arg1);
1130 STRIP_NOPS (arg2);
1132 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1134 if (code == POINTER_PLUS_EXPR)
1135 return int_const_binop (PLUS_EXPR,
1136 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1138 return int_const_binop (code, arg1, arg2);
1141 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1143 machine_mode mode;
1144 REAL_VALUE_TYPE d1;
1145 REAL_VALUE_TYPE d2;
1146 REAL_VALUE_TYPE value;
1147 REAL_VALUE_TYPE result;
1148 bool inexact;
1149 tree t, type;
1151 /* The following codes are handled by real_arithmetic. */
1152 switch (code)
1154 case PLUS_EXPR:
1155 case MINUS_EXPR:
1156 case MULT_EXPR:
1157 case RDIV_EXPR:
1158 case MIN_EXPR:
1159 case MAX_EXPR:
1160 break;
1162 default:
1163 return NULL_TREE;
1166 d1 = TREE_REAL_CST (arg1);
1167 d2 = TREE_REAL_CST (arg2);
1169 type = TREE_TYPE (arg1);
1170 mode = TYPE_MODE (type);
1172 /* Don't perform operation if we honor signaling NaNs and
1173 either operand is a NaN. */
1174 if (HONOR_SNANS (mode)
1175 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1176 return NULL_TREE;
1178 /* Don't perform operation if it would raise a division
1179 by zero exception. */
1180 if (code == RDIV_EXPR
1181 && real_equal (&d2, &dconst0)
1182 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1183 return NULL_TREE;
1185 /* If either operand is a NaN, just return it. Otherwise, set up
1186 for floating-point trap; we return an overflow. */
1187 if (REAL_VALUE_ISNAN (d1))
1188 return arg1;
1189 else if (REAL_VALUE_ISNAN (d2))
1190 return arg2;
1192 inexact = real_arithmetic (&value, code, &d1, &d2);
1193 real_convert (&result, mode, &value);
1195 /* Don't constant fold this floating point operation if
1196 the result has overflowed and flag_trapping_math. */
1197 if (flag_trapping_math
1198 && MODE_HAS_INFINITIES (mode)
1199 && REAL_VALUE_ISINF (result)
1200 && !REAL_VALUE_ISINF (d1)
1201 && !REAL_VALUE_ISINF (d2))
1202 return NULL_TREE;
1204 /* Don't constant fold this floating point operation if the
1205 result may dependent upon the run-time rounding mode and
1206 flag_rounding_math is set, or if GCC's software emulation
1207 is unable to accurately represent the result. */
1208 if ((flag_rounding_math
1209 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1210 && (inexact || !real_identical (&result, &value)))
1211 return NULL_TREE;
1213 t = build_real (type, result);
1215 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1216 return t;
1219 if (TREE_CODE (arg1) == FIXED_CST)
1221 FIXED_VALUE_TYPE f1;
1222 FIXED_VALUE_TYPE f2;
1223 FIXED_VALUE_TYPE result;
1224 tree t, type;
1225 int sat_p;
1226 bool overflow_p;
1228 /* The following codes are handled by fixed_arithmetic. */
1229 switch (code)
1231 case PLUS_EXPR:
1232 case MINUS_EXPR:
1233 case MULT_EXPR:
1234 case TRUNC_DIV_EXPR:
1235 if (TREE_CODE (arg2) != FIXED_CST)
1236 return NULL_TREE;
1237 f2 = TREE_FIXED_CST (arg2);
1238 break;
1240 case LSHIFT_EXPR:
1241 case RSHIFT_EXPR:
1243 if (TREE_CODE (arg2) != INTEGER_CST)
1244 return NULL_TREE;
1245 wide_int w2 = arg2;
1246 f2.data.high = w2.elt (1);
1247 f2.data.low = w2.elt (0);
1248 f2.mode = SImode;
1250 break;
1252 default:
1253 return NULL_TREE;
1256 f1 = TREE_FIXED_CST (arg1);
1257 type = TREE_TYPE (arg1);
1258 sat_p = TYPE_SATURATING (type);
1259 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1260 t = build_fixed (type, result);
1261 /* Propagate overflow flags. */
1262 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1263 TREE_OVERFLOW (t) = 1;
1264 return t;
1267 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1269 tree type = TREE_TYPE (arg1);
1270 tree r1 = TREE_REALPART (arg1);
1271 tree i1 = TREE_IMAGPART (arg1);
1272 tree r2 = TREE_REALPART (arg2);
1273 tree i2 = TREE_IMAGPART (arg2);
1274 tree real, imag;
1276 switch (code)
1278 case PLUS_EXPR:
1279 case MINUS_EXPR:
1280 real = const_binop (code, r1, r2);
1281 imag = const_binop (code, i1, i2);
1282 break;
1284 case MULT_EXPR:
1285 if (COMPLEX_FLOAT_TYPE_P (type))
1286 return do_mpc_arg2 (arg1, arg2, type,
1287 /* do_nonfinite= */ folding_initializer,
1288 mpc_mul);
1290 real = const_binop (MINUS_EXPR,
1291 const_binop (MULT_EXPR, r1, r2),
1292 const_binop (MULT_EXPR, i1, i2));
1293 imag = const_binop (PLUS_EXPR,
1294 const_binop (MULT_EXPR, r1, i2),
1295 const_binop (MULT_EXPR, i1, r2));
1296 break;
1298 case RDIV_EXPR:
1299 if (COMPLEX_FLOAT_TYPE_P (type))
1300 return do_mpc_arg2 (arg1, arg2, type,
1301 /* do_nonfinite= */ folding_initializer,
1302 mpc_div);
1303 /* Fallthru ... */
1304 case TRUNC_DIV_EXPR:
1305 case CEIL_DIV_EXPR:
1306 case FLOOR_DIV_EXPR:
1307 case ROUND_DIV_EXPR:
1308 if (flag_complex_method == 0)
1310 /* Keep this algorithm in sync with
1311 tree-complex.c:expand_complex_div_straight().
1313 Expand complex division to scalars, straightforward algorithm.
1314 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1315 t = br*br + bi*bi
1317 tree magsquared
1318 = const_binop (PLUS_EXPR,
1319 const_binop (MULT_EXPR, r2, r2),
1320 const_binop (MULT_EXPR, i2, i2));
1321 tree t1
1322 = const_binop (PLUS_EXPR,
1323 const_binop (MULT_EXPR, r1, r2),
1324 const_binop (MULT_EXPR, i1, i2));
1325 tree t2
1326 = const_binop (MINUS_EXPR,
1327 const_binop (MULT_EXPR, i1, r2),
1328 const_binop (MULT_EXPR, r1, i2));
1330 real = const_binop (code, t1, magsquared);
1331 imag = const_binop (code, t2, magsquared);
1333 else
1335 /* Keep this algorithm in sync with
1336 tree-complex.c:expand_complex_div_wide().
1338 Expand complex division to scalars, modified algorithm to minimize
1339 overflow with wide input ranges. */
1340 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1341 fold_abs_const (r2, TREE_TYPE (type)),
1342 fold_abs_const (i2, TREE_TYPE (type)));
1344 if (integer_nonzerop (compare))
1346 /* In the TRUE branch, we compute
1347 ratio = br/bi;
1348 div = (br * ratio) + bi;
1349 tr = (ar * ratio) + ai;
1350 ti = (ai * ratio) - ar;
1351 tr = tr / div;
1352 ti = ti / div; */
1353 tree ratio = const_binop (code, r2, i2);
1354 tree div = const_binop (PLUS_EXPR, i2,
1355 const_binop (MULT_EXPR, r2, ratio));
1356 real = const_binop (MULT_EXPR, r1, ratio);
1357 real = const_binop (PLUS_EXPR, real, i1);
1358 real = const_binop (code, real, div);
1360 imag = const_binop (MULT_EXPR, i1, ratio);
1361 imag = const_binop (MINUS_EXPR, imag, r1);
1362 imag = const_binop (code, imag, div);
1364 else
1366 /* In the FALSE branch, we compute
1367 ratio = d/c;
1368 divisor = (d * ratio) + c;
1369 tr = (b * ratio) + a;
1370 ti = b - (a * ratio);
1371 tr = tr / div;
1372 ti = ti / div; */
1373 tree ratio = const_binop (code, i2, r2);
1374 tree div = const_binop (PLUS_EXPR, r2,
1375 const_binop (MULT_EXPR, i2, ratio));
1377 real = const_binop (MULT_EXPR, i1, ratio);
1378 real = const_binop (PLUS_EXPR, real, r1);
1379 real = const_binop (code, real, div);
1381 imag = const_binop (MULT_EXPR, r1, ratio);
1382 imag = const_binop (MINUS_EXPR, i1, imag);
1383 imag = const_binop (code, imag, div);
1386 break;
1388 default:
1389 return NULL_TREE;
1392 if (real && imag)
1393 return build_complex (type, real, imag);
1396 if (TREE_CODE (arg1) == VECTOR_CST
1397 && TREE_CODE (arg2) == VECTOR_CST)
1399 tree type = TREE_TYPE (arg1);
1400 int count = TYPE_VECTOR_SUBPARTS (type), i;
1401 tree *elts = XALLOCAVEC (tree, count);
1403 for (i = 0; i < count; i++)
1405 tree elem1 = VECTOR_CST_ELT (arg1, i);
1406 tree elem2 = VECTOR_CST_ELT (arg2, i);
1408 elts[i] = const_binop (code, elem1, elem2);
1410 /* It is possible that const_binop cannot handle the given
1411 code and return NULL_TREE */
1412 if (elts[i] == NULL_TREE)
1413 return NULL_TREE;
1416 return build_vector (type, elts);
1419 /* Shifts allow a scalar offset for a vector. */
1420 if (TREE_CODE (arg1) == VECTOR_CST
1421 && TREE_CODE (arg2) == INTEGER_CST)
1423 tree type = TREE_TYPE (arg1);
1424 int count = TYPE_VECTOR_SUBPARTS (type), i;
1425 tree *elts = XALLOCAVEC (tree, count);
1427 for (i = 0; i < count; i++)
1429 tree elem1 = VECTOR_CST_ELT (arg1, i);
1431 elts[i] = const_binop (code, elem1, arg2);
1433 /* It is possible that const_binop cannot handle the given
1434 code and return NULL_TREE. */
1435 if (elts[i] == NULL_TREE)
1436 return NULL_TREE;
1439 return build_vector (type, elts);
1441 return NULL_TREE;
1444 /* Overload that adds a TYPE parameter to be able to dispatch
1445 to fold_relational_const. */
1447 tree
1448 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1450 if (TREE_CODE_CLASS (code) == tcc_comparison)
1451 return fold_relational_const (code, type, arg1, arg2);
1453 /* ??? Until we make the const_binop worker take the type of the
1454 result as argument put those cases that need it here. */
1455 switch (code)
1457 case COMPLEX_EXPR:
1458 if ((TREE_CODE (arg1) == REAL_CST
1459 && TREE_CODE (arg2) == REAL_CST)
1460 || (TREE_CODE (arg1) == INTEGER_CST
1461 && TREE_CODE (arg2) == INTEGER_CST))
1462 return build_complex (type, arg1, arg2);
1463 return NULL_TREE;
1465 case VEC_PACK_TRUNC_EXPR:
1466 case VEC_PACK_FIX_TRUNC_EXPR:
1468 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1469 tree *elts;
1471 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1472 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1473 if (TREE_CODE (arg1) != VECTOR_CST
1474 || TREE_CODE (arg2) != VECTOR_CST)
1475 return NULL_TREE;
1477 elts = XALLOCAVEC (tree, nelts);
1478 if (!vec_cst_ctor_to_array (arg1, elts)
1479 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1480 return NULL_TREE;
1482 for (i = 0; i < nelts; i++)
1484 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1485 ? NOP_EXPR : FIX_TRUNC_EXPR,
1486 TREE_TYPE (type), elts[i]);
1487 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1488 return NULL_TREE;
1491 return build_vector (type, elts);
1494 case VEC_WIDEN_MULT_LO_EXPR:
1495 case VEC_WIDEN_MULT_HI_EXPR:
1496 case VEC_WIDEN_MULT_EVEN_EXPR:
1497 case VEC_WIDEN_MULT_ODD_EXPR:
1499 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1500 unsigned int out, ofs, scale;
1501 tree *elts;
1503 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1504 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1505 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1506 return NULL_TREE;
1508 elts = XALLOCAVEC (tree, nelts * 4);
1509 if (!vec_cst_ctor_to_array (arg1, elts)
1510 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1511 return NULL_TREE;
1513 if (code == VEC_WIDEN_MULT_LO_EXPR)
1514 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1515 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1516 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1517 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1518 scale = 1, ofs = 0;
1519 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1520 scale = 1, ofs = 1;
1522 for (out = 0; out < nelts; out++)
1524 unsigned int in1 = (out << scale) + ofs;
1525 unsigned int in2 = in1 + nelts * 2;
1526 tree t1, t2;
1528 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1529 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1531 if (t1 == NULL_TREE || t2 == NULL_TREE)
1532 return NULL_TREE;
1533 elts[out] = const_binop (MULT_EXPR, t1, t2);
1534 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1535 return NULL_TREE;
1538 return build_vector (type, elts);
1541 default:;
1544 if (TREE_CODE_CLASS (code) != tcc_binary)
1545 return NULL_TREE;
1547 /* Make sure type and arg0 have the same saturating flag. */
1548 gcc_checking_assert (TYPE_SATURATING (type)
1549 == TYPE_SATURATING (TREE_TYPE (arg1)));
1551 return const_binop (code, arg1, arg2);
1554 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1555 Return zero if computing the constants is not possible. */
1557 tree
1558 const_unop (enum tree_code code, tree type, tree arg0)
1560 switch (code)
1562 CASE_CONVERT:
1563 case FLOAT_EXPR:
1564 case FIX_TRUNC_EXPR:
1565 case FIXED_CONVERT_EXPR:
1566 return fold_convert_const (code, type, arg0);
1568 case ADDR_SPACE_CONVERT_EXPR:
1569 if (integer_zerop (arg0))
1570 return fold_convert_const (code, type, arg0);
1571 break;
1573 case VIEW_CONVERT_EXPR:
1574 return fold_view_convert_expr (type, arg0);
1576 case NEGATE_EXPR:
1578 /* Can't call fold_negate_const directly here as that doesn't
1579 handle all cases and we might not be able to negate some
1580 constants. */
1581 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1582 if (tem && CONSTANT_CLASS_P (tem))
1583 return tem;
1584 break;
1587 case ABS_EXPR:
1588 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1589 return fold_abs_const (arg0, type);
1590 break;
1592 case CONJ_EXPR:
1593 if (TREE_CODE (arg0) == COMPLEX_CST)
1595 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1596 TREE_TYPE (type));
1597 return build_complex (type, TREE_REALPART (arg0), ipart);
1599 break;
1601 case BIT_NOT_EXPR:
1602 if (TREE_CODE (arg0) == INTEGER_CST)
1603 return fold_not_const (arg0, type);
1604 /* Perform BIT_NOT_EXPR on each element individually. */
1605 else if (TREE_CODE (arg0) == VECTOR_CST)
1607 tree *elements;
1608 tree elem;
1609 unsigned count = VECTOR_CST_NELTS (arg0), i;
1611 elements = XALLOCAVEC (tree, count);
1612 for (i = 0; i < count; i++)
1614 elem = VECTOR_CST_ELT (arg0, i);
1615 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1616 if (elem == NULL_TREE)
1617 break;
1618 elements[i] = elem;
1620 if (i == count)
1621 return build_vector (type, elements);
1623 break;
1625 case TRUTH_NOT_EXPR:
1626 if (TREE_CODE (arg0) == INTEGER_CST)
1627 return constant_boolean_node (integer_zerop (arg0), type);
1628 break;
1630 case REALPART_EXPR:
1631 if (TREE_CODE (arg0) == COMPLEX_CST)
1632 return fold_convert (type, TREE_REALPART (arg0));
1633 break;
1635 case IMAGPART_EXPR:
1636 if (TREE_CODE (arg0) == COMPLEX_CST)
1637 return fold_convert (type, TREE_IMAGPART (arg0));
1638 break;
1640 case VEC_UNPACK_LO_EXPR:
1641 case VEC_UNPACK_HI_EXPR:
1642 case VEC_UNPACK_FLOAT_LO_EXPR:
1643 case VEC_UNPACK_FLOAT_HI_EXPR:
1645 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1646 tree *elts;
1647 enum tree_code subcode;
1649 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1650 if (TREE_CODE (arg0) != VECTOR_CST)
1651 return NULL_TREE;
1653 elts = XALLOCAVEC (tree, nelts * 2);
1654 if (!vec_cst_ctor_to_array (arg0, elts))
1655 return NULL_TREE;
1657 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1658 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1659 elts += nelts;
1661 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1662 subcode = NOP_EXPR;
1663 else
1664 subcode = FLOAT_EXPR;
1666 for (i = 0; i < nelts; i++)
1668 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1669 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1670 return NULL_TREE;
1673 return build_vector (type, elts);
1676 case REDUC_MIN_EXPR:
1677 case REDUC_MAX_EXPR:
1678 case REDUC_PLUS_EXPR:
1680 unsigned int nelts, i;
1681 tree *elts;
1682 enum tree_code subcode;
1684 if (TREE_CODE (arg0) != VECTOR_CST)
1685 return NULL_TREE;
1686 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1688 elts = XALLOCAVEC (tree, nelts);
1689 if (!vec_cst_ctor_to_array (arg0, elts))
1690 return NULL_TREE;
1692 switch (code)
1694 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1695 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1696 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1697 default: gcc_unreachable ();
1700 for (i = 1; i < nelts; i++)
1702 elts[0] = const_binop (subcode, elts[0], elts[i]);
1703 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1704 return NULL_TREE;
1707 return elts[0];
1710 default:
1711 break;
1714 return NULL_TREE;
1717 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1718 indicates which particular sizetype to create. */
1720 tree
1721 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1723 return build_int_cst (sizetype_tab[(int) kind], number);
1726 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1727 is a tree code. The type of the result is taken from the operands.
1728 Both must be equivalent integer types, ala int_binop_types_match_p.
1729 If the operands are constant, so is the result. */
1731 tree
1732 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1734 tree type = TREE_TYPE (arg0);
1736 if (arg0 == error_mark_node || arg1 == error_mark_node)
1737 return error_mark_node;
1739 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1740 TREE_TYPE (arg1)));
1742 /* Handle the special case of two integer constants faster. */
1743 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1745 /* And some specific cases even faster than that. */
1746 if (code == PLUS_EXPR)
1748 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1749 return arg1;
1750 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1751 return arg0;
1753 else if (code == MINUS_EXPR)
1755 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1756 return arg0;
1758 else if (code == MULT_EXPR)
1760 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1761 return arg1;
1764 /* Handle general case of two integer constants. For sizetype
1765 constant calculations we always want to know about overflow,
1766 even in the unsigned case. */
1767 return int_const_binop_1 (code, arg0, arg1, -1);
1770 return fold_build2_loc (loc, code, type, arg0, arg1);
1773 /* Given two values, either both of sizetype or both of bitsizetype,
1774 compute the difference between the two values. Return the value
1775 in signed type corresponding to the type of the operands. */
1777 tree
1778 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1780 tree type = TREE_TYPE (arg0);
1781 tree ctype;
1783 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1784 TREE_TYPE (arg1)));
1786 /* If the type is already signed, just do the simple thing. */
1787 if (!TYPE_UNSIGNED (type))
1788 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1790 if (type == sizetype)
1791 ctype = ssizetype;
1792 else if (type == bitsizetype)
1793 ctype = sbitsizetype;
1794 else
1795 ctype = signed_type_for (type);
1797 /* If either operand is not a constant, do the conversions to the signed
1798 type and subtract. The hardware will do the right thing with any
1799 overflow in the subtraction. */
1800 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1801 return size_binop_loc (loc, MINUS_EXPR,
1802 fold_convert_loc (loc, ctype, arg0),
1803 fold_convert_loc (loc, ctype, arg1));
1805 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1806 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1807 overflow) and negate (which can't either). Special-case a result
1808 of zero while we're here. */
1809 if (tree_int_cst_equal (arg0, arg1))
1810 return build_int_cst (ctype, 0);
1811 else if (tree_int_cst_lt (arg1, arg0))
1812 return fold_convert_loc (loc, ctype,
1813 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1814 else
1815 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1816 fold_convert_loc (loc, ctype,
1817 size_binop_loc (loc,
1818 MINUS_EXPR,
1819 arg1, arg0)));
1822 /* A subroutine of fold_convert_const handling conversions of an
1823 INTEGER_CST to another integer type. */
1825 static tree
1826 fold_convert_const_int_from_int (tree type, const_tree arg1)
1828 /* Given an integer constant, make new constant with new type,
1829 appropriately sign-extended or truncated. Use widest_int
1830 so that any extension is done according ARG1's type. */
1831 return force_fit_type (type, wi::to_widest (arg1),
1832 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1833 TREE_OVERFLOW (arg1));
1836 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1837 to an integer type. */
1839 static tree
1840 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1842 bool overflow = false;
1843 tree t;
1845 /* The following code implements the floating point to integer
1846 conversion rules required by the Java Language Specification,
1847 that IEEE NaNs are mapped to zero and values that overflow
1848 the target precision saturate, i.e. values greater than
1849 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1850 are mapped to INT_MIN. These semantics are allowed by the
1851 C and C++ standards that simply state that the behavior of
1852 FP-to-integer conversion is unspecified upon overflow. */
1854 wide_int val;
1855 REAL_VALUE_TYPE r;
1856 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858 switch (code)
1860 case FIX_TRUNC_EXPR:
1861 real_trunc (&r, VOIDmode, &x);
1862 break;
1864 default:
1865 gcc_unreachable ();
1868 /* If R is NaN, return zero and show we have an overflow. */
1869 if (REAL_VALUE_ISNAN (r))
1871 overflow = true;
1872 val = wi::zero (TYPE_PRECISION (type));
1875 /* See if R is less than the lower bound or greater than the
1876 upper bound. */
1878 if (! overflow)
1880 tree lt = TYPE_MIN_VALUE (type);
1881 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1882 if (real_less (&r, &l))
1884 overflow = true;
1885 val = lt;
1889 if (! overflow)
1891 tree ut = TYPE_MAX_VALUE (type);
1892 if (ut)
1894 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1895 if (real_less (&u, &r))
1897 overflow = true;
1898 val = ut;
1903 if (! overflow)
1904 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1906 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1907 return t;
1910 /* A subroutine of fold_convert_const handling conversions of a
1911 FIXED_CST to an integer type. */
1913 static tree
1914 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1916 tree t;
1917 double_int temp, temp_trunc;
1918 unsigned int mode;
1920 /* Right shift FIXED_CST to temp by fbit. */
1921 temp = TREE_FIXED_CST (arg1).data;
1922 mode = TREE_FIXED_CST (arg1).mode;
1923 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1925 temp = temp.rshift (GET_MODE_FBIT (mode),
1926 HOST_BITS_PER_DOUBLE_INT,
1927 SIGNED_FIXED_POINT_MODE_P (mode));
1929 /* Left shift temp to temp_trunc by fbit. */
1930 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1931 HOST_BITS_PER_DOUBLE_INT,
1932 SIGNED_FIXED_POINT_MODE_P (mode));
1934 else
1936 temp = double_int_zero;
1937 temp_trunc = double_int_zero;
1940 /* If FIXED_CST is negative, we need to round the value toward 0.
1941 By checking if the fractional bits are not zero to add 1 to temp. */
1942 if (SIGNED_FIXED_POINT_MODE_P (mode)
1943 && temp_trunc.is_negative ()
1944 && TREE_FIXED_CST (arg1).data != temp_trunc)
1945 temp += double_int_one;
1947 /* Given a fixed-point constant, make new constant with new type,
1948 appropriately sign-extended or truncated. */
1949 t = force_fit_type (type, temp, -1,
1950 (temp.is_negative ()
1951 && (TYPE_UNSIGNED (type)
1952 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1953 | TREE_OVERFLOW (arg1));
1955 return t;
1958 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1959 to another floating point type. */
1961 static tree
1962 fold_convert_const_real_from_real (tree type, const_tree arg1)
1964 REAL_VALUE_TYPE value;
1965 tree t;
1967 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1968 t = build_real (type, value);
1970 /* If converting an infinity or NAN to a representation that doesn't
1971 have one, set the overflow bit so that we can produce some kind of
1972 error message at the appropriate point if necessary. It's not the
1973 most user-friendly message, but it's better than nothing. */
1974 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1975 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1976 TREE_OVERFLOW (t) = 1;
1977 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1978 && !MODE_HAS_NANS (TYPE_MODE (type)))
1979 TREE_OVERFLOW (t) = 1;
1980 /* Regular overflow, conversion produced an infinity in a mode that
1981 can't represent them. */
1982 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1983 && REAL_VALUE_ISINF (value)
1984 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1985 TREE_OVERFLOW (t) = 1;
1986 else
1987 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1988 return t;
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to a floating point type. */
1994 static tree
1995 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1997 REAL_VALUE_TYPE value;
1998 tree t;
2000 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2001 t = build_real (type, value);
2003 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2004 return t;
2007 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2008 to another fixed-point type. */
2010 static tree
2011 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2013 FIXED_VALUE_TYPE value;
2014 tree t;
2015 bool overflow_p;
2017 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2018 TYPE_SATURATING (type));
2019 t = build_fixed (type, value);
2021 /* Propagate overflow flags. */
2022 if (overflow_p | TREE_OVERFLOW (arg1))
2023 TREE_OVERFLOW (t) = 1;
2024 return t;
2027 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2028 to a fixed-point type. */
2030 static tree
2031 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2033 FIXED_VALUE_TYPE value;
2034 tree t;
2035 bool overflow_p;
2036 double_int di;
2038 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2040 di.low = TREE_INT_CST_ELT (arg1, 0);
2041 if (TREE_INT_CST_NUNITS (arg1) == 1)
2042 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2043 else
2044 di.high = TREE_INT_CST_ELT (arg1, 1);
2046 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2047 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2048 TYPE_SATURATING (type));
2049 t = build_fixed (type, value);
2051 /* Propagate overflow flags. */
2052 if (overflow_p | TREE_OVERFLOW (arg1))
2053 TREE_OVERFLOW (t) = 1;
2054 return t;
2057 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2058 to a fixed-point type. */
2060 static tree
2061 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2063 FIXED_VALUE_TYPE value;
2064 tree t;
2065 bool overflow_p;
2067 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2068 &TREE_REAL_CST (arg1),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2078 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2079 type TYPE. If no simplification can be done return NULL_TREE. */
2081 static tree
2082 fold_convert_const (enum tree_code code, tree type, tree arg1)
2084 if (TREE_TYPE (arg1) == type)
2085 return arg1;
2087 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2088 || TREE_CODE (type) == OFFSET_TYPE)
2090 if (TREE_CODE (arg1) == INTEGER_CST)
2091 return fold_convert_const_int_from_int (type, arg1);
2092 else if (TREE_CODE (arg1) == REAL_CST)
2093 return fold_convert_const_int_from_real (code, type, arg1);
2094 else if (TREE_CODE (arg1) == FIXED_CST)
2095 return fold_convert_const_int_from_fixed (type, arg1);
2097 else if (TREE_CODE (type) == REAL_TYPE)
2099 if (TREE_CODE (arg1) == INTEGER_CST)
2100 return build_real_from_int_cst (type, arg1);
2101 else if (TREE_CODE (arg1) == REAL_CST)
2102 return fold_convert_const_real_from_real (type, arg1);
2103 else if (TREE_CODE (arg1) == FIXED_CST)
2104 return fold_convert_const_real_from_fixed (type, arg1);
2106 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2108 if (TREE_CODE (arg1) == FIXED_CST)
2109 return fold_convert_const_fixed_from_fixed (type, arg1);
2110 else if (TREE_CODE (arg1) == INTEGER_CST)
2111 return fold_convert_const_fixed_from_int (type, arg1);
2112 else if (TREE_CODE (arg1) == REAL_CST)
2113 return fold_convert_const_fixed_from_real (type, arg1);
2115 return NULL_TREE;
2118 /* Construct a vector of zero elements of vector type TYPE. */
2120 static tree
2121 build_zero_vector (tree type)
2123 tree t;
2125 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2126 return build_vector_from_val (type, t);
2129 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2131 bool
2132 fold_convertible_p (const_tree type, const_tree arg)
2134 tree orig = TREE_TYPE (arg);
2136 if (type == orig)
2137 return true;
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return false;
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2145 return true;
2147 switch (TREE_CODE (type))
2149 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2150 case POINTER_TYPE: case REFERENCE_TYPE:
2151 case OFFSET_TYPE:
2152 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2153 || TREE_CODE (orig) == OFFSET_TYPE)
2154 return true;
2155 return (TREE_CODE (orig) == VECTOR_TYPE
2156 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2158 case REAL_TYPE:
2159 case FIXED_POINT_TYPE:
2160 case COMPLEX_TYPE:
2161 case VECTOR_TYPE:
2162 case VOID_TYPE:
2163 return TREE_CODE (type) == TREE_CODE (orig);
2165 default:
2166 return false;
2170 /* Convert expression ARG to type TYPE. Used by the middle-end for
2171 simple conversions in preference to calling the front-end's convert. */
2173 tree
2174 fold_convert_loc (location_t loc, tree type, tree arg)
2176 tree orig = TREE_TYPE (arg);
2177 tree tem;
2179 if (type == orig)
2180 return arg;
2182 if (TREE_CODE (arg) == ERROR_MARK
2183 || TREE_CODE (type) == ERROR_MARK
2184 || TREE_CODE (orig) == ERROR_MARK)
2185 return error_mark_node;
2187 switch (TREE_CODE (type))
2189 case POINTER_TYPE:
2190 case REFERENCE_TYPE:
2191 /* Handle conversions between pointers to different address spaces. */
2192 if (POINTER_TYPE_P (orig)
2193 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2194 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2195 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2196 /* fall through */
2198 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2199 case OFFSET_TYPE:
2200 if (TREE_CODE (arg) == INTEGER_CST)
2202 tem = fold_convert_const (NOP_EXPR, type, arg);
2203 if (tem != NULL_TREE)
2204 return tem;
2206 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2207 || TREE_CODE (orig) == OFFSET_TYPE)
2208 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2209 if (TREE_CODE (orig) == COMPLEX_TYPE)
2210 return fold_convert_loc (loc, type,
2211 fold_build1_loc (loc, REALPART_EXPR,
2212 TREE_TYPE (orig), arg));
2213 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2214 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2215 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2217 case REAL_TYPE:
2218 if (TREE_CODE (arg) == INTEGER_CST)
2220 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2221 if (tem != NULL_TREE)
2222 return tem;
2224 else if (TREE_CODE (arg) == REAL_CST)
2226 tem = fold_convert_const (NOP_EXPR, type, arg);
2227 if (tem != NULL_TREE)
2228 return tem;
2230 else if (TREE_CODE (arg) == FIXED_CST)
2232 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2233 if (tem != NULL_TREE)
2234 return tem;
2237 switch (TREE_CODE (orig))
2239 case INTEGER_TYPE:
2240 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2241 case POINTER_TYPE: case REFERENCE_TYPE:
2242 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2244 case REAL_TYPE:
2245 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2247 case FIXED_POINT_TYPE:
2248 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2250 case COMPLEX_TYPE:
2251 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2252 return fold_convert_loc (loc, type, tem);
2254 default:
2255 gcc_unreachable ();
2258 case FIXED_POINT_TYPE:
2259 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2260 || TREE_CODE (arg) == REAL_CST)
2262 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 goto fold_convert_exit;
2267 switch (TREE_CODE (orig))
2269 case FIXED_POINT_TYPE:
2270 case INTEGER_TYPE:
2271 case ENUMERAL_TYPE:
2272 case BOOLEAN_TYPE:
2273 case REAL_TYPE:
2274 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2276 case COMPLEX_TYPE:
2277 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2278 return fold_convert_loc (loc, type, tem);
2280 default:
2281 gcc_unreachable ();
2284 case COMPLEX_TYPE:
2285 switch (TREE_CODE (orig))
2287 case INTEGER_TYPE:
2288 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2289 case POINTER_TYPE: case REFERENCE_TYPE:
2290 case REAL_TYPE:
2291 case FIXED_POINT_TYPE:
2292 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2293 fold_convert_loc (loc, TREE_TYPE (type), arg),
2294 fold_convert_loc (loc, TREE_TYPE (type),
2295 integer_zero_node));
2296 case COMPLEX_TYPE:
2298 tree rpart, ipart;
2300 if (TREE_CODE (arg) == COMPLEX_EXPR)
2302 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2303 TREE_OPERAND (arg, 0));
2304 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2305 TREE_OPERAND (arg, 1));
2306 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2309 arg = save_expr (arg);
2310 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2311 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2312 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2313 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2314 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2317 default:
2318 gcc_unreachable ();
2321 case VECTOR_TYPE:
2322 if (integer_zerop (arg))
2323 return build_zero_vector (type);
2324 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2325 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2326 || TREE_CODE (orig) == VECTOR_TYPE);
2327 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2329 case VOID_TYPE:
2330 tem = fold_ignored_result (arg);
2331 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2333 default:
2334 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2335 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2336 gcc_unreachable ();
2338 fold_convert_exit:
2339 protected_set_expr_location_unshare (tem, loc);
2340 return tem;
2343 /* Return false if expr can be assumed not to be an lvalue, true
2344 otherwise. */
2346 static bool
2347 maybe_lvalue_p (const_tree x)
2349 /* We only need to wrap lvalue tree codes. */
2350 switch (TREE_CODE (x))
2352 case VAR_DECL:
2353 case PARM_DECL:
2354 case RESULT_DECL:
2355 case LABEL_DECL:
2356 case FUNCTION_DECL:
2357 case SSA_NAME:
2359 case COMPONENT_REF:
2360 case MEM_REF:
2361 case INDIRECT_REF:
2362 case ARRAY_REF:
2363 case ARRAY_RANGE_REF:
2364 case BIT_FIELD_REF:
2365 case OBJ_TYPE_REF:
2367 case REALPART_EXPR:
2368 case IMAGPART_EXPR:
2369 case PREINCREMENT_EXPR:
2370 case PREDECREMENT_EXPR:
2371 case SAVE_EXPR:
2372 case TRY_CATCH_EXPR:
2373 case WITH_CLEANUP_EXPR:
2374 case COMPOUND_EXPR:
2375 case MODIFY_EXPR:
2376 case TARGET_EXPR:
2377 case COND_EXPR:
2378 case BIND_EXPR:
2379 break;
2381 default:
2382 /* Assume the worst for front-end tree codes. */
2383 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2384 break;
2385 return false;
2388 return true;
2391 /* Return an expr equal to X but certainly not valid as an lvalue. */
2393 tree
2394 non_lvalue_loc (location_t loc, tree x)
2396 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2397 us. */
2398 if (in_gimple_form)
2399 return x;
2401 if (! maybe_lvalue_p (x))
2402 return x;
2403 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2409 static tree
2410 pedantic_non_lvalue_loc (location_t loc, tree x)
2412 return protected_set_expr_location_unshare (x, loc);
2415 /* Given a tree comparison code, return the code that is the logical inverse.
2416 It is generally not safe to do this for floating-point comparisons, except
2417 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2418 ERROR_MARK in this case. */
2420 enum tree_code
2421 invert_tree_comparison (enum tree_code code, bool honor_nans)
2423 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2424 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2425 return ERROR_MARK;
2427 switch (code)
2429 case EQ_EXPR:
2430 return NE_EXPR;
2431 case NE_EXPR:
2432 return EQ_EXPR;
2433 case GT_EXPR:
2434 return honor_nans ? UNLE_EXPR : LE_EXPR;
2435 case GE_EXPR:
2436 return honor_nans ? UNLT_EXPR : LT_EXPR;
2437 case LT_EXPR:
2438 return honor_nans ? UNGE_EXPR : GE_EXPR;
2439 case LE_EXPR:
2440 return honor_nans ? UNGT_EXPR : GT_EXPR;
2441 case LTGT_EXPR:
2442 return UNEQ_EXPR;
2443 case UNEQ_EXPR:
2444 return LTGT_EXPR;
2445 case UNGT_EXPR:
2446 return LE_EXPR;
2447 case UNGE_EXPR:
2448 return LT_EXPR;
2449 case UNLT_EXPR:
2450 return GE_EXPR;
2451 case UNLE_EXPR:
2452 return GT_EXPR;
2453 case ORDERED_EXPR:
2454 return UNORDERED_EXPR;
2455 case UNORDERED_EXPR:
2456 return ORDERED_EXPR;
2457 default:
2458 gcc_unreachable ();
2462 /* Similar, but return the comparison that results if the operands are
2463 swapped. This is safe for floating-point. */
2465 enum tree_code
2466 swap_tree_comparison (enum tree_code code)
2468 switch (code)
2470 case EQ_EXPR:
2471 case NE_EXPR:
2472 case ORDERED_EXPR:
2473 case UNORDERED_EXPR:
2474 case LTGT_EXPR:
2475 case UNEQ_EXPR:
2476 return code;
2477 case GT_EXPR:
2478 return LT_EXPR;
2479 case GE_EXPR:
2480 return LE_EXPR;
2481 case LT_EXPR:
2482 return GT_EXPR;
2483 case LE_EXPR:
2484 return GE_EXPR;
2485 case UNGT_EXPR:
2486 return UNLT_EXPR;
2487 case UNGE_EXPR:
2488 return UNLE_EXPR;
2489 case UNLT_EXPR:
2490 return UNGT_EXPR;
2491 case UNLE_EXPR:
2492 return UNGE_EXPR;
2493 default:
2494 gcc_unreachable ();
2499 /* Convert a comparison tree code from an enum tree_code representation
2500 into a compcode bit-based encoding. This function is the inverse of
2501 compcode_to_comparison. */
2503 static enum comparison_code
2504 comparison_to_compcode (enum tree_code code)
2506 switch (code)
2508 case LT_EXPR:
2509 return COMPCODE_LT;
2510 case EQ_EXPR:
2511 return COMPCODE_EQ;
2512 case LE_EXPR:
2513 return COMPCODE_LE;
2514 case GT_EXPR:
2515 return COMPCODE_GT;
2516 case NE_EXPR:
2517 return COMPCODE_NE;
2518 case GE_EXPR:
2519 return COMPCODE_GE;
2520 case ORDERED_EXPR:
2521 return COMPCODE_ORD;
2522 case UNORDERED_EXPR:
2523 return COMPCODE_UNORD;
2524 case UNLT_EXPR:
2525 return COMPCODE_UNLT;
2526 case UNEQ_EXPR:
2527 return COMPCODE_UNEQ;
2528 case UNLE_EXPR:
2529 return COMPCODE_UNLE;
2530 case UNGT_EXPR:
2531 return COMPCODE_UNGT;
2532 case LTGT_EXPR:
2533 return COMPCODE_LTGT;
2534 case UNGE_EXPR:
2535 return COMPCODE_UNGE;
2536 default:
2537 gcc_unreachable ();
2541 /* Convert a compcode bit-based encoding of a comparison operator back
2542 to GCC's enum tree_code representation. This function is the
2543 inverse of comparison_to_compcode. */
2545 static enum tree_code
2546 compcode_to_comparison (enum comparison_code code)
2548 switch (code)
2550 case COMPCODE_LT:
2551 return LT_EXPR;
2552 case COMPCODE_EQ:
2553 return EQ_EXPR;
2554 case COMPCODE_LE:
2555 return LE_EXPR;
2556 case COMPCODE_GT:
2557 return GT_EXPR;
2558 case COMPCODE_NE:
2559 return NE_EXPR;
2560 case COMPCODE_GE:
2561 return GE_EXPR;
2562 case COMPCODE_ORD:
2563 return ORDERED_EXPR;
2564 case COMPCODE_UNORD:
2565 return UNORDERED_EXPR;
2566 case COMPCODE_UNLT:
2567 return UNLT_EXPR;
2568 case COMPCODE_UNEQ:
2569 return UNEQ_EXPR;
2570 case COMPCODE_UNLE:
2571 return UNLE_EXPR;
2572 case COMPCODE_UNGT:
2573 return UNGT_EXPR;
2574 case COMPCODE_LTGT:
2575 return LTGT_EXPR;
2576 case COMPCODE_UNGE:
2577 return UNGE_EXPR;
2578 default:
2579 gcc_unreachable ();
2583 /* Return a tree for the comparison which is the combination of
2584 doing the AND or OR (depending on CODE) of the two operations LCODE
2585 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2586 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2587 if this makes the transformation invalid. */
2589 tree
2590 combine_comparisons (location_t loc,
2591 enum tree_code code, enum tree_code lcode,
2592 enum tree_code rcode, tree truth_type,
2593 tree ll_arg, tree lr_arg)
2595 bool honor_nans = HONOR_NANS (ll_arg);
2596 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2597 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2598 int compcode;
2600 switch (code)
2602 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2603 compcode = lcompcode & rcompcode;
2604 break;
2606 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2607 compcode = lcompcode | rcompcode;
2608 break;
2610 default:
2611 return NULL_TREE;
2614 if (!honor_nans)
2616 /* Eliminate unordered comparisons, as well as LTGT and ORD
2617 which are not used unless the mode has NaNs. */
2618 compcode &= ~COMPCODE_UNORD;
2619 if (compcode == COMPCODE_LTGT)
2620 compcode = COMPCODE_NE;
2621 else if (compcode == COMPCODE_ORD)
2622 compcode = COMPCODE_TRUE;
2624 else if (flag_trapping_math)
2626 /* Check that the original operation and the optimized ones will trap
2627 under the same condition. */
2628 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2629 && (lcompcode != COMPCODE_EQ)
2630 && (lcompcode != COMPCODE_ORD);
2631 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2632 && (rcompcode != COMPCODE_EQ)
2633 && (rcompcode != COMPCODE_ORD);
2634 bool trap = (compcode & COMPCODE_UNORD) == 0
2635 && (compcode != COMPCODE_EQ)
2636 && (compcode != COMPCODE_ORD);
2638 /* In a short-circuited boolean expression the LHS might be
2639 such that the RHS, if evaluated, will never trap. For
2640 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2641 if neither x nor y is NaN. (This is a mixed blessing: for
2642 example, the expression above will never trap, hence
2643 optimizing it to x < y would be invalid). */
2644 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2645 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2646 rtrap = false;
2648 /* If the comparison was short-circuited, and only the RHS
2649 trapped, we may now generate a spurious trap. */
2650 if (rtrap && !ltrap
2651 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2652 return NULL_TREE;
2654 /* If we changed the conditions that cause a trap, we lose. */
2655 if ((ltrap || rtrap) != trap)
2656 return NULL_TREE;
2659 if (compcode == COMPCODE_TRUE)
2660 return constant_boolean_node (true, truth_type);
2661 else if (compcode == COMPCODE_FALSE)
2662 return constant_boolean_node (false, truth_type);
2663 else
2665 enum tree_code tcode;
2667 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2668 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2672 /* Return nonzero if two operands (typically of the same tree node)
2673 are necessarily equal. If either argument has side-effects this
2674 function returns zero. FLAGS modifies behavior as follows:
2676 If OEP_ONLY_CONST is set, only return nonzero for constants.
2677 This function tests whether the operands are indistinguishable;
2678 it does not test whether they are equal using C's == operation.
2679 The distinction is important for IEEE floating point, because
2680 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2681 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2683 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2684 even though it may hold multiple values during a function.
2685 This is because a GCC tree node guarantees that nothing else is
2686 executed between the evaluation of its "operands" (which may often
2687 be evaluated in arbitrary order). Hence if the operands themselves
2688 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2689 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2690 unset means assuming isochronic (or instantaneous) tree equivalence.
2691 Unless comparing arbitrary expression trees, such as from different
2692 statements, this flag can usually be left unset.
2694 If OEP_PURE_SAME is set, then pure functions with identical arguments
2695 are considered the same. It is used when the caller has other ways
2696 to ensure that global memory is unchanged in between. */
2699 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2701 /* If either is ERROR_MARK, they aren't equal. */
2702 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2703 || TREE_TYPE (arg0) == error_mark_node
2704 || TREE_TYPE (arg1) == error_mark_node)
2705 return 0;
2707 /* Similar, if either does not have a type (like a released SSA name),
2708 they aren't equal. */
2709 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2710 return 0;
2712 /* Check equality of integer constants before bailing out due to
2713 precision differences. */
2714 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2715 return tree_int_cst_equal (arg0, arg1);
2717 /* If both types don't have the same signedness, then we can't consider
2718 them equal. We must check this before the STRIP_NOPS calls
2719 because they may change the signedness of the arguments. As pointers
2720 strictly don't have a signedness, require either two pointers or
2721 two non-pointers as well. */
2722 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2723 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2724 return 0;
2726 /* We cannot consider pointers to different address space equal. */
2727 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2728 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2729 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2730 return 0;
2732 /* If both types don't have the same precision, then it is not safe
2733 to strip NOPs. */
2734 if (element_precision (TREE_TYPE (arg0))
2735 != element_precision (TREE_TYPE (arg1)))
2736 return 0;
2738 STRIP_NOPS (arg0);
2739 STRIP_NOPS (arg1);
2741 /* In case both args are comparisons but with different comparison
2742 code, try to swap the comparison operands of one arg to produce
2743 a match and compare that variant. */
2744 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2745 && COMPARISON_CLASS_P (arg0)
2746 && COMPARISON_CLASS_P (arg1))
2748 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2750 if (TREE_CODE (arg0) == swap_code)
2751 return operand_equal_p (TREE_OPERAND (arg0, 0),
2752 TREE_OPERAND (arg1, 1), flags)
2753 && operand_equal_p (TREE_OPERAND (arg0, 1),
2754 TREE_OPERAND (arg1, 0), flags);
2757 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2759 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2760 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2762 else if (flags & OEP_ADDRESS_OF)
2764 /* If we are interested in comparing addresses ignore
2765 MEM_REF wrappings of the base that can appear just for
2766 TBAA reasons. */
2767 if (TREE_CODE (arg0) == MEM_REF
2768 && DECL_P (arg1)
2769 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2770 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2771 && integer_zerop (TREE_OPERAND (arg0, 1)))
2772 return 1;
2773 else if (TREE_CODE (arg1) == MEM_REF
2774 && DECL_P (arg0)
2775 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2776 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2777 && integer_zerop (TREE_OPERAND (arg1, 1)))
2778 return 1;
2779 return 0;
2781 else
2782 return 0;
2785 /* This is needed for conversions and for COMPONENT_REF.
2786 Might as well play it safe and always test this. */
2787 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2788 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2789 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2790 return 0;
2792 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2793 We don't care about side effects in that case because the SAVE_EXPR
2794 takes care of that for us. In all other cases, two expressions are
2795 equal if they have no side effects. If we have two identical
2796 expressions with side effects that should be treated the same due
2797 to the only side effects being identical SAVE_EXPR's, that will
2798 be detected in the recursive calls below.
2799 If we are taking an invariant address of two identical objects
2800 they are necessarily equal as well. */
2801 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2802 && (TREE_CODE (arg0) == SAVE_EXPR
2803 || (flags & OEP_CONSTANT_ADDRESS_OF)
2804 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2805 return 1;
2807 /* Next handle constant cases, those for which we can return 1 even
2808 if ONLY_CONST is set. */
2809 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2810 switch (TREE_CODE (arg0))
2812 case INTEGER_CST:
2813 return tree_int_cst_equal (arg0, arg1);
2815 case FIXED_CST:
2816 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2817 TREE_FIXED_CST (arg1));
2819 case REAL_CST:
2820 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2821 return 1;
2824 if (!HONOR_SIGNED_ZEROS (arg0))
2826 /* If we do not distinguish between signed and unsigned zero,
2827 consider them equal. */
2828 if (real_zerop (arg0) && real_zerop (arg1))
2829 return 1;
2831 return 0;
2833 case VECTOR_CST:
2835 unsigned i;
2837 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2838 return 0;
2840 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2842 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2843 VECTOR_CST_ELT (arg1, i), flags))
2844 return 0;
2846 return 1;
2849 case COMPLEX_CST:
2850 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2851 flags)
2852 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2853 flags));
2855 case STRING_CST:
2856 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2857 && ! memcmp (TREE_STRING_POINTER (arg0),
2858 TREE_STRING_POINTER (arg1),
2859 TREE_STRING_LENGTH (arg0)));
2861 case ADDR_EXPR:
2862 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2863 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2864 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2865 default:
2866 break;
2869 if (flags & OEP_ONLY_CONST)
2870 return 0;
2872 /* Define macros to test an operand from arg0 and arg1 for equality and a
2873 variant that allows null and views null as being different from any
2874 non-null value. In the latter case, if either is null, the both
2875 must be; otherwise, do the normal comparison. */
2876 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2877 TREE_OPERAND (arg1, N), flags)
2879 #define OP_SAME_WITH_NULL(N) \
2880 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2881 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2883 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2885 case tcc_unary:
2886 /* Two conversions are equal only if signedness and modes match. */
2887 switch (TREE_CODE (arg0))
2889 CASE_CONVERT:
2890 case FIX_TRUNC_EXPR:
2891 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2892 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2893 return 0;
2894 break;
2895 default:
2896 break;
2899 return OP_SAME (0);
2902 case tcc_comparison:
2903 case tcc_binary:
2904 if (OP_SAME (0) && OP_SAME (1))
2905 return 1;
2907 /* For commutative ops, allow the other order. */
2908 return (commutative_tree_code (TREE_CODE (arg0))
2909 && operand_equal_p (TREE_OPERAND (arg0, 0),
2910 TREE_OPERAND (arg1, 1), flags)
2911 && operand_equal_p (TREE_OPERAND (arg0, 1),
2912 TREE_OPERAND (arg1, 0), flags));
2914 case tcc_reference:
2915 /* If either of the pointer (or reference) expressions we are
2916 dereferencing contain a side effect, these cannot be equal,
2917 but their addresses can be. */
2918 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2919 && (TREE_SIDE_EFFECTS (arg0)
2920 || TREE_SIDE_EFFECTS (arg1)))
2921 return 0;
2923 switch (TREE_CODE (arg0))
2925 case INDIRECT_REF:
2926 if (!(flags & OEP_ADDRESS_OF)
2927 && (TYPE_ALIGN (TREE_TYPE (arg0))
2928 != TYPE_ALIGN (TREE_TYPE (arg1))))
2929 return 0;
2930 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2931 return OP_SAME (0);
2933 case REALPART_EXPR:
2934 case IMAGPART_EXPR:
2935 return OP_SAME (0);
2937 case TARGET_MEM_REF:
2938 case MEM_REF:
2939 /* Require equal access sizes, and similar pointer types.
2940 We can have incomplete types for array references of
2941 variable-sized arrays from the Fortran frontend
2942 though. Also verify the types are compatible. */
2943 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2944 || (TYPE_SIZE (TREE_TYPE (arg0))
2945 && TYPE_SIZE (TREE_TYPE (arg1))
2946 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2947 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2948 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2949 && ((flags & OEP_ADDRESS_OF)
2950 || (alias_ptr_types_compatible_p
2951 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2952 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2953 && (MR_DEPENDENCE_CLIQUE (arg0)
2954 == MR_DEPENDENCE_CLIQUE (arg1))
2955 && (MR_DEPENDENCE_BASE (arg0)
2956 == MR_DEPENDENCE_BASE (arg1))
2957 && (TYPE_ALIGN (TREE_TYPE (arg0))
2958 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2959 return 0;
2960 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2961 return (OP_SAME (0) && OP_SAME (1)
2962 /* TARGET_MEM_REF require equal extra operands. */
2963 && (TREE_CODE (arg0) != TARGET_MEM_REF
2964 || (OP_SAME_WITH_NULL (2)
2965 && OP_SAME_WITH_NULL (3)
2966 && OP_SAME_WITH_NULL (4))));
2968 case ARRAY_REF:
2969 case ARRAY_RANGE_REF:
2970 /* Operands 2 and 3 may be null.
2971 Compare the array index by value if it is constant first as we
2972 may have different types but same value here. */
2973 if (!OP_SAME (0))
2974 return 0;
2975 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2976 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2977 TREE_OPERAND (arg1, 1))
2978 || OP_SAME (1))
2979 && OP_SAME_WITH_NULL (2)
2980 && OP_SAME_WITH_NULL (3));
2982 case COMPONENT_REF:
2983 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2984 may be NULL when we're called to compare MEM_EXPRs. */
2985 if (!OP_SAME_WITH_NULL (0)
2986 || !OP_SAME (1))
2987 return 0;
2988 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2989 return OP_SAME_WITH_NULL (2);
2991 case BIT_FIELD_REF:
2992 if (!OP_SAME (0))
2993 return 0;
2994 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2995 return OP_SAME (1) && OP_SAME (2);
2997 default:
2998 return 0;
3001 case tcc_expression:
3002 switch (TREE_CODE (arg0))
3004 case ADDR_EXPR:
3005 return operand_equal_p (TREE_OPERAND (arg0, 0),
3006 TREE_OPERAND (arg1, 0),
3007 flags | OEP_ADDRESS_OF);
3009 case TRUTH_NOT_EXPR:
3010 return OP_SAME (0);
3012 case TRUTH_ANDIF_EXPR:
3013 case TRUTH_ORIF_EXPR:
3014 return OP_SAME (0) && OP_SAME (1);
3016 case FMA_EXPR:
3017 case WIDEN_MULT_PLUS_EXPR:
3018 case WIDEN_MULT_MINUS_EXPR:
3019 if (!OP_SAME (2))
3020 return 0;
3021 /* The multiplcation operands are commutative. */
3022 /* FALLTHRU */
3024 case TRUTH_AND_EXPR:
3025 case TRUTH_OR_EXPR:
3026 case TRUTH_XOR_EXPR:
3027 if (OP_SAME (0) && OP_SAME (1))
3028 return 1;
3030 /* Otherwise take into account this is a commutative operation. */
3031 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3032 TREE_OPERAND (arg1, 1), flags)
3033 && operand_equal_p (TREE_OPERAND (arg0, 1),
3034 TREE_OPERAND (arg1, 0), flags));
3036 case COND_EXPR:
3037 case VEC_COND_EXPR:
3038 case DOT_PROD_EXPR:
3039 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3041 default:
3042 return 0;
3045 case tcc_vl_exp:
3046 switch (TREE_CODE (arg0))
3048 case CALL_EXPR:
3049 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3050 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3051 /* If not both CALL_EXPRs are either internal or normal function
3052 functions, then they are not equal. */
3053 return 0;
3054 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3056 /* If the CALL_EXPRs call different internal functions, then they
3057 are not equal. */
3058 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3059 return 0;
3061 else
3063 /* If the CALL_EXPRs call different functions, then they are not
3064 equal. */
3065 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3066 flags))
3067 return 0;
3071 unsigned int cef = call_expr_flags (arg0);
3072 if (flags & OEP_PURE_SAME)
3073 cef &= ECF_CONST | ECF_PURE;
3074 else
3075 cef &= ECF_CONST;
3076 if (!cef)
3077 return 0;
3080 /* Now see if all the arguments are the same. */
3082 const_call_expr_arg_iterator iter0, iter1;
3083 const_tree a0, a1;
3084 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3085 a1 = first_const_call_expr_arg (arg1, &iter1);
3086 a0 && a1;
3087 a0 = next_const_call_expr_arg (&iter0),
3088 a1 = next_const_call_expr_arg (&iter1))
3089 if (! operand_equal_p (a0, a1, flags))
3090 return 0;
3092 /* If we get here and both argument lists are exhausted
3093 then the CALL_EXPRs are equal. */
3094 return ! (a0 || a1);
3096 default:
3097 return 0;
3100 case tcc_declaration:
3101 /* Consider __builtin_sqrt equal to sqrt. */
3102 return (TREE_CODE (arg0) == FUNCTION_DECL
3103 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3104 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3105 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3107 default:
3108 return 0;
3111 #undef OP_SAME
3112 #undef OP_SAME_WITH_NULL
3115 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3116 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3118 When in doubt, return 0. */
3120 static int
3121 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3123 int unsignedp1, unsignedpo;
3124 tree primarg0, primarg1, primother;
3125 unsigned int correct_width;
3127 if (operand_equal_p (arg0, arg1, 0))
3128 return 1;
3130 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3131 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3132 return 0;
3134 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3135 and see if the inner values are the same. This removes any
3136 signedness comparison, which doesn't matter here. */
3137 primarg0 = arg0, primarg1 = arg1;
3138 STRIP_NOPS (primarg0);
3139 STRIP_NOPS (primarg1);
3140 if (operand_equal_p (primarg0, primarg1, 0))
3141 return 1;
3143 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3144 actual comparison operand, ARG0.
3146 First throw away any conversions to wider types
3147 already present in the operands. */
3149 primarg1 = get_narrower (arg1, &unsignedp1);
3150 primother = get_narrower (other, &unsignedpo);
3152 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3153 if (unsignedp1 == unsignedpo
3154 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3155 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3157 tree type = TREE_TYPE (arg0);
3159 /* Make sure shorter operand is extended the right way
3160 to match the longer operand. */
3161 primarg1 = fold_convert (signed_or_unsigned_type_for
3162 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3164 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3165 return 1;
3168 return 0;
3171 /* See if ARG is an expression that is either a comparison or is performing
3172 arithmetic on comparisons. The comparisons must only be comparing
3173 two different values, which will be stored in *CVAL1 and *CVAL2; if
3174 they are nonzero it means that some operands have already been found.
3175 No variables may be used anywhere else in the expression except in the
3176 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3177 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3179 If this is true, return 1. Otherwise, return zero. */
3181 static int
3182 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3184 enum tree_code code = TREE_CODE (arg);
3185 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3187 /* We can handle some of the tcc_expression cases here. */
3188 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3189 tclass = tcc_unary;
3190 else if (tclass == tcc_expression
3191 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3192 || code == COMPOUND_EXPR))
3193 tclass = tcc_binary;
3195 else if (tclass == tcc_expression && code == SAVE_EXPR
3196 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3198 /* If we've already found a CVAL1 or CVAL2, this expression is
3199 two complex to handle. */
3200 if (*cval1 || *cval2)
3201 return 0;
3203 tclass = tcc_unary;
3204 *save_p = 1;
3207 switch (tclass)
3209 case tcc_unary:
3210 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3212 case tcc_binary:
3213 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3214 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3215 cval1, cval2, save_p));
3217 case tcc_constant:
3218 return 1;
3220 case tcc_expression:
3221 if (code == COND_EXPR)
3222 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3223 cval1, cval2, save_p)
3224 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3225 cval1, cval2, save_p)
3226 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3227 cval1, cval2, save_p));
3228 return 0;
3230 case tcc_comparison:
3231 /* First see if we can handle the first operand, then the second. For
3232 the second operand, we know *CVAL1 can't be zero. It must be that
3233 one side of the comparison is each of the values; test for the
3234 case where this isn't true by failing if the two operands
3235 are the same. */
3237 if (operand_equal_p (TREE_OPERAND (arg, 0),
3238 TREE_OPERAND (arg, 1), 0))
3239 return 0;
3241 if (*cval1 == 0)
3242 *cval1 = TREE_OPERAND (arg, 0);
3243 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3245 else if (*cval2 == 0)
3246 *cval2 = TREE_OPERAND (arg, 0);
3247 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3249 else
3250 return 0;
3252 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3254 else if (*cval2 == 0)
3255 *cval2 = TREE_OPERAND (arg, 1);
3256 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3258 else
3259 return 0;
3261 return 1;
3263 default:
3264 return 0;
3268 /* ARG is a tree that is known to contain just arithmetic operations and
3269 comparisons. Evaluate the operations in the tree substituting NEW0 for
3270 any occurrence of OLD0 as an operand of a comparison and likewise for
3271 NEW1 and OLD1. */
3273 static tree
3274 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3275 tree old1, tree new1)
3277 tree type = TREE_TYPE (arg);
3278 enum tree_code code = TREE_CODE (arg);
3279 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3281 /* We can handle some of the tcc_expression cases here. */
3282 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3283 tclass = tcc_unary;
3284 else if (tclass == tcc_expression
3285 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3286 tclass = tcc_binary;
3288 switch (tclass)
3290 case tcc_unary:
3291 return fold_build1_loc (loc, code, type,
3292 eval_subst (loc, TREE_OPERAND (arg, 0),
3293 old0, new0, old1, new1));
3295 case tcc_binary:
3296 return fold_build2_loc (loc, code, type,
3297 eval_subst (loc, TREE_OPERAND (arg, 0),
3298 old0, new0, old1, new1),
3299 eval_subst (loc, TREE_OPERAND (arg, 1),
3300 old0, new0, old1, new1));
3302 case tcc_expression:
3303 switch (code)
3305 case SAVE_EXPR:
3306 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3307 old1, new1);
3309 case COMPOUND_EXPR:
3310 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3311 old1, new1);
3313 case COND_EXPR:
3314 return fold_build3_loc (loc, code, type,
3315 eval_subst (loc, TREE_OPERAND (arg, 0),
3316 old0, new0, old1, new1),
3317 eval_subst (loc, TREE_OPERAND (arg, 1),
3318 old0, new0, old1, new1),
3319 eval_subst (loc, TREE_OPERAND (arg, 2),
3320 old0, new0, old1, new1));
3321 default:
3322 break;
3324 /* Fall through - ??? */
3326 case tcc_comparison:
3328 tree arg0 = TREE_OPERAND (arg, 0);
3329 tree arg1 = TREE_OPERAND (arg, 1);
3331 /* We need to check both for exact equality and tree equality. The
3332 former will be true if the operand has a side-effect. In that
3333 case, we know the operand occurred exactly once. */
3335 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3336 arg0 = new0;
3337 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3338 arg0 = new1;
3340 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3341 arg1 = new0;
3342 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3343 arg1 = new1;
3345 return fold_build2_loc (loc, code, type, arg0, arg1);
3348 default:
3349 return arg;
3353 /* Return a tree for the case when the result of an expression is RESULT
3354 converted to TYPE and OMITTED was previously an operand of the expression
3355 but is now not needed (e.g., we folded OMITTED * 0).
3357 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3358 the conversion of RESULT to TYPE. */
3360 tree
3361 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3363 tree t = fold_convert_loc (loc, type, result);
3365 /* If the resulting operand is an empty statement, just return the omitted
3366 statement casted to void. */
3367 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3368 return build1_loc (loc, NOP_EXPR, void_type_node,
3369 fold_ignored_result (omitted));
3371 if (TREE_SIDE_EFFECTS (omitted))
3372 return build2_loc (loc, COMPOUND_EXPR, type,
3373 fold_ignored_result (omitted), t);
3375 return non_lvalue_loc (loc, t);
3378 /* Return a tree for the case when the result of an expression is RESULT
3379 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3380 of the expression but are now not needed.
3382 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3383 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3384 evaluated before OMITTED2. Otherwise, if neither has side effects,
3385 just do the conversion of RESULT to TYPE. */
3387 tree
3388 omit_two_operands_loc (location_t loc, tree type, tree result,
3389 tree omitted1, tree omitted2)
3391 tree t = fold_convert_loc (loc, type, result);
3393 if (TREE_SIDE_EFFECTS (omitted2))
3394 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3395 if (TREE_SIDE_EFFECTS (omitted1))
3396 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3398 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3402 /* Return a simplified tree node for the truth-negation of ARG. This
3403 never alters ARG itself. We assume that ARG is an operation that
3404 returns a truth value (0 or 1).
3406 FIXME: one would think we would fold the result, but it causes
3407 problems with the dominator optimizer. */
3409 static tree
3410 fold_truth_not_expr (location_t loc, tree arg)
3412 tree type = TREE_TYPE (arg);
3413 enum tree_code code = TREE_CODE (arg);
3414 location_t loc1, loc2;
3416 /* If this is a comparison, we can simply invert it, except for
3417 floating-point non-equality comparisons, in which case we just
3418 enclose a TRUTH_NOT_EXPR around what we have. */
3420 if (TREE_CODE_CLASS (code) == tcc_comparison)
3422 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3423 if (FLOAT_TYPE_P (op_type)
3424 && flag_trapping_math
3425 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3426 && code != NE_EXPR && code != EQ_EXPR)
3427 return NULL_TREE;
3429 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3430 if (code == ERROR_MARK)
3431 return NULL_TREE;
3433 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3434 TREE_OPERAND (arg, 1));
3437 switch (code)
3439 case INTEGER_CST:
3440 return constant_boolean_node (integer_zerop (arg), type);
3442 case TRUTH_AND_EXPR:
3443 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3444 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3445 return build2_loc (loc, TRUTH_OR_EXPR, type,
3446 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3447 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3449 case TRUTH_OR_EXPR:
3450 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3451 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3452 return build2_loc (loc, TRUTH_AND_EXPR, type,
3453 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3454 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3456 case TRUTH_XOR_EXPR:
3457 /* Here we can invert either operand. We invert the first operand
3458 unless the second operand is a TRUTH_NOT_EXPR in which case our
3459 result is the XOR of the first operand with the inside of the
3460 negation of the second operand. */
3462 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3463 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3464 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3465 else
3466 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3467 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3468 TREE_OPERAND (arg, 1));
3470 case TRUTH_ANDIF_EXPR:
3471 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3472 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3473 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3474 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3475 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3477 case TRUTH_ORIF_EXPR:
3478 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3479 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3480 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3481 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3482 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3484 case TRUTH_NOT_EXPR:
3485 return TREE_OPERAND (arg, 0);
3487 case COND_EXPR:
3489 tree arg1 = TREE_OPERAND (arg, 1);
3490 tree arg2 = TREE_OPERAND (arg, 2);
3492 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3493 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3495 /* A COND_EXPR may have a throw as one operand, which
3496 then has void type. Just leave void operands
3497 as they are. */
3498 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3499 VOID_TYPE_P (TREE_TYPE (arg1))
3500 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3501 VOID_TYPE_P (TREE_TYPE (arg2))
3502 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3505 case COMPOUND_EXPR:
3506 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3507 return build2_loc (loc, COMPOUND_EXPR, type,
3508 TREE_OPERAND (arg, 0),
3509 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3511 case NON_LVALUE_EXPR:
3512 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3513 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3515 CASE_CONVERT:
3516 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3517 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3519 /* ... fall through ... */
3521 case FLOAT_EXPR:
3522 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3523 return build1_loc (loc, TREE_CODE (arg), type,
3524 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3526 case BIT_AND_EXPR:
3527 if (!integer_onep (TREE_OPERAND (arg, 1)))
3528 return NULL_TREE;
3529 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3531 case SAVE_EXPR:
3532 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3534 case CLEANUP_POINT_EXPR:
3535 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3536 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3537 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3539 default:
3540 return NULL_TREE;
3544 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3545 assume that ARG is an operation that returns a truth value (0 or 1
3546 for scalars, 0 or -1 for vectors). Return the folded expression if
3547 folding is successful. Otherwise, return NULL_TREE. */
3549 static tree
3550 fold_invert_truthvalue (location_t loc, tree arg)
3552 tree type = TREE_TYPE (arg);
3553 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3554 ? BIT_NOT_EXPR
3555 : TRUTH_NOT_EXPR,
3556 type, arg);
3559 /* Return a simplified tree node for the truth-negation of ARG. This
3560 never alters ARG itself. We assume that ARG is an operation that
3561 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3563 tree
3564 invert_truthvalue_loc (location_t loc, tree arg)
3566 if (TREE_CODE (arg) == ERROR_MARK)
3567 return arg;
3569 tree type = TREE_TYPE (arg);
3570 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3571 ? BIT_NOT_EXPR
3572 : TRUTH_NOT_EXPR,
3573 type, arg);
3576 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3577 with code CODE. This optimization is unsafe. */
3578 static tree
3579 distribute_real_division (location_t loc, enum tree_code code, tree type,
3580 tree arg0, tree arg1)
3582 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3583 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3585 /* (A / C) +- (B / C) -> (A +- B) / C. */
3586 if (mul0 == mul1
3587 && operand_equal_p (TREE_OPERAND (arg0, 1),
3588 TREE_OPERAND (arg1, 1), 0))
3589 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3590 fold_build2_loc (loc, code, type,
3591 TREE_OPERAND (arg0, 0),
3592 TREE_OPERAND (arg1, 0)),
3593 TREE_OPERAND (arg0, 1));
3595 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3596 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3597 TREE_OPERAND (arg1, 0), 0)
3598 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3599 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3601 REAL_VALUE_TYPE r0, r1;
3602 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3603 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3604 if (!mul0)
3605 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3606 if (!mul1)
3607 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3608 real_arithmetic (&r0, code, &r0, &r1);
3609 return fold_build2_loc (loc, MULT_EXPR, type,
3610 TREE_OPERAND (arg0, 0),
3611 build_real (type, r0));
3614 return NULL_TREE;
3617 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3618 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3620 static tree
3621 make_bit_field_ref (location_t loc, tree inner, tree type,
3622 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3624 tree result, bftype;
3626 if (bitpos == 0)
3628 tree size = TYPE_SIZE (TREE_TYPE (inner));
3629 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3630 || POINTER_TYPE_P (TREE_TYPE (inner)))
3631 && tree_fits_shwi_p (size)
3632 && tree_to_shwi (size) == bitsize)
3633 return fold_convert_loc (loc, type, inner);
3636 bftype = type;
3637 if (TYPE_PRECISION (bftype) != bitsize
3638 || TYPE_UNSIGNED (bftype) == !unsignedp)
3639 bftype = build_nonstandard_integer_type (bitsize, 0);
3641 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3642 size_int (bitsize), bitsize_int (bitpos));
3644 if (bftype != type)
3645 result = fold_convert_loc (loc, type, result);
3647 return result;
3650 /* Optimize a bit-field compare.
3652 There are two cases: First is a compare against a constant and the
3653 second is a comparison of two items where the fields are at the same
3654 bit position relative to the start of a chunk (byte, halfword, word)
3655 large enough to contain it. In these cases we can avoid the shift
3656 implicit in bitfield extractions.
3658 For constants, we emit a compare of the shifted constant with the
3659 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3660 compared. For two fields at the same position, we do the ANDs with the
3661 similar mask and compare the result of the ANDs.
3663 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3664 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3665 are the left and right operands of the comparison, respectively.
3667 If the optimization described above can be done, we return the resulting
3668 tree. Otherwise we return zero. */
3670 static tree
3671 optimize_bit_field_compare (location_t loc, enum tree_code code,
3672 tree compare_type, tree lhs, tree rhs)
3674 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3675 tree type = TREE_TYPE (lhs);
3676 tree unsigned_type;
3677 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3678 machine_mode lmode, rmode, nmode;
3679 int lunsignedp, runsignedp;
3680 int lvolatilep = 0, rvolatilep = 0;
3681 tree linner, rinner = NULL_TREE;
3682 tree mask;
3683 tree offset;
3685 /* Get all the information about the extractions being done. If the bit size
3686 if the same as the size of the underlying object, we aren't doing an
3687 extraction at all and so can do nothing. We also don't want to
3688 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3689 then will no longer be able to replace it. */
3690 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3691 &lunsignedp, &lvolatilep, false);
3692 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3693 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3694 return 0;
3696 if (!const_p)
3698 /* If this is not a constant, we can only do something if bit positions,
3699 sizes, and signedness are the same. */
3700 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3701 &runsignedp, &rvolatilep, false);
3703 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3704 || lunsignedp != runsignedp || offset != 0
3705 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3706 return 0;
3709 /* See if we can find a mode to refer to this field. We should be able to,
3710 but fail if we can't. */
3711 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3712 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3713 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3714 TYPE_ALIGN (TREE_TYPE (rinner))),
3715 word_mode, false);
3716 if (nmode == VOIDmode)
3717 return 0;
3719 /* Set signed and unsigned types of the precision of this mode for the
3720 shifts below. */
3721 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3723 /* Compute the bit position and size for the new reference and our offset
3724 within it. If the new reference is the same size as the original, we
3725 won't optimize anything, so return zero. */
3726 nbitsize = GET_MODE_BITSIZE (nmode);
3727 nbitpos = lbitpos & ~ (nbitsize - 1);
3728 lbitpos -= nbitpos;
3729 if (nbitsize == lbitsize)
3730 return 0;
3732 if (BYTES_BIG_ENDIAN)
3733 lbitpos = nbitsize - lbitsize - lbitpos;
3735 /* Make the mask to be used against the extracted field. */
3736 mask = build_int_cst_type (unsigned_type, -1);
3737 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3738 mask = const_binop (RSHIFT_EXPR, mask,
3739 size_int (nbitsize - lbitsize - lbitpos));
3741 if (! const_p)
3742 /* If not comparing with constant, just rework the comparison
3743 and return. */
3744 return fold_build2_loc (loc, code, compare_type,
3745 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3746 make_bit_field_ref (loc, linner,
3747 unsigned_type,
3748 nbitsize, nbitpos,
3750 mask),
3751 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3752 make_bit_field_ref (loc, rinner,
3753 unsigned_type,
3754 nbitsize, nbitpos,
3756 mask));
3758 /* Otherwise, we are handling the constant case. See if the constant is too
3759 big for the field. Warn and return a tree of for 0 (false) if so. We do
3760 this not only for its own sake, but to avoid having to test for this
3761 error case below. If we didn't, we might generate wrong code.
3763 For unsigned fields, the constant shifted right by the field length should
3764 be all zero. For signed fields, the high-order bits should agree with
3765 the sign bit. */
3767 if (lunsignedp)
3769 if (wi::lrshift (rhs, lbitsize) != 0)
3771 warning (0, "comparison is always %d due to width of bit-field",
3772 code == NE_EXPR);
3773 return constant_boolean_node (code == NE_EXPR, compare_type);
3776 else
3778 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3779 if (tem != 0 && tem != -1)
3781 warning (0, "comparison is always %d due to width of bit-field",
3782 code == NE_EXPR);
3783 return constant_boolean_node (code == NE_EXPR, compare_type);
3787 /* Single-bit compares should always be against zero. */
3788 if (lbitsize == 1 && ! integer_zerop (rhs))
3790 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3791 rhs = build_int_cst (type, 0);
3794 /* Make a new bitfield reference, shift the constant over the
3795 appropriate number of bits and mask it with the computed mask
3796 (in case this was a signed field). If we changed it, make a new one. */
3797 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3799 rhs = const_binop (BIT_AND_EXPR,
3800 const_binop (LSHIFT_EXPR,
3801 fold_convert_loc (loc, unsigned_type, rhs),
3802 size_int (lbitpos)),
3803 mask);
3805 lhs = build2_loc (loc, code, compare_type,
3806 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3807 return lhs;
3810 /* Subroutine for fold_truth_andor_1: decode a field reference.
3812 If EXP is a comparison reference, we return the innermost reference.
3814 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3815 set to the starting bit number.
3817 If the innermost field can be completely contained in a mode-sized
3818 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3820 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3821 otherwise it is not changed.
3823 *PUNSIGNEDP is set to the signedness of the field.
3825 *PMASK is set to the mask used. This is either contained in a
3826 BIT_AND_EXPR or derived from the width of the field.
3828 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3830 Return 0 if this is not a component reference or is one that we can't
3831 do anything with. */
3833 static tree
3834 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3835 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3836 int *punsignedp, int *pvolatilep,
3837 tree *pmask, tree *pand_mask)
3839 tree outer_type = 0;
3840 tree and_mask = 0;
3841 tree mask, inner, offset;
3842 tree unsigned_type;
3843 unsigned int precision;
3845 /* All the optimizations using this function assume integer fields.
3846 There are problems with FP fields since the type_for_size call
3847 below can fail for, e.g., XFmode. */
3848 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3849 return 0;
3851 /* We are interested in the bare arrangement of bits, so strip everything
3852 that doesn't affect the machine mode. However, record the type of the
3853 outermost expression if it may matter below. */
3854 if (CONVERT_EXPR_P (exp)
3855 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3856 outer_type = TREE_TYPE (exp);
3857 STRIP_NOPS (exp);
3859 if (TREE_CODE (exp) == BIT_AND_EXPR)
3861 and_mask = TREE_OPERAND (exp, 1);
3862 exp = TREE_OPERAND (exp, 0);
3863 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3864 if (TREE_CODE (and_mask) != INTEGER_CST)
3865 return 0;
3868 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3869 punsignedp, pvolatilep, false);
3870 if ((inner == exp && and_mask == 0)
3871 || *pbitsize < 0 || offset != 0
3872 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3873 return 0;
3875 /* If the number of bits in the reference is the same as the bitsize of
3876 the outer type, then the outer type gives the signedness. Otherwise
3877 (in case of a small bitfield) the signedness is unchanged. */
3878 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3879 *punsignedp = TYPE_UNSIGNED (outer_type);
3881 /* Compute the mask to access the bitfield. */
3882 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3883 precision = TYPE_PRECISION (unsigned_type);
3885 mask = build_int_cst_type (unsigned_type, -1);
3887 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3888 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3890 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3891 if (and_mask != 0)
3892 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3893 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3895 *pmask = mask;
3896 *pand_mask = and_mask;
3897 return inner;
3900 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3901 bit positions and MASK is SIGNED. */
3903 static int
3904 all_ones_mask_p (const_tree mask, unsigned int size)
3906 tree type = TREE_TYPE (mask);
3907 unsigned int precision = TYPE_PRECISION (type);
3909 /* If this function returns true when the type of the mask is
3910 UNSIGNED, then there will be errors. In particular see
3911 gcc.c-torture/execute/990326-1.c. There does not appear to be
3912 any documentation paper trail as to why this is so. But the pre
3913 wide-int worked with that restriction and it has been preserved
3914 here. */
3915 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3916 return false;
3918 return wi::mask (size, false, precision) == mask;
3921 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3922 represents the sign bit of EXP's type. If EXP represents a sign
3923 or zero extension, also test VAL against the unextended type.
3924 The return value is the (sub)expression whose sign bit is VAL,
3925 or NULL_TREE otherwise. */
3927 tree
3928 sign_bit_p (tree exp, const_tree val)
3930 int width;
3931 tree t;
3933 /* Tree EXP must have an integral type. */
3934 t = TREE_TYPE (exp);
3935 if (! INTEGRAL_TYPE_P (t))
3936 return NULL_TREE;
3938 /* Tree VAL must be an integer constant. */
3939 if (TREE_CODE (val) != INTEGER_CST
3940 || TREE_OVERFLOW (val))
3941 return NULL_TREE;
3943 width = TYPE_PRECISION (t);
3944 if (wi::only_sign_bit_p (val, width))
3945 return exp;
3947 /* Handle extension from a narrower type. */
3948 if (TREE_CODE (exp) == NOP_EXPR
3949 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3950 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3952 return NULL_TREE;
3955 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3956 to be evaluated unconditionally. */
3958 static int
3959 simple_operand_p (const_tree exp)
3961 /* Strip any conversions that don't change the machine mode. */
3962 STRIP_NOPS (exp);
3964 return (CONSTANT_CLASS_P (exp)
3965 || TREE_CODE (exp) == SSA_NAME
3966 || (DECL_P (exp)
3967 && ! TREE_ADDRESSABLE (exp)
3968 && ! TREE_THIS_VOLATILE (exp)
3969 && ! DECL_NONLOCAL (exp)
3970 /* Don't regard global variables as simple. They may be
3971 allocated in ways unknown to the compiler (shared memory,
3972 #pragma weak, etc). */
3973 && ! TREE_PUBLIC (exp)
3974 && ! DECL_EXTERNAL (exp)
3975 /* Weakrefs are not safe to be read, since they can be NULL.
3976 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3977 have DECL_WEAK flag set. */
3978 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3979 /* Loading a static variable is unduly expensive, but global
3980 registers aren't expensive. */
3981 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3984 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3985 to be evaluated unconditionally.
3986 I addition to simple_operand_p, we assume that comparisons, conversions,
3987 and logic-not operations are simple, if their operands are simple, too. */
3989 static bool
3990 simple_operand_p_2 (tree exp)
3992 enum tree_code code;
3994 if (TREE_SIDE_EFFECTS (exp)
3995 || tree_could_trap_p (exp))
3996 return false;
3998 while (CONVERT_EXPR_P (exp))
3999 exp = TREE_OPERAND (exp, 0);
4001 code = TREE_CODE (exp);
4003 if (TREE_CODE_CLASS (code) == tcc_comparison)
4004 return (simple_operand_p (TREE_OPERAND (exp, 0))
4005 && simple_operand_p (TREE_OPERAND (exp, 1)));
4007 if (code == TRUTH_NOT_EXPR)
4008 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4010 return simple_operand_p (exp);
4014 /* The following functions are subroutines to fold_range_test and allow it to
4015 try to change a logical combination of comparisons into a range test.
4017 For example, both
4018 X == 2 || X == 3 || X == 4 || X == 5
4020 X >= 2 && X <= 5
4021 are converted to
4022 (unsigned) (X - 2) <= 3
4024 We describe each set of comparisons as being either inside or outside
4025 a range, using a variable named like IN_P, and then describe the
4026 range with a lower and upper bound. If one of the bounds is omitted,
4027 it represents either the highest or lowest value of the type.
4029 In the comments below, we represent a range by two numbers in brackets
4030 preceded by a "+" to designate being inside that range, or a "-" to
4031 designate being outside that range, so the condition can be inverted by
4032 flipping the prefix. An omitted bound is represented by a "-". For
4033 example, "- [-, 10]" means being outside the range starting at the lowest
4034 possible value and ending at 10, in other words, being greater than 10.
4035 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4036 always false.
4038 We set up things so that the missing bounds are handled in a consistent
4039 manner so neither a missing bound nor "true" and "false" need to be
4040 handled using a special case. */
4042 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4043 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4044 and UPPER1_P are nonzero if the respective argument is an upper bound
4045 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4046 must be specified for a comparison. ARG1 will be converted to ARG0's
4047 type if both are specified. */
4049 static tree
4050 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4051 tree arg1, int upper1_p)
4053 tree tem;
4054 int result;
4055 int sgn0, sgn1;
4057 /* If neither arg represents infinity, do the normal operation.
4058 Else, if not a comparison, return infinity. Else handle the special
4059 comparison rules. Note that most of the cases below won't occur, but
4060 are handled for consistency. */
4062 if (arg0 != 0 && arg1 != 0)
4064 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4065 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4066 STRIP_NOPS (tem);
4067 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4070 if (TREE_CODE_CLASS (code) != tcc_comparison)
4071 return 0;
4073 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4074 for neither. In real maths, we cannot assume open ended ranges are
4075 the same. But, this is computer arithmetic, where numbers are finite.
4076 We can therefore make the transformation of any unbounded range with
4077 the value Z, Z being greater than any representable number. This permits
4078 us to treat unbounded ranges as equal. */
4079 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4080 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4081 switch (code)
4083 case EQ_EXPR:
4084 result = sgn0 == sgn1;
4085 break;
4086 case NE_EXPR:
4087 result = sgn0 != sgn1;
4088 break;
4089 case LT_EXPR:
4090 result = sgn0 < sgn1;
4091 break;
4092 case LE_EXPR:
4093 result = sgn0 <= sgn1;
4094 break;
4095 case GT_EXPR:
4096 result = sgn0 > sgn1;
4097 break;
4098 case GE_EXPR:
4099 result = sgn0 >= sgn1;
4100 break;
4101 default:
4102 gcc_unreachable ();
4105 return constant_boolean_node (result, type);
4108 /* Helper routine for make_range. Perform one step for it, return
4109 new expression if the loop should continue or NULL_TREE if it should
4110 stop. */
4112 tree
4113 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4114 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4115 bool *strict_overflow_p)
4117 tree arg0_type = TREE_TYPE (arg0);
4118 tree n_low, n_high, low = *p_low, high = *p_high;
4119 int in_p = *p_in_p, n_in_p;
4121 switch (code)
4123 case TRUTH_NOT_EXPR:
4124 /* We can only do something if the range is testing for zero. */
4125 if (low == NULL_TREE || high == NULL_TREE
4126 || ! integer_zerop (low) || ! integer_zerop (high))
4127 return NULL_TREE;
4128 *p_in_p = ! in_p;
4129 return arg0;
4131 case EQ_EXPR: case NE_EXPR:
4132 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4133 /* We can only do something if the range is testing for zero
4134 and if the second operand is an integer constant. Note that
4135 saying something is "in" the range we make is done by
4136 complementing IN_P since it will set in the initial case of
4137 being not equal to zero; "out" is leaving it alone. */
4138 if (low == NULL_TREE || high == NULL_TREE
4139 || ! integer_zerop (low) || ! integer_zerop (high)
4140 || TREE_CODE (arg1) != INTEGER_CST)
4141 return NULL_TREE;
4143 switch (code)
4145 case NE_EXPR: /* - [c, c] */
4146 low = high = arg1;
4147 break;
4148 case EQ_EXPR: /* + [c, c] */
4149 in_p = ! in_p, low = high = arg1;
4150 break;
4151 case GT_EXPR: /* - [-, c] */
4152 low = 0, high = arg1;
4153 break;
4154 case GE_EXPR: /* + [c, -] */
4155 in_p = ! in_p, low = arg1, high = 0;
4156 break;
4157 case LT_EXPR: /* - [c, -] */
4158 low = arg1, high = 0;
4159 break;
4160 case LE_EXPR: /* + [-, c] */
4161 in_p = ! in_p, low = 0, high = arg1;
4162 break;
4163 default:
4164 gcc_unreachable ();
4167 /* If this is an unsigned comparison, we also know that EXP is
4168 greater than or equal to zero. We base the range tests we make
4169 on that fact, so we record it here so we can parse existing
4170 range tests. We test arg0_type since often the return type
4171 of, e.g. EQ_EXPR, is boolean. */
4172 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4174 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4175 in_p, low, high, 1,
4176 build_int_cst (arg0_type, 0),
4177 NULL_TREE))
4178 return NULL_TREE;
4180 in_p = n_in_p, low = n_low, high = n_high;
4182 /* If the high bound is missing, but we have a nonzero low
4183 bound, reverse the range so it goes from zero to the low bound
4184 minus 1. */
4185 if (high == 0 && low && ! integer_zerop (low))
4187 in_p = ! in_p;
4188 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4189 build_int_cst (TREE_TYPE (low), 1), 0);
4190 low = build_int_cst (arg0_type, 0);
4194 *p_low = low;
4195 *p_high = high;
4196 *p_in_p = in_p;
4197 return arg0;
4199 case NEGATE_EXPR:
4200 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4201 low and high are non-NULL, then normalize will DTRT. */
4202 if (!TYPE_UNSIGNED (arg0_type)
4203 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4205 if (low == NULL_TREE)
4206 low = TYPE_MIN_VALUE (arg0_type);
4207 if (high == NULL_TREE)
4208 high = TYPE_MAX_VALUE (arg0_type);
4211 /* (-x) IN [a,b] -> x in [-b, -a] */
4212 n_low = range_binop (MINUS_EXPR, exp_type,
4213 build_int_cst (exp_type, 0),
4214 0, high, 1);
4215 n_high = range_binop (MINUS_EXPR, exp_type,
4216 build_int_cst (exp_type, 0),
4217 0, low, 0);
4218 if (n_high != 0 && TREE_OVERFLOW (n_high))
4219 return NULL_TREE;
4220 goto normalize;
4222 case BIT_NOT_EXPR:
4223 /* ~ X -> -X - 1 */
4224 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4225 build_int_cst (exp_type, 1));
4227 case PLUS_EXPR:
4228 case MINUS_EXPR:
4229 if (TREE_CODE (arg1) != INTEGER_CST)
4230 return NULL_TREE;
4232 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4233 move a constant to the other side. */
4234 if (!TYPE_UNSIGNED (arg0_type)
4235 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4236 return NULL_TREE;
4238 /* If EXP is signed, any overflow in the computation is undefined,
4239 so we don't worry about it so long as our computations on
4240 the bounds don't overflow. For unsigned, overflow is defined
4241 and this is exactly the right thing. */
4242 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4243 arg0_type, low, 0, arg1, 0);
4244 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4245 arg0_type, high, 1, arg1, 0);
4246 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4247 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4248 return NULL_TREE;
4250 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4251 *strict_overflow_p = true;
4253 normalize:
4254 /* Check for an unsigned range which has wrapped around the maximum
4255 value thus making n_high < n_low, and normalize it. */
4256 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4258 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4259 build_int_cst (TREE_TYPE (n_high), 1), 0);
4260 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4261 build_int_cst (TREE_TYPE (n_low), 1), 0);
4263 /* If the range is of the form +/- [ x+1, x ], we won't
4264 be able to normalize it. But then, it represents the
4265 whole range or the empty set, so make it
4266 +/- [ -, - ]. */
4267 if (tree_int_cst_equal (n_low, low)
4268 && tree_int_cst_equal (n_high, high))
4269 low = high = 0;
4270 else
4271 in_p = ! in_p;
4273 else
4274 low = n_low, high = n_high;
4276 *p_low = low;
4277 *p_high = high;
4278 *p_in_p = in_p;
4279 return arg0;
4281 CASE_CONVERT:
4282 case NON_LVALUE_EXPR:
4283 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4284 return NULL_TREE;
4286 if (! INTEGRAL_TYPE_P (arg0_type)
4287 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4288 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4289 return NULL_TREE;
4291 n_low = low, n_high = high;
4293 if (n_low != 0)
4294 n_low = fold_convert_loc (loc, arg0_type, n_low);
4296 if (n_high != 0)
4297 n_high = fold_convert_loc (loc, arg0_type, n_high);
4299 /* If we're converting arg0 from an unsigned type, to exp,
4300 a signed type, we will be doing the comparison as unsigned.
4301 The tests above have already verified that LOW and HIGH
4302 are both positive.
4304 So we have to ensure that we will handle large unsigned
4305 values the same way that the current signed bounds treat
4306 negative values. */
4308 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4310 tree high_positive;
4311 tree equiv_type;
4312 /* For fixed-point modes, we need to pass the saturating flag
4313 as the 2nd parameter. */
4314 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4315 equiv_type
4316 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4317 TYPE_SATURATING (arg0_type));
4318 else
4319 equiv_type
4320 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4322 /* A range without an upper bound is, naturally, unbounded.
4323 Since convert would have cropped a very large value, use
4324 the max value for the destination type. */
4325 high_positive
4326 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4327 : TYPE_MAX_VALUE (arg0_type);
4329 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4330 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4331 fold_convert_loc (loc, arg0_type,
4332 high_positive),
4333 build_int_cst (arg0_type, 1));
4335 /* If the low bound is specified, "and" the range with the
4336 range for which the original unsigned value will be
4337 positive. */
4338 if (low != 0)
4340 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4341 1, fold_convert_loc (loc, arg0_type,
4342 integer_zero_node),
4343 high_positive))
4344 return NULL_TREE;
4346 in_p = (n_in_p == in_p);
4348 else
4350 /* Otherwise, "or" the range with the range of the input
4351 that will be interpreted as negative. */
4352 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4353 1, fold_convert_loc (loc, arg0_type,
4354 integer_zero_node),
4355 high_positive))
4356 return NULL_TREE;
4358 in_p = (in_p != n_in_p);
4362 *p_low = n_low;
4363 *p_high = n_high;
4364 *p_in_p = in_p;
4365 return arg0;
4367 default:
4368 return NULL_TREE;
4372 /* Given EXP, a logical expression, set the range it is testing into
4373 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4374 actually being tested. *PLOW and *PHIGH will be made of the same
4375 type as the returned expression. If EXP is not a comparison, we
4376 will most likely not be returning a useful value and range. Set
4377 *STRICT_OVERFLOW_P to true if the return value is only valid
4378 because signed overflow is undefined; otherwise, do not change
4379 *STRICT_OVERFLOW_P. */
4381 tree
4382 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4383 bool *strict_overflow_p)
4385 enum tree_code code;
4386 tree arg0, arg1 = NULL_TREE;
4387 tree exp_type, nexp;
4388 int in_p;
4389 tree low, high;
4390 location_t loc = EXPR_LOCATION (exp);
4392 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4393 and see if we can refine the range. Some of the cases below may not
4394 happen, but it doesn't seem worth worrying about this. We "continue"
4395 the outer loop when we've changed something; otherwise we "break"
4396 the switch, which will "break" the while. */
4398 in_p = 0;
4399 low = high = build_int_cst (TREE_TYPE (exp), 0);
4401 while (1)
4403 code = TREE_CODE (exp);
4404 exp_type = TREE_TYPE (exp);
4405 arg0 = NULL_TREE;
4407 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4409 if (TREE_OPERAND_LENGTH (exp) > 0)
4410 arg0 = TREE_OPERAND (exp, 0);
4411 if (TREE_CODE_CLASS (code) == tcc_binary
4412 || TREE_CODE_CLASS (code) == tcc_comparison
4413 || (TREE_CODE_CLASS (code) == tcc_expression
4414 && TREE_OPERAND_LENGTH (exp) > 1))
4415 arg1 = TREE_OPERAND (exp, 1);
4417 if (arg0 == NULL_TREE)
4418 break;
4420 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4421 &high, &in_p, strict_overflow_p);
4422 if (nexp == NULL_TREE)
4423 break;
4424 exp = nexp;
4427 /* If EXP is a constant, we can evaluate whether this is true or false. */
4428 if (TREE_CODE (exp) == INTEGER_CST)
4430 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4431 exp, 0, low, 0))
4432 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4433 exp, 1, high, 1)));
4434 low = high = 0;
4435 exp = 0;
4438 *pin_p = in_p, *plow = low, *phigh = high;
4439 return exp;
4442 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4443 type, TYPE, return an expression to test if EXP is in (or out of, depending
4444 on IN_P) the range. Return 0 if the test couldn't be created. */
4446 tree
4447 build_range_check (location_t loc, tree type, tree exp, int in_p,
4448 tree low, tree high)
4450 tree etype = TREE_TYPE (exp), value;
4452 /* Disable this optimization for function pointer expressions
4453 on targets that require function pointer canonicalization. */
4454 if (targetm.have_canonicalize_funcptr_for_compare ()
4455 && TREE_CODE (etype) == POINTER_TYPE
4456 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4457 return NULL_TREE;
4459 if (! in_p)
4461 value = build_range_check (loc, type, exp, 1, low, high);
4462 if (value != 0)
4463 return invert_truthvalue_loc (loc, value);
4465 return 0;
4468 if (low == 0 && high == 0)
4469 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4471 if (low == 0)
4472 return fold_build2_loc (loc, LE_EXPR, type, exp,
4473 fold_convert_loc (loc, etype, high));
4475 if (high == 0)
4476 return fold_build2_loc (loc, GE_EXPR, type, exp,
4477 fold_convert_loc (loc, etype, low));
4479 if (operand_equal_p (low, high, 0))
4480 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4481 fold_convert_loc (loc, etype, low));
4483 if (integer_zerop (low))
4485 if (! TYPE_UNSIGNED (etype))
4487 etype = unsigned_type_for (etype);
4488 high = fold_convert_loc (loc, etype, high);
4489 exp = fold_convert_loc (loc, etype, exp);
4491 return build_range_check (loc, type, exp, 1, 0, high);
4494 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4495 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4497 int prec = TYPE_PRECISION (etype);
4499 if (wi::mask (prec - 1, false, prec) == high)
4501 if (TYPE_UNSIGNED (etype))
4503 tree signed_etype = signed_type_for (etype);
4504 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4505 etype
4506 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4507 else
4508 etype = signed_etype;
4509 exp = fold_convert_loc (loc, etype, exp);
4511 return fold_build2_loc (loc, GT_EXPR, type, exp,
4512 build_int_cst (etype, 0));
4516 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4517 This requires wrap-around arithmetics for the type of the expression.
4518 First make sure that arithmetics in this type is valid, then make sure
4519 that it wraps around. */
4520 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4521 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4522 TYPE_UNSIGNED (etype));
4524 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4526 tree utype, minv, maxv;
4528 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4529 for the type in question, as we rely on this here. */
4530 utype = unsigned_type_for (etype);
4531 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4532 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4533 build_int_cst (TREE_TYPE (maxv), 1), 1);
4534 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4536 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4537 minv, 1, maxv, 1)))
4538 etype = utype;
4539 else
4540 return 0;
4543 high = fold_convert_loc (loc, etype, high);
4544 low = fold_convert_loc (loc, etype, low);
4545 exp = fold_convert_loc (loc, etype, exp);
4547 value = const_binop (MINUS_EXPR, high, low);
4550 if (POINTER_TYPE_P (etype))
4552 if (value != 0 && !TREE_OVERFLOW (value))
4554 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4555 return build_range_check (loc, type,
4556 fold_build_pointer_plus_loc (loc, exp, low),
4557 1, build_int_cst (etype, 0), value);
4559 return 0;
4562 if (value != 0 && !TREE_OVERFLOW (value))
4563 return build_range_check (loc, type,
4564 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4565 1, build_int_cst (etype, 0), value);
4567 return 0;
4570 /* Return the predecessor of VAL in its type, handling the infinite case. */
4572 static tree
4573 range_predecessor (tree val)
4575 tree type = TREE_TYPE (val);
4577 if (INTEGRAL_TYPE_P (type)
4578 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4579 return 0;
4580 else
4581 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4582 build_int_cst (TREE_TYPE (val), 1), 0);
4585 /* Return the successor of VAL in its type, handling the infinite case. */
4587 static tree
4588 range_successor (tree val)
4590 tree type = TREE_TYPE (val);
4592 if (INTEGRAL_TYPE_P (type)
4593 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4594 return 0;
4595 else
4596 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4597 build_int_cst (TREE_TYPE (val), 1), 0);
4600 /* Given two ranges, see if we can merge them into one. Return 1 if we
4601 can, 0 if we can't. Set the output range into the specified parameters. */
4603 bool
4604 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4605 tree high0, int in1_p, tree low1, tree high1)
4607 int no_overlap;
4608 int subset;
4609 int temp;
4610 tree tem;
4611 int in_p;
4612 tree low, high;
4613 int lowequal = ((low0 == 0 && low1 == 0)
4614 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4615 low0, 0, low1, 0)));
4616 int highequal = ((high0 == 0 && high1 == 0)
4617 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4618 high0, 1, high1, 1)));
4620 /* Make range 0 be the range that starts first, or ends last if they
4621 start at the same value. Swap them if it isn't. */
4622 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4623 low0, 0, low1, 0))
4624 || (lowequal
4625 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4626 high1, 1, high0, 1))))
4628 temp = in0_p, in0_p = in1_p, in1_p = temp;
4629 tem = low0, low0 = low1, low1 = tem;
4630 tem = high0, high0 = high1, high1 = tem;
4633 /* Now flag two cases, whether the ranges are disjoint or whether the
4634 second range is totally subsumed in the first. Note that the tests
4635 below are simplified by the ones above. */
4636 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4637 high0, 1, low1, 0));
4638 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4639 high1, 1, high0, 1));
4641 /* We now have four cases, depending on whether we are including or
4642 excluding the two ranges. */
4643 if (in0_p && in1_p)
4645 /* If they don't overlap, the result is false. If the second range
4646 is a subset it is the result. Otherwise, the range is from the start
4647 of the second to the end of the first. */
4648 if (no_overlap)
4649 in_p = 0, low = high = 0;
4650 else if (subset)
4651 in_p = 1, low = low1, high = high1;
4652 else
4653 in_p = 1, low = low1, high = high0;
4656 else if (in0_p && ! in1_p)
4658 /* If they don't overlap, the result is the first range. If they are
4659 equal, the result is false. If the second range is a subset of the
4660 first, and the ranges begin at the same place, we go from just after
4661 the end of the second range to the end of the first. If the second
4662 range is not a subset of the first, or if it is a subset and both
4663 ranges end at the same place, the range starts at the start of the
4664 first range and ends just before the second range.
4665 Otherwise, we can't describe this as a single range. */
4666 if (no_overlap)
4667 in_p = 1, low = low0, high = high0;
4668 else if (lowequal && highequal)
4669 in_p = 0, low = high = 0;
4670 else if (subset && lowequal)
4672 low = range_successor (high1);
4673 high = high0;
4674 in_p = 1;
4675 if (low == 0)
4677 /* We are in the weird situation where high0 > high1 but
4678 high1 has no successor. Punt. */
4679 return 0;
4682 else if (! subset || highequal)
4684 low = low0;
4685 high = range_predecessor (low1);
4686 in_p = 1;
4687 if (high == 0)
4689 /* low0 < low1 but low1 has no predecessor. Punt. */
4690 return 0;
4693 else
4694 return 0;
4697 else if (! in0_p && in1_p)
4699 /* If they don't overlap, the result is the second range. If the second
4700 is a subset of the first, the result is false. Otherwise,
4701 the range starts just after the first range and ends at the
4702 end of the second. */
4703 if (no_overlap)
4704 in_p = 1, low = low1, high = high1;
4705 else if (subset || highequal)
4706 in_p = 0, low = high = 0;
4707 else
4709 low = range_successor (high0);
4710 high = high1;
4711 in_p = 1;
4712 if (low == 0)
4714 /* high1 > high0 but high0 has no successor. Punt. */
4715 return 0;
4720 else
4722 /* The case where we are excluding both ranges. Here the complex case
4723 is if they don't overlap. In that case, the only time we have a
4724 range is if they are adjacent. If the second is a subset of the
4725 first, the result is the first. Otherwise, the range to exclude
4726 starts at the beginning of the first range and ends at the end of the
4727 second. */
4728 if (no_overlap)
4730 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4731 range_successor (high0),
4732 1, low1, 0)))
4733 in_p = 0, low = low0, high = high1;
4734 else
4736 /* Canonicalize - [min, x] into - [-, x]. */
4737 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4738 switch (TREE_CODE (TREE_TYPE (low0)))
4740 case ENUMERAL_TYPE:
4741 if (TYPE_PRECISION (TREE_TYPE (low0))
4742 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4743 break;
4744 /* FALLTHROUGH */
4745 case INTEGER_TYPE:
4746 if (tree_int_cst_equal (low0,
4747 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4748 low0 = 0;
4749 break;
4750 case POINTER_TYPE:
4751 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4752 && integer_zerop (low0))
4753 low0 = 0;
4754 break;
4755 default:
4756 break;
4759 /* Canonicalize - [x, max] into - [x, -]. */
4760 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4761 switch (TREE_CODE (TREE_TYPE (high1)))
4763 case ENUMERAL_TYPE:
4764 if (TYPE_PRECISION (TREE_TYPE (high1))
4765 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4766 break;
4767 /* FALLTHROUGH */
4768 case INTEGER_TYPE:
4769 if (tree_int_cst_equal (high1,
4770 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4771 high1 = 0;
4772 break;
4773 case POINTER_TYPE:
4774 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4775 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4776 high1, 1,
4777 build_int_cst (TREE_TYPE (high1), 1),
4778 1)))
4779 high1 = 0;
4780 break;
4781 default:
4782 break;
4785 /* The ranges might be also adjacent between the maximum and
4786 minimum values of the given type. For
4787 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4788 return + [x + 1, y - 1]. */
4789 if (low0 == 0 && high1 == 0)
4791 low = range_successor (high0);
4792 high = range_predecessor (low1);
4793 if (low == 0 || high == 0)
4794 return 0;
4796 in_p = 1;
4798 else
4799 return 0;
4802 else if (subset)
4803 in_p = 0, low = low0, high = high0;
4804 else
4805 in_p = 0, low = low0, high = high1;
4808 *pin_p = in_p, *plow = low, *phigh = high;
4809 return 1;
4813 /* Subroutine of fold, looking inside expressions of the form
4814 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4815 of the COND_EXPR. This function is being used also to optimize
4816 A op B ? C : A, by reversing the comparison first.
4818 Return a folded expression whose code is not a COND_EXPR
4819 anymore, or NULL_TREE if no folding opportunity is found. */
4821 static tree
4822 fold_cond_expr_with_comparison (location_t loc, tree type,
4823 tree arg0, tree arg1, tree arg2)
4825 enum tree_code comp_code = TREE_CODE (arg0);
4826 tree arg00 = TREE_OPERAND (arg0, 0);
4827 tree arg01 = TREE_OPERAND (arg0, 1);
4828 tree arg1_type = TREE_TYPE (arg1);
4829 tree tem;
4831 STRIP_NOPS (arg1);
4832 STRIP_NOPS (arg2);
4834 /* If we have A op 0 ? A : -A, consider applying the following
4835 transformations:
4837 A == 0? A : -A same as -A
4838 A != 0? A : -A same as A
4839 A >= 0? A : -A same as abs (A)
4840 A > 0? A : -A same as abs (A)
4841 A <= 0? A : -A same as -abs (A)
4842 A < 0? A : -A same as -abs (A)
4844 None of these transformations work for modes with signed
4845 zeros. If A is +/-0, the first two transformations will
4846 change the sign of the result (from +0 to -0, or vice
4847 versa). The last four will fix the sign of the result,
4848 even though the original expressions could be positive or
4849 negative, depending on the sign of A.
4851 Note that all these transformations are correct if A is
4852 NaN, since the two alternatives (A and -A) are also NaNs. */
4853 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4854 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4855 ? real_zerop (arg01)
4856 : integer_zerop (arg01))
4857 && ((TREE_CODE (arg2) == NEGATE_EXPR
4858 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4859 /* In the case that A is of the form X-Y, '-A' (arg2) may
4860 have already been folded to Y-X, check for that. */
4861 || (TREE_CODE (arg1) == MINUS_EXPR
4862 && TREE_CODE (arg2) == MINUS_EXPR
4863 && operand_equal_p (TREE_OPERAND (arg1, 0),
4864 TREE_OPERAND (arg2, 1), 0)
4865 && operand_equal_p (TREE_OPERAND (arg1, 1),
4866 TREE_OPERAND (arg2, 0), 0))))
4867 switch (comp_code)
4869 case EQ_EXPR:
4870 case UNEQ_EXPR:
4871 tem = fold_convert_loc (loc, arg1_type, arg1);
4872 return pedantic_non_lvalue_loc (loc,
4873 fold_convert_loc (loc, type,
4874 negate_expr (tem)));
4875 case NE_EXPR:
4876 case LTGT_EXPR:
4877 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4878 case UNGE_EXPR:
4879 case UNGT_EXPR:
4880 if (flag_trapping_math)
4881 break;
4882 /* Fall through. */
4883 case GE_EXPR:
4884 case GT_EXPR:
4885 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4886 arg1 = fold_convert_loc (loc, signed_type_for
4887 (TREE_TYPE (arg1)), arg1);
4888 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4889 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4890 case UNLE_EXPR:
4891 case UNLT_EXPR:
4892 if (flag_trapping_math)
4893 break;
4894 case LE_EXPR:
4895 case LT_EXPR:
4896 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4897 arg1 = fold_convert_loc (loc, signed_type_for
4898 (TREE_TYPE (arg1)), arg1);
4899 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4900 return negate_expr (fold_convert_loc (loc, type, tem));
4901 default:
4902 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4903 break;
4906 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4907 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4908 both transformations are correct when A is NaN: A != 0
4909 is then true, and A == 0 is false. */
4911 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4912 && integer_zerop (arg01) && integer_zerop (arg2))
4914 if (comp_code == NE_EXPR)
4915 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4916 else if (comp_code == EQ_EXPR)
4917 return build_zero_cst (type);
4920 /* Try some transformations of A op B ? A : B.
4922 A == B? A : B same as B
4923 A != B? A : B same as A
4924 A >= B? A : B same as max (A, B)
4925 A > B? A : B same as max (B, A)
4926 A <= B? A : B same as min (A, B)
4927 A < B? A : B same as min (B, A)
4929 As above, these transformations don't work in the presence
4930 of signed zeros. For example, if A and B are zeros of
4931 opposite sign, the first two transformations will change
4932 the sign of the result. In the last four, the original
4933 expressions give different results for (A=+0, B=-0) and
4934 (A=-0, B=+0), but the transformed expressions do not.
4936 The first two transformations are correct if either A or B
4937 is a NaN. In the first transformation, the condition will
4938 be false, and B will indeed be chosen. In the case of the
4939 second transformation, the condition A != B will be true,
4940 and A will be chosen.
4942 The conversions to max() and min() are not correct if B is
4943 a number and A is not. The conditions in the original
4944 expressions will be false, so all four give B. The min()
4945 and max() versions would give a NaN instead. */
4946 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4947 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4948 /* Avoid these transformations if the COND_EXPR may be used
4949 as an lvalue in the C++ front-end. PR c++/19199. */
4950 && (in_gimple_form
4951 || VECTOR_TYPE_P (type)
4952 || (! lang_GNU_CXX ()
4953 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4954 || ! maybe_lvalue_p (arg1)
4955 || ! maybe_lvalue_p (arg2)))
4957 tree comp_op0 = arg00;
4958 tree comp_op1 = arg01;
4959 tree comp_type = TREE_TYPE (comp_op0);
4961 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4962 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4964 comp_type = type;
4965 comp_op0 = arg1;
4966 comp_op1 = arg2;
4969 switch (comp_code)
4971 case EQ_EXPR:
4972 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4973 case NE_EXPR:
4974 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4975 case LE_EXPR:
4976 case LT_EXPR:
4977 case UNLE_EXPR:
4978 case UNLT_EXPR:
4979 /* In C++ a ?: expression can be an lvalue, so put the
4980 operand which will be used if they are equal first
4981 so that we can convert this back to the
4982 corresponding COND_EXPR. */
4983 if (!HONOR_NANS (arg1))
4985 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4986 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4987 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4988 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4989 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4990 comp_op1, comp_op0);
4991 return pedantic_non_lvalue_loc (loc,
4992 fold_convert_loc (loc, type, tem));
4994 break;
4995 case GE_EXPR:
4996 case GT_EXPR:
4997 case UNGE_EXPR:
4998 case UNGT_EXPR:
4999 if (!HONOR_NANS (arg1))
5001 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5002 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5003 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5004 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5005 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5006 comp_op1, comp_op0);
5007 return pedantic_non_lvalue_loc (loc,
5008 fold_convert_loc (loc, type, tem));
5010 break;
5011 case UNEQ_EXPR:
5012 if (!HONOR_NANS (arg1))
5013 return pedantic_non_lvalue_loc (loc,
5014 fold_convert_loc (loc, type, arg2));
5015 break;
5016 case LTGT_EXPR:
5017 if (!HONOR_NANS (arg1))
5018 return pedantic_non_lvalue_loc (loc,
5019 fold_convert_loc (loc, type, arg1));
5020 break;
5021 default:
5022 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5023 break;
5027 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5028 we might still be able to simplify this. For example,
5029 if C1 is one less or one more than C2, this might have started
5030 out as a MIN or MAX and been transformed by this function.
5031 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5033 if (INTEGRAL_TYPE_P (type)
5034 && TREE_CODE (arg01) == INTEGER_CST
5035 && TREE_CODE (arg2) == INTEGER_CST)
5036 switch (comp_code)
5038 case EQ_EXPR:
5039 if (TREE_CODE (arg1) == INTEGER_CST)
5040 break;
5041 /* We can replace A with C1 in this case. */
5042 arg1 = fold_convert_loc (loc, type, arg01);
5043 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5045 case LT_EXPR:
5046 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5047 MIN_EXPR, to preserve the signedness of the comparison. */
5048 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5049 OEP_ONLY_CONST)
5050 && operand_equal_p (arg01,
5051 const_binop (PLUS_EXPR, arg2,
5052 build_int_cst (type, 1)),
5053 OEP_ONLY_CONST))
5055 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5056 fold_convert_loc (loc, TREE_TYPE (arg00),
5057 arg2));
5058 return pedantic_non_lvalue_loc (loc,
5059 fold_convert_loc (loc, type, tem));
5061 break;
5063 case LE_EXPR:
5064 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5065 as above. */
5066 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5067 OEP_ONLY_CONST)
5068 && operand_equal_p (arg01,
5069 const_binop (MINUS_EXPR, arg2,
5070 build_int_cst (type, 1)),
5071 OEP_ONLY_CONST))
5073 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5074 fold_convert_loc (loc, TREE_TYPE (arg00),
5075 arg2));
5076 return pedantic_non_lvalue_loc (loc,
5077 fold_convert_loc (loc, type, tem));
5079 break;
5081 case GT_EXPR:
5082 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5083 MAX_EXPR, to preserve the signedness of the comparison. */
5084 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5085 OEP_ONLY_CONST)
5086 && operand_equal_p (arg01,
5087 const_binop (MINUS_EXPR, arg2,
5088 build_int_cst (type, 1)),
5089 OEP_ONLY_CONST))
5091 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5092 fold_convert_loc (loc, TREE_TYPE (arg00),
5093 arg2));
5094 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5096 break;
5098 case GE_EXPR:
5099 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5100 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5101 OEP_ONLY_CONST)
5102 && operand_equal_p (arg01,
5103 const_binop (PLUS_EXPR, arg2,
5104 build_int_cst (type, 1)),
5105 OEP_ONLY_CONST))
5107 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5108 fold_convert_loc (loc, TREE_TYPE (arg00),
5109 arg2));
5110 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5112 break;
5113 case NE_EXPR:
5114 break;
5115 default:
5116 gcc_unreachable ();
5119 return NULL_TREE;
5124 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5125 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5126 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5127 false) >= 2)
5128 #endif
5130 /* EXP is some logical combination of boolean tests. See if we can
5131 merge it into some range test. Return the new tree if so. */
5133 static tree
5134 fold_range_test (location_t loc, enum tree_code code, tree type,
5135 tree op0, tree op1)
5137 int or_op = (code == TRUTH_ORIF_EXPR
5138 || code == TRUTH_OR_EXPR);
5139 int in0_p, in1_p, in_p;
5140 tree low0, low1, low, high0, high1, high;
5141 bool strict_overflow_p = false;
5142 tree tem, lhs, rhs;
5143 const char * const warnmsg = G_("assuming signed overflow does not occur "
5144 "when simplifying range test");
5146 if (!INTEGRAL_TYPE_P (type))
5147 return 0;
5149 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5150 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5152 /* If this is an OR operation, invert both sides; we will invert
5153 again at the end. */
5154 if (or_op)
5155 in0_p = ! in0_p, in1_p = ! in1_p;
5157 /* If both expressions are the same, if we can merge the ranges, and we
5158 can build the range test, return it or it inverted. If one of the
5159 ranges is always true or always false, consider it to be the same
5160 expression as the other. */
5161 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5162 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5163 in1_p, low1, high1)
5164 && 0 != (tem = (build_range_check (loc, type,
5165 lhs != 0 ? lhs
5166 : rhs != 0 ? rhs : integer_zero_node,
5167 in_p, low, high))))
5169 if (strict_overflow_p)
5170 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5171 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5174 /* On machines where the branch cost is expensive, if this is a
5175 short-circuited branch and the underlying object on both sides
5176 is the same, make a non-short-circuit operation. */
5177 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5178 && lhs != 0 && rhs != 0
5179 && (code == TRUTH_ANDIF_EXPR
5180 || code == TRUTH_ORIF_EXPR)
5181 && operand_equal_p (lhs, rhs, 0))
5183 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5184 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5185 which cases we can't do this. */
5186 if (simple_operand_p (lhs))
5187 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5188 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5189 type, op0, op1);
5191 else if (!lang_hooks.decls.global_bindings_p ()
5192 && !CONTAINS_PLACEHOLDER_P (lhs))
5194 tree common = save_expr (lhs);
5196 if (0 != (lhs = build_range_check (loc, type, common,
5197 or_op ? ! in0_p : in0_p,
5198 low0, high0))
5199 && (0 != (rhs = build_range_check (loc, type, common,
5200 or_op ? ! in1_p : in1_p,
5201 low1, high1))))
5203 if (strict_overflow_p)
5204 fold_overflow_warning (warnmsg,
5205 WARN_STRICT_OVERFLOW_COMPARISON);
5206 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5207 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5208 type, lhs, rhs);
5213 return 0;
5216 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5217 bit value. Arrange things so the extra bits will be set to zero if and
5218 only if C is signed-extended to its full width. If MASK is nonzero,
5219 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5221 static tree
5222 unextend (tree c, int p, int unsignedp, tree mask)
5224 tree type = TREE_TYPE (c);
5225 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5226 tree temp;
5228 if (p == modesize || unsignedp)
5229 return c;
5231 /* We work by getting just the sign bit into the low-order bit, then
5232 into the high-order bit, then sign-extend. We then XOR that value
5233 with C. */
5234 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5236 /* We must use a signed type in order to get an arithmetic right shift.
5237 However, we must also avoid introducing accidental overflows, so that
5238 a subsequent call to integer_zerop will work. Hence we must
5239 do the type conversion here. At this point, the constant is either
5240 zero or one, and the conversion to a signed type can never overflow.
5241 We could get an overflow if this conversion is done anywhere else. */
5242 if (TYPE_UNSIGNED (type))
5243 temp = fold_convert (signed_type_for (type), temp);
5245 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5246 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5247 if (mask != 0)
5248 temp = const_binop (BIT_AND_EXPR, temp,
5249 fold_convert (TREE_TYPE (c), mask));
5250 /* If necessary, convert the type back to match the type of C. */
5251 if (TYPE_UNSIGNED (type))
5252 temp = fold_convert (type, temp);
5254 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5257 /* For an expression that has the form
5258 (A && B) || ~B
5260 (A || B) && ~B,
5261 we can drop one of the inner expressions and simplify to
5262 A || ~B
5264 A && ~B
5265 LOC is the location of the resulting expression. OP is the inner
5266 logical operation; the left-hand side in the examples above, while CMPOP
5267 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5268 removing a condition that guards another, as in
5269 (A != NULL && A->...) || A == NULL
5270 which we must not transform. If RHS_ONLY is true, only eliminate the
5271 right-most operand of the inner logical operation. */
5273 static tree
5274 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5275 bool rhs_only)
5277 tree type = TREE_TYPE (cmpop);
5278 enum tree_code code = TREE_CODE (cmpop);
5279 enum tree_code truthop_code = TREE_CODE (op);
5280 tree lhs = TREE_OPERAND (op, 0);
5281 tree rhs = TREE_OPERAND (op, 1);
5282 tree orig_lhs = lhs, orig_rhs = rhs;
5283 enum tree_code rhs_code = TREE_CODE (rhs);
5284 enum tree_code lhs_code = TREE_CODE (lhs);
5285 enum tree_code inv_code;
5287 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5288 return NULL_TREE;
5290 if (TREE_CODE_CLASS (code) != tcc_comparison)
5291 return NULL_TREE;
5293 if (rhs_code == truthop_code)
5295 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5296 if (newrhs != NULL_TREE)
5298 rhs = newrhs;
5299 rhs_code = TREE_CODE (rhs);
5302 if (lhs_code == truthop_code && !rhs_only)
5304 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5305 if (newlhs != NULL_TREE)
5307 lhs = newlhs;
5308 lhs_code = TREE_CODE (lhs);
5312 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5313 if (inv_code == rhs_code
5314 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5315 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5316 return lhs;
5317 if (!rhs_only && inv_code == lhs_code
5318 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5319 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5320 return rhs;
5321 if (rhs != orig_rhs || lhs != orig_lhs)
5322 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5323 lhs, rhs);
5324 return NULL_TREE;
5327 /* Find ways of folding logical expressions of LHS and RHS:
5328 Try to merge two comparisons to the same innermost item.
5329 Look for range tests like "ch >= '0' && ch <= '9'".
5330 Look for combinations of simple terms on machines with expensive branches
5331 and evaluate the RHS unconditionally.
5333 For example, if we have p->a == 2 && p->b == 4 and we can make an
5334 object large enough to span both A and B, we can do this with a comparison
5335 against the object ANDed with the a mask.
5337 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5338 operations to do this with one comparison.
5340 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5341 function and the one above.
5343 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5344 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5346 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5347 two operands.
5349 We return the simplified tree or 0 if no optimization is possible. */
5351 static tree
5352 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5353 tree lhs, tree rhs)
5355 /* If this is the "or" of two comparisons, we can do something if
5356 the comparisons are NE_EXPR. If this is the "and", we can do something
5357 if the comparisons are EQ_EXPR. I.e.,
5358 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5360 WANTED_CODE is this operation code. For single bit fields, we can
5361 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5362 comparison for one-bit fields. */
5364 enum tree_code wanted_code;
5365 enum tree_code lcode, rcode;
5366 tree ll_arg, lr_arg, rl_arg, rr_arg;
5367 tree ll_inner, lr_inner, rl_inner, rr_inner;
5368 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5369 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5370 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5371 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5372 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5373 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5374 machine_mode lnmode, rnmode;
5375 tree ll_mask, lr_mask, rl_mask, rr_mask;
5376 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5377 tree l_const, r_const;
5378 tree lntype, rntype, result;
5379 HOST_WIDE_INT first_bit, end_bit;
5380 int volatilep;
5382 /* Start by getting the comparison codes. Fail if anything is volatile.
5383 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5384 it were surrounded with a NE_EXPR. */
5386 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5387 return 0;
5389 lcode = TREE_CODE (lhs);
5390 rcode = TREE_CODE (rhs);
5392 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5394 lhs = build2 (NE_EXPR, truth_type, lhs,
5395 build_int_cst (TREE_TYPE (lhs), 0));
5396 lcode = NE_EXPR;
5399 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5401 rhs = build2 (NE_EXPR, truth_type, rhs,
5402 build_int_cst (TREE_TYPE (rhs), 0));
5403 rcode = NE_EXPR;
5406 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5407 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5408 return 0;
5410 ll_arg = TREE_OPERAND (lhs, 0);
5411 lr_arg = TREE_OPERAND (lhs, 1);
5412 rl_arg = TREE_OPERAND (rhs, 0);
5413 rr_arg = TREE_OPERAND (rhs, 1);
5415 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5416 if (simple_operand_p (ll_arg)
5417 && simple_operand_p (lr_arg))
5419 if (operand_equal_p (ll_arg, rl_arg, 0)
5420 && operand_equal_p (lr_arg, rr_arg, 0))
5422 result = combine_comparisons (loc, code, lcode, rcode,
5423 truth_type, ll_arg, lr_arg);
5424 if (result)
5425 return result;
5427 else if (operand_equal_p (ll_arg, rr_arg, 0)
5428 && operand_equal_p (lr_arg, rl_arg, 0))
5430 result = combine_comparisons (loc, code, lcode,
5431 swap_tree_comparison (rcode),
5432 truth_type, ll_arg, lr_arg);
5433 if (result)
5434 return result;
5438 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5439 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5441 /* If the RHS can be evaluated unconditionally and its operands are
5442 simple, it wins to evaluate the RHS unconditionally on machines
5443 with expensive branches. In this case, this isn't a comparison
5444 that can be merged. */
5446 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5447 false) >= 2
5448 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5449 && simple_operand_p (rl_arg)
5450 && simple_operand_p (rr_arg))
5452 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5453 if (code == TRUTH_OR_EXPR
5454 && lcode == NE_EXPR && integer_zerop (lr_arg)
5455 && rcode == NE_EXPR && integer_zerop (rr_arg)
5456 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5457 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5458 return build2_loc (loc, NE_EXPR, truth_type,
5459 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5460 ll_arg, rl_arg),
5461 build_int_cst (TREE_TYPE (ll_arg), 0));
5463 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5464 if (code == TRUTH_AND_EXPR
5465 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5466 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5467 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5468 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5469 return build2_loc (loc, EQ_EXPR, truth_type,
5470 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5471 ll_arg, rl_arg),
5472 build_int_cst (TREE_TYPE (ll_arg), 0));
5475 /* See if the comparisons can be merged. Then get all the parameters for
5476 each side. */
5478 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5479 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5480 return 0;
5482 volatilep = 0;
5483 ll_inner = decode_field_reference (loc, ll_arg,
5484 &ll_bitsize, &ll_bitpos, &ll_mode,
5485 &ll_unsignedp, &volatilep, &ll_mask,
5486 &ll_and_mask);
5487 lr_inner = decode_field_reference (loc, lr_arg,
5488 &lr_bitsize, &lr_bitpos, &lr_mode,
5489 &lr_unsignedp, &volatilep, &lr_mask,
5490 &lr_and_mask);
5491 rl_inner = decode_field_reference (loc, rl_arg,
5492 &rl_bitsize, &rl_bitpos, &rl_mode,
5493 &rl_unsignedp, &volatilep, &rl_mask,
5494 &rl_and_mask);
5495 rr_inner = decode_field_reference (loc, rr_arg,
5496 &rr_bitsize, &rr_bitpos, &rr_mode,
5497 &rr_unsignedp, &volatilep, &rr_mask,
5498 &rr_and_mask);
5500 /* It must be true that the inner operation on the lhs of each
5501 comparison must be the same if we are to be able to do anything.
5502 Then see if we have constants. If not, the same must be true for
5503 the rhs's. */
5504 if (volatilep || ll_inner == 0 || rl_inner == 0
5505 || ! operand_equal_p (ll_inner, rl_inner, 0))
5506 return 0;
5508 if (TREE_CODE (lr_arg) == INTEGER_CST
5509 && TREE_CODE (rr_arg) == INTEGER_CST)
5510 l_const = lr_arg, r_const = rr_arg;
5511 else if (lr_inner == 0 || rr_inner == 0
5512 || ! operand_equal_p (lr_inner, rr_inner, 0))
5513 return 0;
5514 else
5515 l_const = r_const = 0;
5517 /* If either comparison code is not correct for our logical operation,
5518 fail. However, we can convert a one-bit comparison against zero into
5519 the opposite comparison against that bit being set in the field. */
5521 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5522 if (lcode != wanted_code)
5524 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5526 /* Make the left operand unsigned, since we are only interested
5527 in the value of one bit. Otherwise we are doing the wrong
5528 thing below. */
5529 ll_unsignedp = 1;
5530 l_const = ll_mask;
5532 else
5533 return 0;
5536 /* This is analogous to the code for l_const above. */
5537 if (rcode != wanted_code)
5539 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5541 rl_unsignedp = 1;
5542 r_const = rl_mask;
5544 else
5545 return 0;
5548 /* See if we can find a mode that contains both fields being compared on
5549 the left. If we can't, fail. Otherwise, update all constants and masks
5550 to be relative to a field of that size. */
5551 first_bit = MIN (ll_bitpos, rl_bitpos);
5552 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5553 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5554 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5555 volatilep);
5556 if (lnmode == VOIDmode)
5557 return 0;
5559 lnbitsize = GET_MODE_BITSIZE (lnmode);
5560 lnbitpos = first_bit & ~ (lnbitsize - 1);
5561 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5562 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5564 if (BYTES_BIG_ENDIAN)
5566 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5567 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5570 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5571 size_int (xll_bitpos));
5572 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5573 size_int (xrl_bitpos));
5575 if (l_const)
5577 l_const = fold_convert_loc (loc, lntype, l_const);
5578 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5579 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5580 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5581 fold_build1_loc (loc, BIT_NOT_EXPR,
5582 lntype, ll_mask))))
5584 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5586 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5589 if (r_const)
5591 r_const = fold_convert_loc (loc, lntype, r_const);
5592 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5593 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5594 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5595 fold_build1_loc (loc, BIT_NOT_EXPR,
5596 lntype, rl_mask))))
5598 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5600 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5604 /* If the right sides are not constant, do the same for it. Also,
5605 disallow this optimization if a size or signedness mismatch occurs
5606 between the left and right sides. */
5607 if (l_const == 0)
5609 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5610 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5611 /* Make sure the two fields on the right
5612 correspond to the left without being swapped. */
5613 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5614 return 0;
5616 first_bit = MIN (lr_bitpos, rr_bitpos);
5617 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5618 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5619 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5620 volatilep);
5621 if (rnmode == VOIDmode)
5622 return 0;
5624 rnbitsize = GET_MODE_BITSIZE (rnmode);
5625 rnbitpos = first_bit & ~ (rnbitsize - 1);
5626 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5627 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5629 if (BYTES_BIG_ENDIAN)
5631 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5632 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5635 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5636 rntype, lr_mask),
5637 size_int (xlr_bitpos));
5638 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5639 rntype, rr_mask),
5640 size_int (xrr_bitpos));
5642 /* Make a mask that corresponds to both fields being compared.
5643 Do this for both items being compared. If the operands are the
5644 same size and the bits being compared are in the same position
5645 then we can do this by masking both and comparing the masked
5646 results. */
5647 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5648 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5649 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5651 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5652 ll_unsignedp || rl_unsignedp);
5653 if (! all_ones_mask_p (ll_mask, lnbitsize))
5654 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5656 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5657 lr_unsignedp || rr_unsignedp);
5658 if (! all_ones_mask_p (lr_mask, rnbitsize))
5659 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5661 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5664 /* There is still another way we can do something: If both pairs of
5665 fields being compared are adjacent, we may be able to make a wider
5666 field containing them both.
5668 Note that we still must mask the lhs/rhs expressions. Furthermore,
5669 the mask must be shifted to account for the shift done by
5670 make_bit_field_ref. */
5671 if ((ll_bitsize + ll_bitpos == rl_bitpos
5672 && lr_bitsize + lr_bitpos == rr_bitpos)
5673 || (ll_bitpos == rl_bitpos + rl_bitsize
5674 && lr_bitpos == rr_bitpos + rr_bitsize))
5676 tree type;
5678 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5679 ll_bitsize + rl_bitsize,
5680 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5681 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5682 lr_bitsize + rr_bitsize,
5683 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5685 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5686 size_int (MIN (xll_bitpos, xrl_bitpos)));
5687 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5688 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5690 /* Convert to the smaller type before masking out unwanted bits. */
5691 type = lntype;
5692 if (lntype != rntype)
5694 if (lnbitsize > rnbitsize)
5696 lhs = fold_convert_loc (loc, rntype, lhs);
5697 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5698 type = rntype;
5700 else if (lnbitsize < rnbitsize)
5702 rhs = fold_convert_loc (loc, lntype, rhs);
5703 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5704 type = lntype;
5708 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5709 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5711 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5712 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5714 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5717 return 0;
5720 /* Handle the case of comparisons with constants. If there is something in
5721 common between the masks, those bits of the constants must be the same.
5722 If not, the condition is always false. Test for this to avoid generating
5723 incorrect code below. */
5724 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5725 if (! integer_zerop (result)
5726 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5727 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5729 if (wanted_code == NE_EXPR)
5731 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5732 return constant_boolean_node (true, truth_type);
5734 else
5736 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5737 return constant_boolean_node (false, truth_type);
5741 /* Construct the expression we will return. First get the component
5742 reference we will make. Unless the mask is all ones the width of
5743 that field, perform the mask operation. Then compare with the
5744 merged constant. */
5745 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5746 ll_unsignedp || rl_unsignedp);
5748 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5749 if (! all_ones_mask_p (ll_mask, lnbitsize))
5750 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5752 return build2_loc (loc, wanted_code, truth_type, result,
5753 const_binop (BIT_IOR_EXPR, l_const, r_const));
5756 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5757 constant. */
5759 static tree
5760 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5761 tree op0, tree op1)
5763 tree arg0 = op0;
5764 enum tree_code op_code;
5765 tree comp_const;
5766 tree minmax_const;
5767 int consts_equal, consts_lt;
5768 tree inner;
5770 STRIP_SIGN_NOPS (arg0);
5772 op_code = TREE_CODE (arg0);
5773 minmax_const = TREE_OPERAND (arg0, 1);
5774 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5775 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5776 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5777 inner = TREE_OPERAND (arg0, 0);
5779 /* If something does not permit us to optimize, return the original tree. */
5780 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5781 || TREE_CODE (comp_const) != INTEGER_CST
5782 || TREE_OVERFLOW (comp_const)
5783 || TREE_CODE (minmax_const) != INTEGER_CST
5784 || TREE_OVERFLOW (minmax_const))
5785 return NULL_TREE;
5787 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5788 and GT_EXPR, doing the rest with recursive calls using logical
5789 simplifications. */
5790 switch (code)
5792 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5794 tree tem
5795 = optimize_minmax_comparison (loc,
5796 invert_tree_comparison (code, false),
5797 type, op0, op1);
5798 if (tem)
5799 return invert_truthvalue_loc (loc, tem);
5800 return NULL_TREE;
5803 case GE_EXPR:
5804 return
5805 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5806 optimize_minmax_comparison
5807 (loc, EQ_EXPR, type, arg0, comp_const),
5808 optimize_minmax_comparison
5809 (loc, GT_EXPR, type, arg0, comp_const));
5811 case EQ_EXPR:
5812 if (op_code == MAX_EXPR && consts_equal)
5813 /* MAX (X, 0) == 0 -> X <= 0 */
5814 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5816 else if (op_code == MAX_EXPR && consts_lt)
5817 /* MAX (X, 0) == 5 -> X == 5 */
5818 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5820 else if (op_code == MAX_EXPR)
5821 /* MAX (X, 0) == -1 -> false */
5822 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5824 else if (consts_equal)
5825 /* MIN (X, 0) == 0 -> X >= 0 */
5826 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5828 else if (consts_lt)
5829 /* MIN (X, 0) == 5 -> false */
5830 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5832 else
5833 /* MIN (X, 0) == -1 -> X == -1 */
5834 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5836 case GT_EXPR:
5837 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5838 /* MAX (X, 0) > 0 -> X > 0
5839 MAX (X, 0) > 5 -> X > 5 */
5840 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5842 else if (op_code == MAX_EXPR)
5843 /* MAX (X, 0) > -1 -> true */
5844 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5846 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5847 /* MIN (X, 0) > 0 -> false
5848 MIN (X, 0) > 5 -> false */
5849 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5851 else
5852 /* MIN (X, 0) > -1 -> X > -1 */
5853 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5855 default:
5856 return NULL_TREE;
5860 /* T is an integer expression that is being multiplied, divided, or taken a
5861 modulus (CODE says which and what kind of divide or modulus) by a
5862 constant C. See if we can eliminate that operation by folding it with
5863 other operations already in T. WIDE_TYPE, if non-null, is a type that
5864 should be used for the computation if wider than our type.
5866 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5867 (X * 2) + (Y * 4). We must, however, be assured that either the original
5868 expression would not overflow or that overflow is undefined for the type
5869 in the language in question.
5871 If we return a non-null expression, it is an equivalent form of the
5872 original computation, but need not be in the original type.
5874 We set *STRICT_OVERFLOW_P to true if the return values depends on
5875 signed overflow being undefined. Otherwise we do not change
5876 *STRICT_OVERFLOW_P. */
5878 static tree
5879 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5880 bool *strict_overflow_p)
5882 /* To avoid exponential search depth, refuse to allow recursion past
5883 three levels. Beyond that (1) it's highly unlikely that we'll find
5884 something interesting and (2) we've probably processed it before
5885 when we built the inner expression. */
5887 static int depth;
5888 tree ret;
5890 if (depth > 3)
5891 return NULL;
5893 depth++;
5894 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5895 depth--;
5897 return ret;
5900 static tree
5901 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5902 bool *strict_overflow_p)
5904 tree type = TREE_TYPE (t);
5905 enum tree_code tcode = TREE_CODE (t);
5906 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5907 > GET_MODE_SIZE (TYPE_MODE (type)))
5908 ? wide_type : type);
5909 tree t1, t2;
5910 int same_p = tcode == code;
5911 tree op0 = NULL_TREE, op1 = NULL_TREE;
5912 bool sub_strict_overflow_p;
5914 /* Don't deal with constants of zero here; they confuse the code below. */
5915 if (integer_zerop (c))
5916 return NULL_TREE;
5918 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5919 op0 = TREE_OPERAND (t, 0);
5921 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5922 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5924 /* Note that we need not handle conditional operations here since fold
5925 already handles those cases. So just do arithmetic here. */
5926 switch (tcode)
5928 case INTEGER_CST:
5929 /* For a constant, we can always simplify if we are a multiply
5930 or (for divide and modulus) if it is a multiple of our constant. */
5931 if (code == MULT_EXPR
5932 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5933 return const_binop (code, fold_convert (ctype, t),
5934 fold_convert (ctype, c));
5935 break;
5937 CASE_CONVERT: case NON_LVALUE_EXPR:
5938 /* If op0 is an expression ... */
5939 if ((COMPARISON_CLASS_P (op0)
5940 || UNARY_CLASS_P (op0)
5941 || BINARY_CLASS_P (op0)
5942 || VL_EXP_CLASS_P (op0)
5943 || EXPRESSION_CLASS_P (op0))
5944 /* ... and has wrapping overflow, and its type is smaller
5945 than ctype, then we cannot pass through as widening. */
5946 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5947 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5948 && (TYPE_PRECISION (ctype)
5949 > TYPE_PRECISION (TREE_TYPE (op0))))
5950 /* ... or this is a truncation (t is narrower than op0),
5951 then we cannot pass through this narrowing. */
5952 || (TYPE_PRECISION (type)
5953 < TYPE_PRECISION (TREE_TYPE (op0)))
5954 /* ... or signedness changes for division or modulus,
5955 then we cannot pass through this conversion. */
5956 || (code != MULT_EXPR
5957 && (TYPE_UNSIGNED (ctype)
5958 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5959 /* ... or has undefined overflow while the converted to
5960 type has not, we cannot do the operation in the inner type
5961 as that would introduce undefined overflow. */
5962 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5963 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5964 && !TYPE_OVERFLOW_UNDEFINED (type))))
5965 break;
5967 /* Pass the constant down and see if we can make a simplification. If
5968 we can, replace this expression with the inner simplification for
5969 possible later conversion to our or some other type. */
5970 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5971 && TREE_CODE (t2) == INTEGER_CST
5972 && !TREE_OVERFLOW (t2)
5973 && (0 != (t1 = extract_muldiv (op0, t2, code,
5974 code == MULT_EXPR
5975 ? ctype : NULL_TREE,
5976 strict_overflow_p))))
5977 return t1;
5978 break;
5980 case ABS_EXPR:
5981 /* If widening the type changes it from signed to unsigned, then we
5982 must avoid building ABS_EXPR itself as unsigned. */
5983 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5985 tree cstype = (*signed_type_for) (ctype);
5986 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5987 != 0)
5989 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5990 return fold_convert (ctype, t1);
5992 break;
5994 /* If the constant is negative, we cannot simplify this. */
5995 if (tree_int_cst_sgn (c) == -1)
5996 break;
5997 /* FALLTHROUGH */
5998 case NEGATE_EXPR:
5999 /* For division and modulus, type can't be unsigned, as e.g.
6000 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6001 For signed types, even with wrapping overflow, this is fine. */
6002 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6003 break;
6004 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6005 != 0)
6006 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6007 break;
6009 case MIN_EXPR: case MAX_EXPR:
6010 /* If widening the type changes the signedness, then we can't perform
6011 this optimization as that changes the result. */
6012 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6013 break;
6015 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6016 sub_strict_overflow_p = false;
6017 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6018 &sub_strict_overflow_p)) != 0
6019 && (t2 = extract_muldiv (op1, c, code, wide_type,
6020 &sub_strict_overflow_p)) != 0)
6022 if (tree_int_cst_sgn (c) < 0)
6023 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6024 if (sub_strict_overflow_p)
6025 *strict_overflow_p = true;
6026 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6027 fold_convert (ctype, t2));
6029 break;
6031 case LSHIFT_EXPR: case RSHIFT_EXPR:
6032 /* If the second operand is constant, this is a multiplication
6033 or floor division, by a power of two, so we can treat it that
6034 way unless the multiplier or divisor overflows. Signed
6035 left-shift overflow is implementation-defined rather than
6036 undefined in C90, so do not convert signed left shift into
6037 multiplication. */
6038 if (TREE_CODE (op1) == INTEGER_CST
6039 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6040 /* const_binop may not detect overflow correctly,
6041 so check for it explicitly here. */
6042 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6043 && 0 != (t1 = fold_convert (ctype,
6044 const_binop (LSHIFT_EXPR,
6045 size_one_node,
6046 op1)))
6047 && !TREE_OVERFLOW (t1))
6048 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6049 ? MULT_EXPR : FLOOR_DIV_EXPR,
6050 ctype,
6051 fold_convert (ctype, op0),
6052 t1),
6053 c, code, wide_type, strict_overflow_p);
6054 break;
6056 case PLUS_EXPR: case MINUS_EXPR:
6057 /* See if we can eliminate the operation on both sides. If we can, we
6058 can return a new PLUS or MINUS. If we can't, the only remaining
6059 cases where we can do anything are if the second operand is a
6060 constant. */
6061 sub_strict_overflow_p = false;
6062 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6063 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6064 if (t1 != 0 && t2 != 0
6065 && (code == MULT_EXPR
6066 /* If not multiplication, we can only do this if both operands
6067 are divisible by c. */
6068 || (multiple_of_p (ctype, op0, c)
6069 && multiple_of_p (ctype, op1, c))))
6071 if (sub_strict_overflow_p)
6072 *strict_overflow_p = true;
6073 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6074 fold_convert (ctype, t2));
6077 /* If this was a subtraction, negate OP1 and set it to be an addition.
6078 This simplifies the logic below. */
6079 if (tcode == MINUS_EXPR)
6081 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6082 /* If OP1 was not easily negatable, the constant may be OP0. */
6083 if (TREE_CODE (op0) == INTEGER_CST)
6085 std::swap (op0, op1);
6086 std::swap (t1, t2);
6090 if (TREE_CODE (op1) != INTEGER_CST)
6091 break;
6093 /* If either OP1 or C are negative, this optimization is not safe for
6094 some of the division and remainder types while for others we need
6095 to change the code. */
6096 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6098 if (code == CEIL_DIV_EXPR)
6099 code = FLOOR_DIV_EXPR;
6100 else if (code == FLOOR_DIV_EXPR)
6101 code = CEIL_DIV_EXPR;
6102 else if (code != MULT_EXPR
6103 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6104 break;
6107 /* If it's a multiply or a division/modulus operation of a multiple
6108 of our constant, do the operation and verify it doesn't overflow. */
6109 if (code == MULT_EXPR
6110 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6112 op1 = const_binop (code, fold_convert (ctype, op1),
6113 fold_convert (ctype, c));
6114 /* We allow the constant to overflow with wrapping semantics. */
6115 if (op1 == 0
6116 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6117 break;
6119 else
6120 break;
6122 /* If we have an unsigned type, we cannot widen the operation since it
6123 will change the result if the original computation overflowed. */
6124 if (TYPE_UNSIGNED (ctype) && ctype != type)
6125 break;
6127 /* If we were able to eliminate our operation from the first side,
6128 apply our operation to the second side and reform the PLUS. */
6129 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6130 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6132 /* The last case is if we are a multiply. In that case, we can
6133 apply the distributive law to commute the multiply and addition
6134 if the multiplication of the constants doesn't overflow
6135 and overflow is defined. With undefined overflow
6136 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6137 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6138 return fold_build2 (tcode, ctype,
6139 fold_build2 (code, ctype,
6140 fold_convert (ctype, op0),
6141 fold_convert (ctype, c)),
6142 op1);
6144 break;
6146 case MULT_EXPR:
6147 /* We have a special case here if we are doing something like
6148 (C * 8) % 4 since we know that's zero. */
6149 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6150 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6151 /* If the multiplication can overflow we cannot optimize this. */
6152 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6153 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6154 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6156 *strict_overflow_p = true;
6157 return omit_one_operand (type, integer_zero_node, op0);
6160 /* ... fall through ... */
6162 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6163 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6164 /* If we can extract our operation from the LHS, do so and return a
6165 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6166 do something only if the second operand is a constant. */
6167 if (same_p
6168 && (t1 = extract_muldiv (op0, c, code, wide_type,
6169 strict_overflow_p)) != 0)
6170 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6171 fold_convert (ctype, op1));
6172 else if (tcode == MULT_EXPR && code == MULT_EXPR
6173 && (t1 = extract_muldiv (op1, c, code, wide_type,
6174 strict_overflow_p)) != 0)
6175 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6176 fold_convert (ctype, t1));
6177 else if (TREE_CODE (op1) != INTEGER_CST)
6178 return 0;
6180 /* If these are the same operation types, we can associate them
6181 assuming no overflow. */
6182 if (tcode == code)
6184 bool overflow_p = false;
6185 bool overflow_mul_p;
6186 signop sign = TYPE_SIGN (ctype);
6187 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6188 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6189 if (overflow_mul_p
6190 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6191 overflow_p = true;
6192 if (!overflow_p)
6194 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6195 TYPE_SIGN (TREE_TYPE (op1)));
6196 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6197 wide_int_to_tree (ctype, mul));
6201 /* If these operations "cancel" each other, we have the main
6202 optimizations of this pass, which occur when either constant is a
6203 multiple of the other, in which case we replace this with either an
6204 operation or CODE or TCODE.
6206 If we have an unsigned type, we cannot do this since it will change
6207 the result if the original computation overflowed. */
6208 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6209 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6210 || (tcode == MULT_EXPR
6211 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6212 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6213 && code != MULT_EXPR)))
6215 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6217 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6218 *strict_overflow_p = true;
6219 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6220 fold_convert (ctype,
6221 const_binop (TRUNC_DIV_EXPR,
6222 op1, c)));
6224 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6226 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6227 *strict_overflow_p = true;
6228 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6229 fold_convert (ctype,
6230 const_binop (TRUNC_DIV_EXPR,
6231 c, op1)));
6234 break;
6236 default:
6237 break;
6240 return 0;
6243 /* Return a node which has the indicated constant VALUE (either 0 or
6244 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6245 and is of the indicated TYPE. */
6247 tree
6248 constant_boolean_node (bool value, tree type)
6250 if (type == integer_type_node)
6251 return value ? integer_one_node : integer_zero_node;
6252 else if (type == boolean_type_node)
6253 return value ? boolean_true_node : boolean_false_node;
6254 else if (TREE_CODE (type) == VECTOR_TYPE)
6255 return build_vector_from_val (type,
6256 build_int_cst (TREE_TYPE (type),
6257 value ? -1 : 0));
6258 else
6259 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6263 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6264 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6265 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6266 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6267 COND is the first argument to CODE; otherwise (as in the example
6268 given here), it is the second argument. TYPE is the type of the
6269 original expression. Return NULL_TREE if no simplification is
6270 possible. */
6272 static tree
6273 fold_binary_op_with_conditional_arg (location_t loc,
6274 enum tree_code code,
6275 tree type, tree op0, tree op1,
6276 tree cond, tree arg, int cond_first_p)
6278 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6279 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6280 tree test, true_value, false_value;
6281 tree lhs = NULL_TREE;
6282 tree rhs = NULL_TREE;
6283 enum tree_code cond_code = COND_EXPR;
6285 if (TREE_CODE (cond) == COND_EXPR
6286 || TREE_CODE (cond) == VEC_COND_EXPR)
6288 test = TREE_OPERAND (cond, 0);
6289 true_value = TREE_OPERAND (cond, 1);
6290 false_value = TREE_OPERAND (cond, 2);
6291 /* If this operand throws an expression, then it does not make
6292 sense to try to perform a logical or arithmetic operation
6293 involving it. */
6294 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6295 lhs = true_value;
6296 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6297 rhs = false_value;
6299 else
6301 tree testtype = TREE_TYPE (cond);
6302 test = cond;
6303 true_value = constant_boolean_node (true, testtype);
6304 false_value = constant_boolean_node (false, testtype);
6307 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6308 cond_code = VEC_COND_EXPR;
6310 /* This transformation is only worthwhile if we don't have to wrap ARG
6311 in a SAVE_EXPR and the operation can be simplified without recursing
6312 on at least one of the branches once its pushed inside the COND_EXPR. */
6313 if (!TREE_CONSTANT (arg)
6314 && (TREE_SIDE_EFFECTS (arg)
6315 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6316 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6317 return NULL_TREE;
6319 arg = fold_convert_loc (loc, arg_type, arg);
6320 if (lhs == 0)
6322 true_value = fold_convert_loc (loc, cond_type, true_value);
6323 if (cond_first_p)
6324 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6325 else
6326 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6328 if (rhs == 0)
6330 false_value = fold_convert_loc (loc, cond_type, false_value);
6331 if (cond_first_p)
6332 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6333 else
6334 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6337 /* Check that we have simplified at least one of the branches. */
6338 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6339 return NULL_TREE;
6341 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6345 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6347 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6348 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6349 ADDEND is the same as X.
6351 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6352 and finite. The problematic cases are when X is zero, and its mode
6353 has signed zeros. In the case of rounding towards -infinity,
6354 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6355 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6357 bool
6358 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6360 if (!real_zerop (addend))
6361 return false;
6363 /* Don't allow the fold with -fsignaling-nans. */
6364 if (HONOR_SNANS (element_mode (type)))
6365 return false;
6367 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6368 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6369 return true;
6371 /* In a vector or complex, we would need to check the sign of all zeros. */
6372 if (TREE_CODE (addend) != REAL_CST)
6373 return false;
6375 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6376 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6377 negate = !negate;
6379 /* The mode has signed zeros, and we have to honor their sign.
6380 In this situation, there is only one case we can return true for.
6381 X - 0 is the same as X unless rounding towards -infinity is
6382 supported. */
6383 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6386 /* Subroutine of fold() that optimizes comparisons of a division by
6387 a nonzero integer constant against an integer constant, i.e.
6388 X/C1 op C2.
6390 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6391 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6392 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6394 The function returns the constant folded tree if a simplification
6395 can be made, and NULL_TREE otherwise. */
6397 static tree
6398 fold_div_compare (location_t loc,
6399 enum tree_code code, tree type, tree arg0, tree arg1)
6401 tree prod, tmp, hi, lo;
6402 tree arg00 = TREE_OPERAND (arg0, 0);
6403 tree arg01 = TREE_OPERAND (arg0, 1);
6404 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6405 bool neg_overflow = false;
6406 bool overflow;
6408 /* We have to do this the hard way to detect unsigned overflow.
6409 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6410 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6411 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6412 neg_overflow = false;
6414 if (sign == UNSIGNED)
6416 tmp = int_const_binop (MINUS_EXPR, arg01,
6417 build_int_cst (TREE_TYPE (arg01), 1));
6418 lo = prod;
6420 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6421 val = wi::add (prod, tmp, sign, &overflow);
6422 hi = force_fit_type (TREE_TYPE (arg00), val,
6423 -1, overflow | TREE_OVERFLOW (prod));
6425 else if (tree_int_cst_sgn (arg01) >= 0)
6427 tmp = int_const_binop (MINUS_EXPR, arg01,
6428 build_int_cst (TREE_TYPE (arg01), 1));
6429 switch (tree_int_cst_sgn (arg1))
6431 case -1:
6432 neg_overflow = true;
6433 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6434 hi = prod;
6435 break;
6437 case 0:
6438 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6439 hi = tmp;
6440 break;
6442 case 1:
6443 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6444 lo = prod;
6445 break;
6447 default:
6448 gcc_unreachable ();
6451 else
6453 /* A negative divisor reverses the relational operators. */
6454 code = swap_tree_comparison (code);
6456 tmp = int_const_binop (PLUS_EXPR, arg01,
6457 build_int_cst (TREE_TYPE (arg01), 1));
6458 switch (tree_int_cst_sgn (arg1))
6460 case -1:
6461 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6462 lo = prod;
6463 break;
6465 case 0:
6466 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6467 lo = tmp;
6468 break;
6470 case 1:
6471 neg_overflow = true;
6472 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6473 hi = prod;
6474 break;
6476 default:
6477 gcc_unreachable ();
6481 switch (code)
6483 case EQ_EXPR:
6484 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6485 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6486 if (TREE_OVERFLOW (hi))
6487 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6488 if (TREE_OVERFLOW (lo))
6489 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6490 return build_range_check (loc, type, arg00, 1, lo, hi);
6492 case NE_EXPR:
6493 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6494 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6495 if (TREE_OVERFLOW (hi))
6496 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6497 if (TREE_OVERFLOW (lo))
6498 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6499 return build_range_check (loc, type, arg00, 0, lo, hi);
6501 case LT_EXPR:
6502 if (TREE_OVERFLOW (lo))
6504 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6505 return omit_one_operand_loc (loc, type, tmp, arg00);
6507 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6509 case LE_EXPR:
6510 if (TREE_OVERFLOW (hi))
6512 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6513 return omit_one_operand_loc (loc, type, tmp, arg00);
6515 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6517 case GT_EXPR:
6518 if (TREE_OVERFLOW (hi))
6520 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6521 return omit_one_operand_loc (loc, type, tmp, arg00);
6523 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6525 case GE_EXPR:
6526 if (TREE_OVERFLOW (lo))
6528 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6529 return omit_one_operand_loc (loc, type, tmp, arg00);
6531 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6533 default:
6534 break;
6537 return NULL_TREE;
6541 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6542 equality/inequality test, then return a simplified form of the test
6543 using a sign testing. Otherwise return NULL. TYPE is the desired
6544 result type. */
6546 static tree
6547 fold_single_bit_test_into_sign_test (location_t loc,
6548 enum tree_code code, tree arg0, tree arg1,
6549 tree result_type)
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code == NE_EXPR || code == EQ_EXPR)
6553 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6556 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6557 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6558 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6560 if (arg00 != NULL_TREE
6561 /* This is only a win if casting to a signed type is cheap,
6562 i.e. when arg00's type is not a partial mode. */
6563 && TYPE_PRECISION (TREE_TYPE (arg00))
6564 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6566 tree stype = signed_type_for (TREE_TYPE (arg00));
6567 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6568 result_type,
6569 fold_convert_loc (loc, stype, arg00),
6570 build_int_cst (stype, 0));
6574 return NULL_TREE;
6577 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6578 equality/inequality test, then return a simplified form of
6579 the test using shifts and logical operations. Otherwise return
6580 NULL. TYPE is the desired result type. */
6582 tree
6583 fold_single_bit_test (location_t loc, enum tree_code code,
6584 tree arg0, tree arg1, tree result_type)
6586 /* If this is testing a single bit, we can optimize the test. */
6587 if ((code == NE_EXPR || code == EQ_EXPR)
6588 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6589 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6591 tree inner = TREE_OPERAND (arg0, 0);
6592 tree type = TREE_TYPE (arg0);
6593 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6594 machine_mode operand_mode = TYPE_MODE (type);
6595 int ops_unsigned;
6596 tree signed_type, unsigned_type, intermediate_type;
6597 tree tem, one;
6599 /* First, see if we can fold the single bit test into a sign-bit
6600 test. */
6601 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6602 result_type);
6603 if (tem)
6604 return tem;
6606 /* Otherwise we have (A & C) != 0 where C is a single bit,
6607 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6608 Similarly for (A & C) == 0. */
6610 /* If INNER is a right shift of a constant and it plus BITNUM does
6611 not overflow, adjust BITNUM and INNER. */
6612 if (TREE_CODE (inner) == RSHIFT_EXPR
6613 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6614 && bitnum < TYPE_PRECISION (type)
6615 && wi::ltu_p (TREE_OPERAND (inner, 1),
6616 TYPE_PRECISION (type) - bitnum))
6618 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6619 inner = TREE_OPERAND (inner, 0);
6622 /* If we are going to be able to omit the AND below, we must do our
6623 operations as unsigned. If we must use the AND, we have a choice.
6624 Normally unsigned is faster, but for some machines signed is. */
6625 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6626 && !flag_syntax_only) ? 0 : 1;
6628 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6629 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6630 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6631 inner = fold_convert_loc (loc, intermediate_type, inner);
6633 if (bitnum != 0)
6634 inner = build2 (RSHIFT_EXPR, intermediate_type,
6635 inner, size_int (bitnum));
6637 one = build_int_cst (intermediate_type, 1);
6639 if (code == EQ_EXPR)
6640 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6642 /* Put the AND last so it can combine with more things. */
6643 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6645 /* Make sure to return the proper type. */
6646 inner = fold_convert_loc (loc, result_type, inner);
6648 return inner;
6650 return NULL_TREE;
6653 /* Check whether we are allowed to reorder operands arg0 and arg1,
6654 such that the evaluation of arg1 occurs before arg0. */
6656 static bool
6657 reorder_operands_p (const_tree arg0, const_tree arg1)
6659 if (! flag_evaluation_order)
6660 return true;
6661 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6662 return true;
6663 return ! TREE_SIDE_EFFECTS (arg0)
6664 && ! TREE_SIDE_EFFECTS (arg1);
6667 /* Test whether it is preferable two swap two operands, ARG0 and
6668 ARG1, for example because ARG0 is an integer constant and ARG1
6669 isn't. If REORDER is true, only recommend swapping if we can
6670 evaluate the operands in reverse order. */
6672 bool
6673 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6675 if (CONSTANT_CLASS_P (arg1))
6676 return 0;
6677 if (CONSTANT_CLASS_P (arg0))
6678 return 1;
6680 STRIP_NOPS (arg0);
6681 STRIP_NOPS (arg1);
6683 if (TREE_CONSTANT (arg1))
6684 return 0;
6685 if (TREE_CONSTANT (arg0))
6686 return 1;
6688 if (reorder && flag_evaluation_order
6689 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6690 return 0;
6692 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6693 for commutative and comparison operators. Ensuring a canonical
6694 form allows the optimizers to find additional redundancies without
6695 having to explicitly check for both orderings. */
6696 if (TREE_CODE (arg0) == SSA_NAME
6697 && TREE_CODE (arg1) == SSA_NAME
6698 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6699 return 1;
6701 /* Put SSA_NAMEs last. */
6702 if (TREE_CODE (arg1) == SSA_NAME)
6703 return 0;
6704 if (TREE_CODE (arg0) == SSA_NAME)
6705 return 1;
6707 /* Put variables last. */
6708 if (DECL_P (arg1))
6709 return 0;
6710 if (DECL_P (arg0))
6711 return 1;
6713 return 0;
6717 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6718 means A >= Y && A != MAX, but in this case we know that
6719 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6721 static tree
6722 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6724 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6726 if (TREE_CODE (bound) == LT_EXPR)
6727 a = TREE_OPERAND (bound, 0);
6728 else if (TREE_CODE (bound) == GT_EXPR)
6729 a = TREE_OPERAND (bound, 1);
6730 else
6731 return NULL_TREE;
6733 typea = TREE_TYPE (a);
6734 if (!INTEGRAL_TYPE_P (typea)
6735 && !POINTER_TYPE_P (typea))
6736 return NULL_TREE;
6738 if (TREE_CODE (ineq) == LT_EXPR)
6740 a1 = TREE_OPERAND (ineq, 1);
6741 y = TREE_OPERAND (ineq, 0);
6743 else if (TREE_CODE (ineq) == GT_EXPR)
6745 a1 = TREE_OPERAND (ineq, 0);
6746 y = TREE_OPERAND (ineq, 1);
6748 else
6749 return NULL_TREE;
6751 if (TREE_TYPE (a1) != typea)
6752 return NULL_TREE;
6754 if (POINTER_TYPE_P (typea))
6756 /* Convert the pointer types into integer before taking the difference. */
6757 tree ta = fold_convert_loc (loc, ssizetype, a);
6758 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6759 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6761 else
6762 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6764 if (!diff || !integer_onep (diff))
6765 return NULL_TREE;
6767 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6770 /* Fold a sum or difference of at least one multiplication.
6771 Returns the folded tree or NULL if no simplification could be made. */
6773 static tree
6774 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6775 tree arg0, tree arg1)
6777 tree arg00, arg01, arg10, arg11;
6778 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6780 /* (A * C) +- (B * C) -> (A+-B) * C.
6781 (A * C) +- A -> A * (C+-1).
6782 We are most concerned about the case where C is a constant,
6783 but other combinations show up during loop reduction. Since
6784 it is not difficult, try all four possibilities. */
6786 if (TREE_CODE (arg0) == MULT_EXPR)
6788 arg00 = TREE_OPERAND (arg0, 0);
6789 arg01 = TREE_OPERAND (arg0, 1);
6791 else if (TREE_CODE (arg0) == INTEGER_CST)
6793 arg00 = build_one_cst (type);
6794 arg01 = arg0;
6796 else
6798 /* We cannot generate constant 1 for fract. */
6799 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6800 return NULL_TREE;
6801 arg00 = arg0;
6802 arg01 = build_one_cst (type);
6804 if (TREE_CODE (arg1) == MULT_EXPR)
6806 arg10 = TREE_OPERAND (arg1, 0);
6807 arg11 = TREE_OPERAND (arg1, 1);
6809 else if (TREE_CODE (arg1) == INTEGER_CST)
6811 arg10 = build_one_cst (type);
6812 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6813 the purpose of this canonicalization. */
6814 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6815 && negate_expr_p (arg1)
6816 && code == PLUS_EXPR)
6818 arg11 = negate_expr (arg1);
6819 code = MINUS_EXPR;
6821 else
6822 arg11 = arg1;
6824 else
6826 /* We cannot generate constant 1 for fract. */
6827 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6828 return NULL_TREE;
6829 arg10 = arg1;
6830 arg11 = build_one_cst (type);
6832 same = NULL_TREE;
6834 if (operand_equal_p (arg01, arg11, 0))
6835 same = arg01, alt0 = arg00, alt1 = arg10;
6836 else if (operand_equal_p (arg00, arg10, 0))
6837 same = arg00, alt0 = arg01, alt1 = arg11;
6838 else if (operand_equal_p (arg00, arg11, 0))
6839 same = arg00, alt0 = arg01, alt1 = arg10;
6840 else if (operand_equal_p (arg01, arg10, 0))
6841 same = arg01, alt0 = arg00, alt1 = arg11;
6843 /* No identical multiplicands; see if we can find a common
6844 power-of-two factor in non-power-of-two multiplies. This
6845 can help in multi-dimensional array access. */
6846 else if (tree_fits_shwi_p (arg01)
6847 && tree_fits_shwi_p (arg11))
6849 HOST_WIDE_INT int01, int11, tmp;
6850 bool swap = false;
6851 tree maybe_same;
6852 int01 = tree_to_shwi (arg01);
6853 int11 = tree_to_shwi (arg11);
6855 /* Move min of absolute values to int11. */
6856 if (absu_hwi (int01) < absu_hwi (int11))
6858 tmp = int01, int01 = int11, int11 = tmp;
6859 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6860 maybe_same = arg01;
6861 swap = true;
6863 else
6864 maybe_same = arg11;
6866 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6867 /* The remainder should not be a constant, otherwise we
6868 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6869 increased the number of multiplications necessary. */
6870 && TREE_CODE (arg10) != INTEGER_CST)
6872 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6873 build_int_cst (TREE_TYPE (arg00),
6874 int01 / int11));
6875 alt1 = arg10;
6876 same = maybe_same;
6877 if (swap)
6878 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6882 if (same)
6883 return fold_build2_loc (loc, MULT_EXPR, type,
6884 fold_build2_loc (loc, code, type,
6885 fold_convert_loc (loc, type, alt0),
6886 fold_convert_loc (loc, type, alt1)),
6887 fold_convert_loc (loc, type, same));
6889 return NULL_TREE;
6892 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6893 specified by EXPR into the buffer PTR of length LEN bytes.
6894 Return the number of bytes placed in the buffer, or zero
6895 upon failure. */
6897 static int
6898 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6900 tree type = TREE_TYPE (expr);
6901 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6902 int byte, offset, word, words;
6903 unsigned char value;
6905 if ((off == -1 && total_bytes > len)
6906 || off >= total_bytes)
6907 return 0;
6908 if (off == -1)
6909 off = 0;
6910 words = total_bytes / UNITS_PER_WORD;
6912 for (byte = 0; byte < total_bytes; byte++)
6914 int bitpos = byte * BITS_PER_UNIT;
6915 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6916 number of bytes. */
6917 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6919 if (total_bytes > UNITS_PER_WORD)
6921 word = byte / UNITS_PER_WORD;
6922 if (WORDS_BIG_ENDIAN)
6923 word = (words - 1) - word;
6924 offset = word * UNITS_PER_WORD;
6925 if (BYTES_BIG_ENDIAN)
6926 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6927 else
6928 offset += byte % UNITS_PER_WORD;
6930 else
6931 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6932 if (offset >= off
6933 && offset - off < len)
6934 ptr[offset - off] = value;
6936 return MIN (len, total_bytes - off);
6940 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6941 specified by EXPR into the buffer PTR of length LEN bytes.
6942 Return the number of bytes placed in the buffer, or zero
6943 upon failure. */
6945 static int
6946 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
6948 tree type = TREE_TYPE (expr);
6949 machine_mode mode = TYPE_MODE (type);
6950 int total_bytes = GET_MODE_SIZE (mode);
6951 FIXED_VALUE_TYPE value;
6952 tree i_value, i_type;
6954 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
6955 return 0;
6957 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
6959 if (NULL_TREE == i_type
6960 || TYPE_PRECISION (i_type) != total_bytes)
6961 return 0;
6963 value = TREE_FIXED_CST (expr);
6964 i_value = double_int_to_tree (i_type, value.data);
6966 return native_encode_int (i_value, ptr, len, off);
6970 /* Subroutine of native_encode_expr. Encode the REAL_CST
6971 specified by EXPR into the buffer PTR of length LEN bytes.
6972 Return the number of bytes placed in the buffer, or zero
6973 upon failure. */
6975 static int
6976 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
6978 tree type = TREE_TYPE (expr);
6979 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6980 int byte, offset, word, words, bitpos;
6981 unsigned char value;
6983 /* There are always 32 bits in each long, no matter the size of
6984 the hosts long. We handle floating point representations with
6985 up to 192 bits. */
6986 long tmp[6];
6988 if ((off == -1 && total_bytes > len)
6989 || off >= total_bytes)
6990 return 0;
6991 if (off == -1)
6992 off = 0;
6993 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
6995 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6997 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
6998 bitpos += BITS_PER_UNIT)
7000 byte = (bitpos / BITS_PER_UNIT) & 3;
7001 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7003 if (UNITS_PER_WORD < 4)
7005 word = byte / UNITS_PER_WORD;
7006 if (WORDS_BIG_ENDIAN)
7007 word = (words - 1) - word;
7008 offset = word * UNITS_PER_WORD;
7009 if (BYTES_BIG_ENDIAN)
7010 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7011 else
7012 offset += byte % UNITS_PER_WORD;
7014 else
7015 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7016 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7017 if (offset >= off
7018 && offset - off < len)
7019 ptr[offset - off] = value;
7021 return MIN (len, total_bytes - off);
7024 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7025 specified by EXPR into the buffer PTR of length LEN bytes.
7026 Return the number of bytes placed in the buffer, or zero
7027 upon failure. */
7029 static int
7030 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7032 int rsize, isize;
7033 tree part;
7035 part = TREE_REALPART (expr);
7036 rsize = native_encode_expr (part, ptr, len, off);
7037 if (off == -1
7038 && rsize == 0)
7039 return 0;
7040 part = TREE_IMAGPART (expr);
7041 if (off != -1)
7042 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7043 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7044 if (off == -1
7045 && isize != rsize)
7046 return 0;
7047 return rsize + isize;
7051 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7052 specified by EXPR into the buffer PTR of length LEN bytes.
7053 Return the number of bytes placed in the buffer, or zero
7054 upon failure. */
7056 static int
7057 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7059 unsigned i, count;
7060 int size, offset;
7061 tree itype, elem;
7063 offset = 0;
7064 count = VECTOR_CST_NELTS (expr);
7065 itype = TREE_TYPE (TREE_TYPE (expr));
7066 size = GET_MODE_SIZE (TYPE_MODE (itype));
7067 for (i = 0; i < count; i++)
7069 if (off >= size)
7071 off -= size;
7072 continue;
7074 elem = VECTOR_CST_ELT (expr, i);
7075 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7076 if ((off == -1 && res != size)
7077 || res == 0)
7078 return 0;
7079 offset += res;
7080 if (offset >= len)
7081 return offset;
7082 if (off != -1)
7083 off = 0;
7085 return offset;
7089 /* Subroutine of native_encode_expr. Encode the STRING_CST
7090 specified by EXPR into the buffer PTR of length LEN bytes.
7091 Return the number of bytes placed in the buffer, or zero
7092 upon failure. */
7094 static int
7095 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7097 tree type = TREE_TYPE (expr);
7098 HOST_WIDE_INT total_bytes;
7100 if (TREE_CODE (type) != ARRAY_TYPE
7101 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7102 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7103 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7104 return 0;
7105 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7106 if ((off == -1 && total_bytes > len)
7107 || off >= total_bytes)
7108 return 0;
7109 if (off == -1)
7110 off = 0;
7111 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7113 int written = 0;
7114 if (off < TREE_STRING_LENGTH (expr))
7116 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7117 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7119 memset (ptr + written, 0,
7120 MIN (total_bytes - written, len - written));
7122 else
7123 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7124 return MIN (total_bytes - off, len);
7128 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7129 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7130 buffer PTR of length LEN bytes. If OFF is not -1 then start
7131 the encoding at byte offset OFF and encode at most LEN bytes.
7132 Return the number of bytes placed in the buffer, or zero upon failure. */
7135 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7137 /* We don't support starting at negative offset and -1 is special. */
7138 if (off < -1)
7139 return 0;
7141 switch (TREE_CODE (expr))
7143 case INTEGER_CST:
7144 return native_encode_int (expr, ptr, len, off);
7146 case REAL_CST:
7147 return native_encode_real (expr, ptr, len, off);
7149 case FIXED_CST:
7150 return native_encode_fixed (expr, ptr, len, off);
7152 case COMPLEX_CST:
7153 return native_encode_complex (expr, ptr, len, off);
7155 case VECTOR_CST:
7156 return native_encode_vector (expr, ptr, len, off);
7158 case STRING_CST:
7159 return native_encode_string (expr, ptr, len, off);
7161 default:
7162 return 0;
7167 /* Subroutine of native_interpret_expr. Interpret the contents of
7168 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7169 If the buffer cannot be interpreted, return NULL_TREE. */
7171 static tree
7172 native_interpret_int (tree type, const unsigned char *ptr, int len)
7174 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7176 if (total_bytes > len
7177 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7178 return NULL_TREE;
7180 wide_int result = wi::from_buffer (ptr, total_bytes);
7182 return wide_int_to_tree (type, result);
7186 /* Subroutine of native_interpret_expr. Interpret the contents of
7187 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7188 If the buffer cannot be interpreted, return NULL_TREE. */
7190 static tree
7191 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7193 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7194 double_int result;
7195 FIXED_VALUE_TYPE fixed_value;
7197 if (total_bytes > len
7198 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7199 return NULL_TREE;
7201 result = double_int::from_buffer (ptr, total_bytes);
7202 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7204 return build_fixed (type, fixed_value);
7208 /* Subroutine of native_interpret_expr. Interpret the contents of
7209 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7210 If the buffer cannot be interpreted, return NULL_TREE. */
7212 static tree
7213 native_interpret_real (tree type, const unsigned char *ptr, int len)
7215 machine_mode mode = TYPE_MODE (type);
7216 int total_bytes = GET_MODE_SIZE (mode);
7217 unsigned char value;
7218 /* There are always 32 bits in each long, no matter the size of
7219 the hosts long. We handle floating point representations with
7220 up to 192 bits. */
7221 REAL_VALUE_TYPE r;
7222 long tmp[6];
7224 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7225 if (total_bytes > len || total_bytes > 24)
7226 return NULL_TREE;
7227 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7229 memset (tmp, 0, sizeof (tmp));
7230 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7231 bitpos += BITS_PER_UNIT)
7233 /* Both OFFSET and BYTE index within a long;
7234 bitpos indexes the whole float. */
7235 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7236 if (UNITS_PER_WORD < 4)
7238 int word = byte / UNITS_PER_WORD;
7239 if (WORDS_BIG_ENDIAN)
7240 word = (words - 1) - word;
7241 offset = word * UNITS_PER_WORD;
7242 if (BYTES_BIG_ENDIAN)
7243 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7244 else
7245 offset += byte % UNITS_PER_WORD;
7247 else
7249 offset = byte;
7250 if (BYTES_BIG_ENDIAN)
7252 /* Reverse bytes within each long, or within the entire float
7253 if it's smaller than a long (for HFmode). */
7254 offset = MIN (3, total_bytes - 1) - offset;
7255 gcc_assert (offset >= 0);
7258 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7260 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7263 real_from_target (&r, tmp, mode);
7264 return build_real (type, r);
7268 /* Subroutine of native_interpret_expr. Interpret the contents of
7269 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7270 If the buffer cannot be interpreted, return NULL_TREE. */
7272 static tree
7273 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7275 tree etype, rpart, ipart;
7276 int size;
7278 etype = TREE_TYPE (type);
7279 size = GET_MODE_SIZE (TYPE_MODE (etype));
7280 if (size * 2 > len)
7281 return NULL_TREE;
7282 rpart = native_interpret_expr (etype, ptr, size);
7283 if (!rpart)
7284 return NULL_TREE;
7285 ipart = native_interpret_expr (etype, ptr+size, size);
7286 if (!ipart)
7287 return NULL_TREE;
7288 return build_complex (type, rpart, ipart);
7292 /* Subroutine of native_interpret_expr. Interpret the contents of
7293 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7294 If the buffer cannot be interpreted, return NULL_TREE. */
7296 static tree
7297 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7299 tree etype, elem;
7300 int i, size, count;
7301 tree *elements;
7303 etype = TREE_TYPE (type);
7304 size = GET_MODE_SIZE (TYPE_MODE (etype));
7305 count = TYPE_VECTOR_SUBPARTS (type);
7306 if (size * count > len)
7307 return NULL_TREE;
7309 elements = XALLOCAVEC (tree, count);
7310 for (i = count - 1; i >= 0; i--)
7312 elem = native_interpret_expr (etype, ptr+(i*size), size);
7313 if (!elem)
7314 return NULL_TREE;
7315 elements[i] = elem;
7317 return build_vector (type, elements);
7321 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7322 the buffer PTR of length LEN as a constant of type TYPE. For
7323 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7324 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7325 return NULL_TREE. */
7327 tree
7328 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7330 switch (TREE_CODE (type))
7332 case INTEGER_TYPE:
7333 case ENUMERAL_TYPE:
7334 case BOOLEAN_TYPE:
7335 case POINTER_TYPE:
7336 case REFERENCE_TYPE:
7337 return native_interpret_int (type, ptr, len);
7339 case REAL_TYPE:
7340 return native_interpret_real (type, ptr, len);
7342 case FIXED_POINT_TYPE:
7343 return native_interpret_fixed (type, ptr, len);
7345 case COMPLEX_TYPE:
7346 return native_interpret_complex (type, ptr, len);
7348 case VECTOR_TYPE:
7349 return native_interpret_vector (type, ptr, len);
7351 default:
7352 return NULL_TREE;
7356 /* Returns true if we can interpret the contents of a native encoding
7357 as TYPE. */
7359 static bool
7360 can_native_interpret_type_p (tree type)
7362 switch (TREE_CODE (type))
7364 case INTEGER_TYPE:
7365 case ENUMERAL_TYPE:
7366 case BOOLEAN_TYPE:
7367 case POINTER_TYPE:
7368 case REFERENCE_TYPE:
7369 case FIXED_POINT_TYPE:
7370 case REAL_TYPE:
7371 case COMPLEX_TYPE:
7372 case VECTOR_TYPE:
7373 return true;
7374 default:
7375 return false;
7379 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7380 TYPE at compile-time. If we're unable to perform the conversion
7381 return NULL_TREE. */
7383 static tree
7384 fold_view_convert_expr (tree type, tree expr)
7386 /* We support up to 512-bit values (for V8DFmode). */
7387 unsigned char buffer[64];
7388 int len;
7390 /* Check that the host and target are sane. */
7391 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7392 return NULL_TREE;
7394 len = native_encode_expr (expr, buffer, sizeof (buffer));
7395 if (len == 0)
7396 return NULL_TREE;
7398 return native_interpret_expr (type, buffer, len);
7401 /* Build an expression for the address of T. Folds away INDIRECT_REF
7402 to avoid confusing the gimplify process. */
7404 tree
7405 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7407 /* The size of the object is not relevant when talking about its address. */
7408 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7409 t = TREE_OPERAND (t, 0);
7411 if (TREE_CODE (t) == INDIRECT_REF)
7413 t = TREE_OPERAND (t, 0);
7415 if (TREE_TYPE (t) != ptrtype)
7416 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7418 else if (TREE_CODE (t) == MEM_REF
7419 && integer_zerop (TREE_OPERAND (t, 1)))
7420 return TREE_OPERAND (t, 0);
7421 else if (TREE_CODE (t) == MEM_REF
7422 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7423 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7424 TREE_OPERAND (t, 0),
7425 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7426 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7428 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7430 if (TREE_TYPE (t) != ptrtype)
7431 t = fold_convert_loc (loc, ptrtype, t);
7433 else
7434 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7436 return t;
7439 /* Build an expression for the address of T. */
7441 tree
7442 build_fold_addr_expr_loc (location_t loc, tree t)
7444 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7446 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7449 /* Fold a unary expression of code CODE and type TYPE with operand
7450 OP0. Return the folded expression if folding is successful.
7451 Otherwise, return NULL_TREE. */
7453 tree
7454 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7456 tree tem;
7457 tree arg0;
7458 enum tree_code_class kind = TREE_CODE_CLASS (code);
7460 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7461 && TREE_CODE_LENGTH (code) == 1);
7463 arg0 = op0;
7464 if (arg0)
7466 if (CONVERT_EXPR_CODE_P (code)
7467 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7469 /* Don't use STRIP_NOPS, because signedness of argument type
7470 matters. */
7471 STRIP_SIGN_NOPS (arg0);
7473 else
7475 /* Strip any conversions that don't change the mode. This
7476 is safe for every expression, except for a comparison
7477 expression because its signedness is derived from its
7478 operands.
7480 Note that this is done as an internal manipulation within
7481 the constant folder, in order to find the simplest
7482 representation of the arguments so that their form can be
7483 studied. In any cases, the appropriate type conversions
7484 should be put back in the tree that will get out of the
7485 constant folder. */
7486 STRIP_NOPS (arg0);
7489 if (CONSTANT_CLASS_P (arg0))
7491 tree tem = const_unop (code, type, arg0);
7492 if (tem)
7494 if (TREE_TYPE (tem) != type)
7495 tem = fold_convert_loc (loc, type, tem);
7496 return tem;
7501 tem = generic_simplify (loc, code, type, op0);
7502 if (tem)
7503 return tem;
7505 if (TREE_CODE_CLASS (code) == tcc_unary)
7507 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7508 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7509 fold_build1_loc (loc, code, type,
7510 fold_convert_loc (loc, TREE_TYPE (op0),
7511 TREE_OPERAND (arg0, 1))));
7512 else if (TREE_CODE (arg0) == COND_EXPR)
7514 tree arg01 = TREE_OPERAND (arg0, 1);
7515 tree arg02 = TREE_OPERAND (arg0, 2);
7516 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7517 arg01 = fold_build1_loc (loc, code, type,
7518 fold_convert_loc (loc,
7519 TREE_TYPE (op0), arg01));
7520 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7521 arg02 = fold_build1_loc (loc, code, type,
7522 fold_convert_loc (loc,
7523 TREE_TYPE (op0), arg02));
7524 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7525 arg01, arg02);
7527 /* If this was a conversion, and all we did was to move into
7528 inside the COND_EXPR, bring it back out. But leave it if
7529 it is a conversion from integer to integer and the
7530 result precision is no wider than a word since such a
7531 conversion is cheap and may be optimized away by combine,
7532 while it couldn't if it were outside the COND_EXPR. Then return
7533 so we don't get into an infinite recursion loop taking the
7534 conversion out and then back in. */
7536 if ((CONVERT_EXPR_CODE_P (code)
7537 || code == NON_LVALUE_EXPR)
7538 && TREE_CODE (tem) == COND_EXPR
7539 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7540 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7541 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7542 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7543 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7544 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7545 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7546 && (INTEGRAL_TYPE_P
7547 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7548 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7549 || flag_syntax_only))
7550 tem = build1_loc (loc, code, type,
7551 build3 (COND_EXPR,
7552 TREE_TYPE (TREE_OPERAND
7553 (TREE_OPERAND (tem, 1), 0)),
7554 TREE_OPERAND (tem, 0),
7555 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7556 TREE_OPERAND (TREE_OPERAND (tem, 2),
7557 0)));
7558 return tem;
7562 switch (code)
7564 case NON_LVALUE_EXPR:
7565 if (!maybe_lvalue_p (op0))
7566 return fold_convert_loc (loc, type, op0);
7567 return NULL_TREE;
7569 CASE_CONVERT:
7570 case FLOAT_EXPR:
7571 case FIX_TRUNC_EXPR:
7572 if (COMPARISON_CLASS_P (op0))
7574 /* If we have (type) (a CMP b) and type is an integral type, return
7575 new expression involving the new type. Canonicalize
7576 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7577 non-integral type.
7578 Do not fold the result as that would not simplify further, also
7579 folding again results in recursions. */
7580 if (TREE_CODE (type) == BOOLEAN_TYPE)
7581 return build2_loc (loc, TREE_CODE (op0), type,
7582 TREE_OPERAND (op0, 0),
7583 TREE_OPERAND (op0, 1));
7584 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7585 && TREE_CODE (type) != VECTOR_TYPE)
7586 return build3_loc (loc, COND_EXPR, type, op0,
7587 constant_boolean_node (true, type),
7588 constant_boolean_node (false, type));
7591 /* Handle (T *)&A.B.C for A being of type T and B and C
7592 living at offset zero. This occurs frequently in
7593 C++ upcasting and then accessing the base. */
7594 if (TREE_CODE (op0) == ADDR_EXPR
7595 && POINTER_TYPE_P (type)
7596 && handled_component_p (TREE_OPERAND (op0, 0)))
7598 HOST_WIDE_INT bitsize, bitpos;
7599 tree offset;
7600 machine_mode mode;
7601 int unsignedp, volatilep;
7602 tree base = TREE_OPERAND (op0, 0);
7603 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7604 &mode, &unsignedp, &volatilep, false);
7605 /* If the reference was to a (constant) zero offset, we can use
7606 the address of the base if it has the same base type
7607 as the result type and the pointer type is unqualified. */
7608 if (! offset && bitpos == 0
7609 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7610 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7611 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7612 return fold_convert_loc (loc, type,
7613 build_fold_addr_expr_loc (loc, base));
7616 if (TREE_CODE (op0) == MODIFY_EXPR
7617 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7618 /* Detect assigning a bitfield. */
7619 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7620 && DECL_BIT_FIELD
7621 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7623 /* Don't leave an assignment inside a conversion
7624 unless assigning a bitfield. */
7625 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7626 /* First do the assignment, then return converted constant. */
7627 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7628 TREE_NO_WARNING (tem) = 1;
7629 TREE_USED (tem) = 1;
7630 return tem;
7633 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7634 constants (if x has signed type, the sign bit cannot be set
7635 in c). This folds extension into the BIT_AND_EXPR.
7636 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7637 very likely don't have maximal range for their precision and this
7638 transformation effectively doesn't preserve non-maximal ranges. */
7639 if (TREE_CODE (type) == INTEGER_TYPE
7640 && TREE_CODE (op0) == BIT_AND_EXPR
7641 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7643 tree and_expr = op0;
7644 tree and0 = TREE_OPERAND (and_expr, 0);
7645 tree and1 = TREE_OPERAND (and_expr, 1);
7646 int change = 0;
7648 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7649 || (TYPE_PRECISION (type)
7650 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7651 change = 1;
7652 else if (TYPE_PRECISION (TREE_TYPE (and1))
7653 <= HOST_BITS_PER_WIDE_INT
7654 && tree_fits_uhwi_p (and1))
7656 unsigned HOST_WIDE_INT cst;
7658 cst = tree_to_uhwi (and1);
7659 cst &= HOST_WIDE_INT_M1U
7660 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7661 change = (cst == 0);
7662 if (change
7663 && !flag_syntax_only
7664 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7665 == ZERO_EXTEND))
7667 tree uns = unsigned_type_for (TREE_TYPE (and0));
7668 and0 = fold_convert_loc (loc, uns, and0);
7669 and1 = fold_convert_loc (loc, uns, and1);
7672 if (change)
7674 tem = force_fit_type (type, wi::to_widest (and1), 0,
7675 TREE_OVERFLOW (and1));
7676 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7677 fold_convert_loc (loc, type, and0), tem);
7681 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7682 when one of the new casts will fold away. Conservatively we assume
7683 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7684 if (POINTER_TYPE_P (type)
7685 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7686 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7687 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7688 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7689 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7691 tree arg00 = TREE_OPERAND (arg0, 0);
7692 tree arg01 = TREE_OPERAND (arg0, 1);
7694 return fold_build_pointer_plus_loc
7695 (loc, fold_convert_loc (loc, type, arg00), arg01);
7698 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7699 of the same precision, and X is an integer type not narrower than
7700 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7701 if (INTEGRAL_TYPE_P (type)
7702 && TREE_CODE (op0) == BIT_NOT_EXPR
7703 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7704 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7705 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7707 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7708 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7709 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7710 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7711 fold_convert_loc (loc, type, tem));
7714 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7715 type of X and Y (integer types only). */
7716 if (INTEGRAL_TYPE_P (type)
7717 && TREE_CODE (op0) == MULT_EXPR
7718 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7719 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7721 /* Be careful not to introduce new overflows. */
7722 tree mult_type;
7723 if (TYPE_OVERFLOW_WRAPS (type))
7724 mult_type = type;
7725 else
7726 mult_type = unsigned_type_for (type);
7728 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7730 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7731 fold_convert_loc (loc, mult_type,
7732 TREE_OPERAND (op0, 0)),
7733 fold_convert_loc (loc, mult_type,
7734 TREE_OPERAND (op0, 1)));
7735 return fold_convert_loc (loc, type, tem);
7739 return NULL_TREE;
7741 case VIEW_CONVERT_EXPR:
7742 if (TREE_CODE (op0) == MEM_REF)
7743 return fold_build2_loc (loc, MEM_REF, type,
7744 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7746 return NULL_TREE;
7748 case NEGATE_EXPR:
7749 tem = fold_negate_expr (loc, arg0);
7750 if (tem)
7751 return fold_convert_loc (loc, type, tem);
7752 return NULL_TREE;
7754 case ABS_EXPR:
7755 /* Convert fabs((double)float) into (double)fabsf(float). */
7756 if (TREE_CODE (arg0) == NOP_EXPR
7757 && TREE_CODE (type) == REAL_TYPE)
7759 tree targ0 = strip_float_extensions (arg0);
7760 if (targ0 != arg0)
7761 return fold_convert_loc (loc, type,
7762 fold_build1_loc (loc, ABS_EXPR,
7763 TREE_TYPE (targ0),
7764 targ0));
7767 /* Strip sign ops from argument. */
7768 if (TREE_CODE (type) == REAL_TYPE)
7770 tem = fold_strip_sign_ops (arg0);
7771 if (tem)
7772 return fold_build1_loc (loc, ABS_EXPR, type,
7773 fold_convert_loc (loc, type, tem));
7775 return NULL_TREE;
7777 case BIT_NOT_EXPR:
7778 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7779 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7780 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7781 fold_convert_loc (loc, type,
7782 TREE_OPERAND (arg0, 0)))))
7783 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7784 fold_convert_loc (loc, type,
7785 TREE_OPERAND (arg0, 1)));
7786 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7787 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7788 fold_convert_loc (loc, type,
7789 TREE_OPERAND (arg0, 1)))))
7790 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7791 fold_convert_loc (loc, type,
7792 TREE_OPERAND (arg0, 0)), tem);
7794 return NULL_TREE;
7796 case TRUTH_NOT_EXPR:
7797 /* Note that the operand of this must be an int
7798 and its values must be 0 or 1.
7799 ("true" is a fixed value perhaps depending on the language,
7800 but we don't handle values other than 1 correctly yet.) */
7801 tem = fold_truth_not_expr (loc, arg0);
7802 if (!tem)
7803 return NULL_TREE;
7804 return fold_convert_loc (loc, type, tem);
7806 case INDIRECT_REF:
7807 /* Fold *&X to X if X is an lvalue. */
7808 if (TREE_CODE (op0) == ADDR_EXPR)
7810 tree op00 = TREE_OPERAND (op0, 0);
7811 if ((TREE_CODE (op00) == VAR_DECL
7812 || TREE_CODE (op00) == PARM_DECL
7813 || TREE_CODE (op00) == RESULT_DECL)
7814 && !TREE_READONLY (op00))
7815 return op00;
7817 return NULL_TREE;
7819 default:
7820 return NULL_TREE;
7821 } /* switch (code) */
7825 /* If the operation was a conversion do _not_ mark a resulting constant
7826 with TREE_OVERFLOW if the original constant was not. These conversions
7827 have implementation defined behavior and retaining the TREE_OVERFLOW
7828 flag here would confuse later passes such as VRP. */
7829 tree
7830 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7831 tree type, tree op0)
7833 tree res = fold_unary_loc (loc, code, type, op0);
7834 if (res
7835 && TREE_CODE (res) == INTEGER_CST
7836 && TREE_CODE (op0) == INTEGER_CST
7837 && CONVERT_EXPR_CODE_P (code))
7838 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7840 return res;
7843 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7844 operands OP0 and OP1. LOC is the location of the resulting expression.
7845 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7846 Return the folded expression if folding is successful. Otherwise,
7847 return NULL_TREE. */
7848 static tree
7849 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7850 tree arg0, tree arg1, tree op0, tree op1)
7852 tree tem;
7854 /* We only do these simplifications if we are optimizing. */
7855 if (!optimize)
7856 return NULL_TREE;
7858 /* Check for things like (A || B) && (A || C). We can convert this
7859 to A || (B && C). Note that either operator can be any of the four
7860 truth and/or operations and the transformation will still be
7861 valid. Also note that we only care about order for the
7862 ANDIF and ORIF operators. If B contains side effects, this
7863 might change the truth-value of A. */
7864 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7865 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7866 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7867 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7868 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7869 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7871 tree a00 = TREE_OPERAND (arg0, 0);
7872 tree a01 = TREE_OPERAND (arg0, 1);
7873 tree a10 = TREE_OPERAND (arg1, 0);
7874 tree a11 = TREE_OPERAND (arg1, 1);
7875 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7876 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7877 && (code == TRUTH_AND_EXPR
7878 || code == TRUTH_OR_EXPR));
7880 if (operand_equal_p (a00, a10, 0))
7881 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7882 fold_build2_loc (loc, code, type, a01, a11));
7883 else if (commutative && operand_equal_p (a00, a11, 0))
7884 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7885 fold_build2_loc (loc, code, type, a01, a10));
7886 else if (commutative && operand_equal_p (a01, a10, 0))
7887 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7888 fold_build2_loc (loc, code, type, a00, a11));
7890 /* This case if tricky because we must either have commutative
7891 operators or else A10 must not have side-effects. */
7893 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7894 && operand_equal_p (a01, a11, 0))
7895 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7896 fold_build2_loc (loc, code, type, a00, a10),
7897 a01);
7900 /* See if we can build a range comparison. */
7901 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7902 return tem;
7904 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7905 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7907 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7908 if (tem)
7909 return fold_build2_loc (loc, code, type, tem, arg1);
7912 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7913 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7915 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7916 if (tem)
7917 return fold_build2_loc (loc, code, type, arg0, tem);
7920 /* Check for the possibility of merging component references. If our
7921 lhs is another similar operation, try to merge its rhs with our
7922 rhs. Then try to merge our lhs and rhs. */
7923 if (TREE_CODE (arg0) == code
7924 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
7925 TREE_OPERAND (arg0, 1), arg1)))
7926 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
7928 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
7929 return tem;
7931 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7932 && (code == TRUTH_AND_EXPR
7933 || code == TRUTH_ANDIF_EXPR
7934 || code == TRUTH_OR_EXPR
7935 || code == TRUTH_ORIF_EXPR))
7937 enum tree_code ncode, icode;
7939 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
7940 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
7941 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
7943 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
7944 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
7945 We don't want to pack more than two leafs to a non-IF AND/OR
7946 expression.
7947 If tree-code of left-hand operand isn't an AND/OR-IF code and not
7948 equal to IF-CODE, then we don't want to add right-hand operand.
7949 If the inner right-hand side of left-hand operand has
7950 side-effects, or isn't simple, then we can't add to it,
7951 as otherwise we might destroy if-sequence. */
7952 if (TREE_CODE (arg0) == icode
7953 && simple_operand_p_2 (arg1)
7954 /* Needed for sequence points to handle trappings, and
7955 side-effects. */
7956 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
7958 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
7959 arg1);
7960 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
7961 tem);
7963 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
7964 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
7965 else if (TREE_CODE (arg1) == icode
7966 && simple_operand_p_2 (arg0)
7967 /* Needed for sequence points to handle trappings, and
7968 side-effects. */
7969 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
7971 tem = fold_build2_loc (loc, ncode, type,
7972 arg0, TREE_OPERAND (arg1, 0));
7973 return fold_build2_loc (loc, icode, type, tem,
7974 TREE_OPERAND (arg1, 1));
7976 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
7977 into (A OR B).
7978 For sequence point consistancy, we need to check for trapping,
7979 and side-effects. */
7980 else if (code == icode && simple_operand_p_2 (arg0)
7981 && simple_operand_p_2 (arg1))
7982 return fold_build2_loc (loc, ncode, type, arg0, arg1);
7985 return NULL_TREE;
7988 /* Fold a binary expression of code CODE and type TYPE with operands
7989 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7990 Return the folded expression if folding is successful. Otherwise,
7991 return NULL_TREE. */
7993 static tree
7994 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
7996 enum tree_code compl_code;
7998 if (code == MIN_EXPR)
7999 compl_code = MAX_EXPR;
8000 else if (code == MAX_EXPR)
8001 compl_code = MIN_EXPR;
8002 else
8003 gcc_unreachable ();
8005 /* MIN (MAX (a, b), b) == b. */
8006 if (TREE_CODE (op0) == compl_code
8007 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8008 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8010 /* MIN (MAX (b, a), b) == b. */
8011 if (TREE_CODE (op0) == compl_code
8012 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8013 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8014 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8016 /* MIN (a, MAX (a, b)) == a. */
8017 if (TREE_CODE (op1) == compl_code
8018 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8019 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8020 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8022 /* MIN (a, MAX (b, a)) == a. */
8023 if (TREE_CODE (op1) == compl_code
8024 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8025 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8026 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8028 return NULL_TREE;
8031 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8032 by changing CODE to reduce the magnitude of constants involved in
8033 ARG0 of the comparison.
8034 Returns a canonicalized comparison tree if a simplification was
8035 possible, otherwise returns NULL_TREE.
8036 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8037 valid if signed overflow is undefined. */
8039 static tree
8040 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8041 tree arg0, tree arg1,
8042 bool *strict_overflow_p)
8044 enum tree_code code0 = TREE_CODE (arg0);
8045 tree t, cst0 = NULL_TREE;
8046 int sgn0;
8048 /* Match A +- CST code arg1. We can change this only if overflow
8049 is undefined. */
8050 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8051 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8052 /* In principle pointers also have undefined overflow behavior,
8053 but that causes problems elsewhere. */
8054 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8055 && (code0 == MINUS_EXPR
8056 || code0 == PLUS_EXPR)
8057 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8058 return NULL_TREE;
8060 /* Identify the constant in arg0 and its sign. */
8061 cst0 = TREE_OPERAND (arg0, 1);
8062 sgn0 = tree_int_cst_sgn (cst0);
8064 /* Overflowed constants and zero will cause problems. */
8065 if (integer_zerop (cst0)
8066 || TREE_OVERFLOW (cst0))
8067 return NULL_TREE;
8069 /* See if we can reduce the magnitude of the constant in
8070 arg0 by changing the comparison code. */
8071 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8072 if (code == LT_EXPR
8073 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8074 code = LE_EXPR;
8075 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8076 else if (code == GT_EXPR
8077 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8078 code = GE_EXPR;
8079 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8080 else if (code == LE_EXPR
8081 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8082 code = LT_EXPR;
8083 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8084 else if (code == GE_EXPR
8085 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8086 code = GT_EXPR;
8087 else
8088 return NULL_TREE;
8089 *strict_overflow_p = true;
8091 /* Now build the constant reduced in magnitude. But not if that
8092 would produce one outside of its types range. */
8093 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8094 && ((sgn0 == 1
8095 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8096 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8097 || (sgn0 == -1
8098 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8099 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8100 return NULL_TREE;
8102 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8103 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8104 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8105 t = fold_convert (TREE_TYPE (arg1), t);
8107 return fold_build2_loc (loc, code, type, t, arg1);
8110 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8111 overflow further. Try to decrease the magnitude of constants involved
8112 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8113 and put sole constants at the second argument position.
8114 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8116 static tree
8117 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8118 tree arg0, tree arg1)
8120 tree t;
8121 bool strict_overflow_p;
8122 const char * const warnmsg = G_("assuming signed overflow does not occur "
8123 "when reducing constant in comparison");
8125 /* Try canonicalization by simplifying arg0. */
8126 strict_overflow_p = false;
8127 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8128 &strict_overflow_p);
8129 if (t)
8131 if (strict_overflow_p)
8132 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8133 return t;
8136 /* Try canonicalization by simplifying arg1 using the swapped
8137 comparison. */
8138 code = swap_tree_comparison (code);
8139 strict_overflow_p = false;
8140 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8141 &strict_overflow_p);
8142 if (t && strict_overflow_p)
8143 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8144 return t;
8147 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8148 space. This is used to avoid issuing overflow warnings for
8149 expressions like &p->x which can not wrap. */
8151 static bool
8152 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8154 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8155 return true;
8157 if (bitpos < 0)
8158 return true;
8160 wide_int wi_offset;
8161 int precision = TYPE_PRECISION (TREE_TYPE (base));
8162 if (offset == NULL_TREE)
8163 wi_offset = wi::zero (precision);
8164 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8165 return true;
8166 else
8167 wi_offset = offset;
8169 bool overflow;
8170 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8171 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8172 if (overflow)
8173 return true;
8175 if (!wi::fits_uhwi_p (total))
8176 return true;
8178 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8179 if (size <= 0)
8180 return true;
8182 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8183 array. */
8184 if (TREE_CODE (base) == ADDR_EXPR)
8186 HOST_WIDE_INT base_size;
8188 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8189 if (base_size > 0 && size < base_size)
8190 size = base_size;
8193 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8196 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8197 kind INTEGER_CST. This makes sure to properly sign-extend the
8198 constant. */
8200 static HOST_WIDE_INT
8201 size_low_cst (const_tree t)
8203 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8204 int prec = TYPE_PRECISION (TREE_TYPE (t));
8205 if (prec < HOST_BITS_PER_WIDE_INT)
8206 return sext_hwi (w, prec);
8207 return w;
8210 /* Subroutine of fold_binary. This routine performs all of the
8211 transformations that are common to the equality/inequality
8212 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8213 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8214 fold_binary should call fold_binary. Fold a comparison with
8215 tree code CODE and type TYPE with operands OP0 and OP1. Return
8216 the folded comparison or NULL_TREE. */
8218 static tree
8219 fold_comparison (location_t loc, enum tree_code code, tree type,
8220 tree op0, tree op1)
8222 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8223 tree arg0, arg1, tem;
8225 arg0 = op0;
8226 arg1 = op1;
8228 STRIP_SIGN_NOPS (arg0);
8229 STRIP_SIGN_NOPS (arg1);
8231 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8232 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8233 && (equality_code
8234 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8235 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8237 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8238 && TREE_CODE (arg1) == INTEGER_CST
8239 && !TREE_OVERFLOW (arg1))
8241 const enum tree_code
8242 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8243 tree const1 = TREE_OPERAND (arg0, 1);
8244 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8245 tree variable = TREE_OPERAND (arg0, 0);
8246 tree new_const = int_const_binop (reverse_op, const2, const1);
8248 /* If the constant operation overflowed this can be
8249 simplified as a comparison against INT_MAX/INT_MIN. */
8250 if (TREE_OVERFLOW (new_const)
8251 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8253 int const1_sgn = tree_int_cst_sgn (const1);
8254 enum tree_code code2 = code;
8256 /* Get the sign of the constant on the lhs if the
8257 operation were VARIABLE + CONST1. */
8258 if (TREE_CODE (arg0) == MINUS_EXPR)
8259 const1_sgn = -const1_sgn;
8261 /* The sign of the constant determines if we overflowed
8262 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8263 Canonicalize to the INT_MIN overflow by swapping the comparison
8264 if necessary. */
8265 if (const1_sgn == -1)
8266 code2 = swap_tree_comparison (code);
8268 /* We now can look at the canonicalized case
8269 VARIABLE + 1 CODE2 INT_MIN
8270 and decide on the result. */
8271 switch (code2)
8273 case EQ_EXPR:
8274 case LT_EXPR:
8275 case LE_EXPR:
8276 return
8277 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8279 case NE_EXPR:
8280 case GE_EXPR:
8281 case GT_EXPR:
8282 return
8283 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8285 default:
8286 gcc_unreachable ();
8289 else
8291 if (!equality_code)
8292 fold_overflow_warning ("assuming signed overflow does not occur "
8293 "when changing X +- C1 cmp C2 to "
8294 "X cmp C2 -+ C1",
8295 WARN_STRICT_OVERFLOW_COMPARISON);
8296 return fold_build2_loc (loc, code, type, variable, new_const);
8300 /* For comparisons of pointers we can decompose it to a compile time
8301 comparison of the base objects and the offsets into the object.
8302 This requires at least one operand being an ADDR_EXPR or a
8303 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8304 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8305 && (TREE_CODE (arg0) == ADDR_EXPR
8306 || TREE_CODE (arg1) == ADDR_EXPR
8307 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8308 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8310 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8311 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8312 machine_mode mode;
8313 int volatilep, unsignedp;
8314 bool indirect_base0 = false, indirect_base1 = false;
8316 /* Get base and offset for the access. Strip ADDR_EXPR for
8317 get_inner_reference, but put it back by stripping INDIRECT_REF
8318 off the base object if possible. indirect_baseN will be true
8319 if baseN is not an address but refers to the object itself. */
8320 base0 = arg0;
8321 if (TREE_CODE (arg0) == ADDR_EXPR)
8323 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8324 &bitsize, &bitpos0, &offset0, &mode,
8325 &unsignedp, &volatilep, false);
8326 if (TREE_CODE (base0) == INDIRECT_REF)
8327 base0 = TREE_OPERAND (base0, 0);
8328 else
8329 indirect_base0 = true;
8331 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8333 base0 = TREE_OPERAND (arg0, 0);
8334 STRIP_SIGN_NOPS (base0);
8335 if (TREE_CODE (base0) == ADDR_EXPR)
8337 base0 = TREE_OPERAND (base0, 0);
8338 indirect_base0 = true;
8340 offset0 = TREE_OPERAND (arg0, 1);
8341 if (tree_fits_shwi_p (offset0))
8343 HOST_WIDE_INT off = size_low_cst (offset0);
8344 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8345 * BITS_PER_UNIT)
8346 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8348 bitpos0 = off * BITS_PER_UNIT;
8349 offset0 = NULL_TREE;
8354 base1 = arg1;
8355 if (TREE_CODE (arg1) == ADDR_EXPR)
8357 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8358 &bitsize, &bitpos1, &offset1, &mode,
8359 &unsignedp, &volatilep, false);
8360 if (TREE_CODE (base1) == INDIRECT_REF)
8361 base1 = TREE_OPERAND (base1, 0);
8362 else
8363 indirect_base1 = true;
8365 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8367 base1 = TREE_OPERAND (arg1, 0);
8368 STRIP_SIGN_NOPS (base1);
8369 if (TREE_CODE (base1) == ADDR_EXPR)
8371 base1 = TREE_OPERAND (base1, 0);
8372 indirect_base1 = true;
8374 offset1 = TREE_OPERAND (arg1, 1);
8375 if (tree_fits_shwi_p (offset1))
8377 HOST_WIDE_INT off = size_low_cst (offset1);
8378 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8379 * BITS_PER_UNIT)
8380 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8382 bitpos1 = off * BITS_PER_UNIT;
8383 offset1 = NULL_TREE;
8388 /* If we have equivalent bases we might be able to simplify. */
8389 if (indirect_base0 == indirect_base1
8390 && operand_equal_p (base0, base1, 0))
8392 /* We can fold this expression to a constant if the non-constant
8393 offset parts are equal. */
8394 if ((offset0 == offset1
8395 || (offset0 && offset1
8396 && operand_equal_p (offset0, offset1, 0)))
8397 && (code == EQ_EXPR
8398 || code == NE_EXPR
8399 || (indirect_base0 && DECL_P (base0))
8400 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8403 if (!equality_code
8404 && bitpos0 != bitpos1
8405 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8406 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8407 fold_overflow_warning (("assuming pointer wraparound does not "
8408 "occur when comparing P +- C1 with "
8409 "P +- C2"),
8410 WARN_STRICT_OVERFLOW_CONDITIONAL);
8412 switch (code)
8414 case EQ_EXPR:
8415 return constant_boolean_node (bitpos0 == bitpos1, type);
8416 case NE_EXPR:
8417 return constant_boolean_node (bitpos0 != bitpos1, type);
8418 case LT_EXPR:
8419 return constant_boolean_node (bitpos0 < bitpos1, type);
8420 case LE_EXPR:
8421 return constant_boolean_node (bitpos0 <= bitpos1, type);
8422 case GE_EXPR:
8423 return constant_boolean_node (bitpos0 >= bitpos1, type);
8424 case GT_EXPR:
8425 return constant_boolean_node (bitpos0 > bitpos1, type);
8426 default:;
8429 /* We can simplify the comparison to a comparison of the variable
8430 offset parts if the constant offset parts are equal.
8431 Be careful to use signed sizetype here because otherwise we
8432 mess with array offsets in the wrong way. This is possible
8433 because pointer arithmetic is restricted to retain within an
8434 object and overflow on pointer differences is undefined as of
8435 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8436 else if (bitpos0 == bitpos1
8437 && (equality_code
8438 || (indirect_base0 && DECL_P (base0))
8439 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8441 /* By converting to signed sizetype we cover middle-end pointer
8442 arithmetic which operates on unsigned pointer types of size
8443 type size and ARRAY_REF offsets which are properly sign or
8444 zero extended from their type in case it is narrower than
8445 sizetype. */
8446 if (offset0 == NULL_TREE)
8447 offset0 = build_int_cst (ssizetype, 0);
8448 else
8449 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8450 if (offset1 == NULL_TREE)
8451 offset1 = build_int_cst (ssizetype, 0);
8452 else
8453 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8455 if (!equality_code
8456 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8457 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8458 fold_overflow_warning (("assuming pointer wraparound does not "
8459 "occur when comparing P +- C1 with "
8460 "P +- C2"),
8461 WARN_STRICT_OVERFLOW_COMPARISON);
8463 return fold_build2_loc (loc, code, type, offset0, offset1);
8466 /* For equal offsets we can simplify to a comparison of the
8467 base addresses. */
8468 else if (bitpos0 == bitpos1
8469 && (indirect_base0
8470 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8471 && (indirect_base1
8472 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8473 && ((offset0 == offset1)
8474 || (offset0 && offset1
8475 && operand_equal_p (offset0, offset1, 0))))
8477 if (indirect_base0)
8478 base0 = build_fold_addr_expr_loc (loc, base0);
8479 if (indirect_base1)
8480 base1 = build_fold_addr_expr_loc (loc, base1);
8481 return fold_build2_loc (loc, code, type, base0, base1);
8485 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8486 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8487 the resulting offset is smaller in absolute value than the
8488 original one and has the same sign. */
8489 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8490 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8491 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8492 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8493 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8494 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8495 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8496 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8498 tree const1 = TREE_OPERAND (arg0, 1);
8499 tree const2 = TREE_OPERAND (arg1, 1);
8500 tree variable1 = TREE_OPERAND (arg0, 0);
8501 tree variable2 = TREE_OPERAND (arg1, 0);
8502 tree cst;
8503 const char * const warnmsg = G_("assuming signed overflow does not "
8504 "occur when combining constants around "
8505 "a comparison");
8507 /* Put the constant on the side where it doesn't overflow and is
8508 of lower absolute value and of same sign than before. */
8509 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8510 ? MINUS_EXPR : PLUS_EXPR,
8511 const2, const1);
8512 if (!TREE_OVERFLOW (cst)
8513 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8514 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8516 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8517 return fold_build2_loc (loc, code, type,
8518 variable1,
8519 fold_build2_loc (loc, TREE_CODE (arg1),
8520 TREE_TYPE (arg1),
8521 variable2, cst));
8524 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8525 ? MINUS_EXPR : PLUS_EXPR,
8526 const1, const2);
8527 if (!TREE_OVERFLOW (cst)
8528 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8529 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8531 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8532 return fold_build2_loc (loc, code, type,
8533 fold_build2_loc (loc, TREE_CODE (arg0),
8534 TREE_TYPE (arg0),
8535 variable1, cst),
8536 variable2);
8540 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8541 if (tem)
8542 return tem;
8544 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8545 constant, we can simplify it. */
8546 if (TREE_CODE (arg1) == INTEGER_CST
8547 && (TREE_CODE (arg0) == MIN_EXPR
8548 || TREE_CODE (arg0) == MAX_EXPR)
8549 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8551 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8552 if (tem)
8553 return tem;
8556 /* If we are comparing an expression that just has comparisons
8557 of two integer values, arithmetic expressions of those comparisons,
8558 and constants, we can simplify it. There are only three cases
8559 to check: the two values can either be equal, the first can be
8560 greater, or the second can be greater. Fold the expression for
8561 those three values. Since each value must be 0 or 1, we have
8562 eight possibilities, each of which corresponds to the constant 0
8563 or 1 or one of the six possible comparisons.
8565 This handles common cases like (a > b) == 0 but also handles
8566 expressions like ((x > y) - (y > x)) > 0, which supposedly
8567 occur in macroized code. */
8569 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8571 tree cval1 = 0, cval2 = 0;
8572 int save_p = 0;
8574 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8575 /* Don't handle degenerate cases here; they should already
8576 have been handled anyway. */
8577 && cval1 != 0 && cval2 != 0
8578 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8579 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8580 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8581 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8582 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8583 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8584 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8586 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8587 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8589 /* We can't just pass T to eval_subst in case cval1 or cval2
8590 was the same as ARG1. */
8592 tree high_result
8593 = fold_build2_loc (loc, code, type,
8594 eval_subst (loc, arg0, cval1, maxval,
8595 cval2, minval),
8596 arg1);
8597 tree equal_result
8598 = fold_build2_loc (loc, code, type,
8599 eval_subst (loc, arg0, cval1, maxval,
8600 cval2, maxval),
8601 arg1);
8602 tree low_result
8603 = fold_build2_loc (loc, code, type,
8604 eval_subst (loc, arg0, cval1, minval,
8605 cval2, maxval),
8606 arg1);
8608 /* All three of these results should be 0 or 1. Confirm they are.
8609 Then use those values to select the proper code to use. */
8611 if (TREE_CODE (high_result) == INTEGER_CST
8612 && TREE_CODE (equal_result) == INTEGER_CST
8613 && TREE_CODE (low_result) == INTEGER_CST)
8615 /* Make a 3-bit mask with the high-order bit being the
8616 value for `>', the next for '=', and the low for '<'. */
8617 switch ((integer_onep (high_result) * 4)
8618 + (integer_onep (equal_result) * 2)
8619 + integer_onep (low_result))
8621 case 0:
8622 /* Always false. */
8623 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8624 case 1:
8625 code = LT_EXPR;
8626 break;
8627 case 2:
8628 code = EQ_EXPR;
8629 break;
8630 case 3:
8631 code = LE_EXPR;
8632 break;
8633 case 4:
8634 code = GT_EXPR;
8635 break;
8636 case 5:
8637 code = NE_EXPR;
8638 break;
8639 case 6:
8640 code = GE_EXPR;
8641 break;
8642 case 7:
8643 /* Always true. */
8644 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8647 if (save_p)
8649 tem = save_expr (build2 (code, type, cval1, cval2));
8650 SET_EXPR_LOCATION (tem, loc);
8651 return tem;
8653 return fold_build2_loc (loc, code, type, cval1, cval2);
8658 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8659 into a single range test. */
8660 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8661 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8662 && TREE_CODE (arg1) == INTEGER_CST
8663 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8664 && !integer_zerop (TREE_OPERAND (arg0, 1))
8665 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8666 && !TREE_OVERFLOW (arg1))
8668 tem = fold_div_compare (loc, code, type, arg0, arg1);
8669 if (tem != NULL_TREE)
8670 return tem;
8673 return NULL_TREE;
8677 /* Subroutine of fold_binary. Optimize complex multiplications of the
8678 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8679 argument EXPR represents the expression "z" of type TYPE. */
8681 static tree
8682 fold_mult_zconjz (location_t loc, tree type, tree expr)
8684 tree itype = TREE_TYPE (type);
8685 tree rpart, ipart, tem;
8687 if (TREE_CODE (expr) == COMPLEX_EXPR)
8689 rpart = TREE_OPERAND (expr, 0);
8690 ipart = TREE_OPERAND (expr, 1);
8692 else if (TREE_CODE (expr) == COMPLEX_CST)
8694 rpart = TREE_REALPART (expr);
8695 ipart = TREE_IMAGPART (expr);
8697 else
8699 expr = save_expr (expr);
8700 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8701 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8704 rpart = save_expr (rpart);
8705 ipart = save_expr (ipart);
8706 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8707 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8708 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8709 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8710 build_zero_cst (itype));
8714 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8715 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8717 static bool
8718 vec_cst_ctor_to_array (tree arg, tree *elts)
8720 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8722 if (TREE_CODE (arg) == VECTOR_CST)
8724 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8725 elts[i] = VECTOR_CST_ELT (arg, i);
8727 else if (TREE_CODE (arg) == CONSTRUCTOR)
8729 constructor_elt *elt;
8731 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8732 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8733 return false;
8734 else
8735 elts[i] = elt->value;
8737 else
8738 return false;
8739 for (; i < nelts; i++)
8740 elts[i]
8741 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8742 return true;
8745 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8746 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8747 NULL_TREE otherwise. */
8749 static tree
8750 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8752 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8753 tree *elts;
8754 bool need_ctor = false;
8756 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8757 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8758 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8759 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8760 return NULL_TREE;
8762 elts = XALLOCAVEC (tree, nelts * 3);
8763 if (!vec_cst_ctor_to_array (arg0, elts)
8764 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8765 return NULL_TREE;
8767 for (i = 0; i < nelts; i++)
8769 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8770 need_ctor = true;
8771 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8774 if (need_ctor)
8776 vec<constructor_elt, va_gc> *v;
8777 vec_alloc (v, nelts);
8778 for (i = 0; i < nelts; i++)
8779 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8780 return build_constructor (type, v);
8782 else
8783 return build_vector (type, &elts[2 * nelts]);
8786 /* Try to fold a pointer difference of type TYPE two address expressions of
8787 array references AREF0 and AREF1 using location LOC. Return a
8788 simplified expression for the difference or NULL_TREE. */
8790 static tree
8791 fold_addr_of_array_ref_difference (location_t loc, tree type,
8792 tree aref0, tree aref1)
8794 tree base0 = TREE_OPERAND (aref0, 0);
8795 tree base1 = TREE_OPERAND (aref1, 0);
8796 tree base_offset = build_int_cst (type, 0);
8798 /* If the bases are array references as well, recurse. If the bases
8799 are pointer indirections compute the difference of the pointers.
8800 If the bases are equal, we are set. */
8801 if ((TREE_CODE (base0) == ARRAY_REF
8802 && TREE_CODE (base1) == ARRAY_REF
8803 && (base_offset
8804 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8805 || (INDIRECT_REF_P (base0)
8806 && INDIRECT_REF_P (base1)
8807 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
8808 TREE_OPERAND (base0, 0),
8809 TREE_OPERAND (base1, 0))))
8810 || operand_equal_p (base0, base1, 0))
8812 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8813 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8814 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8815 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8816 return fold_build2_loc (loc, PLUS_EXPR, type,
8817 base_offset,
8818 fold_build2_loc (loc, MULT_EXPR, type,
8819 diff, esz));
8821 return NULL_TREE;
8824 /* If the real or vector real constant CST of type TYPE has an exact
8825 inverse, return it, else return NULL. */
8827 tree
8828 exact_inverse (tree type, tree cst)
8830 REAL_VALUE_TYPE r;
8831 tree unit_type, *elts;
8832 machine_mode mode;
8833 unsigned vec_nelts, i;
8835 switch (TREE_CODE (cst))
8837 case REAL_CST:
8838 r = TREE_REAL_CST (cst);
8840 if (exact_real_inverse (TYPE_MODE (type), &r))
8841 return build_real (type, r);
8843 return NULL_TREE;
8845 case VECTOR_CST:
8846 vec_nelts = VECTOR_CST_NELTS (cst);
8847 elts = XALLOCAVEC (tree, vec_nelts);
8848 unit_type = TREE_TYPE (type);
8849 mode = TYPE_MODE (unit_type);
8851 for (i = 0; i < vec_nelts; i++)
8853 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8854 if (!exact_real_inverse (mode, &r))
8855 return NULL_TREE;
8856 elts[i] = build_real (unit_type, r);
8859 return build_vector (type, elts);
8861 default:
8862 return NULL_TREE;
8866 /* Mask out the tz least significant bits of X of type TYPE where
8867 tz is the number of trailing zeroes in Y. */
8868 static wide_int
8869 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8871 int tz = wi::ctz (y);
8872 if (tz > 0)
8873 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8874 return x;
8877 /* Return true when T is an address and is known to be nonzero.
8878 For floating point we further ensure that T is not denormal.
8879 Similar logic is present in nonzero_address in rtlanal.h.
8881 If the return value is based on the assumption that signed overflow
8882 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8883 change *STRICT_OVERFLOW_P. */
8885 static bool
8886 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8888 tree type = TREE_TYPE (t);
8889 enum tree_code code;
8891 /* Doing something useful for floating point would need more work. */
8892 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8893 return false;
8895 code = TREE_CODE (t);
8896 switch (TREE_CODE_CLASS (code))
8898 case tcc_unary:
8899 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8900 strict_overflow_p);
8901 case tcc_binary:
8902 case tcc_comparison:
8903 return tree_binary_nonzero_warnv_p (code, type,
8904 TREE_OPERAND (t, 0),
8905 TREE_OPERAND (t, 1),
8906 strict_overflow_p);
8907 case tcc_constant:
8908 case tcc_declaration:
8909 case tcc_reference:
8910 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8912 default:
8913 break;
8916 switch (code)
8918 case TRUTH_NOT_EXPR:
8919 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8920 strict_overflow_p);
8922 case TRUTH_AND_EXPR:
8923 case TRUTH_OR_EXPR:
8924 case TRUTH_XOR_EXPR:
8925 return tree_binary_nonzero_warnv_p (code, type,
8926 TREE_OPERAND (t, 0),
8927 TREE_OPERAND (t, 1),
8928 strict_overflow_p);
8930 case COND_EXPR:
8931 case CONSTRUCTOR:
8932 case OBJ_TYPE_REF:
8933 case ASSERT_EXPR:
8934 case ADDR_EXPR:
8935 case WITH_SIZE_EXPR:
8936 case SSA_NAME:
8937 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8939 case COMPOUND_EXPR:
8940 case MODIFY_EXPR:
8941 case BIND_EXPR:
8942 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8943 strict_overflow_p);
8945 case SAVE_EXPR:
8946 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8947 strict_overflow_p);
8949 case CALL_EXPR:
8951 tree fndecl = get_callee_fndecl (t);
8952 if (!fndecl) return false;
8953 if (flag_delete_null_pointer_checks && !flag_check_new
8954 && DECL_IS_OPERATOR_NEW (fndecl)
8955 && !TREE_NOTHROW (fndecl))
8956 return true;
8957 if (flag_delete_null_pointer_checks
8958 && lookup_attribute ("returns_nonnull",
8959 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8960 return true;
8961 return alloca_call_p (t);
8964 default:
8965 break;
8967 return false;
8970 /* Return true when T is an address and is known to be nonzero.
8971 Handle warnings about undefined signed overflow. */
8973 static bool
8974 tree_expr_nonzero_p (tree t)
8976 bool ret, strict_overflow_p;
8978 strict_overflow_p = false;
8979 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
8980 if (strict_overflow_p)
8981 fold_overflow_warning (("assuming signed overflow does not occur when "
8982 "determining that expression is always "
8983 "non-zero"),
8984 WARN_STRICT_OVERFLOW_MISC);
8985 return ret;
8988 /* Fold a binary expression of code CODE and type TYPE with operands
8989 OP0 and OP1. LOC is the location of the resulting expression.
8990 Return the folded expression if folding is successful. Otherwise,
8991 return NULL_TREE. */
8993 tree
8994 fold_binary_loc (location_t loc,
8995 enum tree_code code, tree type, tree op0, tree op1)
8997 enum tree_code_class kind = TREE_CODE_CLASS (code);
8998 tree arg0, arg1, tem;
8999 tree t1 = NULL_TREE;
9000 bool strict_overflow_p;
9001 unsigned int prec;
9003 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9004 && TREE_CODE_LENGTH (code) == 2
9005 && op0 != NULL_TREE
9006 && op1 != NULL_TREE);
9008 arg0 = op0;
9009 arg1 = op1;
9011 /* Strip any conversions that don't change the mode. This is
9012 safe for every expression, except for a comparison expression
9013 because its signedness is derived from its operands. So, in
9014 the latter case, only strip conversions that don't change the
9015 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9016 preserved.
9018 Note that this is done as an internal manipulation within the
9019 constant folder, in order to find the simplest representation
9020 of the arguments so that their form can be studied. In any
9021 cases, the appropriate type conversions should be put back in
9022 the tree that will get out of the constant folder. */
9024 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9026 STRIP_SIGN_NOPS (arg0);
9027 STRIP_SIGN_NOPS (arg1);
9029 else
9031 STRIP_NOPS (arg0);
9032 STRIP_NOPS (arg1);
9035 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9036 constant but we can't do arithmetic on them. */
9037 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9039 tem = const_binop (code, type, arg0, arg1);
9040 if (tem != NULL_TREE)
9042 if (TREE_TYPE (tem) != type)
9043 tem = fold_convert_loc (loc, type, tem);
9044 return tem;
9048 /* If this is a commutative operation, and ARG0 is a constant, move it
9049 to ARG1 to reduce the number of tests below. */
9050 if (commutative_tree_code (code)
9051 && tree_swap_operands_p (arg0, arg1, true))
9052 return fold_build2_loc (loc, code, type, op1, op0);
9054 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9055 to ARG1 to reduce the number of tests below. */
9056 if (kind == tcc_comparison
9057 && tree_swap_operands_p (arg0, arg1, true))
9058 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9060 tem = generic_simplify (loc, code, type, op0, op1);
9061 if (tem)
9062 return tem;
9064 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9066 First check for cases where an arithmetic operation is applied to a
9067 compound, conditional, or comparison operation. Push the arithmetic
9068 operation inside the compound or conditional to see if any folding
9069 can then be done. Convert comparison to conditional for this purpose.
9070 The also optimizes non-constant cases that used to be done in
9071 expand_expr.
9073 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9074 one of the operands is a comparison and the other is a comparison, a
9075 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9076 code below would make the expression more complex. Change it to a
9077 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9078 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9080 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9081 || code == EQ_EXPR || code == NE_EXPR)
9082 && TREE_CODE (type) != VECTOR_TYPE
9083 && ((truth_value_p (TREE_CODE (arg0))
9084 && (truth_value_p (TREE_CODE (arg1))
9085 || (TREE_CODE (arg1) == BIT_AND_EXPR
9086 && integer_onep (TREE_OPERAND (arg1, 1)))))
9087 || (truth_value_p (TREE_CODE (arg1))
9088 && (truth_value_p (TREE_CODE (arg0))
9089 || (TREE_CODE (arg0) == BIT_AND_EXPR
9090 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9092 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9093 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9094 : TRUTH_XOR_EXPR,
9095 boolean_type_node,
9096 fold_convert_loc (loc, boolean_type_node, arg0),
9097 fold_convert_loc (loc, boolean_type_node, arg1));
9099 if (code == EQ_EXPR)
9100 tem = invert_truthvalue_loc (loc, tem);
9102 return fold_convert_loc (loc, type, tem);
9105 if (TREE_CODE_CLASS (code) == tcc_binary
9106 || TREE_CODE_CLASS (code) == tcc_comparison)
9108 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9110 tem = fold_build2_loc (loc, code, type,
9111 fold_convert_loc (loc, TREE_TYPE (op0),
9112 TREE_OPERAND (arg0, 1)), op1);
9113 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9114 tem);
9116 if (TREE_CODE (arg1) == COMPOUND_EXPR
9117 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9119 tem = fold_build2_loc (loc, code, type, op0,
9120 fold_convert_loc (loc, TREE_TYPE (op1),
9121 TREE_OPERAND (arg1, 1)));
9122 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9123 tem);
9126 if (TREE_CODE (arg0) == COND_EXPR
9127 || TREE_CODE (arg0) == VEC_COND_EXPR
9128 || COMPARISON_CLASS_P (arg0))
9130 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9131 arg0, arg1,
9132 /*cond_first_p=*/1);
9133 if (tem != NULL_TREE)
9134 return tem;
9137 if (TREE_CODE (arg1) == COND_EXPR
9138 || TREE_CODE (arg1) == VEC_COND_EXPR
9139 || COMPARISON_CLASS_P (arg1))
9141 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9142 arg1, arg0,
9143 /*cond_first_p=*/0);
9144 if (tem != NULL_TREE)
9145 return tem;
9149 switch (code)
9151 case MEM_REF:
9152 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9153 if (TREE_CODE (arg0) == ADDR_EXPR
9154 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9156 tree iref = TREE_OPERAND (arg0, 0);
9157 return fold_build2 (MEM_REF, type,
9158 TREE_OPERAND (iref, 0),
9159 int_const_binop (PLUS_EXPR, arg1,
9160 TREE_OPERAND (iref, 1)));
9163 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9164 if (TREE_CODE (arg0) == ADDR_EXPR
9165 && handled_component_p (TREE_OPERAND (arg0, 0)))
9167 tree base;
9168 HOST_WIDE_INT coffset;
9169 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9170 &coffset);
9171 if (!base)
9172 return NULL_TREE;
9173 return fold_build2 (MEM_REF, type,
9174 build_fold_addr_expr (base),
9175 int_const_binop (PLUS_EXPR, arg1,
9176 size_int (coffset)));
9179 return NULL_TREE;
9181 case POINTER_PLUS_EXPR:
9182 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9183 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9184 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9185 return fold_convert_loc (loc, type,
9186 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9187 fold_convert_loc (loc, sizetype,
9188 arg1),
9189 fold_convert_loc (loc, sizetype,
9190 arg0)));
9192 return NULL_TREE;
9194 case PLUS_EXPR:
9195 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9197 /* X + (X / CST) * -CST is X % CST. */
9198 if (TREE_CODE (arg1) == MULT_EXPR
9199 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9200 && operand_equal_p (arg0,
9201 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9203 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9204 tree cst1 = TREE_OPERAND (arg1, 1);
9205 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9206 cst1, cst0);
9207 if (sum && integer_zerop (sum))
9208 return fold_convert_loc (loc, type,
9209 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9210 TREE_TYPE (arg0), arg0,
9211 cst0));
9215 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9216 one. Make sure the type is not saturating and has the signedness of
9217 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9218 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9219 if ((TREE_CODE (arg0) == MULT_EXPR
9220 || TREE_CODE (arg1) == MULT_EXPR)
9221 && !TYPE_SATURATING (type)
9222 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9223 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9224 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9226 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9227 if (tem)
9228 return tem;
9231 if (! FLOAT_TYPE_P (type))
9233 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9234 (plus (plus (mult) (mult)) (foo)) so that we can
9235 take advantage of the factoring cases below. */
9236 if (ANY_INTEGRAL_TYPE_P (type)
9237 && TYPE_OVERFLOW_WRAPS (type)
9238 && (((TREE_CODE (arg0) == PLUS_EXPR
9239 || TREE_CODE (arg0) == MINUS_EXPR)
9240 && TREE_CODE (arg1) == MULT_EXPR)
9241 || ((TREE_CODE (arg1) == PLUS_EXPR
9242 || TREE_CODE (arg1) == MINUS_EXPR)
9243 && TREE_CODE (arg0) == MULT_EXPR)))
9245 tree parg0, parg1, parg, marg;
9246 enum tree_code pcode;
9248 if (TREE_CODE (arg1) == MULT_EXPR)
9249 parg = arg0, marg = arg1;
9250 else
9251 parg = arg1, marg = arg0;
9252 pcode = TREE_CODE (parg);
9253 parg0 = TREE_OPERAND (parg, 0);
9254 parg1 = TREE_OPERAND (parg, 1);
9255 STRIP_NOPS (parg0);
9256 STRIP_NOPS (parg1);
9258 if (TREE_CODE (parg0) == MULT_EXPR
9259 && TREE_CODE (parg1) != MULT_EXPR)
9260 return fold_build2_loc (loc, pcode, type,
9261 fold_build2_loc (loc, PLUS_EXPR, type,
9262 fold_convert_loc (loc, type,
9263 parg0),
9264 fold_convert_loc (loc, type,
9265 marg)),
9266 fold_convert_loc (loc, type, parg1));
9267 if (TREE_CODE (parg0) != MULT_EXPR
9268 && TREE_CODE (parg1) == MULT_EXPR)
9269 return
9270 fold_build2_loc (loc, PLUS_EXPR, type,
9271 fold_convert_loc (loc, type, parg0),
9272 fold_build2_loc (loc, pcode, type,
9273 fold_convert_loc (loc, type, marg),
9274 fold_convert_loc (loc, type,
9275 parg1)));
9278 else
9280 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9281 to __complex__ ( x, y ). This is not the same for SNaNs or
9282 if signed zeros are involved. */
9283 if (!HONOR_SNANS (element_mode (arg0))
9284 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9285 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9287 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9288 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9289 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9290 bool arg0rz = false, arg0iz = false;
9291 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9292 || (arg0i && (arg0iz = real_zerop (arg0i))))
9294 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9295 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9296 if (arg0rz && arg1i && real_zerop (arg1i))
9298 tree rp = arg1r ? arg1r
9299 : build1 (REALPART_EXPR, rtype, arg1);
9300 tree ip = arg0i ? arg0i
9301 : build1 (IMAGPART_EXPR, rtype, arg0);
9302 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9304 else if (arg0iz && arg1r && real_zerop (arg1r))
9306 tree rp = arg0r ? arg0r
9307 : build1 (REALPART_EXPR, rtype, arg0);
9308 tree ip = arg1i ? arg1i
9309 : build1 (IMAGPART_EXPR, rtype, arg1);
9310 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9315 if (flag_unsafe_math_optimizations
9316 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9317 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9318 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9319 return tem;
9321 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9322 We associate floats only if the user has specified
9323 -fassociative-math. */
9324 if (flag_associative_math
9325 && TREE_CODE (arg1) == PLUS_EXPR
9326 && TREE_CODE (arg0) != MULT_EXPR)
9328 tree tree10 = TREE_OPERAND (arg1, 0);
9329 tree tree11 = TREE_OPERAND (arg1, 1);
9330 if (TREE_CODE (tree11) == MULT_EXPR
9331 && TREE_CODE (tree10) == MULT_EXPR)
9333 tree tree0;
9334 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9335 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9338 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9339 We associate floats only if the user has specified
9340 -fassociative-math. */
9341 if (flag_associative_math
9342 && TREE_CODE (arg0) == PLUS_EXPR
9343 && TREE_CODE (arg1) != MULT_EXPR)
9345 tree tree00 = TREE_OPERAND (arg0, 0);
9346 tree tree01 = TREE_OPERAND (arg0, 1);
9347 if (TREE_CODE (tree01) == MULT_EXPR
9348 && TREE_CODE (tree00) == MULT_EXPR)
9350 tree tree0;
9351 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9352 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9357 bit_rotate:
9358 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9359 is a rotate of A by C1 bits. */
9360 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9361 is a rotate of A by B bits. */
9363 enum tree_code code0, code1;
9364 tree rtype;
9365 code0 = TREE_CODE (arg0);
9366 code1 = TREE_CODE (arg1);
9367 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9368 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9369 && operand_equal_p (TREE_OPERAND (arg0, 0),
9370 TREE_OPERAND (arg1, 0), 0)
9371 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9372 TYPE_UNSIGNED (rtype))
9373 /* Only create rotates in complete modes. Other cases are not
9374 expanded properly. */
9375 && (element_precision (rtype)
9376 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9378 tree tree01, tree11;
9379 enum tree_code code01, code11;
9381 tree01 = TREE_OPERAND (arg0, 1);
9382 tree11 = TREE_OPERAND (arg1, 1);
9383 STRIP_NOPS (tree01);
9384 STRIP_NOPS (tree11);
9385 code01 = TREE_CODE (tree01);
9386 code11 = TREE_CODE (tree11);
9387 if (code01 == INTEGER_CST
9388 && code11 == INTEGER_CST
9389 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9390 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9392 tem = build2_loc (loc, LROTATE_EXPR,
9393 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9394 TREE_OPERAND (arg0, 0),
9395 code0 == LSHIFT_EXPR
9396 ? TREE_OPERAND (arg0, 1)
9397 : TREE_OPERAND (arg1, 1));
9398 return fold_convert_loc (loc, type, tem);
9400 else if (code11 == MINUS_EXPR)
9402 tree tree110, tree111;
9403 tree110 = TREE_OPERAND (tree11, 0);
9404 tree111 = TREE_OPERAND (tree11, 1);
9405 STRIP_NOPS (tree110);
9406 STRIP_NOPS (tree111);
9407 if (TREE_CODE (tree110) == INTEGER_CST
9408 && 0 == compare_tree_int (tree110,
9409 element_precision
9410 (TREE_TYPE (TREE_OPERAND
9411 (arg0, 0))))
9412 && operand_equal_p (tree01, tree111, 0))
9413 return
9414 fold_convert_loc (loc, type,
9415 build2 ((code0 == LSHIFT_EXPR
9416 ? LROTATE_EXPR
9417 : RROTATE_EXPR),
9418 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9419 TREE_OPERAND (arg0, 0),
9420 TREE_OPERAND (arg0, 1)));
9422 else if (code01 == MINUS_EXPR)
9424 tree tree010, tree011;
9425 tree010 = TREE_OPERAND (tree01, 0);
9426 tree011 = TREE_OPERAND (tree01, 1);
9427 STRIP_NOPS (tree010);
9428 STRIP_NOPS (tree011);
9429 if (TREE_CODE (tree010) == INTEGER_CST
9430 && 0 == compare_tree_int (tree010,
9431 element_precision
9432 (TREE_TYPE (TREE_OPERAND
9433 (arg0, 0))))
9434 && operand_equal_p (tree11, tree011, 0))
9435 return fold_convert_loc
9436 (loc, type,
9437 build2 ((code0 != LSHIFT_EXPR
9438 ? LROTATE_EXPR
9439 : RROTATE_EXPR),
9440 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9441 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9446 associate:
9447 /* In most languages, can't associate operations on floats through
9448 parentheses. Rather than remember where the parentheses were, we
9449 don't associate floats at all, unless the user has specified
9450 -fassociative-math.
9451 And, we need to make sure type is not saturating. */
9453 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9454 && !TYPE_SATURATING (type))
9456 tree var0, con0, lit0, minus_lit0;
9457 tree var1, con1, lit1, minus_lit1;
9458 tree atype = type;
9459 bool ok = true;
9461 /* Split both trees into variables, constants, and literals. Then
9462 associate each group together, the constants with literals,
9463 then the result with variables. This increases the chances of
9464 literals being recombined later and of generating relocatable
9465 expressions for the sum of a constant and literal. */
9466 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9467 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9468 code == MINUS_EXPR);
9470 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9471 if (code == MINUS_EXPR)
9472 code = PLUS_EXPR;
9474 /* With undefined overflow prefer doing association in a type
9475 which wraps on overflow, if that is one of the operand types. */
9476 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9477 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9479 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9480 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9481 atype = TREE_TYPE (arg0);
9482 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9483 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9484 atype = TREE_TYPE (arg1);
9485 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9488 /* With undefined overflow we can only associate constants with one
9489 variable, and constants whose association doesn't overflow. */
9490 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9491 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9493 if (var0 && var1)
9495 tree tmp0 = var0;
9496 tree tmp1 = var1;
9497 bool one_neg = false;
9499 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9501 tmp0 = TREE_OPERAND (tmp0, 0);
9502 one_neg = !one_neg;
9504 if (CONVERT_EXPR_P (tmp0)
9505 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9506 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9507 <= TYPE_PRECISION (atype)))
9508 tmp0 = TREE_OPERAND (tmp0, 0);
9509 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9511 tmp1 = TREE_OPERAND (tmp1, 0);
9512 one_neg = !one_neg;
9514 if (CONVERT_EXPR_P (tmp1)
9515 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9516 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9517 <= TYPE_PRECISION (atype)))
9518 tmp1 = TREE_OPERAND (tmp1, 0);
9519 /* The only case we can still associate with two variables
9520 is if they cancel out. */
9521 if (!one_neg
9522 || !operand_equal_p (tmp0, tmp1, 0))
9523 ok = false;
9527 /* Only do something if we found more than two objects. Otherwise,
9528 nothing has changed and we risk infinite recursion. */
9529 if (ok
9530 && (2 < ((var0 != 0) + (var1 != 0)
9531 + (con0 != 0) + (con1 != 0)
9532 + (lit0 != 0) + (lit1 != 0)
9533 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9535 bool any_overflows = false;
9536 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9537 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9538 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9539 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9540 var0 = associate_trees (loc, var0, var1, code, atype);
9541 con0 = associate_trees (loc, con0, con1, code, atype);
9542 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9543 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9544 code, atype);
9546 /* Preserve the MINUS_EXPR if the negative part of the literal is
9547 greater than the positive part. Otherwise, the multiplicative
9548 folding code (i.e extract_muldiv) may be fooled in case
9549 unsigned constants are subtracted, like in the following
9550 example: ((X*2 + 4) - 8U)/2. */
9551 if (minus_lit0 && lit0)
9553 if (TREE_CODE (lit0) == INTEGER_CST
9554 && TREE_CODE (minus_lit0) == INTEGER_CST
9555 && tree_int_cst_lt (lit0, minus_lit0))
9557 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9558 MINUS_EXPR, atype);
9559 lit0 = 0;
9561 else
9563 lit0 = associate_trees (loc, lit0, minus_lit0,
9564 MINUS_EXPR, atype);
9565 minus_lit0 = 0;
9569 /* Don't introduce overflows through reassociation. */
9570 if (!any_overflows
9571 && ((lit0 && TREE_OVERFLOW_P (lit0))
9572 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9573 return NULL_TREE;
9575 if (minus_lit0)
9577 if (con0 == 0)
9578 return
9579 fold_convert_loc (loc, type,
9580 associate_trees (loc, var0, minus_lit0,
9581 MINUS_EXPR, atype));
9582 else
9584 con0 = associate_trees (loc, con0, minus_lit0,
9585 MINUS_EXPR, atype);
9586 return
9587 fold_convert_loc (loc, type,
9588 associate_trees (loc, var0, con0,
9589 PLUS_EXPR, atype));
9593 con0 = associate_trees (loc, con0, lit0, code, atype);
9594 return
9595 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9596 code, atype));
9600 return NULL_TREE;
9602 case MINUS_EXPR:
9603 /* Pointer simplifications for subtraction, simple reassociations. */
9604 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9606 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9607 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9608 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9610 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9611 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9612 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9613 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9614 return fold_build2_loc (loc, PLUS_EXPR, type,
9615 fold_build2_loc (loc, MINUS_EXPR, type,
9616 arg00, arg10),
9617 fold_build2_loc (loc, MINUS_EXPR, type,
9618 arg01, arg11));
9620 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9621 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9623 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9624 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9625 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9626 fold_convert_loc (loc, type, arg1));
9627 if (tmp)
9628 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9630 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9631 simplifies. */
9632 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9634 tree arg10 = fold_convert_loc (loc, type,
9635 TREE_OPERAND (arg1, 0));
9636 tree arg11 = fold_convert_loc (loc, type,
9637 TREE_OPERAND (arg1, 1));
9638 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
9639 fold_convert_loc (loc, type, arg0),
9640 arg10);
9641 if (tmp)
9642 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
9645 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9646 if (TREE_CODE (arg0) == NEGATE_EXPR
9647 && negate_expr_p (arg1)
9648 && reorder_operands_p (arg0, arg1))
9649 return fold_build2_loc (loc, MINUS_EXPR, type,
9650 fold_convert_loc (loc, type,
9651 negate_expr (arg1)),
9652 fold_convert_loc (loc, type,
9653 TREE_OPERAND (arg0, 0)));
9655 if (! FLOAT_TYPE_P (type))
9657 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9658 any power of 2 minus 1. */
9659 if (TREE_CODE (arg0) == BIT_AND_EXPR
9660 && TREE_CODE (arg1) == BIT_AND_EXPR
9661 && operand_equal_p (TREE_OPERAND (arg0, 0),
9662 TREE_OPERAND (arg1, 0), 0))
9664 tree mask0 = TREE_OPERAND (arg0, 1);
9665 tree mask1 = TREE_OPERAND (arg1, 1);
9666 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
9668 if (operand_equal_p (tem, mask1, 0))
9670 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
9671 TREE_OPERAND (arg0, 0), mask1);
9672 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
9677 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9678 __complex__ ( x, -y ). This is not the same for SNaNs or if
9679 signed zeros are involved. */
9680 if (!HONOR_SNANS (element_mode (arg0))
9681 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9682 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9684 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9685 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9686 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9687 bool arg0rz = false, arg0iz = false;
9688 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9689 || (arg0i && (arg0iz = real_zerop (arg0i))))
9691 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9692 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9693 if (arg0rz && arg1i && real_zerop (arg1i))
9695 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9696 arg1r ? arg1r
9697 : build1 (REALPART_EXPR, rtype, arg1));
9698 tree ip = arg0i ? arg0i
9699 : build1 (IMAGPART_EXPR, rtype, arg0);
9700 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9702 else if (arg0iz && arg1r && real_zerop (arg1r))
9704 tree rp = arg0r ? arg0r
9705 : build1 (REALPART_EXPR, rtype, arg0);
9706 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9707 arg1i ? arg1i
9708 : build1 (IMAGPART_EXPR, rtype, arg1));
9709 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9714 /* A - B -> A + (-B) if B is easily negatable. */
9715 if (negate_expr_p (arg1)
9716 && !TYPE_OVERFLOW_SANITIZED (type)
9717 && ((FLOAT_TYPE_P (type)
9718 /* Avoid this transformation if B is a positive REAL_CST. */
9719 && (TREE_CODE (arg1) != REAL_CST
9720 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9721 || INTEGRAL_TYPE_P (type)))
9722 return fold_build2_loc (loc, PLUS_EXPR, type,
9723 fold_convert_loc (loc, type, arg0),
9724 fold_convert_loc (loc, type,
9725 negate_expr (arg1)));
9727 /* Fold &a[i] - &a[j] to i-j. */
9728 if (TREE_CODE (arg0) == ADDR_EXPR
9729 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9730 && TREE_CODE (arg1) == ADDR_EXPR
9731 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9733 tree tem = fold_addr_of_array_ref_difference (loc, type,
9734 TREE_OPERAND (arg0, 0),
9735 TREE_OPERAND (arg1, 0));
9736 if (tem)
9737 return tem;
9740 if (FLOAT_TYPE_P (type)
9741 && flag_unsafe_math_optimizations
9742 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9743 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9744 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9745 return tem;
9747 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9748 one. Make sure the type is not saturating and has the signedness of
9749 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9750 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9751 if ((TREE_CODE (arg0) == MULT_EXPR
9752 || TREE_CODE (arg1) == MULT_EXPR)
9753 && !TYPE_SATURATING (type)
9754 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9755 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9756 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9758 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9759 if (tem)
9760 return tem;
9763 goto associate;
9765 case MULT_EXPR:
9766 /* (-A) * (-B) -> A * B */
9767 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9768 return fold_build2_loc (loc, MULT_EXPR, type,
9769 fold_convert_loc (loc, type,
9770 TREE_OPERAND (arg0, 0)),
9771 fold_convert_loc (loc, type,
9772 negate_expr (arg1)));
9773 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9774 return fold_build2_loc (loc, MULT_EXPR, type,
9775 fold_convert_loc (loc, type,
9776 negate_expr (arg0)),
9777 fold_convert_loc (loc, type,
9778 TREE_OPERAND (arg1, 0)));
9780 if (! FLOAT_TYPE_P (type))
9782 /* Transform x * -C into -x * C if x is easily negatable. */
9783 if (TREE_CODE (arg1) == INTEGER_CST
9784 && tree_int_cst_sgn (arg1) == -1
9785 && negate_expr_p (arg0)
9786 && (tem = negate_expr (arg1)) != arg1
9787 && !TREE_OVERFLOW (tem))
9788 return fold_build2_loc (loc, MULT_EXPR, type,
9789 fold_convert_loc (loc, type,
9790 negate_expr (arg0)),
9791 tem);
9793 /* (a * (1 << b)) is (a << b) */
9794 if (TREE_CODE (arg1) == LSHIFT_EXPR
9795 && integer_onep (TREE_OPERAND (arg1, 0)))
9796 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
9797 TREE_OPERAND (arg1, 1));
9798 if (TREE_CODE (arg0) == LSHIFT_EXPR
9799 && integer_onep (TREE_OPERAND (arg0, 0)))
9800 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
9801 TREE_OPERAND (arg0, 1));
9803 /* (A + A) * C -> A * 2 * C */
9804 if (TREE_CODE (arg0) == PLUS_EXPR
9805 && TREE_CODE (arg1) == INTEGER_CST
9806 && operand_equal_p (TREE_OPERAND (arg0, 0),
9807 TREE_OPERAND (arg0, 1), 0))
9808 return fold_build2_loc (loc, MULT_EXPR, type,
9809 omit_one_operand_loc (loc, type,
9810 TREE_OPERAND (arg0, 0),
9811 TREE_OPERAND (arg0, 1)),
9812 fold_build2_loc (loc, MULT_EXPR, type,
9813 build_int_cst (type, 2) , arg1));
9815 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9816 sign-changing only. */
9817 if (TREE_CODE (arg1) == INTEGER_CST
9818 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9819 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9820 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9822 strict_overflow_p = false;
9823 if (TREE_CODE (arg1) == INTEGER_CST
9824 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9825 &strict_overflow_p)))
9827 if (strict_overflow_p)
9828 fold_overflow_warning (("assuming signed overflow does not "
9829 "occur when simplifying "
9830 "multiplication"),
9831 WARN_STRICT_OVERFLOW_MISC);
9832 return fold_convert_loc (loc, type, tem);
9835 /* Optimize z * conj(z) for integer complex numbers. */
9836 if (TREE_CODE (arg0) == CONJ_EXPR
9837 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9838 return fold_mult_zconjz (loc, type, arg1);
9839 if (TREE_CODE (arg1) == CONJ_EXPR
9840 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9841 return fold_mult_zconjz (loc, type, arg0);
9843 else
9845 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
9846 the result for floating point types due to rounding so it is applied
9847 only if -fassociative-math was specify. */
9848 if (flag_associative_math
9849 && TREE_CODE (arg0) == RDIV_EXPR
9850 && TREE_CODE (arg1) == REAL_CST
9851 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9853 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9854 arg1);
9855 if (tem)
9856 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
9857 TREE_OPERAND (arg0, 1));
9860 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9861 if (operand_equal_p (arg0, arg1, 0))
9863 tree tem = fold_strip_sign_ops (arg0);
9864 if (tem != NULL_TREE)
9866 tem = fold_convert_loc (loc, type, tem);
9867 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
9871 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9872 This is not the same for NaNs or if signed zeros are
9873 involved. */
9874 if (!HONOR_NANS (arg0)
9875 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9876 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9877 && TREE_CODE (arg1) == COMPLEX_CST
9878 && real_zerop (TREE_REALPART (arg1)))
9880 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9881 if (real_onep (TREE_IMAGPART (arg1)))
9882 return
9883 fold_build2_loc (loc, COMPLEX_EXPR, type,
9884 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9885 rtype, arg0)),
9886 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9887 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9888 return
9889 fold_build2_loc (loc, COMPLEX_EXPR, type,
9890 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9891 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9892 rtype, arg0)));
9895 /* Optimize z * conj(z) for floating point complex numbers.
9896 Guarded by flag_unsafe_math_optimizations as non-finite
9897 imaginary components don't produce scalar results. */
9898 if (flag_unsafe_math_optimizations
9899 && TREE_CODE (arg0) == CONJ_EXPR
9900 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9901 return fold_mult_zconjz (loc, type, arg1);
9902 if (flag_unsafe_math_optimizations
9903 && TREE_CODE (arg1) == CONJ_EXPR
9904 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9905 return fold_mult_zconjz (loc, type, arg0);
9907 if (flag_unsafe_math_optimizations)
9910 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9911 if (!in_gimple_form
9912 && optimize
9913 && operand_equal_p (arg0, arg1, 0))
9915 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9917 if (powfn)
9919 tree arg = build_real (type, dconst2);
9920 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9925 goto associate;
9927 case BIT_IOR_EXPR:
9928 /* Canonicalize (X & C1) | C2. */
9929 if (TREE_CODE (arg0) == BIT_AND_EXPR
9930 && TREE_CODE (arg1) == INTEGER_CST
9931 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9933 int width = TYPE_PRECISION (type), w;
9934 wide_int c1 = TREE_OPERAND (arg0, 1);
9935 wide_int c2 = arg1;
9937 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9938 if ((c1 & c2) == c1)
9939 return omit_one_operand_loc (loc, type, arg1,
9940 TREE_OPERAND (arg0, 0));
9942 wide_int msk = wi::mask (width, false,
9943 TYPE_PRECISION (TREE_TYPE (arg1)));
9945 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9946 if (msk.and_not (c1 | c2) == 0)
9947 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9948 TREE_OPERAND (arg0, 0), arg1);
9950 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9951 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9952 mode which allows further optimizations. */
9953 c1 &= msk;
9954 c2 &= msk;
9955 wide_int c3 = c1.and_not (c2);
9956 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9958 wide_int mask = wi::mask (w, false,
9959 TYPE_PRECISION (type));
9960 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9962 c3 = mask;
9963 break;
9967 if (c3 != c1)
9968 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9969 fold_build2_loc (loc, BIT_AND_EXPR, type,
9970 TREE_OPERAND (arg0, 0),
9971 wide_int_to_tree (type,
9972 c3)),
9973 arg1);
9976 /* (X & ~Y) | (~X & Y) is X ^ Y */
9977 if (TREE_CODE (arg0) == BIT_AND_EXPR
9978 && TREE_CODE (arg1) == BIT_AND_EXPR)
9980 tree a0, a1, l0, l1, n0, n1;
9982 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9983 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9985 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9986 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9988 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
9989 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
9991 if ((operand_equal_p (n0, a0, 0)
9992 && operand_equal_p (n1, a1, 0))
9993 || (operand_equal_p (n0, a1, 0)
9994 && operand_equal_p (n1, a0, 0)))
9995 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
9998 /* See if this can be simplified into a rotate first. If that
9999 is unsuccessful continue in the association code. */
10000 goto bit_rotate;
10002 case BIT_XOR_EXPR:
10003 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10004 if (TREE_CODE (arg0) == BIT_AND_EXPR
10005 && INTEGRAL_TYPE_P (type)
10006 && integer_onep (TREE_OPERAND (arg0, 1))
10007 && integer_onep (arg1))
10008 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10009 build_zero_cst (TREE_TYPE (arg0)));
10011 /* See if this can be simplified into a rotate first. If that
10012 is unsuccessful continue in the association code. */
10013 goto bit_rotate;
10015 case BIT_AND_EXPR:
10016 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10017 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10018 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10019 || (TREE_CODE (arg0) == EQ_EXPR
10020 && integer_zerop (TREE_OPERAND (arg0, 1))))
10021 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10022 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10024 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10025 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10026 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10027 || (TREE_CODE (arg1) == EQ_EXPR
10028 && integer_zerop (TREE_OPERAND (arg1, 1))))
10029 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10030 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10032 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10033 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10034 && INTEGRAL_TYPE_P (type)
10035 && integer_onep (TREE_OPERAND (arg0, 1))
10036 && integer_onep (arg1))
10038 tree tem2;
10039 tem = TREE_OPERAND (arg0, 0);
10040 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10041 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10042 tem, tem2);
10043 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10044 build_zero_cst (TREE_TYPE (tem)));
10046 /* Fold ~X & 1 as (X & 1) == 0. */
10047 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10048 && INTEGRAL_TYPE_P (type)
10049 && integer_onep (arg1))
10051 tree tem2;
10052 tem = TREE_OPERAND (arg0, 0);
10053 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10054 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10055 tem, tem2);
10056 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10057 build_zero_cst (TREE_TYPE (tem)));
10059 /* Fold !X & 1 as X == 0. */
10060 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10061 && integer_onep (arg1))
10063 tem = TREE_OPERAND (arg0, 0);
10064 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10065 build_zero_cst (TREE_TYPE (tem)));
10068 /* Fold (X ^ Y) & Y as ~X & Y. */
10069 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10070 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10072 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10073 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10074 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10075 fold_convert_loc (loc, type, arg1));
10077 /* Fold (X ^ Y) & X as ~Y & X. */
10078 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10079 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10080 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10082 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10083 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10084 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10085 fold_convert_loc (loc, type, arg1));
10087 /* Fold X & (X ^ Y) as X & ~Y. */
10088 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10089 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10091 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10092 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10093 fold_convert_loc (loc, type, arg0),
10094 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10096 /* Fold X & (Y ^ X) as ~Y & X. */
10097 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10098 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10099 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10101 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10102 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10103 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10104 fold_convert_loc (loc, type, arg0));
10107 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10108 multiple of 1 << CST. */
10109 if (TREE_CODE (arg1) == INTEGER_CST)
10111 wide_int cst1 = arg1;
10112 wide_int ncst1 = -cst1;
10113 if ((cst1 & ncst1) == ncst1
10114 && multiple_of_p (type, arg0,
10115 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10116 return fold_convert_loc (loc, type, arg0);
10119 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10120 bits from CST2. */
10121 if (TREE_CODE (arg1) == INTEGER_CST
10122 && TREE_CODE (arg0) == MULT_EXPR
10123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10125 wide_int warg1 = arg1;
10126 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10128 if (masked == 0)
10129 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10130 arg0, arg1);
10131 else if (masked != warg1)
10133 /* Avoid the transform if arg1 is a mask of some
10134 mode which allows further optimizations. */
10135 int pop = wi::popcount (warg1);
10136 if (!(pop >= BITS_PER_UNIT
10137 && exact_log2 (pop) != -1
10138 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10139 return fold_build2_loc (loc, code, type, op0,
10140 wide_int_to_tree (type, masked));
10144 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10145 ((A & N) + B) & M -> (A + B) & M
10146 Similarly if (N & M) == 0,
10147 ((A | N) + B) & M -> (A + B) & M
10148 and for - instead of + (or unary - instead of +)
10149 and/or ^ instead of |.
10150 If B is constant and (B & M) == 0, fold into A & M. */
10151 if (TREE_CODE (arg1) == INTEGER_CST)
10153 wide_int cst1 = arg1;
10154 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10155 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10156 && (TREE_CODE (arg0) == PLUS_EXPR
10157 || TREE_CODE (arg0) == MINUS_EXPR
10158 || TREE_CODE (arg0) == NEGATE_EXPR)
10159 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10160 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10162 tree pmop[2];
10163 int which = 0;
10164 wide_int cst0;
10166 /* Now we know that arg0 is (C + D) or (C - D) or
10167 -C and arg1 (M) is == (1LL << cst) - 1.
10168 Store C into PMOP[0] and D into PMOP[1]. */
10169 pmop[0] = TREE_OPERAND (arg0, 0);
10170 pmop[1] = NULL;
10171 if (TREE_CODE (arg0) != NEGATE_EXPR)
10173 pmop[1] = TREE_OPERAND (arg0, 1);
10174 which = 1;
10177 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10178 which = -1;
10180 for (; which >= 0; which--)
10181 switch (TREE_CODE (pmop[which]))
10183 case BIT_AND_EXPR:
10184 case BIT_IOR_EXPR:
10185 case BIT_XOR_EXPR:
10186 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10187 != INTEGER_CST)
10188 break;
10189 cst0 = TREE_OPERAND (pmop[which], 1);
10190 cst0 &= cst1;
10191 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10193 if (cst0 != cst1)
10194 break;
10196 else if (cst0 != 0)
10197 break;
10198 /* If C or D is of the form (A & N) where
10199 (N & M) == M, or of the form (A | N) or
10200 (A ^ N) where (N & M) == 0, replace it with A. */
10201 pmop[which] = TREE_OPERAND (pmop[which], 0);
10202 break;
10203 case INTEGER_CST:
10204 /* If C or D is a N where (N & M) == 0, it can be
10205 omitted (assumed 0). */
10206 if ((TREE_CODE (arg0) == PLUS_EXPR
10207 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10208 && (cst1 & pmop[which]) == 0)
10209 pmop[which] = NULL;
10210 break;
10211 default:
10212 break;
10215 /* Only build anything new if we optimized one or both arguments
10216 above. */
10217 if (pmop[0] != TREE_OPERAND (arg0, 0)
10218 || (TREE_CODE (arg0) != NEGATE_EXPR
10219 && pmop[1] != TREE_OPERAND (arg0, 1)))
10221 tree utype = TREE_TYPE (arg0);
10222 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10224 /* Perform the operations in a type that has defined
10225 overflow behavior. */
10226 utype = unsigned_type_for (TREE_TYPE (arg0));
10227 if (pmop[0] != NULL)
10228 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10229 if (pmop[1] != NULL)
10230 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10233 if (TREE_CODE (arg0) == NEGATE_EXPR)
10234 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10235 else if (TREE_CODE (arg0) == PLUS_EXPR)
10237 if (pmop[0] != NULL && pmop[1] != NULL)
10238 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10239 pmop[0], pmop[1]);
10240 else if (pmop[0] != NULL)
10241 tem = pmop[0];
10242 else if (pmop[1] != NULL)
10243 tem = pmop[1];
10244 else
10245 return build_int_cst (type, 0);
10247 else if (pmop[0] == NULL)
10248 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10249 else
10250 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10251 pmop[0], pmop[1]);
10252 /* TEM is now the new binary +, - or unary - replacement. */
10253 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10254 fold_convert_loc (loc, utype, arg1));
10255 return fold_convert_loc (loc, type, tem);
10260 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10261 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10262 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10264 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10266 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10267 if (mask == -1)
10268 return
10269 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10272 goto associate;
10274 case RDIV_EXPR:
10275 /* Don't touch a floating-point divide by zero unless the mode
10276 of the constant can represent infinity. */
10277 if (TREE_CODE (arg1) == REAL_CST
10278 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10279 && real_zerop (arg1))
10280 return NULL_TREE;
10282 /* (-A) / (-B) -> A / B */
10283 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10284 return fold_build2_loc (loc, RDIV_EXPR, type,
10285 TREE_OPERAND (arg0, 0),
10286 negate_expr (arg1));
10287 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10288 return fold_build2_loc (loc, RDIV_EXPR, type,
10289 negate_expr (arg0),
10290 TREE_OPERAND (arg1, 0));
10292 /* Convert A/B/C to A/(B*C). */
10293 if (flag_reciprocal_math
10294 && TREE_CODE (arg0) == RDIV_EXPR)
10295 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10296 fold_build2_loc (loc, MULT_EXPR, type,
10297 TREE_OPERAND (arg0, 1), arg1));
10299 /* Convert A/(B/C) to (A/B)*C. */
10300 if (flag_reciprocal_math
10301 && TREE_CODE (arg1) == RDIV_EXPR)
10302 return fold_build2_loc (loc, MULT_EXPR, type,
10303 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10304 TREE_OPERAND (arg1, 0)),
10305 TREE_OPERAND (arg1, 1));
10307 /* Convert C1/(X*C2) into (C1/C2)/X. */
10308 if (flag_reciprocal_math
10309 && TREE_CODE (arg1) == MULT_EXPR
10310 && TREE_CODE (arg0) == REAL_CST
10311 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10313 tree tem = const_binop (RDIV_EXPR, arg0,
10314 TREE_OPERAND (arg1, 1));
10315 if (tem)
10316 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10317 TREE_OPERAND (arg1, 0));
10320 return NULL_TREE;
10322 case TRUNC_DIV_EXPR:
10323 /* Optimize (X & (-A)) / A where A is a power of 2,
10324 to X >> log2(A) */
10325 if (TREE_CODE (arg0) == BIT_AND_EXPR
10326 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10327 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10329 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10330 arg1, TREE_OPERAND (arg0, 1));
10331 if (sum && integer_zerop (sum)) {
10332 tree pow2 = build_int_cst (integer_type_node,
10333 wi::exact_log2 (arg1));
10334 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10335 TREE_OPERAND (arg0, 0), pow2);
10339 /* Fall through */
10341 case FLOOR_DIV_EXPR:
10342 /* Simplify A / (B << N) where A and B are positive and B is
10343 a power of 2, to A >> (N + log2(B)). */
10344 strict_overflow_p = false;
10345 if (TREE_CODE (arg1) == LSHIFT_EXPR
10346 && (TYPE_UNSIGNED (type)
10347 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10349 tree sval = TREE_OPERAND (arg1, 0);
10350 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10352 tree sh_cnt = TREE_OPERAND (arg1, 1);
10353 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10354 wi::exact_log2 (sval));
10356 if (strict_overflow_p)
10357 fold_overflow_warning (("assuming signed overflow does not "
10358 "occur when simplifying A / (B << N)"),
10359 WARN_STRICT_OVERFLOW_MISC);
10361 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10362 sh_cnt, pow2);
10363 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10364 fold_convert_loc (loc, type, arg0), sh_cnt);
10368 /* Fall through */
10370 case ROUND_DIV_EXPR:
10371 case CEIL_DIV_EXPR:
10372 case EXACT_DIV_EXPR:
10373 if (integer_zerop (arg1))
10374 return NULL_TREE;
10376 /* Convert -A / -B to A / B when the type is signed and overflow is
10377 undefined. */
10378 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10379 && TREE_CODE (arg0) == NEGATE_EXPR
10380 && negate_expr_p (arg1))
10382 if (INTEGRAL_TYPE_P (type))
10383 fold_overflow_warning (("assuming signed overflow does not occur "
10384 "when distributing negation across "
10385 "division"),
10386 WARN_STRICT_OVERFLOW_MISC);
10387 return fold_build2_loc (loc, code, type,
10388 fold_convert_loc (loc, type,
10389 TREE_OPERAND (arg0, 0)),
10390 fold_convert_loc (loc, type,
10391 negate_expr (arg1)));
10393 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10394 && TREE_CODE (arg1) == NEGATE_EXPR
10395 && negate_expr_p (arg0))
10397 if (INTEGRAL_TYPE_P (type))
10398 fold_overflow_warning (("assuming signed overflow does not occur "
10399 "when distributing negation across "
10400 "division"),
10401 WARN_STRICT_OVERFLOW_MISC);
10402 return fold_build2_loc (loc, code, type,
10403 fold_convert_loc (loc, type,
10404 negate_expr (arg0)),
10405 fold_convert_loc (loc, type,
10406 TREE_OPERAND (arg1, 0)));
10409 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10410 operation, EXACT_DIV_EXPR.
10412 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10413 At one time others generated faster code, it's not clear if they do
10414 after the last round to changes to the DIV code in expmed.c. */
10415 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10416 && multiple_of_p (type, arg0, arg1))
10417 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10418 fold_convert (type, arg0),
10419 fold_convert (type, arg1));
10421 strict_overflow_p = false;
10422 if (TREE_CODE (arg1) == INTEGER_CST
10423 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10424 &strict_overflow_p)))
10426 if (strict_overflow_p)
10427 fold_overflow_warning (("assuming signed overflow does not occur "
10428 "when simplifying division"),
10429 WARN_STRICT_OVERFLOW_MISC);
10430 return fold_convert_loc (loc, type, tem);
10433 return NULL_TREE;
10435 case CEIL_MOD_EXPR:
10436 case FLOOR_MOD_EXPR:
10437 case ROUND_MOD_EXPR:
10438 case TRUNC_MOD_EXPR:
10439 strict_overflow_p = false;
10440 if (TREE_CODE (arg1) == INTEGER_CST
10441 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10442 &strict_overflow_p)))
10444 if (strict_overflow_p)
10445 fold_overflow_warning (("assuming signed overflow does not occur "
10446 "when simplifying modulus"),
10447 WARN_STRICT_OVERFLOW_MISC);
10448 return fold_convert_loc (loc, type, tem);
10451 return NULL_TREE;
10453 case LROTATE_EXPR:
10454 case RROTATE_EXPR:
10455 case RSHIFT_EXPR:
10456 case LSHIFT_EXPR:
10457 /* Since negative shift count is not well-defined,
10458 don't try to compute it in the compiler. */
10459 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10460 return NULL_TREE;
10462 prec = element_precision (type);
10464 /* If we have a rotate of a bit operation with the rotate count and
10465 the second operand of the bit operation both constant,
10466 permute the two operations. */
10467 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10468 && (TREE_CODE (arg0) == BIT_AND_EXPR
10469 || TREE_CODE (arg0) == BIT_IOR_EXPR
10470 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10471 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10472 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10473 fold_build2_loc (loc, code, type,
10474 TREE_OPERAND (arg0, 0), arg1),
10475 fold_build2_loc (loc, code, type,
10476 TREE_OPERAND (arg0, 1), arg1));
10478 /* Two consecutive rotates adding up to the some integer
10479 multiple of the precision of the type can be ignored. */
10480 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10481 && TREE_CODE (arg0) == RROTATE_EXPR
10482 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10483 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10484 prec) == 0)
10485 return TREE_OPERAND (arg0, 0);
10487 return NULL_TREE;
10489 case MIN_EXPR:
10490 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
10491 if (tem)
10492 return tem;
10493 goto associate;
10495 case MAX_EXPR:
10496 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
10497 if (tem)
10498 return tem;
10499 goto associate;
10501 case TRUTH_ANDIF_EXPR:
10502 /* Note that the operands of this must be ints
10503 and their values must be 0 or 1.
10504 ("true" is a fixed value perhaps depending on the language.) */
10505 /* If first arg is constant zero, return it. */
10506 if (integer_zerop (arg0))
10507 return fold_convert_loc (loc, type, arg0);
10508 case TRUTH_AND_EXPR:
10509 /* If either arg is constant true, drop it. */
10510 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10511 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10512 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10513 /* Preserve sequence points. */
10514 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10515 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10516 /* If second arg is constant zero, result is zero, but first arg
10517 must be evaluated. */
10518 if (integer_zerop (arg1))
10519 return omit_one_operand_loc (loc, type, arg1, arg0);
10520 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10521 case will be handled here. */
10522 if (integer_zerop (arg0))
10523 return omit_one_operand_loc (loc, type, arg0, arg1);
10525 /* !X && X is always false. */
10526 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10527 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10528 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10529 /* X && !X is always false. */
10530 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10531 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10532 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10534 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10535 means A >= Y && A != MAX, but in this case we know that
10536 A < X <= MAX. */
10538 if (!TREE_SIDE_EFFECTS (arg0)
10539 && !TREE_SIDE_EFFECTS (arg1))
10541 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10542 if (tem && !operand_equal_p (tem, arg0, 0))
10543 return fold_build2_loc (loc, code, type, tem, arg1);
10545 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10546 if (tem && !operand_equal_p (tem, arg1, 0))
10547 return fold_build2_loc (loc, code, type, arg0, tem);
10550 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10551 != NULL_TREE)
10552 return tem;
10554 return NULL_TREE;
10556 case TRUTH_ORIF_EXPR:
10557 /* Note that the operands of this must be ints
10558 and their values must be 0 or true.
10559 ("true" is a fixed value perhaps depending on the language.) */
10560 /* If first arg is constant true, return it. */
10561 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10562 return fold_convert_loc (loc, type, arg0);
10563 case TRUTH_OR_EXPR:
10564 /* If either arg is constant zero, drop it. */
10565 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10567 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10568 /* Preserve sequence points. */
10569 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10570 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10571 /* If second arg is constant true, result is true, but we must
10572 evaluate first arg. */
10573 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10574 return omit_one_operand_loc (loc, type, arg1, arg0);
10575 /* Likewise for first arg, but note this only occurs here for
10576 TRUTH_OR_EXPR. */
10577 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10578 return omit_one_operand_loc (loc, type, arg0, arg1);
10580 /* !X || X is always true. */
10581 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10582 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10583 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10584 /* X || !X is always true. */
10585 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10586 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10587 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10589 /* (X && !Y) || (!X && Y) is X ^ Y */
10590 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10591 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10593 tree a0, a1, l0, l1, n0, n1;
10595 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10596 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10598 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10599 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10601 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10602 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10604 if ((operand_equal_p (n0, a0, 0)
10605 && operand_equal_p (n1, a1, 0))
10606 || (operand_equal_p (n0, a1, 0)
10607 && operand_equal_p (n1, a0, 0)))
10608 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10611 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10612 != NULL_TREE)
10613 return tem;
10615 return NULL_TREE;
10617 case TRUTH_XOR_EXPR:
10618 /* If the second arg is constant zero, drop it. */
10619 if (integer_zerop (arg1))
10620 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10621 /* If the second arg is constant true, this is a logical inversion. */
10622 if (integer_onep (arg1))
10624 tem = invert_truthvalue_loc (loc, arg0);
10625 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10627 /* Identical arguments cancel to zero. */
10628 if (operand_equal_p (arg0, arg1, 0))
10629 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10631 /* !X ^ X is always true. */
10632 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10633 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10634 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10636 /* X ^ !X is always true. */
10637 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10638 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10639 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10641 return NULL_TREE;
10643 case EQ_EXPR:
10644 case NE_EXPR:
10645 STRIP_NOPS (arg0);
10646 STRIP_NOPS (arg1);
10648 tem = fold_comparison (loc, code, type, op0, op1);
10649 if (tem != NULL_TREE)
10650 return tem;
10652 /* bool_var != 1 becomes !bool_var. */
10653 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10654 && code == NE_EXPR)
10655 return fold_convert_loc (loc, type,
10656 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10657 TREE_TYPE (arg0), arg0));
10659 /* bool_var == 0 becomes !bool_var. */
10660 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10661 && code == EQ_EXPR)
10662 return fold_convert_loc (loc, type,
10663 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10664 TREE_TYPE (arg0), arg0));
10666 /* !exp != 0 becomes !exp */
10667 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10668 && code == NE_EXPR)
10669 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10671 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10672 if ((TREE_CODE (arg0) == PLUS_EXPR
10673 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10674 || TREE_CODE (arg0) == MINUS_EXPR)
10675 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10676 0)),
10677 arg1, 0)
10678 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10679 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10681 tree val = TREE_OPERAND (arg0, 1);
10682 return omit_two_operands_loc (loc, type,
10683 fold_build2_loc (loc, code, type,
10684 val,
10685 build_int_cst (TREE_TYPE (val),
10686 0)),
10687 TREE_OPERAND (arg0, 0), arg1);
10690 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10691 if (TREE_CODE (arg0) == MINUS_EXPR
10692 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10693 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10694 1)),
10695 arg1, 0)
10696 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10698 return omit_two_operands_loc (loc, type,
10699 code == NE_EXPR
10700 ? boolean_true_node : boolean_false_node,
10701 TREE_OPERAND (arg0, 1), arg1);
10704 /* If this is an EQ or NE comparison with zero and ARG0 is
10705 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10706 two operations, but the latter can be done in one less insn
10707 on machines that have only two-operand insns or on which a
10708 constant cannot be the first operand. */
10709 if (TREE_CODE (arg0) == BIT_AND_EXPR
10710 && integer_zerop (arg1))
10712 tree arg00 = TREE_OPERAND (arg0, 0);
10713 tree arg01 = TREE_OPERAND (arg0, 1);
10714 if (TREE_CODE (arg00) == LSHIFT_EXPR
10715 && integer_onep (TREE_OPERAND (arg00, 0)))
10717 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10718 arg01, TREE_OPERAND (arg00, 1));
10719 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10720 build_int_cst (TREE_TYPE (arg0), 1));
10721 return fold_build2_loc (loc, code, type,
10722 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10723 arg1);
10725 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10726 && integer_onep (TREE_OPERAND (arg01, 0)))
10728 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10729 arg00, TREE_OPERAND (arg01, 1));
10730 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10731 build_int_cst (TREE_TYPE (arg0), 1));
10732 return fold_build2_loc (loc, code, type,
10733 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10734 arg1);
10738 /* If this is an NE or EQ comparison of zero against the result of a
10739 signed MOD operation whose second operand is a power of 2, make
10740 the MOD operation unsigned since it is simpler and equivalent. */
10741 if (integer_zerop (arg1)
10742 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10743 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10744 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10745 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10746 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10747 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10749 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10750 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10751 fold_convert_loc (loc, newtype,
10752 TREE_OPERAND (arg0, 0)),
10753 fold_convert_loc (loc, newtype,
10754 TREE_OPERAND (arg0, 1)));
10756 return fold_build2_loc (loc, code, type, newmod,
10757 fold_convert_loc (loc, newtype, arg1));
10760 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10761 C1 is a valid shift constant, and C2 is a power of two, i.e.
10762 a single bit. */
10763 if (TREE_CODE (arg0) == BIT_AND_EXPR
10764 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10765 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10766 == INTEGER_CST
10767 && integer_pow2p (TREE_OPERAND (arg0, 1))
10768 && integer_zerop (arg1))
10770 tree itype = TREE_TYPE (arg0);
10771 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10772 prec = TYPE_PRECISION (itype);
10774 /* Check for a valid shift count. */
10775 if (wi::ltu_p (arg001, prec))
10777 tree arg01 = TREE_OPERAND (arg0, 1);
10778 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10779 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10780 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10781 can be rewritten as (X & (C2 << C1)) != 0. */
10782 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10784 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10785 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10786 return fold_build2_loc (loc, code, type, tem,
10787 fold_convert_loc (loc, itype, arg1));
10789 /* Otherwise, for signed (arithmetic) shifts,
10790 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10791 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10792 else if (!TYPE_UNSIGNED (itype))
10793 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10794 arg000, build_int_cst (itype, 0));
10795 /* Otherwise, of unsigned (logical) shifts,
10796 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10797 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10798 else
10799 return omit_one_operand_loc (loc, type,
10800 code == EQ_EXPR ? integer_one_node
10801 : integer_zero_node,
10802 arg000);
10806 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10807 Similarly for NE_EXPR. */
10808 if (TREE_CODE (arg0) == BIT_AND_EXPR
10809 && TREE_CODE (arg1) == INTEGER_CST
10810 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10812 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10813 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10814 TREE_OPERAND (arg0, 1));
10815 tree dandnotc
10816 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10817 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10818 notc);
10819 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10820 if (integer_nonzerop (dandnotc))
10821 return omit_one_operand_loc (loc, type, rslt, arg0);
10824 /* If this is a comparison of a field, we may be able to simplify it. */
10825 if ((TREE_CODE (arg0) == COMPONENT_REF
10826 || TREE_CODE (arg0) == BIT_FIELD_REF)
10827 /* Handle the constant case even without -O
10828 to make sure the warnings are given. */
10829 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10831 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10832 if (t1)
10833 return t1;
10836 /* Optimize comparisons of strlen vs zero to a compare of the
10837 first character of the string vs zero. To wit,
10838 strlen(ptr) == 0 => *ptr == 0
10839 strlen(ptr) != 0 => *ptr != 0
10840 Other cases should reduce to one of these two (or a constant)
10841 due to the return value of strlen being unsigned. */
10842 if (TREE_CODE (arg0) == CALL_EXPR
10843 && integer_zerop (arg1))
10845 tree fndecl = get_callee_fndecl (arg0);
10847 if (fndecl
10848 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10849 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10850 && call_expr_nargs (arg0) == 1
10851 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10853 tree iref = build_fold_indirect_ref_loc (loc,
10854 CALL_EXPR_ARG (arg0, 0));
10855 return fold_build2_loc (loc, code, type, iref,
10856 build_int_cst (TREE_TYPE (iref), 0));
10860 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10861 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10862 if (TREE_CODE (arg0) == RSHIFT_EXPR
10863 && integer_zerop (arg1)
10864 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10866 tree arg00 = TREE_OPERAND (arg0, 0);
10867 tree arg01 = TREE_OPERAND (arg0, 1);
10868 tree itype = TREE_TYPE (arg00);
10869 if (wi::eq_p (arg01, element_precision (itype) - 1))
10871 if (TYPE_UNSIGNED (itype))
10873 itype = signed_type_for (itype);
10874 arg00 = fold_convert_loc (loc, itype, arg00);
10876 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10877 type, arg00, build_zero_cst (itype));
10881 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10882 (X & C) == 0 when C is a single bit. */
10883 if (TREE_CODE (arg0) == BIT_AND_EXPR
10884 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10885 && integer_zerop (arg1)
10886 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10888 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10889 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10890 TREE_OPERAND (arg0, 1));
10891 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10892 type, tem,
10893 fold_convert_loc (loc, TREE_TYPE (arg0),
10894 arg1));
10897 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10898 constant C is a power of two, i.e. a single bit. */
10899 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10900 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10901 && integer_zerop (arg1)
10902 && integer_pow2p (TREE_OPERAND (arg0, 1))
10903 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10904 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10906 tree arg00 = TREE_OPERAND (arg0, 0);
10907 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10908 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10911 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10912 when is C is a power of two, i.e. a single bit. */
10913 if (TREE_CODE (arg0) == BIT_AND_EXPR
10914 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10915 && integer_zerop (arg1)
10916 && integer_pow2p (TREE_OPERAND (arg0, 1))
10917 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10918 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10920 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10921 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10922 arg000, TREE_OPERAND (arg0, 1));
10923 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10924 tem, build_int_cst (TREE_TYPE (tem), 0));
10927 if (integer_zerop (arg1)
10928 && tree_expr_nonzero_p (arg0))
10930 tree res = constant_boolean_node (code==NE_EXPR, type);
10931 return omit_one_operand_loc (loc, type, res, arg0);
10934 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10935 if (TREE_CODE (arg0) == BIT_AND_EXPR
10936 && TREE_CODE (arg1) == BIT_AND_EXPR)
10938 tree arg00 = TREE_OPERAND (arg0, 0);
10939 tree arg01 = TREE_OPERAND (arg0, 1);
10940 tree arg10 = TREE_OPERAND (arg1, 0);
10941 tree arg11 = TREE_OPERAND (arg1, 1);
10942 tree itype = TREE_TYPE (arg0);
10944 if (operand_equal_p (arg01, arg11, 0))
10945 return fold_build2_loc (loc, code, type,
10946 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10947 fold_build2_loc (loc,
10948 BIT_XOR_EXPR, itype,
10949 arg00, arg10),
10950 arg01),
10951 build_zero_cst (itype));
10953 if (operand_equal_p (arg01, arg10, 0))
10954 return fold_build2_loc (loc, code, type,
10955 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10956 fold_build2_loc (loc,
10957 BIT_XOR_EXPR, itype,
10958 arg00, arg11),
10959 arg01),
10960 build_zero_cst (itype));
10962 if (operand_equal_p (arg00, arg11, 0))
10963 return fold_build2_loc (loc, code, type,
10964 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10965 fold_build2_loc (loc,
10966 BIT_XOR_EXPR, itype,
10967 arg01, arg10),
10968 arg00),
10969 build_zero_cst (itype));
10971 if (operand_equal_p (arg00, arg10, 0))
10972 return fold_build2_loc (loc, code, type,
10973 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10974 fold_build2_loc (loc,
10975 BIT_XOR_EXPR, itype,
10976 arg01, arg11),
10977 arg00),
10978 build_zero_cst (itype));
10981 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10982 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10984 tree arg00 = TREE_OPERAND (arg0, 0);
10985 tree arg01 = TREE_OPERAND (arg0, 1);
10986 tree arg10 = TREE_OPERAND (arg1, 0);
10987 tree arg11 = TREE_OPERAND (arg1, 1);
10988 tree itype = TREE_TYPE (arg0);
10990 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10991 operand_equal_p guarantees no side-effects so we don't need
10992 to use omit_one_operand on Z. */
10993 if (operand_equal_p (arg01, arg11, 0))
10994 return fold_build2_loc (loc, code, type, arg00,
10995 fold_convert_loc (loc, TREE_TYPE (arg00),
10996 arg10));
10997 if (operand_equal_p (arg01, arg10, 0))
10998 return fold_build2_loc (loc, code, type, arg00,
10999 fold_convert_loc (loc, TREE_TYPE (arg00),
11000 arg11));
11001 if (operand_equal_p (arg00, arg11, 0))
11002 return fold_build2_loc (loc, code, type, arg01,
11003 fold_convert_loc (loc, TREE_TYPE (arg01),
11004 arg10));
11005 if (operand_equal_p (arg00, arg10, 0))
11006 return fold_build2_loc (loc, code, type, arg01,
11007 fold_convert_loc (loc, TREE_TYPE (arg01),
11008 arg11));
11010 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11011 if (TREE_CODE (arg01) == INTEGER_CST
11012 && TREE_CODE (arg11) == INTEGER_CST)
11014 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11015 fold_convert_loc (loc, itype, arg11));
11016 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11017 return fold_build2_loc (loc, code, type, tem,
11018 fold_convert_loc (loc, itype, arg10));
11022 /* Attempt to simplify equality/inequality comparisons of complex
11023 values. Only lower the comparison if the result is known or
11024 can be simplified to a single scalar comparison. */
11025 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11026 || TREE_CODE (arg0) == COMPLEX_CST)
11027 && (TREE_CODE (arg1) == COMPLEX_EXPR
11028 || TREE_CODE (arg1) == COMPLEX_CST))
11030 tree real0, imag0, real1, imag1;
11031 tree rcond, icond;
11033 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11035 real0 = TREE_OPERAND (arg0, 0);
11036 imag0 = TREE_OPERAND (arg0, 1);
11038 else
11040 real0 = TREE_REALPART (arg0);
11041 imag0 = TREE_IMAGPART (arg0);
11044 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11046 real1 = TREE_OPERAND (arg1, 0);
11047 imag1 = TREE_OPERAND (arg1, 1);
11049 else
11051 real1 = TREE_REALPART (arg1);
11052 imag1 = TREE_IMAGPART (arg1);
11055 rcond = fold_binary_loc (loc, code, type, real0, real1);
11056 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11058 if (integer_zerop (rcond))
11060 if (code == EQ_EXPR)
11061 return omit_two_operands_loc (loc, type, boolean_false_node,
11062 imag0, imag1);
11063 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11065 else
11067 if (code == NE_EXPR)
11068 return omit_two_operands_loc (loc, type, boolean_true_node,
11069 imag0, imag1);
11070 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11074 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11075 if (icond && TREE_CODE (icond) == INTEGER_CST)
11077 if (integer_zerop (icond))
11079 if (code == EQ_EXPR)
11080 return omit_two_operands_loc (loc, type, boolean_false_node,
11081 real0, real1);
11082 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11084 else
11086 if (code == NE_EXPR)
11087 return omit_two_operands_loc (loc, type, boolean_true_node,
11088 real0, real1);
11089 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11094 return NULL_TREE;
11096 case LT_EXPR:
11097 case GT_EXPR:
11098 case LE_EXPR:
11099 case GE_EXPR:
11100 tem = fold_comparison (loc, code, type, op0, op1);
11101 if (tem != NULL_TREE)
11102 return tem;
11104 /* Transform comparisons of the form X +- C CMP X. */
11105 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11106 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11107 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11108 && !HONOR_SNANS (arg0))
11109 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11110 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11112 tree arg01 = TREE_OPERAND (arg0, 1);
11113 enum tree_code code0 = TREE_CODE (arg0);
11114 int is_positive;
11116 if (TREE_CODE (arg01) == REAL_CST)
11117 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11118 else
11119 is_positive = tree_int_cst_sgn (arg01);
11121 /* (X - c) > X becomes false. */
11122 if (code == GT_EXPR
11123 && ((code0 == MINUS_EXPR && is_positive >= 0)
11124 || (code0 == PLUS_EXPR && is_positive <= 0)))
11126 if (TREE_CODE (arg01) == INTEGER_CST
11127 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11128 fold_overflow_warning (("assuming signed overflow does not "
11129 "occur when assuming that (X - c) > X "
11130 "is always false"),
11131 WARN_STRICT_OVERFLOW_ALL);
11132 return constant_boolean_node (0, type);
11135 /* Likewise (X + c) < X becomes false. */
11136 if (code == LT_EXPR
11137 && ((code0 == PLUS_EXPR && is_positive >= 0)
11138 || (code0 == MINUS_EXPR && is_positive <= 0)))
11140 if (TREE_CODE (arg01) == INTEGER_CST
11141 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11142 fold_overflow_warning (("assuming signed overflow does not "
11143 "occur when assuming that "
11144 "(X + c) < X is always false"),
11145 WARN_STRICT_OVERFLOW_ALL);
11146 return constant_boolean_node (0, type);
11149 /* Convert (X - c) <= X to true. */
11150 if (!HONOR_NANS (arg1)
11151 && code == LE_EXPR
11152 && ((code0 == MINUS_EXPR && is_positive >= 0)
11153 || (code0 == PLUS_EXPR && is_positive <= 0)))
11155 if (TREE_CODE (arg01) == INTEGER_CST
11156 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11157 fold_overflow_warning (("assuming signed overflow does not "
11158 "occur when assuming that "
11159 "(X - c) <= X is always true"),
11160 WARN_STRICT_OVERFLOW_ALL);
11161 return constant_boolean_node (1, type);
11164 /* Convert (X + c) >= X to true. */
11165 if (!HONOR_NANS (arg1)
11166 && code == GE_EXPR
11167 && ((code0 == PLUS_EXPR && is_positive >= 0)
11168 || (code0 == MINUS_EXPR && is_positive <= 0)))
11170 if (TREE_CODE (arg01) == INTEGER_CST
11171 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11172 fold_overflow_warning (("assuming signed overflow does not "
11173 "occur when assuming that "
11174 "(X + c) >= X is always true"),
11175 WARN_STRICT_OVERFLOW_ALL);
11176 return constant_boolean_node (1, type);
11179 if (TREE_CODE (arg01) == INTEGER_CST)
11181 /* Convert X + c > X and X - c < X to true for integers. */
11182 if (code == GT_EXPR
11183 && ((code0 == PLUS_EXPR && is_positive > 0)
11184 || (code0 == MINUS_EXPR && is_positive < 0)))
11186 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11187 fold_overflow_warning (("assuming signed overflow does "
11188 "not occur when assuming that "
11189 "(X + c) > X is always true"),
11190 WARN_STRICT_OVERFLOW_ALL);
11191 return constant_boolean_node (1, type);
11194 if (code == LT_EXPR
11195 && ((code0 == MINUS_EXPR && is_positive > 0)
11196 || (code0 == PLUS_EXPR && is_positive < 0)))
11198 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11199 fold_overflow_warning (("assuming signed overflow does "
11200 "not occur when assuming that "
11201 "(X - c) < X is always true"),
11202 WARN_STRICT_OVERFLOW_ALL);
11203 return constant_boolean_node (1, type);
11206 /* Convert X + c <= X and X - c >= X to false for integers. */
11207 if (code == LE_EXPR
11208 && ((code0 == PLUS_EXPR && is_positive > 0)
11209 || (code0 == MINUS_EXPR && is_positive < 0)))
11211 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11212 fold_overflow_warning (("assuming signed overflow does "
11213 "not occur when assuming that "
11214 "(X + c) <= X is always false"),
11215 WARN_STRICT_OVERFLOW_ALL);
11216 return constant_boolean_node (0, type);
11219 if (code == GE_EXPR
11220 && ((code0 == MINUS_EXPR && is_positive > 0)
11221 || (code0 == PLUS_EXPR && is_positive < 0)))
11223 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11224 fold_overflow_warning (("assuming signed overflow does "
11225 "not occur when assuming that "
11226 "(X - c) >= X is always false"),
11227 WARN_STRICT_OVERFLOW_ALL);
11228 return constant_boolean_node (0, type);
11233 /* If we are comparing an ABS_EXPR with a constant, we can
11234 convert all the cases into explicit comparisons, but they may
11235 well not be faster than doing the ABS and one comparison.
11236 But ABS (X) <= C is a range comparison, which becomes a subtraction
11237 and a comparison, and is probably faster. */
11238 if (code == LE_EXPR
11239 && TREE_CODE (arg1) == INTEGER_CST
11240 && TREE_CODE (arg0) == ABS_EXPR
11241 && ! TREE_SIDE_EFFECTS (arg0)
11242 && (0 != (tem = negate_expr (arg1)))
11243 && TREE_CODE (tem) == INTEGER_CST
11244 && !TREE_OVERFLOW (tem))
11245 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11246 build2 (GE_EXPR, type,
11247 TREE_OPERAND (arg0, 0), tem),
11248 build2 (LE_EXPR, type,
11249 TREE_OPERAND (arg0, 0), arg1));
11251 /* Convert ABS_EXPR<x> >= 0 to true. */
11252 strict_overflow_p = false;
11253 if (code == GE_EXPR
11254 && (integer_zerop (arg1)
11255 || (! HONOR_NANS (arg0)
11256 && real_zerop (arg1)))
11257 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11259 if (strict_overflow_p)
11260 fold_overflow_warning (("assuming signed overflow does not occur "
11261 "when simplifying comparison of "
11262 "absolute value and zero"),
11263 WARN_STRICT_OVERFLOW_CONDITIONAL);
11264 return omit_one_operand_loc (loc, type,
11265 constant_boolean_node (true, type),
11266 arg0);
11269 /* Convert ABS_EXPR<x> < 0 to false. */
11270 strict_overflow_p = false;
11271 if (code == LT_EXPR
11272 && (integer_zerop (arg1) || real_zerop (arg1))
11273 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11275 if (strict_overflow_p)
11276 fold_overflow_warning (("assuming signed overflow does not occur "
11277 "when simplifying comparison of "
11278 "absolute value and zero"),
11279 WARN_STRICT_OVERFLOW_CONDITIONAL);
11280 return omit_one_operand_loc (loc, type,
11281 constant_boolean_node (false, type),
11282 arg0);
11285 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11286 and similarly for >= into !=. */
11287 if ((code == LT_EXPR || code == GE_EXPR)
11288 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11289 && TREE_CODE (arg1) == LSHIFT_EXPR
11290 && integer_onep (TREE_OPERAND (arg1, 0)))
11291 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11292 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11293 TREE_OPERAND (arg1, 1)),
11294 build_zero_cst (TREE_TYPE (arg0)));
11296 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11297 otherwise Y might be >= # of bits in X's type and thus e.g.
11298 (unsigned char) (1 << Y) for Y 15 might be 0.
11299 If the cast is widening, then 1 << Y should have unsigned type,
11300 otherwise if Y is number of bits in the signed shift type minus 1,
11301 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11302 31 might be 0xffffffff80000000. */
11303 if ((code == LT_EXPR || code == GE_EXPR)
11304 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11305 && CONVERT_EXPR_P (arg1)
11306 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11307 && (element_precision (TREE_TYPE (arg1))
11308 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11309 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11310 || (element_precision (TREE_TYPE (arg1))
11311 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11312 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11314 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11315 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11316 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11317 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11318 build_zero_cst (TREE_TYPE (arg0)));
11321 return NULL_TREE;
11323 case UNORDERED_EXPR:
11324 case ORDERED_EXPR:
11325 case UNLT_EXPR:
11326 case UNLE_EXPR:
11327 case UNGT_EXPR:
11328 case UNGE_EXPR:
11329 case UNEQ_EXPR:
11330 case LTGT_EXPR:
11331 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11333 tree targ0 = strip_float_extensions (arg0);
11334 tree targ1 = strip_float_extensions (arg1);
11335 tree newtype = TREE_TYPE (targ0);
11337 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11338 newtype = TREE_TYPE (targ1);
11340 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11341 return fold_build2_loc (loc, code, type,
11342 fold_convert_loc (loc, newtype, targ0),
11343 fold_convert_loc (loc, newtype, targ1));
11346 return NULL_TREE;
11348 case COMPOUND_EXPR:
11349 /* When pedantic, a compound expression can be neither an lvalue
11350 nor an integer constant expression. */
11351 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11352 return NULL_TREE;
11353 /* Don't let (0, 0) be null pointer constant. */
11354 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11355 : fold_convert_loc (loc, type, arg1);
11356 return pedantic_non_lvalue_loc (loc, tem);
11358 case ASSERT_EXPR:
11359 /* An ASSERT_EXPR should never be passed to fold_binary. */
11360 gcc_unreachable ();
11362 default:
11363 return NULL_TREE;
11364 } /* switch (code) */
11367 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11368 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11369 of GOTO_EXPR. */
11371 static tree
11372 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11374 switch (TREE_CODE (*tp))
11376 case LABEL_EXPR:
11377 return *tp;
11379 case GOTO_EXPR:
11380 *walk_subtrees = 0;
11382 /* ... fall through ... */
11384 default:
11385 return NULL_TREE;
11389 /* Return whether the sub-tree ST contains a label which is accessible from
11390 outside the sub-tree. */
11392 static bool
11393 contains_label_p (tree st)
11395 return
11396 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11399 /* Fold a ternary expression of code CODE and type TYPE with operands
11400 OP0, OP1, and OP2. Return the folded expression if folding is
11401 successful. Otherwise, return NULL_TREE. */
11403 tree
11404 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11405 tree op0, tree op1, tree op2)
11407 tree tem;
11408 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11409 enum tree_code_class kind = TREE_CODE_CLASS (code);
11411 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11412 && TREE_CODE_LENGTH (code) == 3);
11414 /* If this is a commutative operation, and OP0 is a constant, move it
11415 to OP1 to reduce the number of tests below. */
11416 if (commutative_ternary_tree_code (code)
11417 && tree_swap_operands_p (op0, op1, true))
11418 return fold_build3_loc (loc, code, type, op1, op0, op2);
11420 tem = generic_simplify (loc, code, type, op0, op1, op2);
11421 if (tem)
11422 return tem;
11424 /* Strip any conversions that don't change the mode. This is safe
11425 for every expression, except for a comparison expression because
11426 its signedness is derived from its operands. So, in the latter
11427 case, only strip conversions that don't change the signedness.
11429 Note that this is done as an internal manipulation within the
11430 constant folder, in order to find the simplest representation of
11431 the arguments so that their form can be studied. In any cases,
11432 the appropriate type conversions should be put back in the tree
11433 that will get out of the constant folder. */
11434 if (op0)
11436 arg0 = op0;
11437 STRIP_NOPS (arg0);
11440 if (op1)
11442 arg1 = op1;
11443 STRIP_NOPS (arg1);
11446 if (op2)
11448 arg2 = op2;
11449 STRIP_NOPS (arg2);
11452 switch (code)
11454 case COMPONENT_REF:
11455 if (TREE_CODE (arg0) == CONSTRUCTOR
11456 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11458 unsigned HOST_WIDE_INT idx;
11459 tree field, value;
11460 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11461 if (field == arg1)
11462 return value;
11464 return NULL_TREE;
11466 case COND_EXPR:
11467 case VEC_COND_EXPR:
11468 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11469 so all simple results must be passed through pedantic_non_lvalue. */
11470 if (TREE_CODE (arg0) == INTEGER_CST)
11472 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11473 tem = integer_zerop (arg0) ? op2 : op1;
11474 /* Only optimize constant conditions when the selected branch
11475 has the same type as the COND_EXPR. This avoids optimizing
11476 away "c ? x : throw", where the throw has a void type.
11477 Avoid throwing away that operand which contains label. */
11478 if ((!TREE_SIDE_EFFECTS (unused_op)
11479 || !contains_label_p (unused_op))
11480 && (! VOID_TYPE_P (TREE_TYPE (tem))
11481 || VOID_TYPE_P (type)))
11482 return pedantic_non_lvalue_loc (loc, tem);
11483 return NULL_TREE;
11485 else if (TREE_CODE (arg0) == VECTOR_CST)
11487 if ((TREE_CODE (arg1) == VECTOR_CST
11488 || TREE_CODE (arg1) == CONSTRUCTOR)
11489 && (TREE_CODE (arg2) == VECTOR_CST
11490 || TREE_CODE (arg2) == CONSTRUCTOR))
11492 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11493 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11494 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11495 for (i = 0; i < nelts; i++)
11497 tree val = VECTOR_CST_ELT (arg0, i);
11498 if (integer_all_onesp (val))
11499 sel[i] = i;
11500 else if (integer_zerop (val))
11501 sel[i] = nelts + i;
11502 else /* Currently unreachable. */
11503 return NULL_TREE;
11505 tree t = fold_vec_perm (type, arg1, arg2, sel);
11506 if (t != NULL_TREE)
11507 return t;
11511 /* If we have A op B ? A : C, we may be able to convert this to a
11512 simpler expression, depending on the operation and the values
11513 of B and C. Signed zeros prevent all of these transformations,
11514 for reasons given above each one.
11516 Also try swapping the arguments and inverting the conditional. */
11517 if (COMPARISON_CLASS_P (arg0)
11518 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11519 arg1, TREE_OPERAND (arg0, 1))
11520 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11522 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11523 if (tem)
11524 return tem;
11527 if (COMPARISON_CLASS_P (arg0)
11528 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11529 op2,
11530 TREE_OPERAND (arg0, 1))
11531 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11533 location_t loc0 = expr_location_or (arg0, loc);
11534 tem = fold_invert_truthvalue (loc0, arg0);
11535 if (tem && COMPARISON_CLASS_P (tem))
11537 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11538 if (tem)
11539 return tem;
11543 /* If the second operand is simpler than the third, swap them
11544 since that produces better jump optimization results. */
11545 if (truth_value_p (TREE_CODE (arg0))
11546 && tree_swap_operands_p (op1, op2, false))
11548 location_t loc0 = expr_location_or (arg0, loc);
11549 /* See if this can be inverted. If it can't, possibly because
11550 it was a floating-point inequality comparison, don't do
11551 anything. */
11552 tem = fold_invert_truthvalue (loc0, arg0);
11553 if (tem)
11554 return fold_build3_loc (loc, code, type, tem, op2, op1);
11557 /* Convert A ? 1 : 0 to simply A. */
11558 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11559 : (integer_onep (op1)
11560 && !VECTOR_TYPE_P (type)))
11561 && integer_zerop (op2)
11562 /* If we try to convert OP0 to our type, the
11563 call to fold will try to move the conversion inside
11564 a COND, which will recurse. In that case, the COND_EXPR
11565 is probably the best choice, so leave it alone. */
11566 && type == TREE_TYPE (arg0))
11567 return pedantic_non_lvalue_loc (loc, arg0);
11569 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11570 over COND_EXPR in cases such as floating point comparisons. */
11571 if (integer_zerop (op1)
11572 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11573 : (integer_onep (op2)
11574 && !VECTOR_TYPE_P (type)))
11575 && truth_value_p (TREE_CODE (arg0)))
11576 return pedantic_non_lvalue_loc (loc,
11577 fold_convert_loc (loc, type,
11578 invert_truthvalue_loc (loc,
11579 arg0)));
11581 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11582 if (TREE_CODE (arg0) == LT_EXPR
11583 && integer_zerop (TREE_OPERAND (arg0, 1))
11584 && integer_zerop (op2)
11585 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11587 /* sign_bit_p looks through both zero and sign extensions,
11588 but for this optimization only sign extensions are
11589 usable. */
11590 tree tem2 = TREE_OPERAND (arg0, 0);
11591 while (tem != tem2)
11593 if (TREE_CODE (tem2) != NOP_EXPR
11594 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11596 tem = NULL_TREE;
11597 break;
11599 tem2 = TREE_OPERAND (tem2, 0);
11601 /* sign_bit_p only checks ARG1 bits within A's precision.
11602 If <sign bit of A> has wider type than A, bits outside
11603 of A's precision in <sign bit of A> need to be checked.
11604 If they are all 0, this optimization needs to be done
11605 in unsigned A's type, if they are all 1 in signed A's type,
11606 otherwise this can't be done. */
11607 if (tem
11608 && TYPE_PRECISION (TREE_TYPE (tem))
11609 < TYPE_PRECISION (TREE_TYPE (arg1))
11610 && TYPE_PRECISION (TREE_TYPE (tem))
11611 < TYPE_PRECISION (type))
11613 int inner_width, outer_width;
11614 tree tem_type;
11616 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11617 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11618 if (outer_width > TYPE_PRECISION (type))
11619 outer_width = TYPE_PRECISION (type);
11621 wide_int mask = wi::shifted_mask
11622 (inner_width, outer_width - inner_width, false,
11623 TYPE_PRECISION (TREE_TYPE (arg1)));
11625 wide_int common = mask & arg1;
11626 if (common == mask)
11628 tem_type = signed_type_for (TREE_TYPE (tem));
11629 tem = fold_convert_loc (loc, tem_type, tem);
11631 else if (common == 0)
11633 tem_type = unsigned_type_for (TREE_TYPE (tem));
11634 tem = fold_convert_loc (loc, tem_type, tem);
11636 else
11637 tem = NULL;
11640 if (tem)
11641 return
11642 fold_convert_loc (loc, type,
11643 fold_build2_loc (loc, BIT_AND_EXPR,
11644 TREE_TYPE (tem), tem,
11645 fold_convert_loc (loc,
11646 TREE_TYPE (tem),
11647 arg1)));
11650 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11651 already handled above. */
11652 if (TREE_CODE (arg0) == BIT_AND_EXPR
11653 && integer_onep (TREE_OPERAND (arg0, 1))
11654 && integer_zerop (op2)
11655 && integer_pow2p (arg1))
11657 tree tem = TREE_OPERAND (arg0, 0);
11658 STRIP_NOPS (tem);
11659 if (TREE_CODE (tem) == RSHIFT_EXPR
11660 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11661 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11662 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11663 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11664 TREE_OPERAND (tem, 0), arg1);
11667 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11668 is probably obsolete because the first operand should be a
11669 truth value (that's why we have the two cases above), but let's
11670 leave it in until we can confirm this for all front-ends. */
11671 if (integer_zerop (op2)
11672 && TREE_CODE (arg0) == NE_EXPR
11673 && integer_zerop (TREE_OPERAND (arg0, 1))
11674 && integer_pow2p (arg1)
11675 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11676 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11677 arg1, OEP_ONLY_CONST))
11678 return pedantic_non_lvalue_loc (loc,
11679 fold_convert_loc (loc, type,
11680 TREE_OPERAND (arg0, 0)));
11682 /* Disable the transformations below for vectors, since
11683 fold_binary_op_with_conditional_arg may undo them immediately,
11684 yielding an infinite loop. */
11685 if (code == VEC_COND_EXPR)
11686 return NULL_TREE;
11688 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11689 if (integer_zerop (op2)
11690 && truth_value_p (TREE_CODE (arg0))
11691 && truth_value_p (TREE_CODE (arg1))
11692 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11693 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11694 : TRUTH_ANDIF_EXPR,
11695 type, fold_convert_loc (loc, type, arg0), arg1);
11697 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11698 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11699 && truth_value_p (TREE_CODE (arg0))
11700 && truth_value_p (TREE_CODE (arg1))
11701 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11703 location_t loc0 = expr_location_or (arg0, loc);
11704 /* Only perform transformation if ARG0 is easily inverted. */
11705 tem = fold_invert_truthvalue (loc0, arg0);
11706 if (tem)
11707 return fold_build2_loc (loc, code == VEC_COND_EXPR
11708 ? BIT_IOR_EXPR
11709 : TRUTH_ORIF_EXPR,
11710 type, fold_convert_loc (loc, type, tem),
11711 arg1);
11714 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11715 if (integer_zerop (arg1)
11716 && truth_value_p (TREE_CODE (arg0))
11717 && truth_value_p (TREE_CODE (op2))
11718 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11720 location_t loc0 = expr_location_or (arg0, loc);
11721 /* Only perform transformation if ARG0 is easily inverted. */
11722 tem = fold_invert_truthvalue (loc0, arg0);
11723 if (tem)
11724 return fold_build2_loc (loc, code == VEC_COND_EXPR
11725 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11726 type, fold_convert_loc (loc, type, tem),
11727 op2);
11730 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11731 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11732 && truth_value_p (TREE_CODE (arg0))
11733 && truth_value_p (TREE_CODE (op2))
11734 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11735 return fold_build2_loc (loc, code == VEC_COND_EXPR
11736 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11737 type, fold_convert_loc (loc, type, arg0), op2);
11739 return NULL_TREE;
11741 case CALL_EXPR:
11742 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11743 of fold_ternary on them. */
11744 gcc_unreachable ();
11746 case BIT_FIELD_REF:
11747 if ((TREE_CODE (arg0) == VECTOR_CST
11748 || (TREE_CODE (arg0) == CONSTRUCTOR
11749 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11750 && (type == TREE_TYPE (TREE_TYPE (arg0))
11751 || (TREE_CODE (type) == VECTOR_TYPE
11752 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11754 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11755 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11756 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11757 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11759 if (n != 0
11760 && (idx % width) == 0
11761 && (n % width) == 0
11762 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11764 idx = idx / width;
11765 n = n / width;
11767 if (TREE_CODE (arg0) == VECTOR_CST)
11769 if (n == 1)
11770 return VECTOR_CST_ELT (arg0, idx);
11772 tree *vals = XALLOCAVEC (tree, n);
11773 for (unsigned i = 0; i < n; ++i)
11774 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11775 return build_vector (type, vals);
11778 /* Constructor elements can be subvectors. */
11779 unsigned HOST_WIDE_INT k = 1;
11780 if (CONSTRUCTOR_NELTS (arg0) != 0)
11782 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11783 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11784 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11787 /* We keep an exact subset of the constructor elements. */
11788 if ((idx % k) == 0 && (n % k) == 0)
11790 if (CONSTRUCTOR_NELTS (arg0) == 0)
11791 return build_constructor (type, NULL);
11792 idx /= k;
11793 n /= k;
11794 if (n == 1)
11796 if (idx < CONSTRUCTOR_NELTS (arg0))
11797 return CONSTRUCTOR_ELT (arg0, idx)->value;
11798 return build_zero_cst (type);
11801 vec<constructor_elt, va_gc> *vals;
11802 vec_alloc (vals, n);
11803 for (unsigned i = 0;
11804 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11805 ++i)
11806 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11807 CONSTRUCTOR_ELT
11808 (arg0, idx + i)->value);
11809 return build_constructor (type, vals);
11811 /* The bitfield references a single constructor element. */
11812 else if (idx + n <= (idx / k + 1) * k)
11814 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11815 return build_zero_cst (type);
11816 else if (n == k)
11817 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11818 else
11819 return fold_build3_loc (loc, code, type,
11820 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11821 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11826 /* A bit-field-ref that referenced the full argument can be stripped. */
11827 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11828 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11829 && integer_zerop (op2))
11830 return fold_convert_loc (loc, type, arg0);
11832 /* On constants we can use native encode/interpret to constant
11833 fold (nearly) all BIT_FIELD_REFs. */
11834 if (CONSTANT_CLASS_P (arg0)
11835 && can_native_interpret_type_p (type)
11836 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11837 /* This limitation should not be necessary, we just need to
11838 round this up to mode size. */
11839 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11840 /* Need bit-shifting of the buffer to relax the following. */
11841 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11843 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11844 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11845 unsigned HOST_WIDE_INT clen;
11846 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11847 /* ??? We cannot tell native_encode_expr to start at
11848 some random byte only. So limit us to a reasonable amount
11849 of work. */
11850 if (clen <= 4096)
11852 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11853 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11854 if (len > 0
11855 && len * BITS_PER_UNIT >= bitpos + bitsize)
11857 tree v = native_interpret_expr (type,
11858 b + bitpos / BITS_PER_UNIT,
11859 bitsize / BITS_PER_UNIT);
11860 if (v)
11861 return v;
11866 return NULL_TREE;
11868 case FMA_EXPR:
11869 /* For integers we can decompose the FMA if possible. */
11870 if (TREE_CODE (arg0) == INTEGER_CST
11871 && TREE_CODE (arg1) == INTEGER_CST)
11872 return fold_build2_loc (loc, PLUS_EXPR, type,
11873 const_binop (MULT_EXPR, arg0, arg1), arg2);
11874 if (integer_zerop (arg2))
11875 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11877 return fold_fma (loc, type, arg0, arg1, arg2);
11879 case VEC_PERM_EXPR:
11880 if (TREE_CODE (arg2) == VECTOR_CST)
11882 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11883 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11884 unsigned char *sel2 = sel + nelts;
11885 bool need_mask_canon = false;
11886 bool need_mask_canon2 = false;
11887 bool all_in_vec0 = true;
11888 bool all_in_vec1 = true;
11889 bool maybe_identity = true;
11890 bool single_arg = (op0 == op1);
11891 bool changed = false;
11893 mask2 = 2 * nelts - 1;
11894 mask = single_arg ? (nelts - 1) : mask2;
11895 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11896 for (i = 0; i < nelts; i++)
11898 tree val = VECTOR_CST_ELT (arg2, i);
11899 if (TREE_CODE (val) != INTEGER_CST)
11900 return NULL_TREE;
11902 /* Make sure that the perm value is in an acceptable
11903 range. */
11904 wide_int t = val;
11905 need_mask_canon |= wi::gtu_p (t, mask);
11906 need_mask_canon2 |= wi::gtu_p (t, mask2);
11907 sel[i] = t.to_uhwi () & mask;
11908 sel2[i] = t.to_uhwi () & mask2;
11910 if (sel[i] < nelts)
11911 all_in_vec1 = false;
11912 else
11913 all_in_vec0 = false;
11915 if ((sel[i] & (nelts-1)) != i)
11916 maybe_identity = false;
11919 if (maybe_identity)
11921 if (all_in_vec0)
11922 return op0;
11923 if (all_in_vec1)
11924 return op1;
11927 if (all_in_vec0)
11928 op1 = op0;
11929 else if (all_in_vec1)
11931 op0 = op1;
11932 for (i = 0; i < nelts; i++)
11933 sel[i] -= nelts;
11934 need_mask_canon = true;
11937 if ((TREE_CODE (op0) == VECTOR_CST
11938 || TREE_CODE (op0) == CONSTRUCTOR)
11939 && (TREE_CODE (op1) == VECTOR_CST
11940 || TREE_CODE (op1) == CONSTRUCTOR))
11942 tree t = fold_vec_perm (type, op0, op1, sel);
11943 if (t != NULL_TREE)
11944 return t;
11947 if (op0 == op1 && !single_arg)
11948 changed = true;
11950 /* Some targets are deficient and fail to expand a single
11951 argument permutation while still allowing an equivalent
11952 2-argument version. */
11953 if (need_mask_canon && arg2 == op2
11954 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11955 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11957 need_mask_canon = need_mask_canon2;
11958 sel = sel2;
11961 if (need_mask_canon && arg2 == op2)
11963 tree *tsel = XALLOCAVEC (tree, nelts);
11964 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11965 for (i = 0; i < nelts; i++)
11966 tsel[i] = build_int_cst (eltype, sel[i]);
11967 op2 = build_vector (TREE_TYPE (arg2), tsel);
11968 changed = true;
11971 if (changed)
11972 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11974 return NULL_TREE;
11976 default:
11977 return NULL_TREE;
11978 } /* switch (code) */
11981 /* Perform constant folding and related simplification of EXPR.
11982 The related simplifications include x*1 => x, x*0 => 0, etc.,
11983 and application of the associative law.
11984 NOP_EXPR conversions may be removed freely (as long as we
11985 are careful not to change the type of the overall expression).
11986 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11987 but we can constant-fold them if they have constant operands. */
11989 #ifdef ENABLE_FOLD_CHECKING
11990 # define fold(x) fold_1 (x)
11991 static tree fold_1 (tree);
11992 static
11993 #endif
11994 tree
11995 fold (tree expr)
11997 const tree t = expr;
11998 enum tree_code code = TREE_CODE (t);
11999 enum tree_code_class kind = TREE_CODE_CLASS (code);
12000 tree tem;
12001 location_t loc = EXPR_LOCATION (expr);
12003 /* Return right away if a constant. */
12004 if (kind == tcc_constant)
12005 return t;
12007 /* CALL_EXPR-like objects with variable numbers of operands are
12008 treated specially. */
12009 if (kind == tcc_vl_exp)
12011 if (code == CALL_EXPR)
12013 tem = fold_call_expr (loc, expr, false);
12014 return tem ? tem : expr;
12016 return expr;
12019 if (IS_EXPR_CODE_CLASS (kind))
12021 tree type = TREE_TYPE (t);
12022 tree op0, op1, op2;
12024 switch (TREE_CODE_LENGTH (code))
12026 case 1:
12027 op0 = TREE_OPERAND (t, 0);
12028 tem = fold_unary_loc (loc, code, type, op0);
12029 return tem ? tem : expr;
12030 case 2:
12031 op0 = TREE_OPERAND (t, 0);
12032 op1 = TREE_OPERAND (t, 1);
12033 tem = fold_binary_loc (loc, code, type, op0, op1);
12034 return tem ? tem : expr;
12035 case 3:
12036 op0 = TREE_OPERAND (t, 0);
12037 op1 = TREE_OPERAND (t, 1);
12038 op2 = TREE_OPERAND (t, 2);
12039 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12040 return tem ? tem : expr;
12041 default:
12042 break;
12046 switch (code)
12048 case ARRAY_REF:
12050 tree op0 = TREE_OPERAND (t, 0);
12051 tree op1 = TREE_OPERAND (t, 1);
12053 if (TREE_CODE (op1) == INTEGER_CST
12054 && TREE_CODE (op0) == CONSTRUCTOR
12055 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12057 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
12058 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
12059 unsigned HOST_WIDE_INT begin = 0;
12061 /* Find a matching index by means of a binary search. */
12062 while (begin != end)
12064 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12065 tree index = (*elts)[middle].index;
12067 if (TREE_CODE (index) == INTEGER_CST
12068 && tree_int_cst_lt (index, op1))
12069 begin = middle + 1;
12070 else if (TREE_CODE (index) == INTEGER_CST
12071 && tree_int_cst_lt (op1, index))
12072 end = middle;
12073 else if (TREE_CODE (index) == RANGE_EXPR
12074 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12075 begin = middle + 1;
12076 else if (TREE_CODE (index) == RANGE_EXPR
12077 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
12078 end = middle;
12079 else
12080 return (*elts)[middle].value;
12084 return t;
12087 /* Return a VECTOR_CST if possible. */
12088 case CONSTRUCTOR:
12090 tree type = TREE_TYPE (t);
12091 if (TREE_CODE (type) != VECTOR_TYPE)
12092 return t;
12094 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
12095 unsigned HOST_WIDE_INT idx, pos = 0;
12096 tree value;
12098 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
12100 if (!CONSTANT_CLASS_P (value))
12101 return t;
12102 if (TREE_CODE (value) == VECTOR_CST)
12104 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
12105 vec[pos++] = VECTOR_CST_ELT (value, i);
12107 else
12108 vec[pos++] = value;
12110 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
12111 vec[pos] = build_zero_cst (TREE_TYPE (type));
12113 return build_vector (type, vec);
12116 case CONST_DECL:
12117 return fold (DECL_INITIAL (t));
12119 default:
12120 return t;
12121 } /* switch (code) */
12124 #ifdef ENABLE_FOLD_CHECKING
12125 #undef fold
12127 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12128 hash_table<nofree_ptr_hash<const tree_node> > *);
12129 static void fold_check_failed (const_tree, const_tree);
12130 void print_fold_checksum (const_tree);
12132 /* When --enable-checking=fold, compute a digest of expr before
12133 and after actual fold call to see if fold did not accidentally
12134 change original expr. */
12136 tree
12137 fold (tree expr)
12139 tree ret;
12140 struct md5_ctx ctx;
12141 unsigned char checksum_before[16], checksum_after[16];
12142 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12144 md5_init_ctx (&ctx);
12145 fold_checksum_tree (expr, &ctx, &ht);
12146 md5_finish_ctx (&ctx, checksum_before);
12147 ht.empty ();
12149 ret = fold_1 (expr);
12151 md5_init_ctx (&ctx);
12152 fold_checksum_tree (expr, &ctx, &ht);
12153 md5_finish_ctx (&ctx, checksum_after);
12155 if (memcmp (checksum_before, checksum_after, 16))
12156 fold_check_failed (expr, ret);
12158 return ret;
12161 void
12162 print_fold_checksum (const_tree expr)
12164 struct md5_ctx ctx;
12165 unsigned char checksum[16], cnt;
12166 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12168 md5_init_ctx (&ctx);
12169 fold_checksum_tree (expr, &ctx, &ht);
12170 md5_finish_ctx (&ctx, checksum);
12171 for (cnt = 0; cnt < 16; ++cnt)
12172 fprintf (stderr, "%02x", checksum[cnt]);
12173 putc ('\n', stderr);
12176 static void
12177 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12179 internal_error ("fold check: original tree changed by fold");
12182 static void
12183 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12184 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12186 const tree_node **slot;
12187 enum tree_code code;
12188 union tree_node buf;
12189 int i, len;
12191 recursive_label:
12192 if (expr == NULL)
12193 return;
12194 slot = ht->find_slot (expr, INSERT);
12195 if (*slot != NULL)
12196 return;
12197 *slot = expr;
12198 code = TREE_CODE (expr);
12199 if (TREE_CODE_CLASS (code) == tcc_declaration
12200 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12202 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12203 memcpy ((char *) &buf, expr, tree_size (expr));
12204 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12205 buf.decl_with_vis.symtab_node = NULL;
12206 expr = (tree) &buf;
12208 else if (TREE_CODE_CLASS (code) == tcc_type
12209 && (TYPE_POINTER_TO (expr)
12210 || TYPE_REFERENCE_TO (expr)
12211 || TYPE_CACHED_VALUES_P (expr)
12212 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12213 || TYPE_NEXT_VARIANT (expr)))
12215 /* Allow these fields to be modified. */
12216 tree tmp;
12217 memcpy ((char *) &buf, expr, tree_size (expr));
12218 expr = tmp = (tree) &buf;
12219 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12220 TYPE_POINTER_TO (tmp) = NULL;
12221 TYPE_REFERENCE_TO (tmp) = NULL;
12222 TYPE_NEXT_VARIANT (tmp) = NULL;
12223 if (TYPE_CACHED_VALUES_P (tmp))
12225 TYPE_CACHED_VALUES_P (tmp) = 0;
12226 TYPE_CACHED_VALUES (tmp) = NULL;
12229 md5_process_bytes (expr, tree_size (expr), ctx);
12230 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12231 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12232 if (TREE_CODE_CLASS (code) != tcc_type
12233 && TREE_CODE_CLASS (code) != tcc_declaration
12234 && code != TREE_LIST
12235 && code != SSA_NAME
12236 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12237 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12238 switch (TREE_CODE_CLASS (code))
12240 case tcc_constant:
12241 switch (code)
12243 case STRING_CST:
12244 md5_process_bytes (TREE_STRING_POINTER (expr),
12245 TREE_STRING_LENGTH (expr), ctx);
12246 break;
12247 case COMPLEX_CST:
12248 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12249 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12250 break;
12251 case VECTOR_CST:
12252 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12253 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12254 break;
12255 default:
12256 break;
12258 break;
12259 case tcc_exceptional:
12260 switch (code)
12262 case TREE_LIST:
12263 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12264 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12265 expr = TREE_CHAIN (expr);
12266 goto recursive_label;
12267 break;
12268 case TREE_VEC:
12269 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12270 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12271 break;
12272 default:
12273 break;
12275 break;
12276 case tcc_expression:
12277 case tcc_reference:
12278 case tcc_comparison:
12279 case tcc_unary:
12280 case tcc_binary:
12281 case tcc_statement:
12282 case tcc_vl_exp:
12283 len = TREE_OPERAND_LENGTH (expr);
12284 for (i = 0; i < len; ++i)
12285 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12286 break;
12287 case tcc_declaration:
12288 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12289 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12290 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12292 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12293 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12294 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12295 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12296 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12299 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12301 if (TREE_CODE (expr) == FUNCTION_DECL)
12303 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12304 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12306 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12308 break;
12309 case tcc_type:
12310 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12311 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12312 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12313 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12314 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12315 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12316 if (INTEGRAL_TYPE_P (expr)
12317 || SCALAR_FLOAT_TYPE_P (expr))
12319 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12320 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12322 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12323 if (TREE_CODE (expr) == RECORD_TYPE
12324 || TREE_CODE (expr) == UNION_TYPE
12325 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12326 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12327 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12328 break;
12329 default:
12330 break;
12334 /* Helper function for outputting the checksum of a tree T. When
12335 debugging with gdb, you can "define mynext" to be "next" followed
12336 by "call debug_fold_checksum (op0)", then just trace down till the
12337 outputs differ. */
12339 DEBUG_FUNCTION void
12340 debug_fold_checksum (const_tree t)
12342 int i;
12343 unsigned char checksum[16];
12344 struct md5_ctx ctx;
12345 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12347 md5_init_ctx (&ctx);
12348 fold_checksum_tree (t, &ctx, &ht);
12349 md5_finish_ctx (&ctx, checksum);
12350 ht.empty ();
12352 for (i = 0; i < 16; i++)
12353 fprintf (stderr, "%d ", checksum[i]);
12355 fprintf (stderr, "\n");
12358 #endif
12360 /* Fold a unary tree expression with code CODE of type TYPE with an
12361 operand OP0. LOC is the location of the resulting expression.
12362 Return a folded expression if successful. Otherwise, return a tree
12363 expression with code CODE of type TYPE with an operand OP0. */
12365 tree
12366 fold_build1_stat_loc (location_t loc,
12367 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12369 tree tem;
12370 #ifdef ENABLE_FOLD_CHECKING
12371 unsigned char checksum_before[16], checksum_after[16];
12372 struct md5_ctx ctx;
12373 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12375 md5_init_ctx (&ctx);
12376 fold_checksum_tree (op0, &ctx, &ht);
12377 md5_finish_ctx (&ctx, checksum_before);
12378 ht.empty ();
12379 #endif
12381 tem = fold_unary_loc (loc, code, type, op0);
12382 if (!tem)
12383 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12385 #ifdef ENABLE_FOLD_CHECKING
12386 md5_init_ctx (&ctx);
12387 fold_checksum_tree (op0, &ctx, &ht);
12388 md5_finish_ctx (&ctx, checksum_after);
12390 if (memcmp (checksum_before, checksum_after, 16))
12391 fold_check_failed (op0, tem);
12392 #endif
12393 return tem;
12396 /* Fold a binary tree expression with code CODE of type TYPE with
12397 operands OP0 and OP1. LOC is the location of the resulting
12398 expression. Return a folded expression if successful. Otherwise,
12399 return a tree expression with code CODE of type TYPE with operands
12400 OP0 and OP1. */
12402 tree
12403 fold_build2_stat_loc (location_t loc,
12404 enum tree_code code, tree type, tree op0, tree op1
12405 MEM_STAT_DECL)
12407 tree tem;
12408 #ifdef ENABLE_FOLD_CHECKING
12409 unsigned char checksum_before_op0[16],
12410 checksum_before_op1[16],
12411 checksum_after_op0[16],
12412 checksum_after_op1[16];
12413 struct md5_ctx ctx;
12414 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12416 md5_init_ctx (&ctx);
12417 fold_checksum_tree (op0, &ctx, &ht);
12418 md5_finish_ctx (&ctx, checksum_before_op0);
12419 ht.empty ();
12421 md5_init_ctx (&ctx);
12422 fold_checksum_tree (op1, &ctx, &ht);
12423 md5_finish_ctx (&ctx, checksum_before_op1);
12424 ht.empty ();
12425 #endif
12427 tem = fold_binary_loc (loc, code, type, op0, op1);
12428 if (!tem)
12429 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12431 #ifdef ENABLE_FOLD_CHECKING
12432 md5_init_ctx (&ctx);
12433 fold_checksum_tree (op0, &ctx, &ht);
12434 md5_finish_ctx (&ctx, checksum_after_op0);
12435 ht.empty ();
12437 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12438 fold_check_failed (op0, tem);
12440 md5_init_ctx (&ctx);
12441 fold_checksum_tree (op1, &ctx, &ht);
12442 md5_finish_ctx (&ctx, checksum_after_op1);
12444 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12445 fold_check_failed (op1, tem);
12446 #endif
12447 return tem;
12450 /* Fold a ternary tree expression with code CODE of type TYPE with
12451 operands OP0, OP1, and OP2. Return a folded expression if
12452 successful. Otherwise, return a tree expression with code CODE of
12453 type TYPE with operands OP0, OP1, and OP2. */
12455 tree
12456 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12457 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12459 tree tem;
12460 #ifdef ENABLE_FOLD_CHECKING
12461 unsigned char checksum_before_op0[16],
12462 checksum_before_op1[16],
12463 checksum_before_op2[16],
12464 checksum_after_op0[16],
12465 checksum_after_op1[16],
12466 checksum_after_op2[16];
12467 struct md5_ctx ctx;
12468 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12470 md5_init_ctx (&ctx);
12471 fold_checksum_tree (op0, &ctx, &ht);
12472 md5_finish_ctx (&ctx, checksum_before_op0);
12473 ht.empty ();
12475 md5_init_ctx (&ctx);
12476 fold_checksum_tree (op1, &ctx, &ht);
12477 md5_finish_ctx (&ctx, checksum_before_op1);
12478 ht.empty ();
12480 md5_init_ctx (&ctx);
12481 fold_checksum_tree (op2, &ctx, &ht);
12482 md5_finish_ctx (&ctx, checksum_before_op2);
12483 ht.empty ();
12484 #endif
12486 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12487 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12488 if (!tem)
12489 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12491 #ifdef ENABLE_FOLD_CHECKING
12492 md5_init_ctx (&ctx);
12493 fold_checksum_tree (op0, &ctx, &ht);
12494 md5_finish_ctx (&ctx, checksum_after_op0);
12495 ht.empty ();
12497 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12498 fold_check_failed (op0, tem);
12500 md5_init_ctx (&ctx);
12501 fold_checksum_tree (op1, &ctx, &ht);
12502 md5_finish_ctx (&ctx, checksum_after_op1);
12503 ht.empty ();
12505 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12506 fold_check_failed (op1, tem);
12508 md5_init_ctx (&ctx);
12509 fold_checksum_tree (op2, &ctx, &ht);
12510 md5_finish_ctx (&ctx, checksum_after_op2);
12512 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12513 fold_check_failed (op2, tem);
12514 #endif
12515 return tem;
12518 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12519 arguments in ARGARRAY, and a null static chain.
12520 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12521 of type TYPE from the given operands as constructed by build_call_array. */
12523 tree
12524 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12525 int nargs, tree *argarray)
12527 tree tem;
12528 #ifdef ENABLE_FOLD_CHECKING
12529 unsigned char checksum_before_fn[16],
12530 checksum_before_arglist[16],
12531 checksum_after_fn[16],
12532 checksum_after_arglist[16];
12533 struct md5_ctx ctx;
12534 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12535 int i;
12537 md5_init_ctx (&ctx);
12538 fold_checksum_tree (fn, &ctx, &ht);
12539 md5_finish_ctx (&ctx, checksum_before_fn);
12540 ht.empty ();
12542 md5_init_ctx (&ctx);
12543 for (i = 0; i < nargs; i++)
12544 fold_checksum_tree (argarray[i], &ctx, &ht);
12545 md5_finish_ctx (&ctx, checksum_before_arglist);
12546 ht.empty ();
12547 #endif
12549 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12550 if (!tem)
12551 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12553 #ifdef ENABLE_FOLD_CHECKING
12554 md5_init_ctx (&ctx);
12555 fold_checksum_tree (fn, &ctx, &ht);
12556 md5_finish_ctx (&ctx, checksum_after_fn);
12557 ht.empty ();
12559 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12560 fold_check_failed (fn, tem);
12562 md5_init_ctx (&ctx);
12563 for (i = 0; i < nargs; i++)
12564 fold_checksum_tree (argarray[i], &ctx, &ht);
12565 md5_finish_ctx (&ctx, checksum_after_arglist);
12567 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12568 fold_check_failed (NULL_TREE, tem);
12569 #endif
12570 return tem;
12573 /* Perform constant folding and related simplification of initializer
12574 expression EXPR. These behave identically to "fold_buildN" but ignore
12575 potential run-time traps and exceptions that fold must preserve. */
12577 #define START_FOLD_INIT \
12578 int saved_signaling_nans = flag_signaling_nans;\
12579 int saved_trapping_math = flag_trapping_math;\
12580 int saved_rounding_math = flag_rounding_math;\
12581 int saved_trapv = flag_trapv;\
12582 int saved_folding_initializer = folding_initializer;\
12583 flag_signaling_nans = 0;\
12584 flag_trapping_math = 0;\
12585 flag_rounding_math = 0;\
12586 flag_trapv = 0;\
12587 folding_initializer = 1;
12589 #define END_FOLD_INIT \
12590 flag_signaling_nans = saved_signaling_nans;\
12591 flag_trapping_math = saved_trapping_math;\
12592 flag_rounding_math = saved_rounding_math;\
12593 flag_trapv = saved_trapv;\
12594 folding_initializer = saved_folding_initializer;
12596 tree
12597 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12598 tree type, tree op)
12600 tree result;
12601 START_FOLD_INIT;
12603 result = fold_build1_loc (loc, code, type, op);
12605 END_FOLD_INIT;
12606 return result;
12609 tree
12610 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12611 tree type, tree op0, tree op1)
12613 tree result;
12614 START_FOLD_INIT;
12616 result = fold_build2_loc (loc, code, type, op0, op1);
12618 END_FOLD_INIT;
12619 return result;
12622 tree
12623 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12624 int nargs, tree *argarray)
12626 tree result;
12627 START_FOLD_INIT;
12629 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12631 END_FOLD_INIT;
12632 return result;
12635 #undef START_FOLD_INIT
12636 #undef END_FOLD_INIT
12638 /* Determine if first argument is a multiple of second argument. Return 0 if
12639 it is not, or we cannot easily determined it to be.
12641 An example of the sort of thing we care about (at this point; this routine
12642 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12643 fold cases do now) is discovering that
12645 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12647 is a multiple of
12649 SAVE_EXPR (J * 8)
12651 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12653 This code also handles discovering that
12655 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12657 is a multiple of 8 so we don't have to worry about dealing with a
12658 possible remainder.
12660 Note that we *look* inside a SAVE_EXPR only to determine how it was
12661 calculated; it is not safe for fold to do much of anything else with the
12662 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12663 at run time. For example, the latter example above *cannot* be implemented
12664 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12665 evaluation time of the original SAVE_EXPR is not necessarily the same at
12666 the time the new expression is evaluated. The only optimization of this
12667 sort that would be valid is changing
12669 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12671 divided by 8 to
12673 SAVE_EXPR (I) * SAVE_EXPR (J)
12675 (where the same SAVE_EXPR (J) is used in the original and the
12676 transformed version). */
12679 multiple_of_p (tree type, const_tree top, const_tree bottom)
12681 if (operand_equal_p (top, bottom, 0))
12682 return 1;
12684 if (TREE_CODE (type) != INTEGER_TYPE)
12685 return 0;
12687 switch (TREE_CODE (top))
12689 case BIT_AND_EXPR:
12690 /* Bitwise and provides a power of two multiple. If the mask is
12691 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12692 if (!integer_pow2p (bottom))
12693 return 0;
12694 /* FALLTHRU */
12696 case MULT_EXPR:
12697 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12698 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12700 case PLUS_EXPR:
12701 case MINUS_EXPR:
12702 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12703 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12705 case LSHIFT_EXPR:
12706 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12708 tree op1, t1;
12710 op1 = TREE_OPERAND (top, 1);
12711 /* const_binop may not detect overflow correctly,
12712 so check for it explicitly here. */
12713 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12714 && 0 != (t1 = fold_convert (type,
12715 const_binop (LSHIFT_EXPR,
12716 size_one_node,
12717 op1)))
12718 && !TREE_OVERFLOW (t1))
12719 return multiple_of_p (type, t1, bottom);
12721 return 0;
12723 case NOP_EXPR:
12724 /* Can't handle conversions from non-integral or wider integral type. */
12725 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12726 || (TYPE_PRECISION (type)
12727 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12728 return 0;
12730 /* .. fall through ... */
12732 case SAVE_EXPR:
12733 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12735 case COND_EXPR:
12736 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12737 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12739 case INTEGER_CST:
12740 if (TREE_CODE (bottom) != INTEGER_CST
12741 || integer_zerop (bottom)
12742 || (TYPE_UNSIGNED (type)
12743 && (tree_int_cst_sgn (top) < 0
12744 || tree_int_cst_sgn (bottom) < 0)))
12745 return 0;
12746 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12747 SIGNED);
12749 default:
12750 return 0;
12754 #define tree_expr_nonnegative_warnv_p(X, Y) \
12755 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12757 #define RECURSE(X) \
12758 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12760 /* Return true if CODE or TYPE is known to be non-negative. */
12762 static bool
12763 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12765 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12766 && truth_value_p (code))
12767 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12768 have a signed:1 type (where the value is -1 and 0). */
12769 return true;
12770 return false;
12773 /* Return true if (CODE OP0) is known to be non-negative. If the return
12774 value is based on the assumption that signed overflow is undefined,
12775 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12776 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12778 bool
12779 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12780 bool *strict_overflow_p, int depth)
12782 if (TYPE_UNSIGNED (type))
12783 return true;
12785 switch (code)
12787 case ABS_EXPR:
12788 /* We can't return 1 if flag_wrapv is set because
12789 ABS_EXPR<INT_MIN> = INT_MIN. */
12790 if (!ANY_INTEGRAL_TYPE_P (type))
12791 return true;
12792 if (TYPE_OVERFLOW_UNDEFINED (type))
12794 *strict_overflow_p = true;
12795 return true;
12797 break;
12799 case NON_LVALUE_EXPR:
12800 case FLOAT_EXPR:
12801 case FIX_TRUNC_EXPR:
12802 return RECURSE (op0);
12804 CASE_CONVERT:
12806 tree inner_type = TREE_TYPE (op0);
12807 tree outer_type = type;
12809 if (TREE_CODE (outer_type) == REAL_TYPE)
12811 if (TREE_CODE (inner_type) == REAL_TYPE)
12812 return RECURSE (op0);
12813 if (INTEGRAL_TYPE_P (inner_type))
12815 if (TYPE_UNSIGNED (inner_type))
12816 return true;
12817 return RECURSE (op0);
12820 else if (INTEGRAL_TYPE_P (outer_type))
12822 if (TREE_CODE (inner_type) == REAL_TYPE)
12823 return RECURSE (op0);
12824 if (INTEGRAL_TYPE_P (inner_type))
12825 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12826 && TYPE_UNSIGNED (inner_type);
12829 break;
12831 default:
12832 return tree_simple_nonnegative_warnv_p (code, type);
12835 /* We don't know sign of `t', so be conservative and return false. */
12836 return false;
12839 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12840 value is based on the assumption that signed overflow is undefined,
12841 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12842 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12844 bool
12845 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12846 tree op1, bool *strict_overflow_p,
12847 int depth)
12849 if (TYPE_UNSIGNED (type))
12850 return true;
12852 switch (code)
12854 case POINTER_PLUS_EXPR:
12855 case PLUS_EXPR:
12856 if (FLOAT_TYPE_P (type))
12857 return RECURSE (op0) && RECURSE (op1);
12859 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12860 both unsigned and at least 2 bits shorter than the result. */
12861 if (TREE_CODE (type) == INTEGER_TYPE
12862 && TREE_CODE (op0) == NOP_EXPR
12863 && TREE_CODE (op1) == NOP_EXPR)
12865 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12866 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12867 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12868 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12870 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12871 TYPE_PRECISION (inner2)) + 1;
12872 return prec < TYPE_PRECISION (type);
12875 break;
12877 case MULT_EXPR:
12878 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12880 /* x * x is always non-negative for floating point x
12881 or without overflow. */
12882 if (operand_equal_p (op0, op1, 0)
12883 || (RECURSE (op0) && RECURSE (op1)))
12885 if (ANY_INTEGRAL_TYPE_P (type)
12886 && TYPE_OVERFLOW_UNDEFINED (type))
12887 *strict_overflow_p = true;
12888 return true;
12892 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12893 both unsigned and their total bits is shorter than the result. */
12894 if (TREE_CODE (type) == INTEGER_TYPE
12895 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12896 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12898 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12899 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12900 : TREE_TYPE (op0);
12901 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12902 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12903 : TREE_TYPE (op1);
12905 bool unsigned0 = TYPE_UNSIGNED (inner0);
12906 bool unsigned1 = TYPE_UNSIGNED (inner1);
12908 if (TREE_CODE (op0) == INTEGER_CST)
12909 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12911 if (TREE_CODE (op1) == INTEGER_CST)
12912 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12914 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12915 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12917 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12918 ? tree_int_cst_min_precision (op0, UNSIGNED)
12919 : TYPE_PRECISION (inner0);
12921 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12922 ? tree_int_cst_min_precision (op1, UNSIGNED)
12923 : TYPE_PRECISION (inner1);
12925 return precision0 + precision1 < TYPE_PRECISION (type);
12928 return false;
12930 case BIT_AND_EXPR:
12931 case MAX_EXPR:
12932 return RECURSE (op0) || RECURSE (op1);
12934 case BIT_IOR_EXPR:
12935 case BIT_XOR_EXPR:
12936 case MIN_EXPR:
12937 case RDIV_EXPR:
12938 case TRUNC_DIV_EXPR:
12939 case CEIL_DIV_EXPR:
12940 case FLOOR_DIV_EXPR:
12941 case ROUND_DIV_EXPR:
12942 return RECURSE (op0) && RECURSE (op1);
12944 case TRUNC_MOD_EXPR:
12945 case CEIL_MOD_EXPR:
12946 case FLOOR_MOD_EXPR:
12947 case ROUND_MOD_EXPR:
12948 return RECURSE (op0);
12950 default:
12951 return tree_simple_nonnegative_warnv_p (code, type);
12954 /* We don't know sign of `t', so be conservative and return false. */
12955 return false;
12958 /* Return true if SSA name T is known to be non-negative. If the return
12959 value is based on the assumption that signed overflow is undefined,
12960 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12961 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12963 static bool
12964 tree_ssa_name_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12966 /* Limit the depth of recursion to avoid quadratic behavior.
12967 This is expected to catch almost all occurrences in practice.
12968 If this code misses important cases that unbounded recursion
12969 would not, passes that need this information could be revised
12970 to provide it through dataflow propagation. */
12971 if (depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH))
12972 return gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12973 strict_overflow_p, depth);
12974 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12977 /* Return true if T is known to be non-negative. If the return
12978 value is based on the assumption that signed overflow is undefined,
12979 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12980 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12982 bool
12983 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12985 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12986 return true;
12988 switch (TREE_CODE (t))
12990 case INTEGER_CST:
12991 return tree_int_cst_sgn (t) >= 0;
12993 case REAL_CST:
12994 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12996 case FIXED_CST:
12997 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12999 case COND_EXPR:
13000 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13002 case SSA_NAME:
13003 return tree_ssa_name_nonnegative_warnv_p (t, strict_overflow_p, depth);
13005 default:
13006 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13010 /* Return true if T is known to be non-negative. If the return
13011 value is based on the assumption that signed overflow is undefined,
13012 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13013 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13015 bool
13016 tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
13017 bool *strict_overflow_p, int depth)
13019 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13020 switch (DECL_FUNCTION_CODE (fndecl))
13022 CASE_FLT_FN (BUILT_IN_ACOS):
13023 CASE_FLT_FN (BUILT_IN_ACOSH):
13024 CASE_FLT_FN (BUILT_IN_CABS):
13025 CASE_FLT_FN (BUILT_IN_COSH):
13026 CASE_FLT_FN (BUILT_IN_ERFC):
13027 CASE_FLT_FN (BUILT_IN_EXP):
13028 CASE_FLT_FN (BUILT_IN_EXP10):
13029 CASE_FLT_FN (BUILT_IN_EXP2):
13030 CASE_FLT_FN (BUILT_IN_FABS):
13031 CASE_FLT_FN (BUILT_IN_FDIM):
13032 CASE_FLT_FN (BUILT_IN_HYPOT):
13033 CASE_FLT_FN (BUILT_IN_POW10):
13034 CASE_INT_FN (BUILT_IN_FFS):
13035 CASE_INT_FN (BUILT_IN_PARITY):
13036 CASE_INT_FN (BUILT_IN_POPCOUNT):
13037 CASE_INT_FN (BUILT_IN_CLZ):
13038 CASE_INT_FN (BUILT_IN_CLRSB):
13039 case BUILT_IN_BSWAP32:
13040 case BUILT_IN_BSWAP64:
13041 /* Always true. */
13042 return true;
13044 CASE_FLT_FN (BUILT_IN_SQRT):
13045 /* sqrt(-0.0) is -0.0. */
13046 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13047 return true;
13048 return RECURSE (arg0);
13050 CASE_FLT_FN (BUILT_IN_ASINH):
13051 CASE_FLT_FN (BUILT_IN_ATAN):
13052 CASE_FLT_FN (BUILT_IN_ATANH):
13053 CASE_FLT_FN (BUILT_IN_CBRT):
13054 CASE_FLT_FN (BUILT_IN_CEIL):
13055 CASE_FLT_FN (BUILT_IN_ERF):
13056 CASE_FLT_FN (BUILT_IN_EXPM1):
13057 CASE_FLT_FN (BUILT_IN_FLOOR):
13058 CASE_FLT_FN (BUILT_IN_FMOD):
13059 CASE_FLT_FN (BUILT_IN_FREXP):
13060 CASE_FLT_FN (BUILT_IN_ICEIL):
13061 CASE_FLT_FN (BUILT_IN_IFLOOR):
13062 CASE_FLT_FN (BUILT_IN_IRINT):
13063 CASE_FLT_FN (BUILT_IN_IROUND):
13064 CASE_FLT_FN (BUILT_IN_LCEIL):
13065 CASE_FLT_FN (BUILT_IN_LDEXP):
13066 CASE_FLT_FN (BUILT_IN_LFLOOR):
13067 CASE_FLT_FN (BUILT_IN_LLCEIL):
13068 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13069 CASE_FLT_FN (BUILT_IN_LLRINT):
13070 CASE_FLT_FN (BUILT_IN_LLROUND):
13071 CASE_FLT_FN (BUILT_IN_LRINT):
13072 CASE_FLT_FN (BUILT_IN_LROUND):
13073 CASE_FLT_FN (BUILT_IN_MODF):
13074 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13075 CASE_FLT_FN (BUILT_IN_RINT):
13076 CASE_FLT_FN (BUILT_IN_ROUND):
13077 CASE_FLT_FN (BUILT_IN_SCALB):
13078 CASE_FLT_FN (BUILT_IN_SCALBLN):
13079 CASE_FLT_FN (BUILT_IN_SCALBN):
13080 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13082 CASE_FLT_FN (BUILT_IN_SINH):
13083 CASE_FLT_FN (BUILT_IN_TANH):
13084 CASE_FLT_FN (BUILT_IN_TRUNC):
13085 /* True if the 1st argument is nonnegative. */
13086 return RECURSE (arg0);
13088 CASE_FLT_FN (BUILT_IN_FMAX):
13089 /* True if the 1st OR 2nd arguments are nonnegative. */
13090 return RECURSE (arg0) || RECURSE (arg1);
13092 CASE_FLT_FN (BUILT_IN_FMIN):
13093 /* True if the 1st AND 2nd arguments are nonnegative. */
13094 return RECURSE (arg0) && RECURSE (arg1);
13096 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13097 /* True if the 2nd argument is nonnegative. */
13098 return RECURSE (arg1);
13100 CASE_FLT_FN (BUILT_IN_POWI):
13101 /* True if the 1st argument is nonnegative or the second
13102 argument is an even integer. */
13103 if (TREE_CODE (arg1) == INTEGER_CST
13104 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13105 return true;
13106 return RECURSE (arg0);
13108 CASE_FLT_FN (BUILT_IN_POW):
13109 /* True if the 1st argument is nonnegative or the second
13110 argument is an even integer valued real. */
13111 if (TREE_CODE (arg1) == REAL_CST)
13113 REAL_VALUE_TYPE c;
13114 HOST_WIDE_INT n;
13116 c = TREE_REAL_CST (arg1);
13117 n = real_to_integer (&c);
13118 if ((n & 1) == 0)
13120 REAL_VALUE_TYPE cint;
13121 real_from_integer (&cint, VOIDmode, n, SIGNED);
13122 if (real_identical (&c, &cint))
13123 return true;
13126 return RECURSE (arg0);
13128 default:
13129 break;
13131 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13134 /* Return true if T is known to be non-negative. If the return
13135 value is based on the assumption that signed overflow is undefined,
13136 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13137 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13139 static bool
13140 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13142 enum tree_code code = TREE_CODE (t);
13143 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13144 return true;
13146 switch (code)
13148 case TARGET_EXPR:
13150 tree temp = TARGET_EXPR_SLOT (t);
13151 t = TARGET_EXPR_INITIAL (t);
13153 /* If the initializer is non-void, then it's a normal expression
13154 that will be assigned to the slot. */
13155 if (!VOID_TYPE_P (t))
13156 return RECURSE (t);
13158 /* Otherwise, the initializer sets the slot in some way. One common
13159 way is an assignment statement at the end of the initializer. */
13160 while (1)
13162 if (TREE_CODE (t) == BIND_EXPR)
13163 t = expr_last (BIND_EXPR_BODY (t));
13164 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13165 || TREE_CODE (t) == TRY_CATCH_EXPR)
13166 t = expr_last (TREE_OPERAND (t, 0));
13167 else if (TREE_CODE (t) == STATEMENT_LIST)
13168 t = expr_last (t);
13169 else
13170 break;
13172 if (TREE_CODE (t) == MODIFY_EXPR
13173 && TREE_OPERAND (t, 0) == temp)
13174 return RECURSE (TREE_OPERAND (t, 1));
13176 return false;
13179 case CALL_EXPR:
13181 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13182 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13184 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13185 get_callee_fndecl (t),
13186 arg0,
13187 arg1,
13188 strict_overflow_p, depth);
13190 case COMPOUND_EXPR:
13191 case MODIFY_EXPR:
13192 return RECURSE (TREE_OPERAND (t, 1));
13194 case BIND_EXPR:
13195 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13197 case SAVE_EXPR:
13198 return RECURSE (TREE_OPERAND (t, 0));
13200 default:
13201 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13205 #undef RECURSE
13206 #undef tree_expr_nonnegative_warnv_p
13208 /* Return true if T is known to be non-negative. If the return
13209 value is based on the assumption that signed overflow is undefined,
13210 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13211 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13213 bool
13214 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13216 enum tree_code code;
13217 if (t == error_mark_node)
13218 return false;
13220 code = TREE_CODE (t);
13221 switch (TREE_CODE_CLASS (code))
13223 case tcc_binary:
13224 case tcc_comparison:
13225 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13226 TREE_TYPE (t),
13227 TREE_OPERAND (t, 0),
13228 TREE_OPERAND (t, 1),
13229 strict_overflow_p, depth);
13231 case tcc_unary:
13232 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13233 TREE_TYPE (t),
13234 TREE_OPERAND (t, 0),
13235 strict_overflow_p, depth);
13237 case tcc_constant:
13238 case tcc_declaration:
13239 case tcc_reference:
13240 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13242 default:
13243 break;
13246 switch (code)
13248 case TRUTH_AND_EXPR:
13249 case TRUTH_OR_EXPR:
13250 case TRUTH_XOR_EXPR:
13251 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13252 TREE_TYPE (t),
13253 TREE_OPERAND (t, 0),
13254 TREE_OPERAND (t, 1),
13255 strict_overflow_p, depth);
13256 case TRUTH_NOT_EXPR:
13257 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13258 TREE_TYPE (t),
13259 TREE_OPERAND (t, 0),
13260 strict_overflow_p, depth);
13262 case COND_EXPR:
13263 case CONSTRUCTOR:
13264 case OBJ_TYPE_REF:
13265 case ASSERT_EXPR:
13266 case ADDR_EXPR:
13267 case WITH_SIZE_EXPR:
13268 case SSA_NAME:
13269 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13271 default:
13272 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13276 /* Return true if `t' is known to be non-negative. Handle warnings
13277 about undefined signed overflow. */
13279 bool
13280 tree_expr_nonnegative_p (tree t)
13282 bool ret, strict_overflow_p;
13284 strict_overflow_p = false;
13285 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13286 if (strict_overflow_p)
13287 fold_overflow_warning (("assuming signed overflow does not occur when "
13288 "determining that expression is always "
13289 "non-negative"),
13290 WARN_STRICT_OVERFLOW_MISC);
13291 return ret;
13295 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13296 For floating point we further ensure that T is not denormal.
13297 Similar logic is present in nonzero_address in rtlanal.h.
13299 If the return value is based on the assumption that signed overflow
13300 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13301 change *STRICT_OVERFLOW_P. */
13303 bool
13304 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13305 bool *strict_overflow_p)
13307 switch (code)
13309 case ABS_EXPR:
13310 return tree_expr_nonzero_warnv_p (op0,
13311 strict_overflow_p);
13313 case NOP_EXPR:
13315 tree inner_type = TREE_TYPE (op0);
13316 tree outer_type = type;
13318 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13319 && tree_expr_nonzero_warnv_p (op0,
13320 strict_overflow_p));
13322 break;
13324 case NON_LVALUE_EXPR:
13325 return tree_expr_nonzero_warnv_p (op0,
13326 strict_overflow_p);
13328 default:
13329 break;
13332 return false;
13335 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13336 For floating point we further ensure that T is not denormal.
13337 Similar logic is present in nonzero_address in rtlanal.h.
13339 If the return value is based on the assumption that signed overflow
13340 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13341 change *STRICT_OVERFLOW_P. */
13343 bool
13344 tree_binary_nonzero_warnv_p (enum tree_code code,
13345 tree type,
13346 tree op0,
13347 tree op1, bool *strict_overflow_p)
13349 bool sub_strict_overflow_p;
13350 switch (code)
13352 case POINTER_PLUS_EXPR:
13353 case PLUS_EXPR:
13354 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13356 /* With the presence of negative values it is hard
13357 to say something. */
13358 sub_strict_overflow_p = false;
13359 if (!tree_expr_nonnegative_warnv_p (op0,
13360 &sub_strict_overflow_p)
13361 || !tree_expr_nonnegative_warnv_p (op1,
13362 &sub_strict_overflow_p))
13363 return false;
13364 /* One of operands must be positive and the other non-negative. */
13365 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13366 overflows, on a twos-complement machine the sum of two
13367 nonnegative numbers can never be zero. */
13368 return (tree_expr_nonzero_warnv_p (op0,
13369 strict_overflow_p)
13370 || tree_expr_nonzero_warnv_p (op1,
13371 strict_overflow_p));
13373 break;
13375 case MULT_EXPR:
13376 if (TYPE_OVERFLOW_UNDEFINED (type))
13378 if (tree_expr_nonzero_warnv_p (op0,
13379 strict_overflow_p)
13380 && tree_expr_nonzero_warnv_p (op1,
13381 strict_overflow_p))
13383 *strict_overflow_p = true;
13384 return true;
13387 break;
13389 case MIN_EXPR:
13390 sub_strict_overflow_p = false;
13391 if (tree_expr_nonzero_warnv_p (op0,
13392 &sub_strict_overflow_p)
13393 && tree_expr_nonzero_warnv_p (op1,
13394 &sub_strict_overflow_p))
13396 if (sub_strict_overflow_p)
13397 *strict_overflow_p = true;
13399 break;
13401 case MAX_EXPR:
13402 sub_strict_overflow_p = false;
13403 if (tree_expr_nonzero_warnv_p (op0,
13404 &sub_strict_overflow_p))
13406 if (sub_strict_overflow_p)
13407 *strict_overflow_p = true;
13409 /* When both operands are nonzero, then MAX must be too. */
13410 if (tree_expr_nonzero_warnv_p (op1,
13411 strict_overflow_p))
13412 return true;
13414 /* MAX where operand 0 is positive is positive. */
13415 return tree_expr_nonnegative_warnv_p (op0,
13416 strict_overflow_p);
13418 /* MAX where operand 1 is positive is positive. */
13419 else if (tree_expr_nonzero_warnv_p (op1,
13420 &sub_strict_overflow_p)
13421 && tree_expr_nonnegative_warnv_p (op1,
13422 &sub_strict_overflow_p))
13424 if (sub_strict_overflow_p)
13425 *strict_overflow_p = true;
13426 return true;
13428 break;
13430 case BIT_IOR_EXPR:
13431 return (tree_expr_nonzero_warnv_p (op1,
13432 strict_overflow_p)
13433 || tree_expr_nonzero_warnv_p (op0,
13434 strict_overflow_p));
13436 default:
13437 break;
13440 return false;
13443 /* Return true when T is an address and is known to be nonzero.
13444 For floating point we further ensure that T is not denormal.
13445 Similar logic is present in nonzero_address in rtlanal.h.
13447 If the return value is based on the assumption that signed overflow
13448 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13449 change *STRICT_OVERFLOW_P. */
13451 bool
13452 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13454 bool sub_strict_overflow_p;
13455 switch (TREE_CODE (t))
13457 case INTEGER_CST:
13458 return !integer_zerop (t);
13460 case ADDR_EXPR:
13462 tree base = TREE_OPERAND (t, 0);
13464 if (!DECL_P (base))
13465 base = get_base_address (base);
13467 if (!base)
13468 return false;
13470 /* For objects in symbol table check if we know they are non-zero.
13471 Don't do anything for variables and functions before symtab is built;
13472 it is quite possible that they will be declared weak later. */
13473 if (DECL_P (base) && decl_in_symtab_p (base))
13475 struct symtab_node *symbol;
13477 symbol = symtab_node::get_create (base);
13478 if (symbol)
13479 return symbol->nonzero_address ();
13480 else
13481 return false;
13484 /* Function local objects are never NULL. */
13485 if (DECL_P (base)
13486 && (DECL_CONTEXT (base)
13487 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13488 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13489 return true;
13491 /* Constants are never weak. */
13492 if (CONSTANT_CLASS_P (base))
13493 return true;
13495 return false;
13498 case COND_EXPR:
13499 sub_strict_overflow_p = false;
13500 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13501 &sub_strict_overflow_p)
13502 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13503 &sub_strict_overflow_p))
13505 if (sub_strict_overflow_p)
13506 *strict_overflow_p = true;
13507 return true;
13509 break;
13511 default:
13512 break;
13514 return false;
13517 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13518 attempt to fold the expression to a constant without modifying TYPE,
13519 OP0 or OP1.
13521 If the expression could be simplified to a constant, then return
13522 the constant. If the expression would not be simplified to a
13523 constant, then return NULL_TREE. */
13525 tree
13526 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13528 tree tem = fold_binary (code, type, op0, op1);
13529 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13532 /* Given the components of a unary expression CODE, TYPE and OP0,
13533 attempt to fold the expression to a constant without modifying
13534 TYPE or OP0.
13536 If the expression could be simplified to a constant, then return
13537 the constant. If the expression would not be simplified to a
13538 constant, then return NULL_TREE. */
13540 tree
13541 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13543 tree tem = fold_unary (code, type, op0);
13544 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13547 /* If EXP represents referencing an element in a constant string
13548 (either via pointer arithmetic or array indexing), return the
13549 tree representing the value accessed, otherwise return NULL. */
13551 tree
13552 fold_read_from_constant_string (tree exp)
13554 if ((TREE_CODE (exp) == INDIRECT_REF
13555 || TREE_CODE (exp) == ARRAY_REF)
13556 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13558 tree exp1 = TREE_OPERAND (exp, 0);
13559 tree index;
13560 tree string;
13561 location_t loc = EXPR_LOCATION (exp);
13563 if (TREE_CODE (exp) == INDIRECT_REF)
13564 string = string_constant (exp1, &index);
13565 else
13567 tree low_bound = array_ref_low_bound (exp);
13568 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13570 /* Optimize the special-case of a zero lower bound.
13572 We convert the low_bound to sizetype to avoid some problems
13573 with constant folding. (E.g. suppose the lower bound is 1,
13574 and its mode is QI. Without the conversion,l (ARRAY
13575 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13576 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13577 if (! integer_zerop (low_bound))
13578 index = size_diffop_loc (loc, index,
13579 fold_convert_loc (loc, sizetype, low_bound));
13581 string = exp1;
13584 if (string
13585 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13586 && TREE_CODE (string) == STRING_CST
13587 && TREE_CODE (index) == INTEGER_CST
13588 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13589 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13590 == MODE_INT)
13591 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13592 return build_int_cst_type (TREE_TYPE (exp),
13593 (TREE_STRING_POINTER (string)
13594 [TREE_INT_CST_LOW (index)]));
13596 return NULL;
13599 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13600 an integer constant, real, or fixed-point constant.
13602 TYPE is the type of the result. */
13604 static tree
13605 fold_negate_const (tree arg0, tree type)
13607 tree t = NULL_TREE;
13609 switch (TREE_CODE (arg0))
13611 case INTEGER_CST:
13613 bool overflow;
13614 wide_int val = wi::neg (arg0, &overflow);
13615 t = force_fit_type (type, val, 1,
13616 (overflow | TREE_OVERFLOW (arg0))
13617 && !TYPE_UNSIGNED (type));
13618 break;
13621 case REAL_CST:
13622 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13623 break;
13625 case FIXED_CST:
13627 FIXED_VALUE_TYPE f;
13628 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13629 &(TREE_FIXED_CST (arg0)), NULL,
13630 TYPE_SATURATING (type));
13631 t = build_fixed (type, f);
13632 /* Propagate overflow flags. */
13633 if (overflow_p | TREE_OVERFLOW (arg0))
13634 TREE_OVERFLOW (t) = 1;
13635 break;
13638 default:
13639 gcc_unreachable ();
13642 return t;
13645 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13646 an integer constant or real constant.
13648 TYPE is the type of the result. */
13650 tree
13651 fold_abs_const (tree arg0, tree type)
13653 tree t = NULL_TREE;
13655 switch (TREE_CODE (arg0))
13657 case INTEGER_CST:
13659 /* If the value is unsigned or non-negative, then the absolute value
13660 is the same as the ordinary value. */
13661 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13662 t = arg0;
13664 /* If the value is negative, then the absolute value is
13665 its negation. */
13666 else
13668 bool overflow;
13669 wide_int val = wi::neg (arg0, &overflow);
13670 t = force_fit_type (type, val, -1,
13671 overflow | TREE_OVERFLOW (arg0));
13674 break;
13676 case REAL_CST:
13677 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13678 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13679 else
13680 t = arg0;
13681 break;
13683 default:
13684 gcc_unreachable ();
13687 return t;
13690 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13691 constant. TYPE is the type of the result. */
13693 static tree
13694 fold_not_const (const_tree arg0, tree type)
13696 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13698 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13701 /* Given CODE, a relational operator, the target type, TYPE and two
13702 constant operands OP0 and OP1, return the result of the
13703 relational operation. If the result is not a compile time
13704 constant, then return NULL_TREE. */
13706 static tree
13707 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13709 int result, invert;
13711 /* From here on, the only cases we handle are when the result is
13712 known to be a constant. */
13714 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13716 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13717 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13719 /* Handle the cases where either operand is a NaN. */
13720 if (real_isnan (c0) || real_isnan (c1))
13722 switch (code)
13724 case EQ_EXPR:
13725 case ORDERED_EXPR:
13726 result = 0;
13727 break;
13729 case NE_EXPR:
13730 case UNORDERED_EXPR:
13731 case UNLT_EXPR:
13732 case UNLE_EXPR:
13733 case UNGT_EXPR:
13734 case UNGE_EXPR:
13735 case UNEQ_EXPR:
13736 result = 1;
13737 break;
13739 case LT_EXPR:
13740 case LE_EXPR:
13741 case GT_EXPR:
13742 case GE_EXPR:
13743 case LTGT_EXPR:
13744 if (flag_trapping_math)
13745 return NULL_TREE;
13746 result = 0;
13747 break;
13749 default:
13750 gcc_unreachable ();
13753 return constant_boolean_node (result, type);
13756 return constant_boolean_node (real_compare (code, c0, c1), type);
13759 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13761 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13762 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13763 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13766 /* Handle equality/inequality of complex constants. */
13767 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13769 tree rcond = fold_relational_const (code, type,
13770 TREE_REALPART (op0),
13771 TREE_REALPART (op1));
13772 tree icond = fold_relational_const (code, type,
13773 TREE_IMAGPART (op0),
13774 TREE_IMAGPART (op1));
13775 if (code == EQ_EXPR)
13776 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13777 else if (code == NE_EXPR)
13778 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13779 else
13780 return NULL_TREE;
13783 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13785 unsigned count = VECTOR_CST_NELTS (op0);
13786 tree *elts = XALLOCAVEC (tree, count);
13787 gcc_assert (VECTOR_CST_NELTS (op1) == count
13788 && TYPE_VECTOR_SUBPARTS (type) == count);
13790 for (unsigned i = 0; i < count; i++)
13792 tree elem_type = TREE_TYPE (type);
13793 tree elem0 = VECTOR_CST_ELT (op0, i);
13794 tree elem1 = VECTOR_CST_ELT (op1, i);
13796 tree tem = fold_relational_const (code, elem_type,
13797 elem0, elem1);
13799 if (tem == NULL_TREE)
13800 return NULL_TREE;
13802 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13805 return build_vector (type, elts);
13808 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13810 To compute GT, swap the arguments and do LT.
13811 To compute GE, do LT and invert the result.
13812 To compute LE, swap the arguments, do LT and invert the result.
13813 To compute NE, do EQ and invert the result.
13815 Therefore, the code below must handle only EQ and LT. */
13817 if (code == LE_EXPR || code == GT_EXPR)
13819 std::swap (op0, op1);
13820 code = swap_tree_comparison (code);
13823 /* Note that it is safe to invert for real values here because we
13824 have already handled the one case that it matters. */
13826 invert = 0;
13827 if (code == NE_EXPR || code == GE_EXPR)
13829 invert = 1;
13830 code = invert_tree_comparison (code, false);
13833 /* Compute a result for LT or EQ if args permit;
13834 Otherwise return T. */
13835 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13837 if (code == EQ_EXPR)
13838 result = tree_int_cst_equal (op0, op1);
13839 else
13840 result = tree_int_cst_lt (op0, op1);
13842 else
13843 return NULL_TREE;
13845 if (invert)
13846 result ^= 1;
13847 return constant_boolean_node (result, type);
13850 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13851 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13852 itself. */
13854 tree
13855 fold_build_cleanup_point_expr (tree type, tree expr)
13857 /* If the expression does not have side effects then we don't have to wrap
13858 it with a cleanup point expression. */
13859 if (!TREE_SIDE_EFFECTS (expr))
13860 return expr;
13862 /* If the expression is a return, check to see if the expression inside the
13863 return has no side effects or the right hand side of the modify expression
13864 inside the return. If either don't have side effects set we don't need to
13865 wrap the expression in a cleanup point expression. Note we don't check the
13866 left hand side of the modify because it should always be a return decl. */
13867 if (TREE_CODE (expr) == RETURN_EXPR)
13869 tree op = TREE_OPERAND (expr, 0);
13870 if (!op || !TREE_SIDE_EFFECTS (op))
13871 return expr;
13872 op = TREE_OPERAND (op, 1);
13873 if (!TREE_SIDE_EFFECTS (op))
13874 return expr;
13877 return build1 (CLEANUP_POINT_EXPR, type, expr);
13880 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13881 of an indirection through OP0, or NULL_TREE if no simplification is
13882 possible. */
13884 tree
13885 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13887 tree sub = op0;
13888 tree subtype;
13890 STRIP_NOPS (sub);
13891 subtype = TREE_TYPE (sub);
13892 if (!POINTER_TYPE_P (subtype))
13893 return NULL_TREE;
13895 if (TREE_CODE (sub) == ADDR_EXPR)
13897 tree op = TREE_OPERAND (sub, 0);
13898 tree optype = TREE_TYPE (op);
13899 /* *&CONST_DECL -> to the value of the const decl. */
13900 if (TREE_CODE (op) == CONST_DECL)
13901 return DECL_INITIAL (op);
13902 /* *&p => p; make sure to handle *&"str"[cst] here. */
13903 if (type == optype)
13905 tree fop = fold_read_from_constant_string (op);
13906 if (fop)
13907 return fop;
13908 else
13909 return op;
13911 /* *(foo *)&fooarray => fooarray[0] */
13912 else if (TREE_CODE (optype) == ARRAY_TYPE
13913 && type == TREE_TYPE (optype)
13914 && (!in_gimple_form
13915 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13917 tree type_domain = TYPE_DOMAIN (optype);
13918 tree min_val = size_zero_node;
13919 if (type_domain && TYPE_MIN_VALUE (type_domain))
13920 min_val = TYPE_MIN_VALUE (type_domain);
13921 if (in_gimple_form
13922 && TREE_CODE (min_val) != INTEGER_CST)
13923 return NULL_TREE;
13924 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13925 NULL_TREE, NULL_TREE);
13927 /* *(foo *)&complexfoo => __real__ complexfoo */
13928 else if (TREE_CODE (optype) == COMPLEX_TYPE
13929 && type == TREE_TYPE (optype))
13930 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13931 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13932 else if (TREE_CODE (optype) == VECTOR_TYPE
13933 && type == TREE_TYPE (optype))
13935 tree part_width = TYPE_SIZE (type);
13936 tree index = bitsize_int (0);
13937 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13941 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13942 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13944 tree op00 = TREE_OPERAND (sub, 0);
13945 tree op01 = TREE_OPERAND (sub, 1);
13947 STRIP_NOPS (op00);
13948 if (TREE_CODE (op00) == ADDR_EXPR)
13950 tree op00type;
13951 op00 = TREE_OPERAND (op00, 0);
13952 op00type = TREE_TYPE (op00);
13954 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13955 if (TREE_CODE (op00type) == VECTOR_TYPE
13956 && type == TREE_TYPE (op00type))
13958 HOST_WIDE_INT offset = tree_to_shwi (op01);
13959 tree part_width = TYPE_SIZE (type);
13960 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
13961 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13962 tree index = bitsize_int (indexi);
13964 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
13965 return fold_build3_loc (loc,
13966 BIT_FIELD_REF, type, op00,
13967 part_width, index);
13970 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13971 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13972 && type == TREE_TYPE (op00type))
13974 tree size = TYPE_SIZE_UNIT (type);
13975 if (tree_int_cst_equal (size, op01))
13976 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13978 /* ((foo *)&fooarray)[1] => fooarray[1] */
13979 else if (TREE_CODE (op00type) == ARRAY_TYPE
13980 && type == TREE_TYPE (op00type))
13982 tree type_domain = TYPE_DOMAIN (op00type);
13983 tree min_val = size_zero_node;
13984 if (type_domain && TYPE_MIN_VALUE (type_domain))
13985 min_val = TYPE_MIN_VALUE (type_domain);
13986 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
13987 TYPE_SIZE_UNIT (type));
13988 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
13989 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13990 NULL_TREE, NULL_TREE);
13995 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13996 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13997 && type == TREE_TYPE (TREE_TYPE (subtype))
13998 && (!in_gimple_form
13999 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14001 tree type_domain;
14002 tree min_val = size_zero_node;
14003 sub = build_fold_indirect_ref_loc (loc, sub);
14004 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14005 if (type_domain && TYPE_MIN_VALUE (type_domain))
14006 min_val = TYPE_MIN_VALUE (type_domain);
14007 if (in_gimple_form
14008 && TREE_CODE (min_val) != INTEGER_CST)
14009 return NULL_TREE;
14010 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14011 NULL_TREE);
14014 return NULL_TREE;
14017 /* Builds an expression for an indirection through T, simplifying some
14018 cases. */
14020 tree
14021 build_fold_indirect_ref_loc (location_t loc, tree t)
14023 tree type = TREE_TYPE (TREE_TYPE (t));
14024 tree sub = fold_indirect_ref_1 (loc, type, t);
14026 if (sub)
14027 return sub;
14029 return build1_loc (loc, INDIRECT_REF, type, t);
14032 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14034 tree
14035 fold_indirect_ref_loc (location_t loc, tree t)
14037 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14039 if (sub)
14040 return sub;
14041 else
14042 return t;
14045 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14046 whose result is ignored. The type of the returned tree need not be
14047 the same as the original expression. */
14049 tree
14050 fold_ignored_result (tree t)
14052 if (!TREE_SIDE_EFFECTS (t))
14053 return integer_zero_node;
14055 for (;;)
14056 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14058 case tcc_unary:
14059 t = TREE_OPERAND (t, 0);
14060 break;
14062 case tcc_binary:
14063 case tcc_comparison:
14064 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14065 t = TREE_OPERAND (t, 0);
14066 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14067 t = TREE_OPERAND (t, 1);
14068 else
14069 return t;
14070 break;
14072 case tcc_expression:
14073 switch (TREE_CODE (t))
14075 case COMPOUND_EXPR:
14076 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14077 return t;
14078 t = TREE_OPERAND (t, 0);
14079 break;
14081 case COND_EXPR:
14082 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14083 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14084 return t;
14085 t = TREE_OPERAND (t, 0);
14086 break;
14088 default:
14089 return t;
14091 break;
14093 default:
14094 return t;
14098 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14100 tree
14101 round_up_loc (location_t loc, tree value, unsigned int divisor)
14103 tree div = NULL_TREE;
14105 if (divisor == 1)
14106 return value;
14108 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14109 have to do anything. Only do this when we are not given a const,
14110 because in that case, this check is more expensive than just
14111 doing it. */
14112 if (TREE_CODE (value) != INTEGER_CST)
14114 div = build_int_cst (TREE_TYPE (value), divisor);
14116 if (multiple_of_p (TREE_TYPE (value), value, div))
14117 return value;
14120 /* If divisor is a power of two, simplify this to bit manipulation. */
14121 if (divisor == (divisor & -divisor))
14123 if (TREE_CODE (value) == INTEGER_CST)
14125 wide_int val = value;
14126 bool overflow_p;
14128 if ((val & (divisor - 1)) == 0)
14129 return value;
14131 overflow_p = TREE_OVERFLOW (value);
14132 val += divisor - 1;
14133 val &= - (int) divisor;
14134 if (val == 0)
14135 overflow_p = true;
14137 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14139 else
14141 tree t;
14143 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14144 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14145 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14146 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14149 else
14151 if (!div)
14152 div = build_int_cst (TREE_TYPE (value), divisor);
14153 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14154 value = size_binop_loc (loc, MULT_EXPR, value, div);
14157 return value;
14160 /* Likewise, but round down. */
14162 tree
14163 round_down_loc (location_t loc, tree value, int divisor)
14165 tree div = NULL_TREE;
14167 gcc_assert (divisor > 0);
14168 if (divisor == 1)
14169 return value;
14171 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14172 have to do anything. Only do this when we are not given a const,
14173 because in that case, this check is more expensive than just
14174 doing it. */
14175 if (TREE_CODE (value) != INTEGER_CST)
14177 div = build_int_cst (TREE_TYPE (value), divisor);
14179 if (multiple_of_p (TREE_TYPE (value), value, div))
14180 return value;
14183 /* If divisor is a power of two, simplify this to bit manipulation. */
14184 if (divisor == (divisor & -divisor))
14186 tree t;
14188 t = build_int_cst (TREE_TYPE (value), -divisor);
14189 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14191 else
14193 if (!div)
14194 div = build_int_cst (TREE_TYPE (value), divisor);
14195 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14196 value = size_binop_loc (loc, MULT_EXPR, value, div);
14199 return value;
14202 /* Returns the pointer to the base of the object addressed by EXP and
14203 extracts the information about the offset of the access, storing it
14204 to PBITPOS and POFFSET. */
14206 static tree
14207 split_address_to_core_and_offset (tree exp,
14208 HOST_WIDE_INT *pbitpos, tree *poffset)
14210 tree core;
14211 machine_mode mode;
14212 int unsignedp, volatilep;
14213 HOST_WIDE_INT bitsize;
14214 location_t loc = EXPR_LOCATION (exp);
14216 if (TREE_CODE (exp) == ADDR_EXPR)
14218 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14219 poffset, &mode, &unsignedp, &volatilep,
14220 false);
14221 core = build_fold_addr_expr_loc (loc, core);
14223 else
14225 core = exp;
14226 *pbitpos = 0;
14227 *poffset = NULL_TREE;
14230 return core;
14233 /* Returns true if addresses of E1 and E2 differ by a constant, false
14234 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14236 bool
14237 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14239 tree core1, core2;
14240 HOST_WIDE_INT bitpos1, bitpos2;
14241 tree toffset1, toffset2, tdiff, type;
14243 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14244 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14246 if (bitpos1 % BITS_PER_UNIT != 0
14247 || bitpos2 % BITS_PER_UNIT != 0
14248 || !operand_equal_p (core1, core2, 0))
14249 return false;
14251 if (toffset1 && toffset2)
14253 type = TREE_TYPE (toffset1);
14254 if (type != TREE_TYPE (toffset2))
14255 toffset2 = fold_convert (type, toffset2);
14257 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14258 if (!cst_and_fits_in_hwi (tdiff))
14259 return false;
14261 *diff = int_cst_value (tdiff);
14263 else if (toffset1 || toffset2)
14265 /* If only one of the offsets is non-constant, the difference cannot
14266 be a constant. */
14267 return false;
14269 else
14270 *diff = 0;
14272 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14273 return true;
14276 /* Simplify the floating point expression EXP when the sign of the
14277 result is not significant. Return NULL_TREE if no simplification
14278 is possible. */
14280 tree
14281 fold_strip_sign_ops (tree exp)
14283 tree arg0, arg1;
14284 location_t loc = EXPR_LOCATION (exp);
14286 switch (TREE_CODE (exp))
14288 case ABS_EXPR:
14289 case NEGATE_EXPR:
14290 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14291 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14293 case MULT_EXPR:
14294 case RDIV_EXPR:
14295 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
14296 return NULL_TREE;
14297 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14298 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14299 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14300 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
14301 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14302 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14303 break;
14305 case COMPOUND_EXPR:
14306 arg0 = TREE_OPERAND (exp, 0);
14307 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14308 if (arg1)
14309 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14310 break;
14312 case COND_EXPR:
14313 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14314 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14315 if (arg0 || arg1)
14316 return fold_build3_loc (loc,
14317 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14318 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14319 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14320 break;
14322 case CALL_EXPR:
14324 const enum built_in_function fcode = builtin_mathfn_code (exp);
14325 switch (fcode)
14327 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14328 /* Strip copysign function call, return the 1st argument. */
14329 arg0 = CALL_EXPR_ARG (exp, 0);
14330 arg1 = CALL_EXPR_ARG (exp, 1);
14331 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
14333 default:
14334 /* Strip sign ops from the argument of "odd" math functions. */
14335 if (negate_mathfn_p (fcode))
14337 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14338 if (arg0)
14339 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
14341 break;
14344 break;
14346 default:
14347 break;
14349 return NULL_TREE;
14352 /* Return OFF converted to a pointer offset type suitable as offset for
14353 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14354 tree
14355 convert_to_ptrofftype_loc (location_t loc, tree off)
14357 return fold_convert_loc (loc, sizetype, off);
14360 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14361 tree
14362 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14364 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14365 ptr, convert_to_ptrofftype_loc (loc, off));
14368 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14369 tree
14370 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14372 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14373 ptr, size_int (off));